aboutsummaryrefslogtreecommitdiffstats
path: root/src/test/java/org/openecomp/sa
diff options
context:
space:
mode:
Diffstat (limited to 'src/test/java/org/openecomp/sa')
-rw-r--r--src/test/java/org/openecomp/sa/rest/ApiUtilsTest.java46
-rw-r--r--src/test/java/org/openecomp/sa/rest/BulkApiTest.java109
-rw-r--r--src/test/java/org/openecomp/sa/rest/DocumentApiTest.java206
-rw-r--r--src/test/java/org/openecomp/sa/rest/DocumentSchemaTest.java106
-rw-r--r--src/test/java/org/openecomp/sa/rest/IndexApiTest.java228
-rw-r--r--src/test/java/org/openecomp/sa/rest/SearchServiceApiHarness.java200
-rw-r--r--src/test/java/org/openecomp/sa/rest/StubEsController.java237
-rw-r--r--src/test/java/org/openecomp/sa/rest/TestUtils.java67
-rw-r--r--src/test/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/AggregationResponseParsingTest.java103
-rw-r--r--src/test/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchHttpControllerTest.java235
-rw-r--r--src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationStatementTest.java147
-rw-r--r--src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationTest.java52
-rw-r--r--src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateHistogramAggregationTest.java87
-rw-r--r--src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeAggregationTest.java71
-rw-r--r--src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeTest.java78
-rw-r--r--src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/FilterTest.java53
-rw-r--r--src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/GroupByAggregationTest.java66
-rw-r--r--src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/QueryTest.java356
-rw-r--r--src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/SearchStatementTest.java270
-rw-r--r--src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/SortTest.java54
20 files changed, 2771 insertions, 0 deletions
diff --git a/src/test/java/org/openecomp/sa/rest/ApiUtilsTest.java b/src/test/java/org/openecomp/sa/rest/ApiUtilsTest.java
new file mode 100644
index 0000000..b5bc314
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/rest/ApiUtilsTest.java
@@ -0,0 +1,46 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+
+public class ApiUtilsTest {
+
+ @Test
+ public void testHTTPStatusConversion() {
+ String statusString = ApiUtils.getHttpStatusString(201);
+ assertEquals(0, statusString.compareToIgnoreCase("Created"));
+
+ statusString = ApiUtils.getHttpStatusString(207);
+ assertEquals(0, statusString.compareToIgnoreCase("Multi Status"));
+
+ statusString = ApiUtils.getHttpStatusString(9999);
+ assertEquals(0, statusString.compareToIgnoreCase("Unknown"));
+ }
+
+}
diff --git a/src/test/java/org/openecomp/sa/rest/BulkApiTest.java b/src/test/java/org/openecomp/sa/rest/BulkApiTest.java
new file mode 100644
index 0000000..483ef33
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/rest/BulkApiTest.java
@@ -0,0 +1,109 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import org.glassfish.jersey.server.ResourceConfig;
+import org.glassfish.jersey.test.JerseyTest;
+import org.junit.Test;
+
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.core.Application;
+import javax.ws.rs.core.Response;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+
+import static org.junit.Assert.assertEquals;
+
+
+/**
+ * This suite of tests validates the behaviour of the bulk operations REST
+ * end point.
+ */
+public class BulkApiTest extends JerseyTest {
+
+ private final String TOP_URI = "/test/bulk/";
+
+
+ @Override
+ protected Application configure() {
+
+ // Make sure that our test endpoint is on the resource path
+ // for Jersey Test.
+ return new ResourceConfig(SearchServiceApiHarness.class);
+ }
+
+
+ /**
+ * This test validates that the expected response codes are returned
+ * to the client in the event of an authentication failure.
+ */
+ @Test
+ public void authenticationFailureTest() {
+
+ // Send a request to the end point, with a special trigger in the
+ // payload that tells our test harness to force the authentication
+ // to fail.
+ Response result = target(TOP_URI).request().post(Entity.json(SearchServiceApiHarness.FAIL_AUTHENTICATION_TRIGGER), Response.class);
+
+ // Validate that a failure to authenticate results in the expected
+ // response code returned to the client.
+ assertEquals(Response.Status.FORBIDDEN.getStatusCode(), result.getStatus());
+ }
+
+
+ /**
+ * This test validates that properly constructed json payloads are
+ * correctly validated and that improperly contructed payloads will
+ * be rejected with the appropriate response code returned to the
+ * client.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void payloadValidationTest() throws IOException {
+
+ // Post a request to the bulk operations endpoint with a valid
+ // operations list payload.
+ File validBulkOpsFile = new File("src/test/resources/json/bulk-ops-valid.json");
+ String validPayloadStr = TestUtils.readFileToString(validBulkOpsFile);
+ Response validResult = target(TOP_URI).request().post(Entity.json(validPayloadStr), Response.class);
+
+ // Validate that the payload is accepted as expected.
+ assertEquals("Valid operations payload was rejected",
+ Response.Status.OK.getStatusCode(), validResult.getStatus());
+
+ // Post a request to the bulk operations endpoint with an invalid
+ // operations list payload.
+ File inValidBulkOpsFile = new File("src/test/resources/json/bulk-ops-invalid.json");
+ String inValidPayloadStr = TestUtils.readFileToString(inValidBulkOpsFile);
+ Response invalidResult = target(TOP_URI).request().post(Entity.json(inValidPayloadStr), Response.class);
+
+ // Validate that the payload is rejected as expected.
+ assertEquals("Invalid operations payload was not rejected",
+ Response.Status.BAD_REQUEST.getStatusCode(), invalidResult.getStatus());
+ }
+}
diff --git a/src/test/java/org/openecomp/sa/rest/DocumentApiTest.java b/src/test/java/org/openecomp/sa/rest/DocumentApiTest.java
new file mode 100644
index 0000000..f57bf30
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/rest/DocumentApiTest.java
@@ -0,0 +1,206 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import org.glassfish.jersey.server.ResourceConfig;
+import org.glassfish.jersey.test.JerseyTest;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.client.Invocation.Builder;
+import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.Application;
+import java.io.IOException;
+
+import static org.junit.Assert.assertTrue;
+
+public class DocumentApiTest extends JerseyTest {
+
+ private static final String INDEXES_URI = "/test/indexes/";
+ private static final String DOCUMENT_URI = "documents/";
+
+ private static final String SEARCH_URI = "query/";
+ private static final String INDEX_NAME = "test-index";
+ private static final String DOC_ID = "test-1";
+ private static final String SIMPLE_QUERY = "\"parsed-query\": {\"my-field\": \"something\", \"query-string\": \"string\"}";
+ private static final String COMPLEX_QUERY =
+ "{"
+ + "\"filter\": {"
+ + "\"all\": ["
+ + "{\"match\": {\"field\": \"searchTags\", \"value\": \"a\"}}"
+ + "]"
+ + "},"
+ + "\"queries\": ["
+ + "{\"may\": {\"parsed-query\": {\"field\": \"searchTags\", \"query-string\": \"b\"}}}"
+ + "]"
+ + "}";
+
+ private static final String CREATE_JSON_CONTENT = "creation content";
+
+
+ @Override
+ protected Application configure() {
+
+ // Make sure that our test endpoint is on the resource path
+ // for Jersey Test.
+ return new ResourceConfig(SearchServiceApiHarness.class);
+ }
+
+ /**
+ * This test validates the behaviour of the 'Create Document' POST request
+ * endpoint.
+ *
+ * @throws IOException
+ * @throws ParseException
+ */
+ @Test
+ public void createDocumentTest() throws IOException, ParseException {
+ String result = target(INDEXES_URI + INDEX_NAME + "/" + DOCUMENT_URI).request().post(Entity.json(CREATE_JSON_CONTENT), String.class);
+
+
+ // Our stub document store DAO returns the parameters that it was
+ // passed as the result string, so now we can validate that our
+ // endpoint invoked it with the correct parameters.
+
+ JSONParser parser = new JSONParser();
+ JSONObject json = (JSONObject) parser.parse(result);
+
+ assertTrue("Unexpected Result ", !json.get("etag").toString().isEmpty());
+ }
+
+ /**
+ * This test validates the behaviour of the 'Create Document' PUT request
+ * endpoint.
+ *
+ * @throws IOException
+ * @throws ParseException
+ */
+ @Test
+ public void updateDocumentTest() throws IOException, ParseException {
+ WebTarget target = target(INDEXES_URI + INDEX_NAME + "/" + DOCUMENT_URI + DOC_ID);
+ Builder request = target.request().header("If-Match", "1");
+ String result = request.put(Entity.json(CREATE_JSON_CONTENT), String.class);
+
+ // Our stub document store DAO returns the parameters that it was
+ // passed as the result string, so now we can validate that our
+ // endpoint invoked it with the correct parameters.
+ JSONParser parser = new JSONParser();
+ JSONObject json = (JSONObject) parser.parse(result);
+
+ assertTrue("Unexpected Result ", !json.get("etag").toString().isEmpty());
+ }
+
+ /**
+ * This test validates the behaviour of the 'Get Document' GET request
+ * endpoint.
+ *
+ * @throws IOException
+ * @throws ParseException
+ */
+ @Test
+ public void getDocumentTest() throws IOException, ParseException {
+ String result = target(INDEXES_URI + INDEX_NAME + "/" + DOCUMENT_URI + DOC_ID).request().get(String.class);
+
+ // Our stub document store DAO returns the parameters that it was
+ // passed as the result string, so now we can validate that our
+ // endpoint invoked it with the correct parameters.
+ JSONParser parser = new JSONParser();
+ JSONObject json = (JSONObject) parser.parse(result);
+
+ assertTrue("Unexpected Result ", !json.get("etag").toString().isEmpty());
+
+ }
+
+ /**
+ * This test validates the behaviour of the 'Delete Document' DELETE request
+ * endpoint.
+ *
+ * @throws IOException
+ * @throws ParseException
+ */
+ @Test
+ public void deleteDocumentTest() throws IOException, ParseException {
+ WebTarget target = target(INDEXES_URI + INDEX_NAME + "/" + DOCUMENT_URI + DOC_ID);
+ Builder request = target.request().header("If-Match", "1");
+ String result = request.delete(String.class);
+
+
+ // Our stub document store DAO returns the parameters that it was
+ // passed as the result string, so now we can validate that our
+ // endpoint invoked it with the correct parameters.
+ assertTrue("Unexpected Result ", result.isEmpty());
+
+ }
+
+ /**
+ * This test validates the behaviour of the 'Search Documents' GET request
+ * endpoint.
+ *
+ * @throws IOException
+ * @throws ParseException
+ */
+ @Ignore
+ @Test
+ public void searchDocumentTest1() throws IOException, ParseException {
+ String result = target(INDEXES_URI + INDEX_NAME + "/" + SEARCH_URI + SIMPLE_QUERY).request().get(String.class);
+
+ // Our stub document store DAO returns the parameters that it was
+ // passed as the result string, so now we can validate that our
+ // endpoint invoked it with the correct parameters.
+ JSONParser parser = new JSONParser();
+ JSONObject json = (JSONObject) parser.parse(result);
+
+ assertTrue("Unexpected Result ", json.get("totalHits").toString().equals("1"));
+
+
+ }
+
+ /**
+ * This test validates the behaviour of the 'Search Documents' GET request
+ * endpoint.
+ *
+ * @throws IOException
+ * @throws ParseException
+ */
+ @Test
+ public void searchDocumentTest2() throws IOException, ParseException {
+ String result = target(INDEXES_URI + INDEX_NAME + "/" + SEARCH_URI).request().post(Entity.json(COMPLEX_QUERY), String.class);
+
+ // Our stub document store DAO returns the parameters that it was
+ // passed as the result string, so now we can validate that our
+ // endpoint invoked it with the correct parameters.
+ JSONParser parser = new JSONParser();
+ JSONObject json = (JSONObject) parser.parse(result);
+ JSONObject resultJson = (JSONObject) json.get("searchResult");
+
+ assertTrue("Unexpected Result ", resultJson.get("totalHits").toString().equals("1"));
+
+ }
+
+}
diff --git a/src/test/java/org/openecomp/sa/rest/DocumentSchemaTest.java b/src/test/java/org/openecomp/sa/rest/DocumentSchemaTest.java
new file mode 100644
index 0000000..fff36b9
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/rest/DocumentSchemaTest.java
@@ -0,0 +1,106 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+import org.openecomp.sa.rest.DocumentSchema;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+
+import static org.junit.Assert.assertTrue;
+
+
+public class DocumentSchemaTest {
+
+ private final String SIMPLE_DOC_SCHEMA_JSON = "src/test/resources/json/simpleDocument.json";
+ private final String NESTED_DOC_SCHEMA_JSON = "src/test/resources/json/nested-document.json";
+
+
+ /**
+ * This test validates that we convert document definitions back and
+ * forth between json strings and POJOs without any loss of data.
+ *
+ * @throws com.fasterxml.jackson.core.JsonParseException
+ * @throws com.fasterxml.jackson.databind.JsonMappingException
+ * @throws IOException
+ */
+ @Test
+ public void simpleDocSchemaFromJsonFileTest() throws com.fasterxml.jackson.core.JsonParseException, com.fasterxml.jackson.databind.JsonMappingException, IOException {
+
+ // Import our json format document schema from a file.
+ File schemaFile = new File(SIMPLE_DOC_SCHEMA_JSON);
+ String fileString = TestUtils.readFileToString(schemaFile);
+
+ // Unmarshall that to a Java POJO
+ ObjectMapper mapper = new ObjectMapper();
+ DocumentSchema docSchema = mapper.readValue(schemaFile, DocumentSchema.class);
+
+ // Now, for the purposes of comparison, produce a JSON string from
+ // our Java object.
+ String jsonString = mapper.writeValueAsString(docSchema);
+
+ // Assert that the raw JSON that we read from the file matches the marshalled
+ // JSON we generated from our Java object (ie: validate that we didn't lose
+ // anything going in either direction).
+ assertTrue("Marshalled object does not match the original json source that produced it",
+ fileString.equals(jsonString));
+ }
+
+
+ /**
+ * This test validates that we convert document definitions back and
+ * forth between json strings and POJOs without any loss of data in
+ * the case of document schemas which contain nested fields.
+ *
+ * @throws com.fasterxml.jackson.core.JsonParseException
+ * @throws com.fasterxml.jackson.databind.JsonMappingException
+ * @throws IOException
+ */
+ @Test
+ public void nestedDocSchemaFromJsonFileTest() throws JsonParseException, JsonMappingException, IOException {
+
+ // Import our json format document schema from a file.
+ File schemaFile = new File(NESTED_DOC_SCHEMA_JSON);
+ String fileString = TestUtils.readFileToString(schemaFile);
+
+ // Unmarshall that to a Java POJO
+ ObjectMapper mapper = new ObjectMapper();
+ DocumentSchema docSchema = mapper.readValue(schemaFile, DocumentSchema.class);
+
+ String jsonString = mapper.writeValueAsString(docSchema);
+
+ // Assert that the raw JSON that we read from the file matches the marshalled
+ // JSON we generated from our Java object (ie: validate that we didn't lose
+ // anything going in either direction).
+ assertTrue("Marshalled object does not match the original json source that produced it",
+ fileString.equals(jsonString));
+ }
+} \ No newline at end of file
diff --git a/src/test/java/org/openecomp/sa/rest/IndexApiTest.java b/src/test/java/org/openecomp/sa/rest/IndexApiTest.java
new file mode 100644
index 0000000..b969ab6
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/rest/IndexApiTest.java
@@ -0,0 +1,228 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+
+import org.glassfish.jersey.server.ResourceConfig;
+import org.glassfish.jersey.test.JerseyTest;
+import org.junit.Test;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.exception.DocumentStoreOperationException;
+import org.openecomp.sa.searchdbabstraction.entity.OperationResult;
+
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.core.Application;
+import javax.ws.rs.core.Response;
+import java.io.*;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+
+/**
+ * This suite of tests is intended to exercise the set of REST endpoints
+ * associated with manipulating Indexes in the document store.
+ */
+public class IndexApiTest extends JerseyTest {
+
+ private final String TOP_URI = "/test/indexes/";
+ private final String SIMPLE_DOC_SCHEMA_JSON = "src/test/resources/json/simpleDocument.json";
+
+
+ @Override
+ protected Application configure() {
+
+ // Make sure that our test endpoint is on the resource path
+ // for Jersey Test.
+ return new ResourceConfig(SearchServiceApiHarness.class);
+ }
+
+
+ /**
+ * This test validates that the {@link IndexApi} is able to convert {@link OperationResult}
+ * obects to standard REST {@link Response} objects.
+ *
+ * @throws FileNotFoundException
+ * @throws IOException
+ * @throws DocumentStoreOperationException
+ */
+ @Test
+ public void responseFromOperationResultTest() throws FileNotFoundException, IOException, DocumentStoreOperationException {
+
+ int SUCCESS_RESULT_CODE = 200;
+ String SUCCESS_RESULT_STRING = "Everything is ay-okay!";
+ int FAILURE_RESULT_CODE = 500;
+ String FAILURE_CAUSE_STRING = "Something went wrong!";
+
+
+ // Create an instance of the index API endpoint that we will test against.
+ // We will override the init() method because we don't want it to try to
+ // connect to a real document store.
+ IndexApi indexApi = new IndexApi(new SearchServiceApiHarness()) {
+ @Override
+ public void init() { /* do nothing */ }
+ };
+
+ //Construct an OperationResult instance with a success code and string.
+ OperationResult successResult = new OperationResult();
+ successResult.setResultCode(SUCCESS_RESULT_CODE);
+ successResult.setResult(SUCCESS_RESULT_STRING);
+
+ // Convert our success OperationResult to a standard REST Response...
+ Response successResponse = indexApi.responseFromOperationResult(successResult);
+
+ // ...and validate that the Response is correctly populated.
+ assertEquals("Unexpected result code", SUCCESS_RESULT_CODE, successResponse.getStatus());
+ assertTrue("Incorrect result string", ((String) successResponse.getEntity()).equals(SUCCESS_RESULT_STRING));
+
+ // Construct an OperationResult instance with an error code and failure
+ // cause.
+ OperationResult failureResult = new OperationResult();
+ failureResult.setResultCode(FAILURE_RESULT_CODE);
+ failureResult.setFailureCause(FAILURE_CAUSE_STRING);
+
+ // Convert our failure OperationResult to a standard REST Response...
+ Response failureResponse = indexApi.responseFromOperationResult(failureResult);
+
+ // ...and validate that the Response is correctly populated.
+ assertEquals("Unexpected result code", FAILURE_RESULT_CODE, failureResponse.getStatus());
+ assertTrue("Incorrect result string", ((String) failureResponse.getEntity()).equals(FAILURE_CAUSE_STRING));
+ }
+
+
+ /**
+ * This test validates the behaviour of the 'Create Index' POST request
+ * endpoint.
+ *
+ * @throws IOException
+ */
+ @Test
+ public void createIndexTest() throws IOException {
+
+ String INDEX_NAME = "test-index";
+ String EXPECTED_SETTINGS =
+ "{\"analysis\": "
+ + "{\"filter\": "
+ + "{\"nGram_filter\": { "
+ + "\"type\": \"nGram\", "
+ + "\"min_gram\": 1, "
+ + "\"max_gram\": 50, "
+ + "\"token_chars\": [ \"letter\", \"digit\", \"punctuation\", \"symbol\" ]}},"
+ + "\"analyzer\": {"
+ + "\"nGram_analyzer\": "
+ + "{\"type\": \"custom\","
+ + "\"tokenizer\": \"whitespace\","
+ + "\"filter\": [\"lowercase\",\"asciifolding\",\"nGram_filter\"]},"
+ + "\"whitespace_analyzer\": "
+ + "{\"type\": \"custom\","
+ + "\"tokenizer\": \"whitespace\","
+ + "\"filter\": [\"lowercase\",\"asciifolding\"]}}}}";
+ String EXPECTED_MAPPINGS =
+ "{\"properties\": {"
+ + "\"serverName\": {"
+ + "\"type\": \"string\", "
+ + "\"index\": \"analyzed\", "
+ + "\"search_analyzer\": \"whitespace\"}, "
+ + "\"serverComplex\": {"
+ + "\"type\": \"string\", "
+ + "\"search_analyzer\": \"whitespace\"}}}";
+
+ // Read a valid document schema from a json file.
+ File schemaFile = new File(SIMPLE_DOC_SCHEMA_JSON);
+ String documentJson = TestUtils.readFileToString(schemaFile);
+
+ // Send a request to our 'create index' endpoint, using the schema
+ // which we just read.
+ String result = target(TOP_URI + INDEX_NAME).request().put(Entity.json(documentJson), String.class);
+
+
+ // Our stub document store DAO returns the parameters that it was
+ // passed as the result string, so now we can validate that our
+ // endpoint invoked it with the correct parameters.
+ String[] tokenizedResult = result.split("@");
+ assertTrue("Unexpected Index Name '" + tokenizedResult[0] + "' passed to doc store DAO",
+ tokenizedResult[0].equals(INDEX_NAME));
+ assertTrue("Unexpected settings string '" + tokenizedResult[1] + "' passed to doc store DAO",
+ tokenizedResult[1].equals(EXPECTED_SETTINGS));
+ assertTrue("Unexpected mappings string '" + tokenizedResult[2] + "' passed to doc store DAO",
+ tokenizedResult[2].equals(EXPECTED_MAPPINGS));
+ }
+
+
+ /**
+ * This test validates that a 'create index' request with an improperly
+ * formatted document schema as the payload will result in an
+ * appropriate error being returned from the endpoint.
+ */
+ @Test
+ public void createIndexWithMangledSchemaTest() {
+
+ String INDEX_NAME = "test-index";
+ int BAD_REQUEST_CODE = 400;
+
+ String invalidSchemaString = "this is definitely not json!";
+
+ Response result = target(TOP_URI + INDEX_NAME).request().put(Entity.json(invalidSchemaString), Response.class);
+
+ assertEquals("Invalid document schema should result in a 400 error",
+ BAD_REQUEST_CODE, result.getStatus());
+ }
+
+
+ /**
+ * This test validates the behaviour of the 'Delete Index' end point.
+ */
+ @Test
+ public void deleteIndexTest() {
+
+ String INDEX_NAME = "test-index";
+
+ // Send a request to the 'delete index' endpoint.
+ String result = target(TOP_URI + INDEX_NAME).request().delete(String.class);
+
+ // Validate that the expected parameters were passed to the document
+ // store DAO.
+ assertTrue("Unexpected index name '" + result + "' passed to doc store DAO",
+ result.equals(INDEX_NAME));
+ }
+
+
+ /**
+ * This test validates that attempting to delete an index which does not
+ * exist results in a 404 error.
+ */
+ @Test
+ public void deleteIndexDoesNotExistTest() {
+
+ int NOT_FOUND_CODE = 404;
+
+ // Send a request to the 'delete index' endpoint, specifying a
+ // non-existent index.
+ Response result = target(TOP_URI + StubEsController.DOES_NOT_EXIST_INDEX).request().delete(Response.class);
+
+ // Validate that a 404 error code is returned from the end point.
+ assertEquals("Deleting an index which does not exist should result in a 404 error",
+ NOT_FOUND_CODE, result.getStatus());
+ }
+}
diff --git a/src/test/java/org/openecomp/sa/rest/SearchServiceApiHarness.java b/src/test/java/org/openecomp/sa/rest/SearchServiceApiHarness.java
new file mode 100644
index 0000000..1306740
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/rest/SearchServiceApiHarness.java
@@ -0,0 +1,200 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.*;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+@Path("test/")
+public class SearchServiceApiHarness extends SearchServiceApi {
+
+
+ public static final String FAIL_AUTHENTICATION_TRIGGER = "FAIL AUTHENTICATION";
+
+ private boolean authenticationShouldSucceed = true;
+
+
+ /**
+ * Performs all one-time initialization required for the end point.
+ */
+ @Override
+ public void init() {
+
+ // Instantiate our Document Store DAO.
+ documentStore = new StubEsController();
+ }
+
+
+ @PUT
+ @Path("/indexes/{index}")
+ @Consumes({MediaType.APPLICATION_JSON})
+ @Override
+ public Response processCreateIndex(String requestBody,
+ @Context HttpServletRequest request,
+ @Context HttpHeaders headers,
+ @PathParam("index") String index) {
+
+ return super.processCreateIndex(requestBody, request, headers, index);
+ }
+
+ @DELETE
+ @Path("/indexes/{index}")
+ @Consumes({MediaType.APPLICATION_JSON})
+ @Override
+ public Response processDeleteIndex(String requestBody,
+ @Context HttpServletRequest request,
+ @Context HttpHeaders headers,
+ @PathParam("index") String index) {
+
+ return super.processDeleteIndex(requestBody, request, headers, index);
+ }
+
+ @GET
+ @Path("/indexes/{index}/documents/{id}")
+ @Consumes({MediaType.APPLICATION_JSON})
+ @Override
+ public Response processGetDocument(String requestBody,
+ @Context HttpServletRequest request,
+ @Context HttpServletResponse httpResponse,
+ @Context HttpHeaders headers,
+ @PathParam("index") String index,
+ @PathParam("id") String id) {
+
+ return super.processGetDocument(requestBody, request, httpResponse, headers, index, id);
+ }
+
+ @POST
+ @Path("/indexes/{index}/documents")
+ @Consumes({MediaType.APPLICATION_JSON})
+ @Override
+ public Response processCreateDocWithoutId(String requestBody,
+ @Context HttpServletRequest request,
+ @Context HttpServletResponse httpResponse,
+ @Context HttpHeaders headers,
+ @PathParam("index") String index) {
+
+ return super.processCreateDocWithoutId(requestBody, request, httpResponse, headers, index);
+ }
+
+ @PUT
+ @Path("/indexes/{index}/documents/{id}")
+ @Consumes({MediaType.APPLICATION_JSON})
+ @Override
+ public Response processUpsertDoc(String requestBody,
+ @Context HttpServletRequest request,
+ @Context HttpServletResponse httpResponse,
+ @Context HttpHeaders headers,
+ @PathParam("index") String index,
+ @PathParam("id") String id) {
+
+ return super.processUpsertDoc(requestBody, request, httpResponse, headers, index, id);
+ }
+
+ @DELETE
+ @Path("/indexes/{index}/documents/{id}")
+ @Consumes({MediaType.APPLICATION_JSON})
+ @Override
+ public Response processDeleteDoc(String requestBody,
+ @Context HttpServletRequest request,
+ @Context HttpServletResponse httpResponse,
+ @Context HttpHeaders headers,
+ @PathParam("index") String index,
+ @PathParam("id") String id) {
+
+ return super.processDeleteDoc(requestBody, request, httpResponse, headers, index, id);
+ }
+
+ @GET
+ @Path("/indexes/{index}/query/{queryText}")
+ @Consumes({MediaType.APPLICATION_JSON})
+ @Override
+ public Response processInlineQuery(String requestBody,
+ @Context HttpServletRequest request,
+ @Context HttpHeaders headers,
+ @PathParam("index") String index,
+ @PathParam("queryText") String queryText) {
+
+ return super.processInlineQuery(requestBody, request, headers, index, queryText);
+ }
+
+ @GET
+ @Path("/indexes/{index}/query")
+ @Consumes({MediaType.APPLICATION_JSON})
+ @Override
+ public Response processQueryWithGet(String requestBody,
+ @Context HttpServletRequest request,
+ @Context HttpHeaders headers,
+ @PathParam("index") String index) {
+
+ return super.processQueryWithGet(requestBody, request, headers, index);
+ }
+
+ @POST
+ @Path("/indexes/{index}/query")
+ @Consumes({MediaType.APPLICATION_JSON})
+ @Override
+ public Response processQuery(String requestBody,
+ @Context HttpServletRequest request,
+ @Context HttpHeaders headers,
+ @PathParam("index") String index) {
+
+ return super.processQuery(requestBody, request, headers, index);
+ }
+
+ @POST
+ @Path("/bulk")
+ @Consumes({MediaType.APPLICATION_JSON})
+ @Override
+ public Response processBulkRequest(String requestBody,
+ @Context HttpServletRequest request,
+ @Context HttpHeaders headers,
+ @PathParam("index") String index) {
+
+ // If the operations string contains a special keyword, set the
+ // harness to fail the authentication validation.
+ if (requestBody.contains(FAIL_AUTHENTICATION_TRIGGER)) {
+ authenticationShouldSucceed = false;
+ }
+
+ // Just pass the request up to the parent, since that is the code
+ // that we really want to test.
+ //return super.processPost(operations, request, headers, index);
+ return super.processBulkRequest(requestBody, request, headers, index);
+ }
+
+ @Override
+ protected boolean validateRequest(HttpHeaders headers,
+ HttpServletRequest req,
+ ApiUtils.Action action,
+ String authPolicyFunctionName) throws Exception {
+
+ return authenticationShouldSucceed;
+ }
+}
diff --git a/src/test/java/org/openecomp/sa/rest/StubEsController.java b/src/test/java/org/openecomp/sa/rest/StubEsController.java
new file mode 100644
index 0000000..f3e5619
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/rest/StubEsController.java
@@ -0,0 +1,237 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import org.json.simple.JSONObject;
+import org.openecomp.sa.rest.DocumentSchema;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreDataEntity;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.dao.DocumentStoreInterface;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.exception.DocumentStoreOperationException;
+import org.openecomp.sa.searchdbabstraction.entity.Document;
+import org.openecomp.sa.searchdbabstraction.entity.*;
+import org.openecomp.sa.searchdbabstraction.util.DocumentSchemaUtil;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * This class implements a stubbed version of the document store DAO so
+ * that we can run unit tests without trying to connect to a real
+ * document store.
+ */
+public class StubEsController implements DocumentStoreInterface {
+
+ public static final String DOES_NOT_EXIST_INDEX = "index-does-not-exist";
+
+ private AnalysisConfiguration analysisConfig = null;
+
+ /**
+ *
+ */
+ //private IndexAPIHarness indexAPIHarness;
+
+ StubEsController() {
+ analysisConfig = new AnalysisConfiguration();
+ analysisConfig.init("src/test/resources/json/filter-config.json",
+ "src/test/resources/json/analysis-config.json");
+ }
+
+
+ @Override
+ public OperationResult createIndex(String index, DocumentSchema documentSchema) {
+
+ // Just return an OK result, with the parameters that we were passed
+ // bundled in the response string. This allows unit tests to validate
+ // that those parameters match what they expected to be passed.
+ OperationResult opResult = new OperationResult();
+ opResult.setResultCode(200);
+
+ opResult.setResult(index + "@" + analysisConfig.getEsIndexSettings() + "@"
+ + DocumentSchemaUtil.generateDocumentMappings(documentSchema));
+
+ return opResult;
+ }
+
+
+ @Override
+ public OperationResult deleteIndex(String indexName) throws DocumentStoreOperationException {
+
+ OperationResult opResult = new OperationResult();
+
+ if (indexName.equals(DOES_NOT_EXIST_INDEX)) {
+ opResult.setResultCode(404);
+ } else {
+ opResult.setResultCode(200);
+ opResult.setResult(indexName);
+ }
+
+ return opResult;
+ }
+
+ @Override
+ public DocumentOperationResult createDocument(String indexName,
+ DocumentStoreDataEntity document) throws DocumentStoreOperationException {
+ DocumentOperationResult opResult = buildSampleDocumentOperationResult();
+
+ if (indexName.equals(DOES_NOT_EXIST_INDEX)) {
+ opResult.setResultCode(404);
+ } else {
+ opResult.setResultCode(200);
+ String id = "dummy";
+ if (document.getId() != null) {
+ id = document.getId();
+ }
+ opResult.setResultVersion("1");
+ }
+
+ return opResult;
+ }
+
+ @Override
+ public DocumentOperationResult updateDocument(String indexName,
+ DocumentStoreDataEntity document) throws DocumentStoreOperationException {
+ DocumentOperationResult opResult = buildSampleDocumentOperationResult();
+
+ if (indexName.equals(DOES_NOT_EXIST_INDEX)) {
+ opResult.setResultCode(404);
+ } else {
+ opResult.setResultCode(200);
+ String version = "1";
+ if (document.getVersion() != null) {
+ version = String.valueOf(Integer.parseInt(document.getVersion()) + 1);
+ }
+ opResult.setResultVersion(version);
+ }
+
+ return opResult;
+ }
+
+ @Override
+ public DocumentOperationResult deleteDocument(String indexName,
+ DocumentStoreDataEntity document) throws DocumentStoreOperationException {
+ DocumentOperationResult opResult = buildSampleDocumentOperationResult();
+
+
+ if (indexName.equals(DOES_NOT_EXIST_INDEX)) {
+ opResult.setResultCode(404);
+ } else {
+ if (opResult.getDocument() != null) {
+ opResult.getDocument().setEtag(null);
+ opResult.getDocument().setUrl(null);
+ }
+ opResult.setResultCode(200);
+ opResult.setResult(indexName + "@" + document.getId());
+ }
+
+ return opResult;
+ }
+
+ @Override
+ public DocumentOperationResult getDocument(String indexName,
+ DocumentStoreDataEntity document) throws DocumentStoreOperationException {
+ DocumentOperationResult opResult = buildSampleDocumentOperationResult();
+
+ if (indexName.equals(DOES_NOT_EXIST_INDEX)) {
+ opResult.setResultCode(404);
+ } else {
+ opResult.setResultCode(200);
+ }
+
+ return opResult;
+ }
+
+ @Override
+ public SearchOperationResult search(String indexName,
+ String queryText) throws DocumentStoreOperationException {
+
+ SearchOperationResult opResult = buildSampleSearchOperationResult();
+
+ if (indexName.equals(DOES_NOT_EXIST_INDEX)) {
+ opResult.setResultCode(404);
+ } else {
+ opResult.setResultCode(200);
+ opResult.setResult(indexName + "@" + queryText);
+ }
+
+ return opResult;
+ }
+
+ @Override
+ public SearchOperationResult searchWithPayload(String indexName,
+ String query) throws DocumentStoreOperationException {
+ SearchOperationResult opResult = buildSampleSearchOperationResult();
+
+ if (indexName.equals(DOES_NOT_EXIST_INDEX)) {
+ opResult.setResultCode(404);
+ } else {
+ opResult.setResultCode(200);
+ opResult.setResult(indexName + "@" + query);
+ }
+
+ return opResult;
+ }
+
+ @Override
+ public OperationResult performBulkOperations(BulkRequest[] requests) throws DocumentStoreOperationException {
+
+ OperationResult opResult = new OperationResult();
+ opResult.setResultCode(200);
+
+ return opResult;
+ }
+
+ private DocumentOperationResult buildSampleDocumentOperationResult() {
+ DocumentOperationResult result = new DocumentOperationResult();
+ Document doc = new Document();
+ doc.setEtag("etag1");
+
+ doc.setContent(new JSONObject());
+ result.setDocument(doc);
+ return result;
+ }
+
+ private SearchOperationResult buildSampleSearchOperationResult() {
+ SearchOperationResult result = new SearchOperationResult();
+
+ SearchHits searchHits = new SearchHits();
+ SearchHit[] searchHitArray = new SearchHit[1];
+ SearchHit searchHit = new SearchHit();
+ Document doc = new Document();
+ doc.setEtag("etag1");
+ Map<String, Object> content = new HashMap<String, Object>();
+ content.put("key1", "value1");
+ doc.setContent(new JSONObject());
+ searchHit.setDocument(doc);
+ searchHitArray[0] = searchHit;
+
+ searchHits.setHits(searchHitArray);
+ searchHits.setTotalHits("1");
+ result.setSearchResult(searchHits);
+
+ return result;
+
+ }
+
+} \ No newline at end of file
diff --git a/src/test/java/org/openecomp/sa/rest/TestUtils.java b/src/test/java/org/openecomp/sa/rest/TestUtils.java
new file mode 100644
index 0000000..dc95d8f
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/rest/TestUtils.java
@@ -0,0 +1,67 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.rest;
+
+import static org.junit.Assert.fail;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+
+public class TestUtils {
+
+ /**
+ * This helper method reads the contents of a file into a
+ * simple string.
+ *
+ * @param aFile - The file to be imported.
+ *
+ * @return - The file contents expressed as a simple string.
+ *
+ * @throws IOException
+ */
+ public static String readFileToString(File aFile) throws IOException {
+
+ BufferedReader br = new BufferedReader(new FileReader(aFile));
+ try {
+ StringBuilder sb = new StringBuilder();
+ String line = br.readLine();
+
+ while (line != null) {
+ sb.append(line);
+ line = br.readLine();
+ }
+
+ return sb.toString().replaceAll("\\s+", "");
+ } finally {
+ try {
+ br.close();
+ } catch (IOException e) {
+ fail("Unexpected IOException: " + e.getMessage());
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/AggregationResponseParsingTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/AggregationResponseParsingTest.java
new file mode 100644
index 0000000..b0ea69b
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/AggregationResponseParsingTest.java
@@ -0,0 +1,103 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
+
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.junit.Test;
+import org.openecomp.sa.searchdbabstraction.entity.AggregationResult;
+import org.openecomp.sa.searchdbabstraction.entity.AggregationResults;
+import org.openecomp.sa.searchdbabstraction.util.AggregationParsingUtil;
+
+public class AggregationResponseParsingTest {
+
+ @Test
+ public void testParseAggregationResponse() {
+ JSONParser parser = new JSONParser();
+ JSONObject root;
+
+ String input =
+ "{\r\n \"aggregations\": {\r\n \"violations\": {\r\n \"doc_count\": 2,\r\n \"by_Timestamp\": {\r\n \"doc_count_error_upper_bound\": 0,\r\n \"sum_other_doc_count\": 0,\r\n \"buckets\": [\r\n {\r\n \"key\": 7199992,\r\n \"key_as_string\": \"Jan 1 1970 01:59:59\",\r\n \"doc_count\": 2\r\n }\r\n ]\r\n }\r\n }\r\n }\r\n}";
+
+ try {
+ root = (JSONObject) parser.parse(input);
+ JSONObject aggregations = (JSONObject) root.get("aggregations");
+ AggregationResult[] results = AggregationParsingUtil.parseAggregationResults(aggregations);
+ AggregationResults aggs = new AggregationResults();
+ ObjectMapper mapper = new ObjectMapper();
+ aggs.setAggregations(results);
+ System.out.println(mapper.setSerializationInclusion(Include.NON_NULL)
+ .writerWithDefaultPrettyPrinter().writeValueAsString(aggs));
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ @Test
+ public void testParseAggregationResponse2() {
+ JSONParser parser = new JSONParser();
+ JSONObject root;
+
+ String input =
+ "{\r\n \"aggregations\": {\r\n \"entityType\": {\r\n \"doc_count_error_upper_bound\": 0,\r\n \"sum_other_doc_count\": 0,\r\n \"buckets\": [\r\n {\r\n \"key\": \"entity1\",\r\n \"doc_count\": 5,\r\n \"byVersion\": {\r\n \"doc_count_error_upper_bound\": 0,\r\n \"sum_other_doc_count\": 0,\r\n \"buckets\": [\r\n {\r\n \"key\": \"0\",\r\n \"doc_count\": 5\r\n }\r\n ]\r\n }\r\n }\r\n ]\r\n }\r\n }\r\n}";
+
+ try {
+ root = (JSONObject) parser.parse(input);
+ JSONObject aggregations = (JSONObject) root.get("aggregations");
+ AggregationResult[] results = AggregationParsingUtil.parseAggregationResults(aggregations);
+ AggregationResults aggs = new AggregationResults();
+ ObjectMapper mapper = new ObjectMapper();
+ aggs.setAggregations(results);
+ System.out.println(mapper.setSerializationInclusion(Include.NON_NULL)
+ .writerWithDefaultPrettyPrinter().writeValueAsString(aggs));
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ @Test
+ public void testParseAggregationResponse3() {
+ JSONParser parser = new JSONParser();
+ JSONObject root;
+
+ String input =
+ "{\r\n \"aggregations\": {\r\n \"validateTimes\": {\r\n \"buckets\": [\r\n {\r\n \"key\": \"Jan 10 2017 21:6:6-Jan 24 2017 13:43:5\",\r\n \"from\": 1484082366000,\r\n \"from_as_string\": \"Jan 10 2017 21:6:6\",\r\n \"to\": 1485265385000,\r\n \"to_as_string\": \"Jan 24 2017 13:43:5\",\r\n \"doc_count\": 95\r\n },\r\n {\r\n \"key\": \"Feb 3 2017 18:27:39-*\",\r\n \"from\": 1486146459000,\r\n \"from_as_string\": \"Feb 3 2017 18:27:39\",\r\n \"doc_count\": 2\r\n }\r\n ]\r\n }\r\n }\r\n}";
+
+ try {
+ root = (JSONObject) parser.parse(input);
+ JSONObject aggregations = (JSONObject) root.get("aggregations");
+ AggregationResult[] results = AggregationParsingUtil.parseAggregationResults(aggregations);
+ AggregationResults aggs = new AggregationResults();
+ ObjectMapper mapper = new ObjectMapper();
+ aggs.setAggregations(results);
+ System.out.println(mapper.setSerializationInclusion(Include.NON_NULL)
+ .writerWithDefaultPrettyPrinter().writeValueAsString(aggs));
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchHttpControllerTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchHttpControllerTest.java
new file mode 100644
index 0000000..2439f48
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/searchdbabstraction/elasticsearch/dao/ElasticSearchHttpControllerTest.java
@@ -0,0 +1,235 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.elasticsearch.dao;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.openecomp.sa.searchdbabstraction.elasticsearch.config.ElasticSearchConfig;
+import org.openecomp.sa.searchdbabstraction.entity.OperationResult;
+
+import java.util.Properties;
+
+@Ignore("All tests in this classes require an Elasticsearch instance to run locally")
+public class ElasticSearchHttpControllerTest {
+
+ private static ElasticSearchHttpController elasticSearch;
+ private static AAIEntityTestObject testDocument;
+
+ private static final String indexMappings = "{\r\n \"properties\": {\r\n \"entityType\": {\r\n \"type\": \"string\"\r\n },\r\n \"edgeTagQueryEntityFieldName\": {\r\n \"type\": \"string\",\r\n \"index\": \"no\"\r\n },\r\n \"edgeTagQueryEntityFieldValue\": {\r\n \"type\": \"string\",\r\n \"index\": \"no\"\r\n },\r\n \"searchTagIDs\" : {\r\n \"type\" : \"string\"\r\n },\r\n \"searchTags\": {\r\n \"type\": \"string\",\r\n \"analyzer\": \"nGram_analyzer\",\r\n \"search_analyzer\": \"whitespace_analyzer\"\r\n }\r\n }\r\n}";
+ private static final String indexSettings = "{\r\n \"analysis\": {\r\n \"filter\": {\r\n \"nGram_filter\": {\r\n \"type\": \"nGram\",\r\n \"min_gram\": 1,\r\n \"max_gram\": 50,\r\n \"token_chars\": [\r\n \"letter\",\r\n \"digit\",\r\n \"punctuation\",\r\n \"symbol\"\r\n ]\r\n }\r\n },\r\n \"analyzer\": {\r\n \"nGram_analyzer\": {\r\n \"type\": \"custom\",\r\n \"tokenizer\": \"whitespace\",\r\n \"filter\": [\r\n \"lowercase\",\r\n \"asciifolding\",\r\n \"nGram_filter\"\r\n ]\r\n },\r\n \"whitespace_analyzer\": {\r\n \"type\": \"custom\",\r\n \"tokenizer\": \"whitespace\",\r\n \"filter\": [\r\n \"lowercase\",\r\n \"asciifolding\"\r\n ]\r\n }\r\n }\r\n }\r\n}";
+
+ @Before
+ public void setUp() throws Exception {
+ Properties properties = new Properties();
+ properties.put(ElasticSearchConfig.ES_IP_ADDRESS, "127.0.0.1");
+ properties.put(ElasticSearchConfig.ES_HTTP_PORT, "9200");
+ ElasticSearchConfig config = new ElasticSearchConfig(properties);
+ elasticSearch = new ElasticSearchHttpController(config);
+
+ testDocument = new AAIEntityTestObject();
+ testDocument.setId("test123");
+ testDocument.setEntityType("service-instance");
+ testDocument.setEdgeTagQueryEntityFieldName("service-instance.service-instance-id");
+ testDocument.setEdgeTagQueryEntityFieldValue("123456");
+ testDocument.setSearchTagIDs("0");
+ testDocument.setSearchTags("service-instance-id");
+
+ }
+
+ @Test
+ public void testCreateTable() throws Exception {
+ OperationResult result = elasticSearch.createTable("test", "aai-entities", indexSettings, indexMappings);
+ System.out.println(result);
+ }
+
+ @Test
+ public void testCreateDocument() throws Exception {
+ OperationResult result = elasticSearch.createDocument("test", testDocument);
+ System.out.println(result);
+
+ DocumentStoreDataEntityImpl ds = new DocumentStoreDataEntityImpl();
+ ds.setId(testDocument.getId());
+
+ result = elasticSearch.getDocument("test", ds);
+ System.out.println(result);
+ }
+
+ @Test
+ public void testUpdateDocument() throws Exception {
+ testDocument.setEdgeTagQueryEntityFieldValue("567890");
+
+ OperationResult result = elasticSearch.updateDocument("test", testDocument);
+ System.out.println(result);
+
+ result = elasticSearch.getDocument("test", testDocument);
+ System.out.println(result);
+ }
+
+ @Test
+ public void testDeleteDocument() throws Exception {
+ OperationResult result = elasticSearch.deleteDocument("test", testDocument);
+ System.out.println(result);
+
+ result = elasticSearch.getDocument("test", testDocument);
+ System.out.println(result);
+ }
+
+ @Test
+ public void testBulkCreateDocuments() throws Exception {
+ for (int i = 0; i < 10; i++) {
+ AAIEntityTestObject doc = new AAIEntityTestObject();
+ doc.setId("test-" + i);
+ doc.setEntityType("service-instance");
+ doc.setEdgeTagQueryEntityFieldName("service-instance.service-instance-id");
+ doc.setEdgeTagQueryEntityFieldValue("123456" + i);
+ doc.setSearchTagIDs("" + i);
+ doc.setSearchTags("service-instance-id");
+
+ OperationResult result = elasticSearch.createDocument("test", doc);
+ System.out.println(result);
+ }
+ }
+
+ @Test
+ public void serchByEntityType() throws Exception {
+ OperationResult result = elasticSearch.search("test", "q=instance");
+ System.out.println(result);
+ }
+
+ @Test
+ public void serchByTagIDs() throws Exception {
+ OperationResult result = elasticSearch.search("test", "q=9");
+ System.out.println(result);
+ }
+
+ @Test
+ public void serchByTags() throws Exception {
+ OperationResult result = elasticSearch.search("test", "q=service");
+ System.out.println(result);
+ }
+
+ @Test
+ public void testCreateDocumentWithoutId() throws Exception {
+ AAIEntityTestObject doc = new AAIEntityTestObject();
+ doc.setEntityType("service-instance");
+ doc.setEdgeTagQueryEntityFieldName("service-instance.service-instance-id");
+ doc.setEdgeTagQueryEntityFieldValue("1111111");
+ doc.setSearchTagIDs("321");
+ doc.setSearchTags("service-instance-id");
+
+ OperationResult result = elasticSearch.createDocument("test", doc);
+ System.out.println(result);
+ }
+
+ @Test
+ public void testDeleteIndex() throws Exception {
+ OperationResult result = elasticSearch.deleteIndex("test");
+ System.out.println(result);
+ }
+
+ class AAIEntityTestObject implements DocumentStoreDataEntity {
+ private String id;
+ private String entityType;
+ private String edgeTagQueryEntityFieldName;
+ private String edgeTagQueryEntityFieldValue;
+ private String searchTagIDs;
+ private String searchTags;
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ @Override
+ public String getId() {
+ return this.id;
+ }
+
+ public String getEntityType() {
+ return entityType;
+ }
+
+ public void setEntityType(String entityType) {
+ this.entityType = entityType;
+ }
+
+ public String getEdgeTagQueryEntityFieldName() {
+ return edgeTagQueryEntityFieldName;
+ }
+
+ public void setEdgeTagQueryEntityFieldName(String edgeTagQueryEntityFieldName) {
+ this.edgeTagQueryEntityFieldName = edgeTagQueryEntityFieldName;
+ }
+
+ public String getEdgeTagQueryEntityFieldValue() {
+ return edgeTagQueryEntityFieldValue;
+ }
+
+ public void setEdgeTagQueryEntityFieldValue(String edgeTagQueryEntityFieldValue) {
+ this.edgeTagQueryEntityFieldValue = edgeTagQueryEntityFieldValue;
+ }
+
+ public String getSearchTagIDs() {
+ return searchTagIDs;
+ }
+
+ public void setSearchTagIDs(String searchTagIDs) {
+ this.searchTagIDs = searchTagIDs;
+ }
+
+ public String getSearchTags() {
+ return searchTags;
+ }
+
+ public void setSearchTags(String searchTags) {
+ this.searchTags = searchTags;
+ }
+
+ @Override
+ public String getVersion() {
+ return "1";
+ }
+
+ @Override
+ public String getContentInJson() {
+ try {
+ return new JSONObject()
+ .put("entityType", entityType)
+ .put("edgeTagQueryEntityFieldName", edgeTagQueryEntityFieldName)
+ .put("edgeTagQueryEntityFieldValue", edgeTagQueryEntityFieldValue)
+ .put("searchTagIDs", searchTagIDs)
+ .put("searchTags", searchTags).toString();
+ } catch (JSONException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ return null;
+ }
+ }
+
+ }
+
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationStatementTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationStatementTest.java
new file mode 100644
index 0000000..e73b882
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationStatementTest.java
@@ -0,0 +1,147 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+public class AggregationStatementTest {
+
+ private static ObjectMapper mapper = new ObjectMapper();
+
+ @Test
+ public void testGroupBy() {
+ String input = "{\r\n \"group-by\": {\r\n \"field\": \"entityType\"\r\n }\r\n }";
+
+ String expected = "{\"terms\": {\"field\": \"entityType\"}}";
+
+ AggregationStatement actual;
+ try {
+ actual = mapper.readValue(input, AggregationStatement.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+
+ }
+
+ @Test
+ public void testDateRange() {
+ String input = "{\r\n \"date-range\": {\r\n \"field\": \"mydate\",\r\n \"ranges\": [\r\n {\r\n \"from\": \"2016-12-19T00:00:00.738-05:00\",\r\n \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n }\r\n ],\r\n \"format\": \"MM-yyy\",\r\n \"size\": \"5\"\r\n }\r\n}";
+
+ String expected = "{\"date_range\": {\"field\": \"mydate\", \"format\": \"MM-yyy\", \"ranges\": [{\"from\": \"2016-12-19T00:00:00.738-05:00\", \"to\": \"2016-12-23T23:59:59.738-05:00\"}], \"size\": 5}}";
+
+ AggregationStatement actual;
+ try {
+ actual = mapper.readValue(input, AggregationStatement.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+
+ }
+
+ @Test
+ public void testDateHistogram() {
+ String input = "{\r\n \"date-histogram\": {\r\n \"field\": \"mydate\",\r\n \"interval\": \"day\"\r\n }\r\n}";
+
+ String expected = "{\"date_histogram\": {\"field\": \"mydate\", \"interval\": \"day\"}}";
+
+ AggregationStatement actual;
+ try {
+ actual = mapper.readValue(input, AggregationStatement.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+
+ }
+
+ @Test
+ public void testSubAggregation1() {
+ String input = "{\r\n \"group-by\": {\r\n \"field\": \"severity\"\r\n },\r\n \"sub-aggregations\": [\r\n {\r\n \"name\": \"byType\",\r\n \"aggregation\": {\r\n \"group-by\": {\r\n \"field\": \"entityType\"\r\n }\r\n }\r\n }\r\n ]\r\n}";
+ String expected = "{\"terms\": {\"field\": \"severity\"}, \"aggs\": {\"byType\": {\"terms\": {\"field\": \"entityType\"}}}}";
+
+ AggregationStatement actual;
+ try {
+ actual = mapper.readValue(input, AggregationStatement.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+
+ }
+
+ @Test
+ public void testSubAggregation2() {
+ String input = "{\r\n \"group-by\": {\r\n \"field\": \"severity\"\r\n },\r\n \"sub-aggregations\": [\r\n {\r\n \"name\": \"byType\",\r\n \"aggregation\": {\r\n \"group-by\": {\r\n \"field\": \"violationType\"\r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"byRule\",\r\n \"aggregation\": {\r\n \"group-by\": {\r\n \"field\": \"validationRule\"\r\n }\r\n }\r\n }\r\n ]\r\n}";
+ String expected = "{\"terms\": {\"field\": \"severity\"}, \"aggs\": {\"byType\": {\"terms\": {\"field\": \"violationType\"}},\"byRule\": {\"terms\": {\"field\": \"validationRule\"}}}}";
+
+ AggregationStatement actual;
+ try {
+ actual = mapper.readValue(input, AggregationStatement.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+
+ }
+
+
+ @Test
+ public void testNestedAggregation1() {
+ String input = "{\r\n \"nested\": [{\r\n \"name\": \"by_severity\",\r\n \"aggregation\": {\r\n \"group-by\": {\r\n \"field\": \"violations.severity\"\r\n }\r\n }\r\n }]\r\n}";
+ String expected = "{\"nested\": {\"path\": \"violations\"}, \"aggs\": {\"by_severity\": {\"terms\": {\"field\": \"violations.severity\"}}}}";
+
+ AggregationStatement actual;
+ try {
+ actual = mapper.readValue(input, AggregationStatement.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+
+ }
+
+ @Test
+ public void testNestedAggregation2() {
+ String input = "{\r\n \"nested\": [\r\n {\r\n \"name\": \"by_severity\",\r\n \"aggregation\": {\r\n \"group-by\": {\r\n \"field\": \"violations.severity\"\r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"by_type\",\r\n \"aggregation\": {\r\n \"group-by\": {\r\n \"field\": \"violations.violationType\"\r\n }\r\n }\r\n }\r\n ]\r\n}";
+ String expected = "{\"nested\": {\"path\": \"violations\"}, \"aggs\": {\"by_severity\": {\"terms\": {\"field\": \"violations.severity\"}},\"by_type\": {\"terms\": {\"field\": \"violations.violationType\"}}}}";
+
+ AggregationStatement actual;
+ try {
+ actual = mapper.readValue(input, AggregationStatement.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+
+ }
+
+
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationTest.java
new file mode 100644
index 0000000..8b77c68
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/AggregationTest.java
@@ -0,0 +1,52 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+public class AggregationTest {
+ private static ObjectMapper mapper = new ObjectMapper();
+
+ @Test
+ public void test() {
+ String input = "{\r\n \"name\": \"byDate\",\r\n \"aggregation\": {\r\n \"date-range\": {\r\n \"field\": \"mydate\",\r\n \"ranges\": [\r\n {\r\n \"from\": \"2016-12-19T00:00:00.738-05:00\",\r\n \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n }\r\n ]\r\n },\r\n \"sub-aggregations\": [{\r\n \"name\": \"byTerm\",\r\n \"aggregation\": {\r\n \"group-by\": {\r\n \"field\": \"myterm\"\r\n }\r\n }\r\n }]\r\n }\r\n}";
+
+ String expected = "\"byDate\": {\"date_range\": {\"field\": \"mydate\", \"ranges\": [{\"from\": \"2016-12-19T00:00:00.738-05:00\", \"to\": \"2016-12-23T23:59:59.738-05:00\"}]}, \"aggs\": {\"byTerm\": {\"terms\": {\"field\": \"myterm\"}}}}";
+
+ Aggregation actual;
+ try {
+ actual = mapper.readValue(input, Aggregation.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+
+ }
+
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateHistogramAggregationTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateHistogramAggregationTest.java
new file mode 100644
index 0000000..cb93644
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateHistogramAggregationTest.java
@@ -0,0 +1,87 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+public class DateHistogramAggregationTest {
+ ObjectMapper mapper = new ObjectMapper();
+
+ @Test
+ public void testFullSet() {
+ String input =
+ "{\r\n \"field\": \"mydate\",\r\n \"interval\": \"day\",\r\n \"time-zone\": \"-01:00\"\r\n}";
+
+ String expected =
+ "\"date_histogram\": {\"field\": \"mydate\", \"interval\": \"day\", \"time_zone\": \"-01:00\"}";
+
+ DateHistogramAggregation actual;
+ try {
+ actual = mapper.readValue(input, DateHistogramAggregation.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+ }
+
+ @Test
+ public void test2() {
+ String input =
+ "{\r\n \"field\": \"mydate\",\r\n \"interval\": \"day\"\r\n}";
+
+ String expected =
+ "\"date_histogram\": {\"field\": \"mydate\", \"interval\": \"day\"}";
+
+ DateHistogramAggregation actual;
+ try {
+ actual = mapper.readValue(input, DateHistogramAggregation.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+ }
+
+ @Test
+ public void test3() {
+ String input =
+ "{\r\n \"field\": \"mydate\"\r\n}";
+
+ String expected =
+ "\"date_histogram\": {\"field\": \"mydate\"}";
+
+ DateHistogramAggregation actual;
+ try {
+ actual = mapper.readValue(input, DateHistogramAggregation.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+ }
+
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeAggregationTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeAggregationTest.java
new file mode 100644
index 0000000..cb08c47
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeAggregationTest.java
@@ -0,0 +1,71 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+public class DateRangeAggregationTest {
+
+ private static ObjectMapper mapper = new ObjectMapper();
+
+
+ @Test
+ public void test() {
+
+ String input =
+ "{\r\n \"field\": \"mydate\",\r\n \"ranges\": [\r\n {\r\n \"from\": \"2016-12-19T00:00:00.738-05:00\",\r\n \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n },\r\n {\r\n \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n },\r\n {\r\n \"from\": \"2016-12-19T00:00:00.738-05:00\"\r\n }\r\n ],\r\n \"format\": \"MM-yyy\",\r\n \"size\": \"5\"\r\n}";
+ String expected = "\"date_range\": {\"field\": \"mydate\", \"format\": \"MM-yyy\", \"ranges\": [{\"from\": \"2016-12-19T00:00:00.738-05:00\", \"to\": \"2016-12-23T23:59:59.738-05:00\"},{\"to\": \"2016-12-23T23:59:59.738-05:00\"},{\"from\": \"2016-12-19T00:00:00.738-05:00\"}], \"size\": 5}";
+
+ DateRangeAggregation actual;
+ try {
+ actual = mapper.readValue(input, DateRangeAggregation.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+ }
+
+ @Test
+ public void testNoFormatNoSize() {
+
+ String input =
+ "{\r\n \"field\": \"mydate\",\r\n \"ranges\": [\r\n {\r\n \"from\": \"2016-12-19T00:00:00.738-05:00\",\r\n \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n },\r\n {\r\n \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n },\r\n {\r\n \"from\": \"2016-12-19T00:00:00.738-05:00\"\r\n }\r\n ]\r\n}";
+ String expected = "\"date_range\": {\"field\": \"mydate\", \"ranges\": [{\"from\": \"2016-12-19T00:00:00.738-05:00\", \"to\": \"2016-12-23T23:59:59.738-05:00\"},{\"to\": \"2016-12-23T23:59:59.738-05:00\"},{\"from\": \"2016-12-19T00:00:00.738-05:00\"}]}";
+
+ DateRangeAggregation actual;
+ try {
+ actual = mapper.readValue(input, DateRangeAggregation.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+ }
+
+
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeTest.java
new file mode 100644
index 0000000..59d5bdd
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/DateRangeTest.java
@@ -0,0 +1,78 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+public class DateRangeTest {
+ private static ObjectMapper mapper = new ObjectMapper();
+
+ @Test
+ public void testBoth() {
+ String input = "{\r\n \"from\": \"2016-12-19T00:00:00.738-05:00\",\r\n \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n}";
+ String expected = "{\"from\": \"2016-12-19T00:00:00.738-05:00\", \"to\": \"2016-12-23T23:59:59.738-05:00\"}";
+
+ DateRange actual;
+ try {
+ actual = mapper.readValue(input, DateRange.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+ }
+
+ @Test
+ public void testFrom() {
+ String input = "{\"from\": \"2016-12-19T00:00:00.738-05:00\"}";
+ String expected = "{\"from\": \"2016-12-19T00:00:00.738-05:00\"}";
+
+ DateRange actual;
+ try {
+ actual = mapper.readValue(input, DateRange.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+ }
+
+ @Test
+ public void testTo() {
+ String input = "{\r\n \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n}";
+ String expected = "{\"to\": \"2016-12-23T23:59:59.738-05:00\"}";
+
+ DateRange actual;
+ try {
+ actual = mapper.readValue(input, DateRange.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+ }
+
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/FilterTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/FilterTest.java
new file mode 100644
index 0000000..6c7e5d2
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/FilterTest.java
@@ -0,0 +1,53 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import java.io.IOException;
+
+public class FilterTest {
+
+ @Test
+ public void test() throws JsonParseException, JsonMappingException, IOException {
+
+
+ String json = "{ \"any\": [ "
+ + "{\"match\": {\"field\": \"searchTags\", \"value\": \"a\"}},"
+ + "{\"match\": {\"field\": \"searchTags\", \"value\": \"b\"}}"
+ + "],"
+ + "\"all\": ["
+ + "{\"parsed-query\": {\"field\": \"fieldname\", \"query-string\": \"string\"}}"
+ + "]"
+ + "}";
+
+ ObjectMapper mapper = new ObjectMapper();
+ Filter filter = mapper.readValue(json, Filter.class);
+ System.out.println("GDF: filter = " + filter);
+ }
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/GroupByAggregationTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/GroupByAggregationTest.java
new file mode 100644
index 0000000..a81de6e
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/GroupByAggregationTest.java
@@ -0,0 +1,66 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+public class GroupByAggregationTest {
+ private static ObjectMapper mapper = new ObjectMapper();
+
+ @Test
+ public void test() {
+ String input = "{\"field\" : \"entityType\", \"size\": 20}\r\n";
+
+ String expected = "\"terms\": {\"field\": \"entityType\", \"size\": 20}";
+
+ GroupByAggregation actual;
+ try {
+ actual = mapper.readValue(input, GroupByAggregation.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+ }
+
+ @Test
+ public void testNoSize() {
+ String input = "{\"field\" : \"entityType\"}\r\n";
+
+ String expected = "\"terms\": {\"field\": \"entityType\"}";
+
+ GroupByAggregation actual;
+ try {
+ actual = mapper.readValue(input, GroupByAggregation.class);
+ assertEquals(expected, actual.toElasticSearch());
+ } catch (Exception e) {
+ fail("Exception occurred: " + e.getMessage());
+ }
+ }
+
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/QueryTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/QueryTest.java
new file mode 100644
index 0000000..e754ce2
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/QueryTest.java
@@ -0,0 +1,356 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+
+public class QueryTest {
+
+ /**
+ * This test validates that we are able to marshal json structures
+ * representing term queries into POJOs and that we can then
+ * unmarshal those POJOs into ElasticSearch syntax.
+ *
+ * @throws JsonParseException
+ * @throws JsonMappingException
+ * @throws IOException
+ */
+ @Test
+ public void termQueryTest() throws JsonParseException, JsonMappingException, IOException {
+
+ Integer intValue = 1;
+ String field = "searchTags";
+ String termQueryWithIntegerValueJson = "{\"field\": \"" + field + "\", \"value\": " + intValue + "}";
+ String termQueryWithIntegerValueExpectedES = "{\"term\": {\"" + field + "\" : " + intValue + "}}";
+
+ Double doubleValue = 5.7;
+ String termQueryWithDoubleValueJson = "{\"field\": \"" + field + "\", \"value\": " + doubleValue + "}";
+ String termQueryWithDoubleValueExpectedES = "{\"term\": {\"" + field + "\" : " + doubleValue + "}}";
+
+ String stringValue = "theValue";
+ String termQueryWithStringValueJson = "{\"field\": \"" + field + "\", \"value\": \"" + stringValue + "\"}";
+ String termQueryWithStringValueExpectedES = "{\"term\": {\"" + field + "\" : \"" + stringValue + "\"}}";
+
+ ObjectMapper mapper = new ObjectMapper();
+
+
+ // Validate that we can marshal a term query where the supplied value
+ // is an Integer.
+ TermQuery integerTermQuery = mapper.readValue(termQueryWithIntegerValueJson, TermQuery.class);
+ assertTrue("Expected value to be of type Integer, but was type " + integerTermQuery.getValue().getClass().getName(),
+ integerTermQuery.getValue() instanceof Integer);
+ assertEquals(intValue, integerTermQuery.getValue());
+
+ assertTrue("ElasticSearch term query translation does not match the expected result",
+ termQueryWithIntegerValueExpectedES.equals(integerTermQuery.toElasticSearch()));
+
+ // Validate that we can marshal a term query where the supplied value
+ // is a Double.
+ TermQuery doubleTermQuery = mapper.readValue(termQueryWithDoubleValueJson, TermQuery.class);
+ assertTrue("Expected value to be of type Double, but was type " + doubleTermQuery.getValue().getClass().getName(),
+ doubleTermQuery.getValue() instanceof Double);
+ assertEquals(doubleValue, doubleTermQuery.getValue());
+ assertTrue("ElasticSearch term query translation does not match the expected result",
+ termQueryWithDoubleValueExpectedES.equals(doubleTermQuery.toElasticSearch()));
+
+ // Validate that we can marshal a term query where the supplied value
+ // is a String literal.
+ TermQuery stringTermQuery = mapper.readValue(termQueryWithStringValueJson, TermQuery.class);
+ assertTrue("Expected value to be of type String, but was type " + stringTermQuery.getValue().getClass().getName(),
+ stringTermQuery.getValue() instanceof String);
+ assertEquals(stringValue, stringTermQuery.getValue());
+ assertTrue("ElasticSearch term query translation does not match the expected result",
+ termQueryWithStringValueExpectedES.equals(stringTermQuery.toElasticSearch()));
+
+
+ }
+
+
+ /**
+ * This test validates that we are able to marshal json structures
+ * representing parsed queries into POJOs and that we can then
+ * unmarshal those POJOs into ElasticSearch syntax.
+ *
+ * @throws JsonParseException
+ * @throws JsonMappingException
+ * @throws IOException
+ */
+ @Test
+ public void parsedQueryTest() throws JsonParseException, JsonMappingException, IOException {
+
+ String field = "fieldname";
+ String queryString = "The query string";
+
+ String queryJson = "{\"field\": \"" + field + "\", \"query-string\": \"" + queryString + "\"}";
+ String queryExpectedES = "{\"query_string\": {\"default_field\": \"" + field + "\", \"query\": \"" + queryString + "\"}}";
+
+ ObjectMapper mapper = new ObjectMapper();
+ ParsedQuery pq = mapper.readValue(queryJson, ParsedQuery.class);
+
+ assertTrue("Unexpected marshalled value for 'field' - expected: " + field + " actual: " + pq.getField(),
+ field.equals(pq.getField()));
+ assertTrue("Unexpected marshalled value for 'query-string' - expected: " + queryString + " actual: " + pq.getQueryString(),
+ queryString.equals(pq.getQueryString()));
+ assertTrue("Unexpected ElasticSearch syntax. Expected: " + queryExpectedES + " Actual: " + pq.toElasticSearch(),
+ queryExpectedES.equals(pq.toElasticSearch()));
+ }
+
+
+ /**
+ * This test validates that a ranged query cannot be parsed with values
+ * for both the 'gte' and 'gt' fields or the 'lte' and 'lt' fields, and
+ * that we do not allow mixing of numeric and date types in the same
+ * query.
+ *
+ * @throws JsonParseException
+ * @throws IOException
+ */
+ @Test
+ public void rangeQueryConflictingBoundsTest() throws JsonParseException, IOException {
+
+ String invalidGTAndGTE = "{ \"field\": \"timestamp\", \"gte\": \"2016-10-06T00:00:00.558+03:00\", \"gt\": \"2016-10-06T23:59:59.558+03:00\"}";
+ String invalidLTAndLTE = "{ \"field\": \"timestamp\", \"lte\": \"2016-10-06T00:00:00.558+03:00\", \"lt\": \"2016-10-06T23:59:59.558+03:00\"}";
+ String invalidTypes = "{ \"field\": \"timestamp\", \"lte\": 5, \"gte\": \"2016-10-06T23:59:59.558+03:00\"}";
+
+ ObjectMapper mapper = new ObjectMapper();
+
+ // Attempt to parse a query where we are setting values for both the
+ // 'greater than' and 'greater than and equal to' operators.
+ boolean gotExpectedException = false;
+ try {
+ RangeQuery badRangeQuery = mapper.readValue(invalidGTAndGTE, RangeQuery.class);
+ } catch (JsonMappingException e) {
+ gotExpectedException = true;
+ }
+ assertTrue("Attempting to set both a 'gt' and 'gte' value on the same query should not have been allowed",
+ gotExpectedException);
+
+ // Attempt to parse a query where we are setting values for both the
+ // 'less than' and 'less than and equal to' operators.
+ gotExpectedException = false;
+ try {
+ RangeQuery badRangeQuery = mapper.readValue(invalidLTAndLTE, RangeQuery.class);
+ } catch (JsonMappingException e) {
+ gotExpectedException = true;
+ }
+ assertTrue("Attempting to set both a 'lt' and 'lte' value on the same query should not have been allowed",
+ gotExpectedException);
+
+ // Attempt to parse a query where we are mixing numeric and date values
+ // in the same query.
+ gotExpectedException = false;
+ try {
+ RangeQuery badRangeQuery = mapper.readValue(invalidTypes, RangeQuery.class);
+ } catch (JsonMappingException e) {
+ gotExpectedException = true;
+ }
+ assertTrue("Attempting to mix numeric and date values in the same query should not have been allowed",
+ gotExpectedException);
+
+
+ }
+
+
+ /**
+ * This test validates that date range queries can be marshalled to a Java
+ * POJO and unmarshalled to ElasticSearch syntax.
+ *
+ * @throws JsonParseException
+ * @throws JsonMappingException
+ * @throws IOException
+ */
+ @Test
+ public void dateRangeQueryTest() throws JsonParseException, JsonMappingException, IOException {
+
+ String field = "timestamp";
+ String greaterThanDate = "2016-10-06T00:00:00.558+03:00";
+ String lessThanDate = "2016-10-06T23:59:59.558+03:00";
+
+ ObjectMapper mapper = new ObjectMapper();
+
+ // Generate a date range query using 'greater than or equal' and 'less
+ // than or equal' operations.
+ String dateRangeJson =
+ "{ \"field\": \"" + field + "\", \"gte\": \"" + greaterThanDate + "\", \"lte\": \"" + lessThanDate + "\"}";
+ String dateRangeExpectedES =
+ "{\"range\": {\"timestamp\": {\"gte\": \"2016-10-06T00:00:00.558+03:00\", \"lte\": \"2016-10-06T23:59:59.558+03:00\"}}}";
+
+ // Validate that the query is marshalled correctly to the POJO and that
+ // the generated ElasticSearch syntax looks as expected.
+ RangeQuery dateRangeQuery = mapper.readValue(dateRangeJson, RangeQuery.class);
+
+ assertTrue("Unexpected marshalled value for 'field'. Expected: " + field + " Actual: " + dateRangeQuery.getField(),
+ field.equals(dateRangeQuery.getField()));
+ assertTrue("Unexpected type for 'gte' value. Expected: String Actual: " + dateRangeQuery.getGte().getClass().getName(),
+ dateRangeQuery.getGte() instanceof String);
+ assertTrue("Unexpected type for 'lte' value. Expected: String Actual: " + dateRangeQuery.getLte().getClass().getName(),
+ dateRangeQuery.getLte() instanceof String);
+ assertTrue("Unexpected marshalled value for 'gte'. Expected: " + greaterThanDate + " Actual: " + dateRangeQuery.getGte(),
+ greaterThanDate.equals(dateRangeQuery.getGte()));
+ assertTrue("Unexpected marshalled value for 'lte'. Expected: " + lessThanDate + " Actual: " + dateRangeQuery.getLte(),
+ lessThanDate.equals(dateRangeQuery.getLte()));
+ assertTrue("Unexpected ElasticSearch syntax. Expected: " + dateRangeExpectedES + " Actual: " + dateRangeQuery.toElasticSearch(),
+ dateRangeExpectedES.equals(dateRangeQuery.toElasticSearch()));
+
+
+ // Generate a date range query using 'greater than' and 'less than or
+ // equal' operations.
+ dateRangeJson =
+ "{ \"field\": \"" + field + "\", \"gt\": \"" + greaterThanDate + "\", \"lte\": \"" + lessThanDate + "\"}";
+ dateRangeExpectedES =
+ "{\"range\": {\"timestamp\": {\"gt\": \"2016-10-06T00:00:00.558+03:00\", \"lte\": \"2016-10-06T23:59:59.558+03:00\"}}}";
+
+ // Validate that the query is marshalled correctly to the POJO and that
+ // the generated ElasticSearch syntax looks as expected.
+ dateRangeQuery = mapper.readValue(dateRangeJson, RangeQuery.class);
+
+ assertTrue("Unexpected marshalled value for 'field'. Expected: " + field + " Actual: " + dateRangeQuery.getField(),
+ field.equals(dateRangeQuery.getField()));
+
+ assertTrue("Unexpected type for 'gt' value. Expected: String Actual: " + dateRangeQuery.getGt().getClass().getName(),
+ dateRangeQuery.getGt() instanceof String);
+
+ assertTrue("Unexpected type for 'lte' value. Expected: String Actual: " + dateRangeQuery.getLte().getClass().getName(),
+ dateRangeQuery.getLte() instanceof String);
+
+ assertTrue("Unexpected marshalled value for 'gt'. Expected: " + greaterThanDate + " Actual: " + dateRangeQuery.getGt(),
+ greaterThanDate.equals(dateRangeQuery.getGt()));
+
+ assertTrue("Unexpected marshalled value for 'lte'. Expected: " + lessThanDate + " Actual: " + dateRangeQuery.getLte(),
+ lessThanDate.equals(dateRangeQuery.getLte()));
+
+ assertTrue("Unexpected ElasticSearch syntax. Expected: " + dateRangeExpectedES + " Actual: " + dateRangeQuery.toElasticSearch(),
+ dateRangeExpectedES.equals(dateRangeQuery.toElasticSearch()));
+
+
+ // Generate a date range query using only a 'greater than' operation.
+ dateRangeJson =
+ "{ \"field\": \"" + field + "\", \"gt\": \"" + greaterThanDate + "\"}";
+ dateRangeExpectedES =
+ "{\"range\": {\"timestamp\": {\"gt\": \"2016-10-06T00:00:00.558+03:00\"}}}";
+
+ // Validate that the query is marshalled correctly to the POJO and that
+ // the generated ElasticSearch syntax looks as expected.
+ dateRangeQuery = mapper.readValue(dateRangeJson, RangeQuery.class);
+
+ assertTrue("Unexpected marshalled value for 'field'. Expected: " + field + " Actual: " + dateRangeQuery.getField(),
+ field.equals(dateRangeQuery.getField()));
+
+ assertTrue("Unexpected type for 'gt' value. Expected: String Actual: " + dateRangeQuery.getGt().getClass().getName(),
+ dateRangeQuery.getGt() instanceof String);
+
+ assertTrue("Unexpected marshalled value for 'gt'. Expected: " + greaterThanDate + " Actual: " + dateRangeQuery.getGt(),
+ greaterThanDate.equals(dateRangeQuery.getGt()));
+
+ assertTrue("Unexpected ElasticSearch syntax. Expected: " + dateRangeExpectedES + " Actual: " + dateRangeQuery.toElasticSearch(),
+ dateRangeExpectedES.equals(dateRangeQuery.toElasticSearch()));
+
+ }
+
+ /**
+ * This test validates that numeric range queries can be marshalled to a Java
+ * POJO and unmarshalled to ElasticSearch syntax.
+ *
+ * @throws JsonParseException
+ * @throws JsonMappingException
+ * @throws IOException
+ */
+ @Test
+ public void numericRangeQueryTest() throws JsonParseException, JsonMappingException, IOException {
+
+ String field = "version";
+ Integer greaterThanInt = 5;
+ Integer lessThanInt = 100;
+
+ ObjectMapper mapper = new ObjectMapper();
+
+ // Generate a numeric range query using 'greater than or equal' and 'less
+ // than or equal' operations.
+ String numericRangeJson =
+ "{ \"field\": \"" + field + "\", \"gte\": " + greaterThanInt + ", \"lte\": " + lessThanInt + "}";
+ String numericRangeExpectedES =
+ "{\"range\": {\"" + field + "\": {\"gte\": " + greaterThanInt + ", \"lte\": " + lessThanInt + "}}}";
+
+ // Validate that the query is marshalled correctly to the POJO and that
+ // the generated ElasticSearch syntax looks as expected.
+ RangeQuery numericRangeQuery = mapper.readValue(numericRangeJson, RangeQuery.class);
+
+ assertTrue("Unexpected marshalled value for 'field'. Expected: " + field + " Actual: " + numericRangeQuery.getField(),
+ field.equals(numericRangeQuery.getField()));
+ assertTrue("Unexpected type for 'gte' value. Expected: Integer Actual: " + numericRangeQuery.getGte().getClass().getName(),
+ numericRangeQuery.getGte() instanceof Integer);
+ assertTrue("Unexpected type for 'lte' value. Expected: Integer Actual: " + numericRangeQuery.getLte().getClass().getName(),
+ numericRangeQuery.getLte() instanceof Integer);
+ assertEquals("Unexpected marshalled value for 'gte'. Expected: " + greaterThanInt + " Actual: " + numericRangeQuery.getGte(),
+ greaterThanInt, numericRangeQuery.getGte());
+ assertEquals("Unexpected marshalled value for 'lte'. Expected: " + lessThanInt + " Actual: " + numericRangeQuery.getLte(),
+ lessThanInt, numericRangeQuery.getLte());
+ assertTrue("Unexpected ElasticSearch syntax. Expected: " + numericRangeExpectedES + " Actual: " + numericRangeQuery.toElasticSearch(),
+ numericRangeExpectedES.equals(numericRangeQuery.toElasticSearch()));
+
+
+ Double greaterThanDouble = 5.0;
+ Double lessThanDouble = 100.0;
+
+ // Generate a date range query using 'greater than' and 'less than or
+ // equal' operations.
+ numericRangeJson =
+ "{ \"field\": \"" + field + "\", \"gt\": " + greaterThanDouble + ", \"lte\": " + lessThanDouble + "}";
+ numericRangeExpectedES =
+ "{\"range\": {\"" + field + "\": {\"gt\": " + greaterThanDouble + ", \"lte\": " + lessThanDouble + "}}}";
+
+ // Validate that the query is marshalled correctly to the POJO and that
+ // the generated ElasticSearch syntax looks as expected.
+ numericRangeQuery = mapper.readValue(numericRangeJson, RangeQuery.class);
+
+ assertTrue("Unexpected marshalled value for 'field'. Expected: " + field + " Actual: " + numericRangeQuery.getField(),
+ field.equals(numericRangeQuery.getField()));
+
+ assertTrue("Unexpected type for 'gt' value. Expected: Double Actual: " + numericRangeQuery.getGt().getClass().getName(),
+ numericRangeQuery.getGt() instanceof Double);
+
+ assertTrue("Unexpected type for 'lte' value. Expected: Double Actual: " + numericRangeQuery.getLte().getClass().getName(),
+ numericRangeQuery.getLte() instanceof Double);
+
+ assertEquals("Unexpected marshalled value for 'gt'. Expected: " + greaterThanDouble + " Actual: " + numericRangeQuery.getGt(),
+ greaterThanDouble, numericRangeQuery.getGt());
+
+ assertEquals("Unexpected marshalled value for 'lte'. Expected: " + lessThanDouble + " Actual: " + numericRangeQuery.getLte(),
+ lessThanDouble, numericRangeQuery.getLte());
+
+ assertTrue("Unexpected ElasticSearch syntax. Expected: " + numericRangeExpectedES + " Actual: " + numericRangeQuery.toElasticSearch(),
+ numericRangeExpectedES.equals(numericRangeQuery.toElasticSearch()));
+ }
+
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/SearchStatementTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/SearchStatementTest.java
new file mode 100644
index 0000000..b4c2fb8
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/SearchStatementTest.java
@@ -0,0 +1,270 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+import org.openecomp.sa.rest.TestUtils;
+
+import java.io.File;
+import java.io.IOException;
+
+import static org.junit.Assert.*;
+
+public class SearchStatementTest {
+
+ @Test
+ public void simpleQueryTest() throws JsonParseException, JsonMappingException, IOException {
+
+ String field = "searchTags";
+ String queryString = "aai3255";
+ String queryJson =
+ "{"
+ + "\"queries\": ["
+ + "{\"may\": {\"parsed-query\": {"
+ + "\"field\": \"" + field + "\","
+ + "\"query-string\": \"" + queryString + "\"}}}"
+ + "]"
+ + "}"
+ + "}";
+
+ String queryES =
+ "{"
+ + "\"version\": true,"
+ + "\"query\": {"
+ + "\"bool\": {"
+ + "\"must\": [], "
+ + "\"should\": ["
+ + "{\"query_string\": {\"default_field\": \"searchTags\", \"query\": \"aai3255\"}}"
+ + "],"
+ + "\"must_not\": []}"
+ + "}"
+ + "}";
+
+ // Marshal our simple query JSON to a SearchStatement object.
+ ObjectMapper mapper = new ObjectMapper();
+ SearchStatement ss = mapper.readValue(queryJson, SearchStatement.class);
+
+ // We expect to have a search statement with one query.
+ assertEquals("Unexpected number of queries in marshalled result",
+ 1, ss.getQueries().length);
+
+ // Validate that the query is of the expected type and contains the
+ // expected values.
+ QueryStatement query = ss.getQueries()[0].getQueryStatement();
+ assertNotNull("Expected marshalled statement to contain a 'parsed query'",
+ query.getParsedQuery());
+ assertTrue("Unexpected field name in marshalled query. Expected: " + field + " Actual: " + query.getParsedQuery().getField(),
+ field.equals(query.getParsedQuery().getField()));
+ assertTrue("Unexpected query string in marshalled query. Expected: " + queryString + " Actual: " + query.getParsedQuery().getQueryString(),
+ queryString.equals(query.getParsedQuery().getQueryString()));
+
+ // Validate that we are able to produce the expected ElasticSearch
+ // query syntax from the search statement.
+ assertTrue("Unexpected ElasticSearch syntax. Expected: " + queryES + " Actual: " + ss.toElasticSearch(),
+ queryES.equals(ss.toElasticSearch()));
+ }
+
+
+ @Test
+ public void simpleSortedQueryTest() throws JsonParseException, JsonMappingException, IOException {
+
+ String field = "searchTags";
+ String queryString = "aai3255";
+ String queryJson =
+ "{"
+ + "\"queries\": ["
+ + "{\"may\": {\"parsed-query\": {"
+ + "\"field\": \"" + field + "\","
+ + "\"query-string\": \"" + queryString + "\"}}}"
+ + "],"
+ + "\"sort\": { \"field\": \"date\", \"order\": \"ascending\" }"
+ + "}";
+
+
+ String queryES =
+ "{"
+ + "\"version\": true,"
+ + "\"query\": {"
+ + "\"bool\": {"
+ + "\"must\": [], "
+ + "\"should\": ["
+ + "{\"query_string\": {\"default_field\": \"searchTags\", \"query\": \"aai3255\"}}"
+ + "],"
+ + "\"must_not\": []"
+ + "}"
+ + "}, "
+ + "\"sort\": { \"date\": { \"order\": \"asc\"}}"
+ + "}";
+
+ // Marshal our simple query JSON to a SearchStatement object.
+ ObjectMapper mapper = new ObjectMapper();
+ SearchStatement ss = mapper.readValue(queryJson, SearchStatement.class);
+
+ // We expect to have a search statement with one query.
+ assertEquals("Unexpected number of queries in marshalled result",
+ 1, ss.getQueries().length);
+
+ // Validate that the query is of the expected type and contains the
+ // expected values.
+ QueryStatement query = ss.getQueries()[0].getQueryStatement();
+ assertNotNull("Expected marshalled statement to contain a 'parsed query'",
+ query.getParsedQuery());
+ assertTrue("Unexpected field name in marshalled query. Expected: " + field + " Actual: " + query.getParsedQuery().getField(),
+ field.equals(query.getParsedQuery().getField()));
+ assertTrue("Unexpected query string in marshalled query. Expected: " + queryString + " Actual: " + query.getParsedQuery().getQueryString(),
+ queryString.equals(query.getParsedQuery().getQueryString()));
+ System.out.println("GDF: ES = " + ss.toElasticSearch());
+ // Validate that we are able to produce the expected ElasticSearch
+ // query syntax from the search statement.
+ assertTrue("Unexpected ElasticSearch syntax. Expected: " + queryES + " Actual: " + ss.toElasticSearch(),
+ queryES.equals(ss.toElasticSearch()));
+ assertNull(ss.getAggregations());
+ }
+
+ @Test
+ public void filteredQueryTest() throws JsonParseException, JsonMappingException, IOException {
+
+ String filterField1 = "field1";
+ String filterField2 = "field2";
+ String filterField3 = "field3";
+ String filterValue1 = "a";
+ String filterValue2 = "b";
+ String filterValue3 = "string";
+ String filterJson = "{ \"any\": [ "
+ + "{\"match\": {\"field\": \"" + filterField1 + "\", \"value\": \"" + filterValue1 + "\"}},"
+ + "{\"match\": {\"field\": \"" + filterField2 + "\", \"value\": \"" + filterValue2 + "\"}}"
+ + "],"
+ + "\"all\": ["
+ + "{\"parsed-query\": {\"field\": \"" + filterField3 + "\", \"query-string\": \"" + filterValue3 + "\"}}"
+ + "]"
+ + "}";
+
+ String filterStanzaJson = "\"filter\": " + filterJson;
+
+ String queryStanzaJson = "\"queries\": [ "
+ + "{\"may\": {\"match\": {\"field\": \"searchTags\", \"value\": \"a\"}}},"
+ + "{\"may\": {\"match\": {\"field\": \"searchTags\", \"value\": \"b\"}}},"
+ + "{\"may\": {\"parsed-query\": {\"field\": \"fieldname\", \"query-string\": \"string\"}}}"
+ + "]";
+
+ String queryES =
+ "{"
+ + "\"version\": true,"
+ + "\"query\": {"
+ + "\"bool\": {"
+ + "\"must\": [], "
+ + "\"should\": ["
+ + "{\"term\": {\"searchTags\" : \"a\"}}, "
+ + "{\"term\": {\"searchTags\" : \"b\"}}, "
+ + "{\"query_string\": {\"default_field\": \"fieldname\", \"query\": \"string\"}}"
+ + "],"
+ + "\"must_not\": [], "
+ + "\"filter\": {"
+ + "\"bool\": {"
+ + "\"must\": ["
+ + "{\"query_string\": {\"default_field\": \"field3\", \"query\": \"string\"}}"
+ + "],"
+ + "\"must_not\": [],"
+ + "\"should\": ["
+ + "{\"term\": {\"field1\" : \"a\"}}, "
+ + "{\"term\": {\"field2\" : \"b\"}}"
+ + "],"
+ + "\"must_not\": []"
+ + "}"
+ + "}"
+ + "}"
+ + "}"
+ + "}";
+
+ StringBuilder sb = new StringBuilder();
+ sb.append("{");
+ sb.append(filterStanzaJson).append(", ");
+ sb.append(queryStanzaJson);
+ sb.append("}");
+
+ ObjectMapper mapper = new ObjectMapper();
+ SearchStatement ss = mapper.readValue(sb.toString(), SearchStatement.class);
+
+ assertEquals("Unexpected number of queries in the 'any' list for this statement's filter",
+ 2, ss.getFilter().getAny().length);
+ assertEquals("Unexpected number of queries in the 'all' list for this statement's filter",
+ 1, ss.getFilter().getAll().length);
+
+ assertTrue("Unexpected ElasticSearch syntax. Expected: " + queryES + " Actual: " + ss.toElasticSearch(),
+ queryES.equals(ss.toElasticSearch()));
+
+ assertNull(ss.getAggregations());
+ }
+
+ @Test
+ public void aggregationTest() {
+ String input = "{\r\n \"queries\": [\r\n {\r\n \"must\": {\r\n \"match\": {\r\n \"field\": \"searchTags\",\r\n \"value\": \"a\"\r\n }\r\n }\r\n }\r\n ],\r\n \"aggregations\": [\r\n {\r\n \"name\": \"byDate\",\r\n \"aggregation\": {\r\n \"date-range\": {\r\n \"field\": \"mydate\",\r\n \"ranges\": [\r\n {\r\n \"from\": \"2016-12-19T00:00:00.738-05:00\",\r\n \"to\": \"2016-12-23T23:59:59.738-05:00\"\r\n }\r\n ]\r\n },\r\n \"sub-aggregations\": [\r\n {\r\n \"name\": \"byTerm\",\r\n \"aggregation\": {\r\n \"group-by\": {\r\n \"field\": \"myterm\"\r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"byDate\",\r\n \"aggregation\": {\r\n \"date-histogram\": {\r\n \"field\": \"myDate\",\r\n \"interval\": \"myInterval\"\r\n }\r\n }\r\n }\r\n ]\r\n }\r\n },\r\n {\r\n \"name\": \"2nd\",\r\n \"aggregation\": {\r\n \"group-by\": {\r\n \"field\": \"anotherTerm\"\r\n }\r\n }\r\n }\r\n ]\r\n}";
+
+ ObjectMapper mapper = new ObjectMapper();
+ try {
+ SearchStatement ss = mapper.readValue(input, SearchStatement.class);
+ Aggregation[] aggs = ss.getAggregations();
+ assertNotNull(aggs);
+ assertEquals("Unexpected number aggregations", 2, aggs.length);
+ assertEquals("byDate", aggs[0].getName());
+ assertNotNull(aggs[0].getStatement().getDateRange());
+ assertEquals("mydate", aggs[0].getStatement().getDateRange().getField());
+ assertNotNull(aggs[0].getStatement().getSubAggregations());
+ assertEquals(2, aggs[0].getStatement().getSubAggregations().length);
+ assertEquals("byTerm", aggs[0].getStatement().getSubAggregations()[0].getName());
+ assertEquals("byDate", aggs[0].getStatement().getSubAggregations()[1].getName());
+ assertNull(aggs[0].getStatement().getGroupBy());
+ assertEquals("2nd", aggs[1].getName());
+ assertNotNull(aggs[1].getStatement().getGroupBy());
+ assertEquals("anotherTerm", aggs[1].getStatement().getGroupBy().getField());
+ assertNull(aggs[1].getStatement().getDateRange());
+ assertNull(aggs[1].getStatement().getSubAggregations());
+
+ } catch (Exception e) {
+ fail("Encountered exception: " + e.getMessage());
+ }
+ }
+
+ @Test
+ public void resultSetRangeTest() throws IOException {
+
+ // Simple query with a result set subrange specified.
+ File queryWithSubrangeFile = new File("src/test/resources/json/queries/query-with-subrange.json");
+ String queryWithSubrangeStr = TestUtils.readFileToString(queryWithSubrangeFile);
+ String queryWithSubrangeExpectedESString =
+ "{\"version\": true,\"from\": 0, \"size\": 10, \"query\": {\"bool\": {\"must\": [{\"term\": {\"field1\" : \"Bob\"}}], \"should\": [],\"must_not\": []}}}";
+
+ ObjectMapper mapper = new ObjectMapper();
+ SearchStatement ss = mapper.readValue(queryWithSubrangeStr, SearchStatement.class);
+
+ assertEquals("Unexpected index for result set start", ss.getFrom(), (Integer) 0);
+ assertEquals("Unexpected value for result set size", ss.getSize(), (Integer) 10);
+ assertTrue("Unexpected elastic search query generated from search statement",
+ ss.toElasticSearch().equals(queryWithSubrangeExpectedESString));
+ }
+}
diff --git a/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/SortTest.java b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/SortTest.java
new file mode 100644
index 0000000..0374865
--- /dev/null
+++ b/src/test/java/org/openecomp/sa/searchdbabstraction/searchapi/SortTest.java
@@ -0,0 +1,54 @@
+/**
+ * ============LICENSE_START=======================================================
+ * Search Data Service
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License ati
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sa.searchdbabstraction.searchapi;
+
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.junit.Assert.assertTrue;
+
+public class SortTest {
+
+ @Test
+ public void sortFieldTest() throws JsonParseException, JsonMappingException, IOException {
+
+ String field = "fieldname";
+ String order = "ascending";
+ String json = "{\"field\": \"" + field + "\", \"order\": \"" + order + "\"}";
+
+ ObjectMapper mapper = new ObjectMapper();
+ Sort sort = mapper.readValue(json, Sort.class);
+
+ assertTrue("Unexpected field name in marshalled object. Expected: " + field + " Actual: " + sort.getField(),
+ field.equals(sort.getField()));
+ assertTrue("Unexpected order field in marshalled object. Expected: " + order + " Actual: " + sort.getOrder(),
+ order.equals(sort.getOrder().toString()));
+
+ }
+}