diff options
author | ARULNA <arul.nambi@amdocs.com> | 2017-06-12 16:41:12 -0400 |
---|---|---|
committer | ARULNA <arul.nambi@amdocs.com> | 2017-06-12 16:41:28 -0400 |
commit | b4922d319d293894fddd512d29b5f0d1411915d9 (patch) | |
tree | 36cec7575f1631aad41d7b1131d6352847ea0de2 /src/main/java | |
parent | 19dacd2ba38e345eeb5fcfbfe37d615602e8ea44 (diff) |
Initial commit for AAI-UI(sparky-backend)
Change-Id: I785397ed4197663cdf0c1351041d2f708ed08763
Signed-off-by: ARULNA <arul.nambi@amdocs.com>
Diffstat (limited to 'src/main/java')
157 files changed, 30957 insertions, 0 deletions
diff --git a/src/main/java/org/openecomp/sparky/HelloWorld.java b/src/main/java/org/openecomp/sparky/HelloWorld.java new file mode 100644 index 0000000..6719307 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/HelloWorld.java @@ -0,0 +1,49 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky; + +import org.apache.camel.Exchange; + +/** + * The Class HelloWorld. + */ +public class HelloWorld { + + /** + * Instantiates a new hello world. + */ + public HelloWorld() {} + + /** + * Speak. + * + * @param exc the exc + */ + public final void speak(Exchange exc) { + exc.setOut(exc.getIn()); + exc.getOut().setBody("Hello World!"); + } +} diff --git a/src/main/java/org/openecomp/sparky/JaxrsEchoService.java b/src/main/java/org/openecomp/sparky/JaxrsEchoService.java new file mode 100644 index 0000000..ff70fbc --- /dev/null +++ b/src/main/java/org/openecomp/sparky/JaxrsEchoService.java @@ -0,0 +1,83 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky; + +import com.att.ajsc.beans.PropertiesMapBean; +import com.att.ajsc.filemonitor.AJSCPropertiesMap; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; + + +/** + * The Class JaxrsEchoService. + */ +@Path("/jaxrs-services") +public class JaxrsEchoService { + + /** + * Ping. + * + * @param input the input + * @return the string + */ + @GET + @Path("/echo/{input}") + @Produces("text/plain") + public String ping(@PathParam("input") String input) { + return "Hello"; + } + + /** + * Gets the property. + * + * @param fileName the file name + * @param input the input + * @return the property + */ + @GET + @Path("/property/{fileName}/{input:.*}") + @Produces("text/plain") + public String getProperty(@PathParam("fileName") String fileName, + @PathParam("input") String input) { + String val = null; + try { + val = AJSCPropertiesMap.getProperty(fileName, input); + if (val == null || val.isEmpty() || val.length() < 1) { + val = PropertiesMapBean.getProperty(fileName, input); + } + } catch (Exception ex) { + System.out.println("*** Error retrieving property " + input + ": " + ex); + } + if (val == null) { + return "Property is not available"; + } + return "Property value is, " + val + "."; + } + +} diff --git a/src/main/java/org/openecomp/sparky/JaxrsUserService.java b/src/main/java/org/openecomp/sparky/JaxrsUserService.java new file mode 100644 index 0000000..bf9f7b6 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/JaxrsUserService.java @@ -0,0 +1,64 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky; + +import java.util.HashMap; +import java.util.Map; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; + +/** + * The Class JaxrsUserService. + */ +@Path("/user") +public class JaxrsUserService { + + private static final Map<String, String> userIdToNameMap; + + static { + userIdToNameMap = new HashMap<String, String>(); + userIdToNameMap.put("dw113c", "Doug Wait"); + userIdToNameMap.put("so401q", "Stuart O'Day"); + } + + /** + * Lookup user. + * + * @param userId the user id + * @return the string + */ + @GET + @Path("/{userId}") + @Produces("text/plain") + public String lookupUser(@PathParam("userId") String userId) { + String name = userIdToNameMap.get(userId); + return name != null ? name : "unknown id"; + } + +} diff --git a/src/main/java/org/openecomp/sparky/analytics/AbstractStatistics.java b/src/main/java/org/openecomp/sparky/analytics/AbstractStatistics.java new file mode 100644 index 0000000..e599165 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/analytics/AbstractStatistics.java @@ -0,0 +1,180 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.analytics; + +import java.util.HashMap; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * The Class AbstractStatistics. + */ +public class AbstractStatistics implements ComponentStatistics { + + private HashMap<String, AtomicInteger> namedCounters; + private HashMap<String, HistogramSampler> namedHistograms; + + /** + * Instantiates a new abstract statistics. + */ + protected AbstractStatistics() { + namedCounters = new HashMap<String, AtomicInteger>(); + namedHistograms = new HashMap<String, HistogramSampler>(); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.analytics.ComponentStatistics#addCounter(java.lang.String) + */ + /* + * sync-lock the creation of counters during initialization, but run time should not use lock + * synchronization, only thread safe types + * + * @see com.att.ecomp.uicommon.resolver.stat.ComponentStatistics#addCounter(java.lang.String) + */ + @Override + public synchronized void addCounter(String key) { + + AtomicInteger counter = namedCounters.get(key); + + if (counter == null) { + counter = new AtomicInteger(0); + namedCounters.put(key, counter); + } + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.analytics.ComponentStatistics#pegCounter(java.lang.String) + */ + @Override + public void pegCounter(String key) { + + AtomicInteger counter = namedCounters.get(key); + + if (counter != null) { + counter.incrementAndGet(); + } + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.analytics.ComponentStatistics#incrementCounter(java.lang.String, int) + */ + @Override + public void incrementCounter(String key, int value) { + + AtomicInteger counter = namedCounters.get(key); + + if (counter != null) { + counter.addAndGet(value); + } + + } + + + /* (non-Javadoc) + * @see org.openecomp.sparky.analytics.ComponentStatistics#addHistogram(java.lang.String, java.lang.String, long, int, int) + */ + @Override + public synchronized void addHistogram(String key, String histName, long maxYValue, int numBins, + int numDecimalPoints) { + HistogramSampler histSampler = namedHistograms.get(key); + + if (histSampler == null) { + histSampler = new HistogramSampler(histName, maxYValue, numBins, numDecimalPoints); + namedHistograms.put(key, histSampler); + } + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.analytics.ComponentStatistics#updateHistogram(java.lang.String, long) + */ + @Override + public void updateHistogram(String key, long value) { + HistogramSampler histSampler = namedHistograms.get(key); + + if (histSampler != null) { + histSampler.track(value); + } + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.analytics.ComponentStatistics#reset() + */ + @Override + public void reset() { + + for (HistogramSampler h : namedHistograms.values()) { + h.clear(); + } + + for (AtomicInteger c : namedCounters.values()) { + c.set(0); + } + + } + + /** + * Gets the counter value. + * + * @param key the key + * @return the counter value + */ + protected int getCounterValue(String key) { + + AtomicInteger counter = namedCounters.get(key); + + if (counter == null) { + return -1; + } + + return counter.get(); + + } + + /** + * Gets the histogram stats. + * + * @param key the key + * @param verboseEnabled the verbose enabled + * @param indentPadding the indent padding + * @return the histogram stats + */ + protected String getHistogramStats(String key, boolean verboseEnabled, String indentPadding) { + + HistogramSampler histSampler = namedHistograms.get(key); + + if (histSampler == null) { + return null; + } + + return histSampler.getStats(verboseEnabled, indentPadding); + + } + + + +} diff --git a/src/main/java/org/openecomp/sparky/analytics/AveragingRingBuffer.java b/src/main/java/org/openecomp/sparky/analytics/AveragingRingBuffer.java new file mode 100644 index 0000000..18f5dcf --- /dev/null +++ b/src/main/java/org/openecomp/sparky/analytics/AveragingRingBuffer.java @@ -0,0 +1,122 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.analytics; + +import java.util.concurrent.atomic.AtomicInteger; + +/** + * TODO: Fill in description. + * + * @author davea + */ +public class AveragingRingBuffer { + + private int numElements; + + private long[] data; + + private AtomicInteger index; + + private long average; + + private boolean initialAverageCalculated; + + /** + * Instantiates a new averaging ring buffer. + * + * @param size the size + */ + public AveragingRingBuffer(int size) { + + if (size == 0) { + throw new IllegalArgumentException("Size must be greater than zero"); + } + + this.initialAverageCalculated = false; + this.numElements = size; + this.data = new long[this.numElements]; + this.index = new AtomicInteger(-1); + } + + /** + * Calculate average. + * + * @param maxArrayIndex the max array index + */ + private void calculateAverage(int maxArrayIndex) { + + long sum = 0; + + for (int i = 0; i <= maxArrayIndex; i++) { + sum += data[i]; + } + + average = (sum / (maxArrayIndex + 1)); + + } + + public long getAvg() { + + if (!initialAverageCalculated) { + /* + * until the index rolls once we will calculate the average from the data that has been added + * to the array, not including the zero elements + */ + if (index.get() < 0) { + calculateAverage(0); + } else { + calculateAverage(index.get()); + } + + } + + return average; + } + + /** + * Adds the sample. + * + * @param value the value + */ + public synchronized void addSample(long value) { + + index.incrementAndGet(); + + data[index.get()] = value; + + if (index.get() == (numElements - 1)) { + calculateAverage(numElements - 1); + + if (!initialAverageCalculated) { + initialAverageCalculated = true; + } + + index.set(-1); + } + + } + +} diff --git a/src/main/java/org/openecomp/sparky/analytics/ComponentStatistics.java b/src/main/java/org/openecomp/sparky/analytics/ComponentStatistics.java new file mode 100644 index 0000000..285661f --- /dev/null +++ b/src/main/java/org/openecomp/sparky/analytics/ComponentStatistics.java @@ -0,0 +1,81 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.analytics; + + +/** + * The Interface ComponentStatistics. + */ +public interface ComponentStatistics { + + /** + * Adds the counter. + * + * @param key the key + */ + public void addCounter(String key); + + /** + * Peg counter. + * + * @param key the key + */ + public void pegCounter(String key); + + /** + * Increment counter. + * + * @param key the key + * @param value the value + */ + public void incrementCounter(String key, int value); + + /** + * Adds the histogram. + * + * @param key the key + * @param name the name + * @param maxYValue the max Y value + * @param numBins the num bins + * @param numDecimalPoints the num decimal points + */ + public void addHistogram(String key, String name, long maxYValue, int numBins, + int numDecimalPoints); + + /** + * Update histogram. + * + * @param key the key + * @param value the value + */ + public void updateHistogram(String key, long value); + + /** + * Reset. + */ + public void reset(); + +} diff --git a/src/main/java/org/openecomp/sparky/analytics/HistogramSampler.java b/src/main/java/org/openecomp/sparky/analytics/HistogramSampler.java new file mode 100644 index 0000000..7f87bea --- /dev/null +++ b/src/main/java/org/openecomp/sparky/analytics/HistogramSampler.java @@ -0,0 +1,287 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.analytics; + +/** + * A class that models a histogram for reporting and tracking long values with variable steps, bins, + * and floating point accuracy. + * + * @author davea. + */ +public final class HistogramSampler { + + private String label; + + private long binMaxValue; + + private int numBins; + + private double stepSize; + + private long sampleValueTotal; + + private long minValue = -1; + + private long maxValue = 0; + + private long numSamples = 0; + + private long decimalPointAccuracy = 0; + + private static String FORMAT_FLOAT_TEMPLATE = "%%.%df"; + + private String floatFormatStr; + + private long[] histogramBins; + + /** + * Instantiates a new histogram sampler. + * + * @param label the label + * @param maxValue the max value + * @param numBins the num bins + * @param decimalPointAccuracy the decimal point accuracy + */ + public HistogramSampler(String label, long maxValue, int numBins, int decimalPointAccuracy) { + this.label = label; + this.binMaxValue = maxValue; + this.numBins = numBins; + this.stepSize = ((double) binMaxValue / (double) numBins); + this.decimalPointAccuracy = decimalPointAccuracy; + this.floatFormatStr = String.format(FORMAT_FLOAT_TEMPLATE, this.decimalPointAccuracy); + + /* + * [numBins + 1] => last bin is catch-all for outliers + */ + + initializeHistogramBins(numBins + 1); + + } + + /** + * Initialize histogram bins. + * + * @param numBins the num bins + */ + private void initializeHistogramBins(int numBins) { + + histogramBins = new long[numBins]; + int counter = 0; + while (counter < numBins) { + histogramBins[counter] = 0; + counter++; + } + + } + + /* + * Is it really necessary to synchronize the collection, or should we simply switch the underlying + * data type to an AtomicLong + */ + + /** + * Track. + * + * @param value the value + */ + public synchronized void track(long value) { + + if (value < 0) { + return; + } + + sampleValueTotal += value; + numSamples++; + + if (minValue == -1) { + minValue = value; + } + + if (value < minValue) { + minValue = value; + } + + if (value > maxValue) { + maxValue = value; + } + + /* + * One step bin determination + */ + + if (value < (numBins * stepSize)) { + + int index = (int) (value / stepSize); + histogramBins[index]++; + + } else { + // peg the metric in the outlier bin + histogramBins[numBins - 1]++; + } + + } + + /** + * Clear. + */ + public void clear() { + + int counter = 0; + while (counter < numBins) { + histogramBins[counter] = 0; + counter++; + } + + minValue = -1; + maxValue = 0; + numSamples = 0; + sampleValueTotal = 0; + + } + + /** + * Re initialize bins. + * + * @param label the label + * @param numBins the num bins + * @param maxValue the max value + * @param decimalPointAccuracy the decimal point accuracy + */ + public void reInitializeBins(String label, int numBins, long maxValue, int decimalPointAccuracy) { + this.label = label; + this.decimalPointAccuracy = decimalPointAccuracy; + this.floatFormatStr = String.format(FORMAT_FLOAT_TEMPLATE, this.decimalPointAccuracy); + this.numBins = numBins; + this.minValue = -1; + this.maxValue = 0; + initializeHistogramBins(numBins); + this.stepSize = (maxValue / numBins); + clear(); + } + + public long getNumberOfSamples() { + return numSamples; + } + + public long getTotalValueSum() { + return sampleValueTotal; + } + + /** + * Gets the stats. + * + * @param formatted the formatted + * @param indentPadding the indent padding + * @return the stats + */ + public String getStats(boolean formatted, String indentPadding) { + + StringBuilder sb = new StringBuilder(128); + + + if (!formatted) { + // generate CSV in the following format + + /* + * label,minValue,maxValue,avgValue,numSamples,stepSize,numSteps,stepCounters + */ + sb.append(indentPadding); + sb.append(label).append(","); + sb.append(minValue).append(","); + sb.append(maxValue).append(","); + if (numSamples == 0) { + sb.append(0).append(","); + } else { + sb.append((sampleValueTotal / numSamples)).append(","); + } + sb.append(numSamples).append(","); + sb.append(numBins).append(","); + sb.append(String.format(floatFormatStr, stepSize)); + + int counter = 0; + while (counter < numBins) { + + if (counter != (numBins)) { + sb.append(","); + } + + sb.append(histogramBins[counter]); + + counter++; + + } + + return sb.toString(); + + } + + sb.append("\n"); + sb.append(indentPadding).append("Label = ").append(label).append("\n"); + sb.append(indentPadding).append("Min = ").append(minValue).append("\n"); + sb.append(indentPadding).append("Max = ").append(maxValue).append("\n"); + sb.append(indentPadding).append("numSamples = ").append(numSamples).append("\n"); + + if (numSamples == 0) { + sb.append(indentPadding).append("Avg = ").append(0).append("\n"); + } else { + sb.append(indentPadding).append("Avg = ").append((sampleValueTotal / numSamples)) + .append("\n"); + } + + sb.append(indentPadding).append("StepSize = ").append(String.format(floatFormatStr, stepSize)) + .append("\n"); + + sb.append(indentPadding).append("Sample Histogram:").append("\n"); + + int counter = 0; + while (counter < numBins) { + + if (counter == (numBins - 1)) { + // outlier bin + double leftBound = (stepSize * counter); + sb.append(indentPadding).append("\t") + .append(" x >= " + String.format(floatFormatStr, leftBound) + " : " + + histogramBins[counter]) + .append("\n"); + + } else { + double leftBound = (stepSize * counter); + double rightBound = ((stepSize) * (counter + 1)); + sb.append(indentPadding).append("\t") + .append((String.format(floatFormatStr, leftBound) + " < x < " + + String.format(floatFormatStr, rightBound) + " : " + histogramBins[counter])) + .append("\n"); + } + + counter++; + + } + + return sb.toString(); + + } + +} diff --git a/src/main/java/org/openecomp/sparky/analytics/HistoricalCounter.java b/src/main/java/org/openecomp/sparky/analytics/HistoricalCounter.java new file mode 100644 index 0000000..f8c5f05 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/analytics/HistoricalCounter.java @@ -0,0 +1,155 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + + +package org.openecomp.sparky.analytics; + +/** + * A simple class to model a historical counter. A set of values will be tracked and basic + * statistics will be calculated in real time (n, min, max, avg). + * + * @author davea + */ +public class HistoricalCounter { + + private double min; + + private double max; + + private double totalOfSamples; + + private long numSamples; + + private double value; + + private boolean maintainSingleValue; + + /** + * Instantiates a new historical counter. + * + * @param trackSingleValue the track single value + */ + public HistoricalCounter(boolean trackSingleValue) { + min = -1; + max = 0; + totalOfSamples = 0; + value = 0.0; + numSamples = 0; + this.maintainSingleValue = trackSingleValue; + } + + public boolean isSingleValue() { + return maintainSingleValue; + } + + /** + * Update. + * + * @param value the value + */ + public synchronized void update(double value) { + + if (value < 0) { + return; + } + + if (maintainSingleValue) { + + this.value = value; + + } else { + + if (min == -1) { + min = value; + } + + if (value < min) { + min = value; + } + + if (value > max) { + max = value; + } + + totalOfSamples += value; + numSamples++; + } + } + + public double getValue() { + return value; + } + + public double getMin() { + return min; + } + + public double getMax() { + return max; + } + + public long getNumSamples() { + return numSamples; + } + + public double getAvg() { + if (numSamples == 0) { + return 0; + } + + return (totalOfSamples / numSamples); + } + + /** + * Reset. + */ + public synchronized void reset() { + min = -1; + max = 0; + numSamples = 0; + totalOfSamples = 0; + value = 0.0; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + StringBuilder sb = new StringBuilder(32); + + if (maintainSingleValue) { + sb.append("[ Val=").append(value).append(" ]"); + } else { + sb.append("[ NumSamples=").append(numSamples).append(","); + sb.append(" Min=").append(min).append(","); + sb.append(" Max=").append(max).append(","); + sb.append(" Avg=").append(getAvg()).append(" ]"); + } + + return sb.toString(); + } + +} diff --git a/src/main/java/org/openecomp/sparky/config/Configurable.java b/src/main/java/org/openecomp/sparky/config/Configurable.java new file mode 100644 index 0000000..4ea02ff --- /dev/null +++ b/src/main/java/org/openecomp/sparky/config/Configurable.java @@ -0,0 +1,46 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.config; + +import org.openecomp.sparky.config.exception.ConfigurationException; + +/** + * The Interface Configurable. + */ +public interface Configurable { + + public boolean isValid(); + + public boolean isInitialized(); + + /** + * Load config. + * + * @throws ConfigurationException the configuration exception + */ + public void loadConfig() throws ConfigurationException; + +} diff --git a/src/main/java/org/openecomp/sparky/config/exception/ConfigurationException.java b/src/main/java/org/openecomp/sparky/config/exception/ConfigurationException.java new file mode 100644 index 0000000..23f3666 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/config/exception/ConfigurationException.java @@ -0,0 +1,34 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.config.exception; + + +/** + * The Class ConfigurationException. + */ +public class ConfigurationException extends Exception { + +} diff --git a/src/main/java/org/openecomp/sparky/config/oxm/CrossEntityReference.java b/src/main/java/org/openecomp/sparky/config/oxm/CrossEntityReference.java new file mode 100644 index 0000000..855eea4 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/config/oxm/CrossEntityReference.java @@ -0,0 +1,80 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + + +package org.openecomp.sparky.config.oxm; + +import java.util.ArrayList; +import java.util.List; + +/** + * The Class CrossEntityReference. + */ +public class CrossEntityReference { + private String targetEntityType; + private List<String> referenceAttributes; + + /** + * Instantiates a new cross entity reference. + */ + public CrossEntityReference() { + targetEntityType = null; + referenceAttributes = new ArrayList<String>(); + } + + public String getTargetEntityType() { + return targetEntityType; + } + + public void setTargetEntityType(String targetEntityType) { + this.targetEntityType = targetEntityType; + } + + public List<String> getReferenceAttributes() { + return referenceAttributes; + } + + public void setReferenceAttributes(List<String> referenceAttributes) { + this.referenceAttributes = referenceAttributes; + } + + /** + * Adds the reference attribute. + * + * @param additionalAttribute the additional attribute + */ + public void addReferenceAttribute(String additionalAttribute) { + referenceAttributes.add(additionalAttribute); + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "CrossEntityReference [targetEntityType=" + targetEntityType + ", referenceAttributes=" + + referenceAttributes + "]"; + } +} diff --git a/src/main/java/org/openecomp/sparky/config/oxm/OxmEntityDescriptor.java b/src/main/java/org/openecomp/sparky/config/oxm/OxmEntityDescriptor.java new file mode 100644 index 0000000..c38fa40 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/config/oxm/OxmEntityDescriptor.java @@ -0,0 +1,179 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.config.oxm; + +import java.util.List; + +import org.openecomp.sparky.synchronizer.entity.SuggestionSearchEntity; + +/** + * The Class OxmEntityDescriptor. + */ +public class OxmEntityDescriptor { + + private String entityName; + + private List<String> primaryKeyAttributeName; + + private List<String> searchableAttributes; + + private CrossEntityReference crossEntityReference; + + private String geoLatName; + + private String geoLongName; + + private SuggestionSearchEntity suggestionSearchEntity; + + public String getEntityName() { + return entityName; + } + + public void setEntityName(String entityName) { + this.entityName = entityName; + } + + public List<String> getPrimaryKeyAttributeName() { + return primaryKeyAttributeName; + } + + public void setPrimaryKeyAttributeName(List<String> primaryKeyAttributeName) { + this.primaryKeyAttributeName = primaryKeyAttributeName; + } + + public List<String> getSearchableAttributes() { + return searchableAttributes; + } + + public void setSearchableAttributes(List<String> searchableAttributes) { + this.searchableAttributes = searchableAttributes; + } + + /** + * Checks for searchable attributes. + * + * @return true, if successful + */ + public boolean hasSearchableAttributes() { + + if (this.searchableAttributes == null) { + return false; + } + + if (this.searchableAttributes.size() > 0) { + return true; + } + + return false; + + } + + public CrossEntityReference getCrossEntityReference() { + return crossEntityReference; + } + + public void setCrossEntityReference(CrossEntityReference crossEntityReference) { + this.crossEntityReference = crossEntityReference; + } + + /** + * Checks for cross entity references. + * + * @return true, if successful + */ + public boolean hasCrossEntityReferences() { + if (this.crossEntityReference == null) { + return false; + } + if (!this.crossEntityReference.getReferenceAttributes().isEmpty()) { + return true; + } + return false; + } + + public String getGeoLatName() { + return geoLatName; + } + + public void setGeoLatName(String geoLatName) { + this.geoLatName = geoLatName; + } + + public String getGeoLongName() { + return geoLongName; + } + + public void setGeoLongName(String geoLongName) { + this.geoLongName = geoLongName; + } + + /** + * Checks for geo entity. + * + * @return true, if successful + */ + public boolean hasGeoEntity() { + + if (this.geoLongName != null && this.geoLatName != null) { + return true; + } + + return false; + + } + + public SuggestionSearchEntity getSuggestionSearchEntity() { + return this.suggestionSearchEntity; + } + + public void setSuggestionSearchEntity(SuggestionSearchEntity suggestionSearchEntity) { + this.suggestionSearchEntity = suggestionSearchEntity; + } + + /** + * Checks for non-null, populated SuggestionSearchEntity. + * + * @return true, if successful + */ + public boolean hasSuggestionSearchEntity() { + if (this.suggestionSearchEntity == null) { + return false; + } + if (!this.suggestionSearchEntity.getSuggestionConnectorWords().isEmpty()) { + return true; + } + return false; + } + + @Override + public String toString() { + return "OxmEntityDescriptor [entityName=" + entityName + ", primaryKeyAttributeName=" + + primaryKeyAttributeName + ", searchableAttributes=" + searchableAttributes + + ", crossEntityReference=" + crossEntityReference + ", geoLatName=" + geoLatName + + ", geoLongName=" + geoLongName + ", suggestionSearchEntity=" + suggestionSearchEntity + + "]"; + } +} diff --git a/src/main/java/org/openecomp/sparky/config/oxm/OxmModelLoader.java b/src/main/java/org/openecomp/sparky/config/oxm/OxmModelLoader.java new file mode 100644 index 0000000..eef8c93 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/config/oxm/OxmModelLoader.java @@ -0,0 +1,534 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.config.oxm; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Vector; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.eclipse.persistence.dynamic.DynamicType; +import org.eclipse.persistence.internal.oxm.mappings.Descriptor; +import org.eclipse.persistence.jaxb.JAXBContextProperties; +import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContext; +import org.eclipse.persistence.jaxb.dynamic.DynamicJAXBContextFactory; +import org.eclipse.persistence.mappings.DatabaseMapping; +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.synchronizer.entity.SuggestionSearchEntity; +import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; + + +/** + * The Class OxmModelLoader. + */ +public class OxmModelLoader { + + private static OxmModelLoader instance; + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(OxmModelLoader.class); + + private Map<String, HashMap<String, String>> oxmModel = + new LinkedHashMap<String, HashMap<String, String>>(); + + private Map<String, DynamicType> entityTypeLookup = new LinkedHashMap<String, DynamicType>(); + + private Map<String, HashMap<String, String>> searchableOxmModel = + new LinkedHashMap<String, HashMap<String, String>>(); + + private Map<String, HashMap<String, String>> crossReferenceEntityOxmModel = + new LinkedHashMap<String, HashMap<String, String>>(); + + private Map<String, HashMap<String, String>> geoEntityOxmModel = + new LinkedHashMap<String, HashMap<String, String>>(); + + private Map<String, HashMap<String, String>> suggestionSearchEntityOxmModel = + new LinkedHashMap<String, HashMap<String, String>>(); + + private Map<String, OxmEntityDescriptor> entityDescriptors = + new HashMap<String, OxmEntityDescriptor>(); + + private Map<String, OxmEntityDescriptor> searchableEntityDescriptors = + new HashMap<String, OxmEntityDescriptor>(); + + private Map<String, OxmEntityDescriptor> crossReferenceEntityDescriptors = + new HashMap<String, OxmEntityDescriptor>(); + + private Map<String, OxmEntityDescriptor> geoEntityDescriptors = + new HashMap<String, OxmEntityDescriptor>(); + + private Map<String, OxmEntityDescriptor> suggestionSearchEntityDescriptors = + new HashMap<String, OxmEntityDescriptor>(); + + public static OxmModelLoader getInstance() { + if (instance == null) { + instance = new OxmModelLoader(); + LOG.info(AaiUiMsgs.INITIALIZE_OXM_MODEL_LOADER); + instance.loadModels(); + } + + return instance; + + } + + /** + * Instantiates a new oxm model loader. + */ + public OxmModelLoader() { + + } + + /** + * Load models. + */ + private void loadModels() { + // find latest version of OXM file in folder + String version = findLatestOxmVersion(); + if (version == null) { + LOG.error(AaiUiMsgs.OXM_FILE_NOT_FOUND, TierSupportUiConstants.CONFIG_OXM_LOCATION); + return; + } + + // load the latest version based on file name + loadModel(version); + + } + + /** + * Load model. + * + * @param version the version + */ + public void loadModel(String version) { + String fileName = loadOxmFileName(version); + InputStream inputStream; + try { + inputStream = new FileInputStream(new File(fileName)); + } catch (FileNotFoundException fnf) { + LOG.info(AaiUiMsgs.OXM_READ_ERROR_NONVERBOSE); + LOG.error(AaiUiMsgs.OXM_READ_ERROR_VERBOSE, fileName); + return; + } + + Map<String, Object> properties = new HashMap<String, Object>(); + properties.put(JAXBContextProperties.OXM_METADATA_SOURCE, inputStream); + try { + final DynamicJAXBContext oxmContext = DynamicJAXBContextFactory + .createContextFromOXM(Thread.currentThread().getContextClassLoader(), properties); + + parseOxmContext(oxmContext); + // populateSearchableOxmModel(); + LOG.info(AaiUiMsgs.OXM_LOAD_SUCCESS); + } catch (Exception exc) { + LOG.info(AaiUiMsgs.OXM_PARSE_ERROR_NONVERBOSE); + LOG.error(AaiUiMsgs.OXM_PARSE_ERROR_VERBOSE, fileName, exc.getMessage()); + } + } + + /** + * Parses the oxm context. + * + * @param oxmContext the oxm context + */ + private void parseOxmContext(DynamicJAXBContext oxmContext) { + @SuppressWarnings("rawtypes") + List<Descriptor> descriptorsList = oxmContext.getXMLContext().getDescriptors(); + + for (@SuppressWarnings("rawtypes") + Descriptor desc : descriptorsList) { + + DynamicType entity = oxmContext.getDynamicType(desc.getAlias()); + + LinkedHashMap<String, String> oxmProperties = new LinkedHashMap<String, String>(); + + // Not all fields have key attributes + if (desc.getPrimaryKeyFields() != null) { + oxmProperties.put("primaryKeyAttributeNames", desc.getPrimaryKeyFields().toString() + .replaceAll("/text\\(\\)", "").replaceAll("\\[", "").replaceAll("\\]", "")); + } + + String entityName = desc.getDefaultRootElement(); + + entityTypeLookup.put(entityName, entity); + + // add entityName + oxmProperties.put("entityName", entityName); + + Map<String, String> properties = entity.getDescriptor().getProperties(); + if (properties != null) { + for (Map.Entry<String, String> entry : properties.entrySet()) { + + if (entry.getKey().equalsIgnoreCase("searchable")) { + oxmProperties.put("searchableAttributes", entry.getValue()); + } else if (entry.getKey().equalsIgnoreCase("crossEntityReference")) { + oxmProperties.put("crossEntityReference", entry.getValue()); + } else if (entry.getKey().equalsIgnoreCase("geoLat")) { + if (entry.getValue().length() > 0) { + oxmProperties.put("geoLat", entry.getValue()); + } + } else if (entry.getKey().equalsIgnoreCase("geoLong")) { + if (entry.getValue().length() > 0) { + oxmProperties.put("geoLong", entry.getValue()); + } + } else if (entry.getKey().equalsIgnoreCase("containsSuggestibleProps")) { + + oxmProperties.put("containsSuggestibleProps", "true"); + + Vector<DatabaseMapping> descriptorMaps = entity.getDescriptor().getMappings(); + List<String> listOfSuggestableAttributes = new ArrayList<String>(); + + for (DatabaseMapping descMap : descriptorMaps) { + if (descMap.isAbstractDirectMapping()) { + + if (descMap.getProperties().get("suggestibleOnSearch") != null) { + String suggestableOnSearchString = String.valueOf( + descMap.getProperties().get("suggestibleOnSearch")); + + boolean isSuggestibleOnSearch = Boolean.valueOf(suggestableOnSearchString); + + if (isSuggestibleOnSearch) { + /* Grab attribute types for suggestion */ + String attributeName = descMap.getField().getName() + .replaceAll("/text\\(\\)", ""); + listOfSuggestableAttributes.add(attributeName); + + if (descMap.getProperties().get("suggestionVerbs") != null) { + String suggestionVerbsString = String.valueOf( + descMap.getProperties().get("suggestionVerbs")); + + oxmProperties.put("suggestionVerbs", suggestionVerbsString); + } + } + } + } + } + if (!listOfSuggestableAttributes.isEmpty()) { + oxmProperties.put("suggestibleAttributes", String.join(",", + listOfSuggestableAttributes)); + } + } else if (entry.getKey().equalsIgnoreCase("suggestionAliases")) { + oxmProperties.put("suggestionAliases", entry.getValue()); + } + } + } + + oxmModel.put(entityName, oxmProperties); + + // Add all searchable entity types for reserve lookup + if (oxmProperties.containsKey("searchableAttributes")) { + searchableOxmModel.put(entityName, oxmProperties); + } + + if (oxmProperties.containsKey("crossEntityReference")) { + crossReferenceEntityOxmModel.put(entityName, oxmProperties); + } + + if (oxmProperties.containsKey("geoLat") && oxmProperties.containsKey("geoLong")) { + geoEntityOxmModel.put(entityName, oxmProperties); + } + + if (oxmProperties.containsKey("containsSuggestibleProps")) { + suggestionSearchEntityOxmModel.put(entityName, oxmProperties); + } + } + + for (Entry<String, HashMap<String, String>> entityModel : oxmModel.entrySet()) { + HashMap<String, String> attribute = entityModel.getValue(); + OxmEntityDescriptor entity = new OxmEntityDescriptor(); + entity.setEntityName(attribute.get("entityName")); + if (attribute.containsKey("primaryKeyAttributeNames")) { + + entity.setPrimaryKeyAttributeName( + Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); + if (attribute.containsKey("searchableAttributes")) { + entity.setSearchableAttributes( + Arrays.asList(attribute.get("searchableAttributes").split(","))); + } else if (attribute.containsKey("crossEntityReference")) { + List<String> crossEntityRefTokens = + Arrays.asList(attribute.get("crossEntityReference").split(",")); + + if (crossEntityRefTokens.size() >= 2) { + CrossEntityReference entityRef = new CrossEntityReference(); + entityRef.setTargetEntityType(crossEntityRefTokens.get(0)); + + for (int i = 1; i < crossEntityRefTokens.size(); i++) { + entityRef.addReferenceAttribute(crossEntityRefTokens.get(i)); + } + + entity.setCrossEntityReference(entityRef); + } else { + LOG.error(AaiUiMsgs.OXM_PROP_DEF_ERR_CROSS_ENTITY_REF, attribute.get("entityName"), + attribute.get("crossEntityReference")); + } + } + + if (attribute.containsKey("geoLat") || attribute.containsKey("geoLong")) { + entity.setGeoLatName(attribute.get("geoLat")); + entity.setGeoLongName(attribute.get("geoLong")); + } + + if (attribute.containsKey("suggestionVerbs")) { + String entityName = attribute.get("entityName"); + SuggestionSearchEntity suggestionSearchEntity = new SuggestionSearchEntity(this); + suggestionSearchEntity.setEntityType(entityName); + + entity.setSuggestionSearchEntity(suggestionSearchEntity); + } + + entityDescriptors.put(attribute.get("entityName"), entity); + } + } + + + for (Entry<String, HashMap<String, String>> searchableModel : searchableOxmModel.entrySet()) { + HashMap<String, String> attribute = searchableModel.getValue(); + OxmEntityDescriptor entity = new OxmEntityDescriptor(); + entity.setEntityName(attribute.get("entityName")); + entity.setPrimaryKeyAttributeName( + Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); + entity + .setSearchableAttributes(Arrays.asList(attribute.get("searchableAttributes").split(","))); + searchableEntityDescriptors.put(attribute.get("entityName"), entity); + } + + for (Entry<String, HashMap<String, String>> geoEntityModel : geoEntityOxmModel.entrySet()) { + HashMap<String, String> attribute = geoEntityModel.getValue(); + OxmEntityDescriptor entity = new OxmEntityDescriptor(); + entity.setEntityName(attribute.get("entityName")); + entity.setPrimaryKeyAttributeName( + Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); + entity.setGeoLatName(attribute.get("geoLat")); + entity.setGeoLongName(attribute.get("geoLong")); + geoEntityDescriptors.put(attribute.get("entityName"), entity); + } + + for (Entry<String, HashMap<String, String>> crossRefModel : crossReferenceEntityOxmModel + .entrySet()) { + HashMap<String, String> attribute = crossRefModel.getValue(); + OxmEntityDescriptor entity = new OxmEntityDescriptor(); + entity.setEntityName(attribute.get("entityName")); + entity.setPrimaryKeyAttributeName( + Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); + + + List<String> crossEntityRefTokens = + Arrays.asList(attribute.get("crossEntityReference").split(",")); + + if (crossEntityRefTokens.size() >= 2) { + CrossEntityReference entityRef = new CrossEntityReference(); + entityRef.setTargetEntityType(crossEntityRefTokens.get(0)); + + for (int i = 1; i < crossEntityRefTokens.size(); i++) { + entityRef.addReferenceAttribute(crossEntityRefTokens.get(i)); + } + + entity.setCrossEntityReference(entityRef); + } + crossReferenceEntityDescriptors.put(attribute.get("entityName"), entity); + } + + for (Entry<String, HashMap<String, String>> suggestionEntityModel : + suggestionSearchEntityOxmModel.entrySet()) { + HashMap<String, String> attribute = suggestionEntityModel.getValue(); + + String entityName = attribute.get("entityName"); + SuggestionSearchEntity suggestionSearchEntity = new SuggestionSearchEntity(this); + suggestionSearchEntity.setEntityType(entityName); + + if (attribute.get("suggestionVerbs") != null) { + suggestionSearchEntity.setSuggestionConnectorWords(Arrays.asList( + attribute.get("suggestionVerbs").split(","))); + } + + if (attribute.get("suggestionAliases") != null) { + suggestionSearchEntity.setSuggestionAliases(Arrays.asList( + attribute.get("suggestionAliases").split(","))); + } + + if (attribute.get("suggestibleAttributes") != null) { + suggestionSearchEntity.setSuggestionPropertyTypes(Arrays.asList( + attribute.get("suggestibleAttributes").split(","))); + } + + OxmEntityDescriptor entity = new OxmEntityDescriptor(); + entity.setSuggestionSearchEntity(suggestionSearchEntity); + entity.setEntityName(entityName); + + if (attribute.get("primaryKeyAttributeNames") != null) { + entity.setPrimaryKeyAttributeName( + Arrays.asList(attribute.get("primaryKeyAttributeNames").replace(" ", "").split(","))); + } + + suggestionSearchEntityDescriptors.put(entityName, entity); + } + } + + /** + * Find latest oxm version. + * + * @return the string + */ + public String findLatestOxmVersion() { + File[] listOxmFiles = loadOxmFolder().listFiles(); + + if (listOxmFiles == null) { + return null; + } + + Integer latestVersion = -1; + + Pattern oxmFileNamePattern = Pattern.compile("^aai_oxm_v([0-9]*).xml"); + + for (File file : listOxmFiles) { + if (file.isFile()) { + String fileName = file.getName(); + Matcher matcher = oxmFileNamePattern.matcher(fileName); + if (matcher.matches()) { + if (latestVersion <= Integer.parseInt(matcher.group(1))) { + latestVersion = Integer.parseInt(matcher.group(1)); + } + } + } + + } + if (latestVersion != -1) { + return "v" + latestVersion.toString(); + } else { + return null; + } + + } + + /** + * Load oxm folder. + * + * @return the file + */ + public File loadOxmFolder() { + return new File(TierSupportUiConstants.CONFIG_OXM_LOCATION); + } + + /** + * Load oxm file name. + * + * @param version the version + * @return the string + */ + public String loadOxmFileName(String version) { + return new String(TierSupportUiConstants.CONFIG_OXM_LOCATION + "aai_oxm_" + version + ".xml"); + } + + /* + * Get the original representation of the OXM Model + */ + public Map<String, HashMap<String, String>> getOxmModel() { + return oxmModel; + } + + /* + * Get the searchable raw map entity types + */ + public Map<String, HashMap<String, String>> getSearchableOxmModel() { + return searchableOxmModel; + } + + public Map<String, HashMap<String, String>> getCrossReferenceEntityOxmModel() { + return crossReferenceEntityOxmModel; + } + + public Map<String, OxmEntityDescriptor> getEntityDescriptors() { + return entityDescriptors; + } + + /** + * Gets the entity descriptor. + * + * @param type the type + * @return the entity descriptor + */ + public OxmEntityDescriptor getEntityDescriptor(String type) { + return entityDescriptors.get(type); + } + + public Map<String, OxmEntityDescriptor> getSearchableEntityDescriptors() { + return searchableEntityDescriptors; + } + + /** + * Gets the searchable entity descriptor. + * + * @param entityType the entity type + * @return the searchable entity descriptor + */ + public OxmEntityDescriptor getSearchableEntityDescriptor(String entityType) { + return searchableEntityDescriptors.get(entityType); + } + + public Map<String, OxmEntityDescriptor> getCrossReferenceEntityDescriptors() { + return crossReferenceEntityDescriptors; + } + + public Map<String, OxmEntityDescriptor> getGeoEntityDescriptors() { + return geoEntityDescriptors; + } + + public Map<String, OxmEntityDescriptor> getSuggestionSearchEntityDescriptors() { + return suggestionSearchEntityDescriptors; + } + + /** + * The main method. + * + * @param args the arguments + */ + public static void main(String[] args) { + try { + System.setProperty("AJSC_HOME", new File(".").getCanonicalPath().replace('\\', '/')); + + } catch (IOException exc) { + // TODO Auto-generated catch block + exc.printStackTrace(); + } + Map<String, OxmEntityDescriptor> temp = + OxmModelLoader.getInstance().getSearchableEntityDescriptors(); + Map<String, OxmEntityDescriptor> temp2 = OxmModelLoader.getInstance().getEntityDescriptors(); + + System.out.println("Completed"); + } + +} diff --git a/src/main/java/org/openecomp/sparky/config/oxm/OxmModelLoaderFilter.java b/src/main/java/org/openecomp/sparky/config/oxm/OxmModelLoaderFilter.java new file mode 100644 index 0000000..ac29199 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/config/oxm/OxmModelLoaderFilter.java @@ -0,0 +1,88 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.config.oxm; + +import java.io.IOException; +import java.net.UnknownHostException; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; + +import org.openecomp.sparky.util.NodeUtils; + +import org.openecomp.cl.mdc.MdcContext; + +/** + * The Class OxmModelLoaderFilter. + */ +public class OxmModelLoaderFilter implements Filter { + /* (non-Javadoc) + * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, javax.servlet.FilterChain) + */ + @Override + public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) + throws IOException, ServletException { + + /* + * However, we will setup the filtermap with a url that should never get it, so we shouldn't + * ever be in here. + */ + + chain.doFilter(request, response); + } + + /* (non-Javadoc) + * @see javax.servlet.Filter#init(javax.servlet.FilterConfig) + */ + @Override + public void init(FilterConfig filterConfig) throws ServletException { + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "OxmModelLoaderFilter", "", + "Init", ""); + + try { + OxmModelLoader.getInstance(); + } catch (Exception exc) { + throw new ServletException("Caught an exception while initializing OXM model loader filter", + exc); + } + + } + + /* (non-Javadoc) + * @see javax.servlet.Filter#destroy() + */ + @Override + public void destroy() { + // TODO Auto-generated method stub + + } + +} diff --git a/src/main/java/org/openecomp/sparky/dal/NetworkTransaction.java b/src/main/java/org/openecomp/sparky/dal/NetworkTransaction.java new file mode 100644 index 0000000..0a679cf --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/NetworkTransaction.java @@ -0,0 +1,135 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal; + +import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; +import org.openecomp.sparky.dal.rest.HttpMethod; +import org.openecomp.sparky.dal.rest.OperationResult; + +/** + * The Class NetworkTransaction. + */ +public class NetworkTransaction { + + private OperationResult operationResult; + + private String entityType; + + private String link; + + private HttpMethod operationType; + + private OxmEntityDescriptor descriptor; + + private long createdTimeStampInMs; + + private long taskAgeInMs; + + /** + * Instantiates a new network transaction. + */ + public NetworkTransaction() { + this.createdTimeStampInMs = System.currentTimeMillis(); + } + + /** + * Instantiates a new network transaction. + * + * @param method the method + * @param entityType the entity type + * @param or the or + */ + public NetworkTransaction(HttpMethod method, String entityType, OperationResult or) { + this(); + this.operationType = method; + this.entityType = entityType; + this.operationResult = or; + } + + public HttpMethod getOperationType() { + return operationType; + } + + public long getTaskAgeInMs() { + return taskAgeInMs; + } + + /** + * Sets the task age in ms. + */ + public void setTaskAgeInMs() { + this.taskAgeInMs = (System.currentTimeMillis() - createdTimeStampInMs); + } + + public void setOperationType(HttpMethod operationType) { + this.operationType = operationType; + } + + public OperationResult getOperationResult() { + return operationResult; + } + + public void setOperationResult(OperationResult operationResult) { + this.operationResult = operationResult; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public String getLink() { + return link; + } + + public void setLink(String link) { + this.link = link; + } + + public OxmEntityDescriptor getDescriptor() { + return descriptor; + } + + public void setDescriptor(OxmEntityDescriptor descriptor) { + this.descriptor = descriptor; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "NetworkTransaction [operationResult=" + operationResult.toString() + ", entityType=" + + entityType + ", link=" + link + ", operationType=" + operationType + ", descriptor=" + + descriptor.toString() + ", createdTimeStampInMs=" + createdTimeStampInMs + + ", taskAgeInMs=" + taskAgeInMs + "]"; + } + + +} diff --git a/src/main/java/org/openecomp/sparky/dal/aai/ActiveInventoryAdapter.java b/src/main/java/org/openecomp/sparky/dal/aai/ActiveInventoryAdapter.java new file mode 100644 index 0000000..de2085c --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/aai/ActiveInventoryAdapter.java @@ -0,0 +1,418 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.aai; + +import java.io.IOException; +import java.net.URLEncoder; +import java.nio.ByteBuffer; +import java.util.List; +import java.util.NoSuchElementException; + +import org.apache.http.client.utils.URIBuilder; +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.dal.aai.config.ActiveInventoryConfig; +import org.openecomp.sparky.dal.aai.config.ActiveInventoryRestConfig; +import org.openecomp.sparky.dal.aai.enums.RestAuthenticationMode; +import org.openecomp.sparky.dal.exception.ElasticSearchOperationException; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.rest.RestClientBuilder; +import org.openecomp.sparky.dal.rest.RestfulDataAccessor; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.security.SecurityContextFactory; +import org.openecomp.sparky.util.NodeUtils; + +import com.sun.jersey.api.client.Client; +import com.sun.jersey.api.client.WebResource.Builder; + + +/** + * The Class ActiveInventoryAdapter. + */ + +/** + * @author davea + * + */ +public class ActiveInventoryAdapter extends RestfulDataAccessor + implements ActiveInventoryDataProvider { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(ActiveInventoryAdapter.class); + + private static final String HEADER_TRANS_ID = "X-TransactionId"; + private static final String HEADER_FROM_APP_ID = "X-FromAppId"; + private static final String HEADER_AUTHORIZATION = "Authorization"; + + private static final String TRANSACTION_ID_PREFIX = "txnId-"; + private static final String UI_APP_NAME = "AAI-UI"; + + + private ActiveInventoryConfig config; + + /** + * Instantiates a new active inventory adapter. + * + * @param restClientBuilder the rest client builder + * @throws ElasticSearchOperationException the elastic search operation exception + * @throws IOException Signals that an I/O exception has occurred. + */ + public ActiveInventoryAdapter(RestClientBuilder restClientBuilder) + throws ElasticSearchOperationException, IOException { + super(restClientBuilder); + + try { + this.config = ActiveInventoryConfig.getConfig(); + } catch (Exception exc) { + throw new ElasticSearchOperationException("Error getting active inventory configuration", + exc); + } + + clientBuilder.setUseHttps(true); + + clientBuilder.setValidateServerHostname(config.getAaiSslConfig().isValidateServerHostName()); + + SecurityContextFactory sslContextFactory = clientBuilder.getSslContextFactory(); + + sslContextFactory.setServerCertificationChainValidationEnabled( + config.getAaiSslConfig().isValidateServerCertificateChain()); + + if (config.getAaiRestConfig().getAuthenticationMode() == RestAuthenticationMode.SSL_CERT) { + sslContextFactory.setClientCertFileName(config.getAaiSslConfig().getKeystoreFilename()); + sslContextFactory.setClientCertPassword(config.getAaiSslConfig().getKeystorePassword()); + sslContextFactory.setTrustStoreFileName(config.getAaiSslConfig().getTruststoreFilename()); + } + + clientBuilder.setConnectTimeoutInMs(config.getAaiRestConfig().getConnectTimeoutInMs()); + clientBuilder.setReadTimeoutInMs(config.getAaiRestConfig().getReadTimeoutInMs()); + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestfulDataAccessor#setClientDefaults(com.sun.jersey.api.client.Client, java.lang.String, java.lang.String, java.lang.String) + */ + @Override + protected Builder setClientDefaults(Client client, String url, String payloadContentType, + String acceptContentType) { + Builder builder = super.setClientDefaults(client, url, payloadContentType, acceptContentType); + + builder = builder.header(HEADER_FROM_APP_ID, UI_APP_NAME); + byte bytes[] = new byte[6]; + txnIdGenerator.nextBytes(bytes); + builder = + builder.header(HEADER_TRANS_ID, TRANSACTION_ID_PREFIX + ByteBuffer.wrap(bytes).getInt()); + + if (config.getAaiRestConfig().getAuthenticationMode() == RestAuthenticationMode.SSL_BASIC) { + builder = builder.header(HEADER_AUTHORIZATION, + config.getAaiSslConfig().getBasicAuthenticationCredentials()); + } + + return builder; + } + + /** + * The main method. + * + * @param args the arguments + */ + public static void main(String[] args) { + + // TODO Auto-generated method stub + RestClientBuilder builder = new RestClientBuilder(); + RestfulDataAccessor accessor; + try { + accessor = new ActiveInventoryAdapter(builder); + OperationResult or = + accessor.doGet("/cloud-infrastructure/pservers/pserver/SQLTEST006", "application/json"); + String jsonPatch = "{ \"hostname\" : \"SQLTEST006\", \"prov-status\" : \"PREPROV\"," + + " \"in-maint\" : \"false\", \"is-closed-loop\" : \"false\" }"; + or = accessor.doPatch("/cloud-infrastructure/pservers/pserver/SQLTEST006", jsonPatch, + "application/json"); + // System.out.println("PATCH or = " + or.getResultCode() + " : " + or.toString()); + } catch (ElasticSearchOperationException | IOException exc) { + // TODO Auto-generated catch block + exc.printStackTrace(); + } + + } + + /** + * Gets the full url. + * + * @param resourceUrl the resource url + * @return the full url + * @throws Exception the exception + */ + private String getFullUrl(String resourceUrl) throws Exception { + ActiveInventoryRestConfig aaiRestConfig = ActiveInventoryConfig.getConfig().getAaiRestConfig(); + final String host = aaiRestConfig.getHost(); + final String port = aaiRestConfig.getPort(); + final String basePath = aaiRestConfig.getResourceBasePath(); + return String.format("https://%s:%s%s%s", host, port, basePath, resourceUrl); + } + + public String getGenericQueryForSelfLink(String startNodeType, List<String> queryParams) throws Exception { + + URIBuilder urlBuilder = new URIBuilder(getFullUrl("/search/generic-query")); + + for( String queryParam : queryParams) { + urlBuilder.addParameter("key", queryParam); + } + + urlBuilder.addParameter("start-node-type", startNodeType); + urlBuilder.addParameter("include", startNodeType); + + final String constructedLink = urlBuilder.toString(); + + // TODO: debug log for constructed link + + return constructedLink; + +} + + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider#getSelfLinksByEntityType(java.lang.String) + */ + @Override + public OperationResult getSelfLinksByEntityType(String entityType) throws Exception { + + /* + * For this one, I want to dynamically construct the nodes-query for self-link discovery as a + * utility method that will use the OXM model entity data to drive the query as well. + */ + + if (entityType == null) { + throw new NullPointerException( + "Failed to getSelfLinksByEntityType() because entityType is null"); + } + + OxmEntityDescriptor entityDescriptor = + OxmModelLoader.getInstance().getEntityDescriptor(entityType); + + if (entityDescriptor == null) { + throw new NoSuchElementException("Failed to getSelfLinksByEntityType() because could" + + " not find entity descriptor from OXM with type = " + entityType); + } + + String link = null; + final String primaryKeyStr = + NodeUtils.concatArray(entityDescriptor.getPrimaryKeyAttributeName(), "/"); + + link = getFullUrl("/search/nodes-query?search-node-type=" + entityType + "&filter=" + + primaryKeyStr + ":EXISTS"); + + + + return doGet(link, "application/json"); + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider#getSelfLinkForEntity(java.lang.String, java.lang.String, java.lang.String) + */ + @Override + public OperationResult getSelfLinkForEntity(String entityType, String primaryKeyName, + String primaryKeyValue) throws Exception { + + if (entityType == null) { + throw new NullPointerException("Failed to getSelfLinkForEntity() because entityType is null"); + } + + if (primaryKeyName == null) { + throw new NullPointerException( + "Failed to getSelfLinkForEntity() because primaryKeyName is null"); + } + + if (primaryKeyValue == null) { + throw new NullPointerException( + "Failed to getSelfLinkForEntity() because primaryKeyValue is null"); + } + + // https://aai-int1.test.att.com:8443/aai/v8/search/generic-query?key=complex.physical-location-id:atlngade&start-node-type=complex + + /* + * Try to protect ourselves from illegal URI formatting exceptions caused by characters that + * aren't natively supported in a URI, but can be escaped to make them legal. + */ + + String encodedEntityType = URLEncoder.encode(entityType, "UTF-8"); + String encodedPrimaryKeyName = URLEncoder.encode(primaryKeyName, "UTF-8"); + String encodedPrimaryKeyValue = URLEncoder.encode(primaryKeyValue, "UTF-8"); + + String link = null; + + if ("service-instance".equals(entityType)) { + + link = getFullUrl("/search/generic-query?key=" + encodedEntityType + "." + + encodedPrimaryKeyName + ":" + encodedPrimaryKeyValue + "&start-node-type=" + + encodedEntityType + "&include=customer&depth=2"); + + } else { + + link = + getFullUrl("/search/generic-query?key=" + encodedEntityType + "." + encodedPrimaryKeyName + + ":" + encodedPrimaryKeyValue + "&start-node-type=" + encodedEntityType); + + } + + return queryActiveInventoryWithRetries(link, "application/json", + this.config.getAaiRestConfig().getNumRequestRetries()); + + } + + + /** + * Our retry conditions should be very specific. + * + * @param r the r + * @return true, if successful + */ + private boolean shouldRetryRequest(OperationResult r) { + + if (r == null) { + return true; + } + + int rc = r.getResultCode(); + + if (rc == 200) { + return false; + } + + if (rc == 404) { + return false; + } + + return true; + + } + + /** + * Query active inventory. + * + * @param url the url + * @param acceptContentType the accept content type + * @return the operation result + */ + // package protected for test classes instead of private + OperationResult queryActiveInventory(String url, String acceptContentType) { + return doGet(url, acceptContentType); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider#queryActiveInventoryWithRetries(java.lang.String, java.lang.String, int) + */ + @Override + public OperationResult queryActiveInventoryWithRetries(String url, String responseType, + int numRetries) { + + OperationResult result = null; + + for (int x = 0; x < numRetries; x++) { + + LOG.debug(AaiUiMsgs.QUERY_AAI_RETRY_SEQ, url, String.valueOf(x + 1)); + + result = queryActiveInventory(url, responseType); + + /** + * Record number of times we have attempted the request to later summarize how many times we + * are generally retrying over thousands of messages in a sync. + * + * If the number of retries is surprisingly high, then we need to understand why that is as + * the number of retries is also causing a heavier load on AAI beyond the throttling controls + * we already have in place in term of the transaction rate controller and number of + * parallelized threads per task processor. + */ + + result.setNumRequestRetries(x); + + if (!shouldRetryRequest(result)) { + + /* + * if (myConfig.getAaiRestConfig().isCacheEnabled()) { + * + * CachedHttpRequest cachedRequest = new CachedHttpRequest(); + * cachedRequest.setHttpRequestMethod("GET"); cachedRequest.setPayload(""); + * cachedRequest.setPayloadMimeType(""); cachedRequest.setUrl(url); + * cachedRequest.setOperationType( TransactionStorageType.ACTIVE_INVENTORY_QUERY.getIndex() + * ); + * + * CachedHttpResponse cachedResponse = new CachedHttpResponse(); + * cachedResponse.setPayload(result.getResult()); + * cachedResponse.setPayloadMimeType("application/json"); + * cachedResponse.setStatusCode(result.getResultCode()); + * + * CachedHttpTransaction txn = new CachedHttpTransaction(cachedRequest, cachedResponse); + * storageProvider.persistTransaction(txn); + * + * } + */ + + + result.setResolvedLinkFromServer(true); + LOG.debug(AaiUiMsgs.QUERY_AAI_RETRY_DONE_SEQ, url, String.valueOf(x + 1)); + + return result; + } + + try { + /* + * Sleep between re-tries to be nice to the target system. + */ + Thread.sleep(50); + } catch (InterruptedException exc) { + LOG.error(AaiUiMsgs.QUERY_AAI_WAIT_INTERRUPTION, exc.getLocalizedMessage()); + break; + } + LOG.error(AaiUiMsgs.QUERY_AAI_RETRY_FAILURE_WITH_SEQ, url, String.valueOf(x + 1)); + } + + + result.setResolvedLinkFailure(true); + LOG.info(AaiUiMsgs.QUERY_AAI_RETRY_MAXED_OUT, url); + + return result; + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestfulDataAccessor#shutdown() + */ + @Override + public void shutdown() { + // TODO Auto-generated method stub + + if (entityCache != null) { + entityCache.shutdown(); + } + + } + + +} diff --git a/src/main/java/org/openecomp/sparky/dal/aai/ActiveInventoryDataProvider.java b/src/main/java/org/openecomp/sparky/dal/aai/ActiveInventoryDataProvider.java new file mode 100644 index 0000000..8be4a65 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/aai/ActiveInventoryDataProvider.java @@ -0,0 +1,91 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.aai; + +import java.util.List; + +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.rest.RestDataProvider; + +/** + * The Interface ActiveInventoryDataProvider. + */ +public interface ActiveInventoryDataProvider extends RestDataProvider { + + /** + * Gets the self links by entity type. + * + * @param entityType the entity type + * @return the self links by entity type + * @throws Exception the exception + */ + /* + * This one will do the nodes-query and understand enough to make that happen + */ + OperationResult getSelfLinksByEntityType(String entityType) throws Exception; + + /** + * Gets the self link for entity. + * + * @param entityType the entity type + * @param primaryKeyName the primary key name + * @param primaryKeyValue the primary key value + * @return the self link for entity + * @throws Exception the exception + */ + OperationResult getSelfLinkForEntity(String entityType, String primaryKeyName, + String primaryKeyValue) throws Exception; + + /** + * Query active inventory with retries. + * + * @param url the url + * @param responseType the response type + * @param numRetries the num retries + * @return the operation result + */ + OperationResult queryActiveInventoryWithRetries(String url, String responseType, int numRetries); + + + /** + * Determines the self-link for an entity with passed-in key-value pairs. + * + * @param startNodeType + * @param keyParams + * @return + * @throws Exception + */ + String getGenericQueryForSelfLink(String startNodeType, List<String> queryKeyParams) throws Exception; + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.dal.rest.RestDataProvider#shutdown() + */ + @Override + void shutdown(); + +} diff --git a/src/main/java/org/openecomp/sparky/dal/aai/ActiveInventoryEntityStatistics.java b/src/main/java/org/openecomp/sparky/dal/aai/ActiveInventoryEntityStatistics.java new file mode 100644 index 0000000..0671b3e --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/aai/ActiveInventoryEntityStatistics.java @@ -0,0 +1,307 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.aai; + +import java.util.Comparator; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.concurrent.atomic.AtomicInteger; + +import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.rest.OperationResult; + +/** + * The Class ActiveInventoryEntityStatistics. + */ +public class ActiveInventoryEntityStatistics { + + private static final String TOTAL = "Total"; + + private static final String FOUND = "Found"; + + private static final String NO_PAYLOAD = "NoPayload"; + + private static final String NOT_FOUND = "NotFound"; + + private static final String NUM_RETRIES = "NumRetries"; + + private static final String ERROR = "Error"; + + private OxmModelLoader loader; + + + private Map<String, HashMap<String, AtomicInteger>> activeInventoryEntityStatistics; + + /** + * Creates the entity op stats. + * + * @return the hash map + */ + private HashMap<String, AtomicInteger> createEntityOpStats() { + + HashMap<String, AtomicInteger> opStats = new HashMap<String, AtomicInteger>(); + + opStats.put(TOTAL, new AtomicInteger()); + opStats.put(FOUND, new AtomicInteger()); + opStats.put(NO_PAYLOAD, new AtomicInteger()); + opStats.put(NOT_FOUND, new AtomicInteger()); + opStats.put(NUM_RETRIES, new AtomicInteger()); + opStats.put(ERROR, new AtomicInteger()); + + return opStats; + + } + + /* + * private void createSearchableActiveInventoryEntityStatistics() { + * + * Map<String,OxmEntityDescriptor> descriptors = loader.getSearchableEntityDescriptors(); + * + * if(descriptors == null) { return; } + * + * OxmEntityDescriptor d = null; for ( String key : descriptors.keySet() ) { d = + * descriptors.get(key); activeInventoryEntityStatistics.put(d.getEntityName(), + * createEntityOpStats()); } + * + * } + */ + + /* + * private void createCrossEntityReferenceActiveInventoryEntityStatistics() { + * + * Map<String,OxmEntityDescriptor> descriptors = loader.getCrossReferenceEntityDescriptors(); + * + * + * } + */ + + + /** + * Initializecreate active inventory entity statistics. + */ + private void initializecreateActiveInventoryEntityStatistics() { + Set<String> keys = activeInventoryEntityStatistics.keySet(); + + Set<String> opStatKeySet = null; + Map<String, AtomicInteger> opStats = null; + + for (String k : keys) { + + opStats = activeInventoryEntityStatistics.get(k); + + opStatKeySet = opStats.keySet(); + + for (String opStatKey : opStatKeySet) { + opStats.get(opStatKey).set(0); + } + } + } + + /** + * Instantiates a new active inventory entity statistics. + * + * @param loader the loader + */ + public ActiveInventoryEntityStatistics(OxmModelLoader loader) { + this.loader = loader; + activeInventoryEntityStatistics = new HashMap<String, HashMap<String, AtomicInteger>>(); + // createSearchableActiveInventoryEntityStatistics(); + // createCrossEntityReferenceActiveInventoryEntityStatistics(); + reset(); + } + + /** + * Initialize counters from oxm entity descriptors. + * + * @param descriptors the descriptors + */ + public void initializeCountersFromOxmEntityDescriptors( + Map<String, OxmEntityDescriptor> descriptors) { + + if (descriptors == null) { + return; + } + + OxmEntityDescriptor descriptor = null; + for (String key : descriptors.keySet()) { + descriptor = descriptors.get(key); + activeInventoryEntityStatistics.put(descriptor.getEntityName(), createEntityOpStats()); + } + } + + + /** + * Reset. + */ + public void reset() { + initializecreateActiveInventoryEntityStatistics(); + } + + /** + * Gets the result code. + * + * @param txn the txn + * @return the result code + */ + private int getResultCode(NetworkTransaction txn) { + + + if (txn == null) { + return -1; + } + + OperationResult or = txn.getOperationResult(); + + if (or == null) { + return -1; + } + + return or.getResultCode(); + + } + + /** + * Update active inventory entity counters. + * + * @param txn the txn + */ + private void updateActiveInventoryEntityCounters(NetworkTransaction txn) { + + if (txn == null) { + return; + } + + Map<String, AtomicInteger> opStats = activeInventoryEntityStatistics.get(txn.getEntityType()); + + int rc = getResultCode(txn); + + switch (txn.getOperationType()) { + + case GET: { + + opStats.get(TOTAL).incrementAndGet(); + + if (200 <= rc && rc <= 299) { + opStats.get(FOUND).incrementAndGet(); + } else if (rc == 404) { + opStats.get(NOT_FOUND).incrementAndGet(); + } else { + opStats.get(ERROR).incrementAndGet(); + } + + break; + } + + default: { + // nothing else for now + } + + } + + OperationResult or = txn.getOperationResult(); + + if (or != null && or.wasSuccessful()) { + + if (or.getResult() == null || or.getResult().length() == 0) { + opStats.get(NO_PAYLOAD).incrementAndGet(); + } + + if (or.getNumRequestRetries() > 0) { + opStats.get(NUM_RETRIES).addAndGet(or.getNumRequestRetries()); + } + + } + + + } + + /** + * Update counters. + * + * @param txn the txn + */ + public void updateCounters(NetworkTransaction txn) { + + updateActiveInventoryEntityCounters(txn); + + } + + public String getStatisticsReport() { + + StringBuilder sb = new StringBuilder(128); + + /* + * sort entities, then sort nested op codes + */ + + TreeMap<String, HashMap<String, AtomicInteger>> activeInventoryEntitySortedTreeMap = + new TreeMap<String, HashMap<String, AtomicInteger>>(new Comparator<String>() { + + @Override + public int compare(String o1, String o2) { + return o1.toLowerCase().compareTo(o2.toLowerCase()); + } + }); + + activeInventoryEntitySortedTreeMap.putAll(activeInventoryEntityStatistics); + + for (String counterEntityKey : activeInventoryEntitySortedTreeMap.keySet()) { + + HashMap<String, AtomicInteger> entityCounters = + activeInventoryEntitySortedTreeMap.get(counterEntityKey); + + AtomicInteger total = entityCounters.get(TOTAL); + AtomicInteger found = entityCounters.get(FOUND); + AtomicInteger noPayload = entityCounters.get(NO_PAYLOAD); + AtomicInteger notFound = entityCounters.get(NOT_FOUND); + AtomicInteger numRetries = entityCounters.get(NUM_RETRIES); + AtomicInteger error = entityCounters.get(ERROR); + + int totalValue = (total == null) ? 0 : total.get(); + int foundValue = (found == null) ? 0 : found.get(); + int noPayloadValue = (noPayload == null) ? 0 : noPayload.get(); + int notFoundValue = (notFound == null) ? 0 : notFound.get(); + int numRetriesValue = (numRetries == null) ? 0 : numRetries.get(); + int errorValue = (error == null) ? 0 : error.get(); + + sb.append("\n ") + .append(String.format( + "%-30s TOTAL: %-12d FOUND: %-12d NO_PAYLOAD:" + + " %-12d NOT_FOUND: %-12d NUM_RETRIES: %-12d ERROR: %-12d", + counterEntityKey, totalValue, foundValue, noPayloadValue, notFoundValue, + numRetriesValue, errorValue)); + } + + return sb.toString(); + } + + + +} diff --git a/src/main/java/org/openecomp/sparky/dal/aai/ActiveInventoryProcessingExceptionStatistics.java b/src/main/java/org/openecomp/sparky/dal/aai/ActiveInventoryProcessingExceptionStatistics.java new file mode 100644 index 0000000..7a61972 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/aai/ActiveInventoryProcessingExceptionStatistics.java @@ -0,0 +1,139 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.aai; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.analytics.AbstractStatistics; +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.logging.AaiUiMsgs; + +/** + * The Class ActiveInventoryProcessingExceptionStatistics. + */ +public class ActiveInventoryProcessingExceptionStatistics extends AbstractStatistics { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(ActiveInventoryAdapter.class); + + private static final String NATIVE_SOCKET_CONNECT_EXCEPTION = "NativeSocketConnectException"; + private static final String NATIVE_SOCKET_CONNECTION_RESET = "NativeSocketConnectionReset"; + private static final String NATIVE_SOCKET_CONNECTION_REFUSED = "NativeSocketConnectionRefused"; + private static final String CLIENT_TIMEOUT_EXCEPTION = "JerseyClientTimoutException"; + private static final String UNKNOWN_EXCEPTION = "UnknownException"; + + /** + * Creates the counters. + */ + private void createCounters() { + addCounter(NATIVE_SOCKET_CONNECT_EXCEPTION); + addCounter(NATIVE_SOCKET_CONNECTION_RESET); + addCounter(NATIVE_SOCKET_CONNECTION_REFUSED); + addCounter(CLIENT_TIMEOUT_EXCEPTION); + addCounter(UNKNOWN_EXCEPTION); + } + + /** + * Instantiates a new active inventory processing exception statistics. + */ + public ActiveInventoryProcessingExceptionStatistics() { + createCounters(); + reset(); + } + + /** + * Update counters. + * + * @param txn the txn + */ + public void updateCounters(NetworkTransaction txn) { + + if (txn == null) { + return; + } + + OperationResult or = txn.getOperationResult(); + + if (or != null && !or.wasSuccessful()) { + + if (or.getResultCode() != 404) { + + String result = or.getResult(); + + if (result != null) { + + /* + * Try to classify exceptions and peg counters + */ + + if (result.contains("java.net.SocketTimeoutException: connect timed out")) { + pegCounter(CLIENT_TIMEOUT_EXCEPTION); + } else if (result.contains("java.net.ConnectException: Connection timed out: connect")) { + pegCounter(NATIVE_SOCKET_CONNECT_EXCEPTION); + } else if (result.contains("java.net.ConnectException: Connection refused: connect")) { + pegCounter(NATIVE_SOCKET_CONNECTION_REFUSED); + } else if (result.contains("java.net.SocketException: Connection reset")) { + pegCounter(NATIVE_SOCKET_CONNECTION_RESET); + } else { + pegCounter(UNKNOWN_EXCEPTION); + LOG.error(AaiUiMsgs.PEGGING_ERROR, result.toString()); + } + + } + } + + } + + } + + public String getStatisticsReport() { + + StringBuilder sb = new StringBuilder(128); + + int nativeConnect = getCounterValue(NATIVE_SOCKET_CONNECT_EXCEPTION); + int nativeCxnReset = getCounterValue(NATIVE_SOCKET_CONNECTION_RESET); + int nativeCxnRefused = getCounterValue(NATIVE_SOCKET_CONNECTION_REFUSED); + int clientTimeout = getCounterValue(CLIENT_TIMEOUT_EXCEPTION); + int unknown = getCounterValue(UNKNOWN_EXCEPTION); + + sb.append("\n ") + .append(String.format("%-40s: %-12d", NATIVE_SOCKET_CONNECT_EXCEPTION, nativeConnect)); + sb.append("\n ") + .append(String.format("%-40s: %-12d", NATIVE_SOCKET_CONNECTION_RESET, nativeCxnReset)); + sb.append("\n ") + .append(String.format("%-40s: %-12d", NATIVE_SOCKET_CONNECTION_REFUSED, nativeCxnRefused)); + sb.append("\n ") + .append(String.format("%-40s: %-12d", CLIENT_TIMEOUT_EXCEPTION, clientTimeout)); + sb.append("\n ").append(String.format("%-40s: %-12d", UNKNOWN_EXCEPTION, unknown)); + + return sb.toString(); + + } + + + +} diff --git a/src/main/java/org/openecomp/sparky/dal/aai/config/ActiveInventoryConfig.java b/src/main/java/org/openecomp/sparky/dal/aai/config/ActiveInventoryConfig.java new file mode 100644 index 0000000..c0f5db8 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/aai/config/ActiveInventoryConfig.java @@ -0,0 +1,159 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.aai.config; + +import java.net.URI; +import java.util.Properties; + +import javax.ws.rs.core.UriBuilder; + +import org.openecomp.sparky.synchronizer.config.TaskProcessorConfig; +import org.openecomp.sparky.util.ConfigHelper; +import org.openecomp.sparky.util.Encryptor; +import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; + +/** + * The Class ActiveInventoryConfig. + */ +public class ActiveInventoryConfig { + + + + public static final String CONFIG_FILE = + TierSupportUiConstants.DYNAMIC_CONFIG_APP_LOCATION + "aai.properties"; + private static ActiveInventoryConfig instance; + + private static final String HTTP_SCHEME = "http"; + private static final String HTTPS_SCHEME = "https"; + + public static ActiveInventoryConfig getConfig() throws Exception { + if (instance == null) { + instance = new ActiveInventoryConfig(); + } + + return instance; + } + + private ActiveInventoryRestConfig aaiRestConfig; + private ActiveInventorySslConfig aaiSslConfig; + private TaskProcessorConfig taskProcessorConfig; + + /** + * Instantiates a new active inventory config. + * + * @throws Exception the exception + */ + protected ActiveInventoryConfig() throws Exception { + + Properties props = ConfigHelper.loadConfigFromExplicitPath(CONFIG_FILE); + aaiRestConfig = new ActiveInventoryRestConfig(props); + aaiSslConfig = new ActiveInventorySslConfig(props, new Encryptor()); + + taskProcessorConfig = new TaskProcessorConfig(); + taskProcessorConfig + .initializeFromProperties(ConfigHelper.getConfigWithPrefix("aai.taskProcessor", props)); + + + } + + protected ActiveInventoryConfig(Properties props) throws Exception { + + aaiRestConfig = new ActiveInventoryRestConfig(props); + aaiSslConfig = new ActiveInventorySslConfig(props, new Encryptor()); + + taskProcessorConfig = new TaskProcessorConfig(); + taskProcessorConfig + .initializeFromProperties(ConfigHelper.getConfigWithPrefix("aai.taskProcessor", props)); + + + } + + public TaskProcessorConfig getTaskProcessorConfig() { + return taskProcessorConfig; + } + + public void setTaskProcessorConfig(TaskProcessorConfig taskProcessorConfig) { + this.taskProcessorConfig = taskProcessorConfig; + } + + + public ActiveInventoryRestConfig getAaiRestConfig() { + return aaiRestConfig; + } + + public void setAaiRestConfig(ActiveInventoryRestConfig aaiRestConfig) { + this.aaiRestConfig = aaiRestConfig; + } + + public ActiveInventorySslConfig getAaiSslConfig() { + return aaiSslConfig; + } + + public void setAaiSslConfig(ActiveInventorySslConfig aaiSslConfig) { + this.aaiSslConfig = aaiSslConfig; + } + + public String repairSelfLink(String selflink) { + + if (selflink == null) { + return selflink; + } + + UriBuilder builder = UriBuilder.fromPath(selflink).host(aaiRestConfig.getHost()) + .port(Integer.parseInt(aaiRestConfig.getPort())); + + switch (aaiRestConfig.getAuthenticationMode()) { + + case SSL_BASIC: + case SSL_CERT: { + builder.scheme(HTTPS_SCHEME); + break; + } + + default: { + builder.scheme(HTTP_SCHEME); + } + } + + return builder.build().toString(); + + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "ActiveInventoryConfig [aaiRestConfig=" + aaiRestConfig + ", aaiSslConfig=" + + aaiSslConfig + "]"; + } + + public URI getBaseUri() { + return UriBuilder.fromUri("https://" + aaiRestConfig.getHost() + ":" + aaiRestConfig.getPort() + + aaiRestConfig.getResourceBasePath()).build(); + } + +} diff --git a/src/main/java/org/openecomp/sparky/dal/aai/config/ActiveInventoryRestConfig.java b/src/main/java/org/openecomp/sparky/dal/aai/config/ActiveInventoryRestConfig.java new file mode 100644 index 0000000..d609f16 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/aai/config/ActiveInventoryRestConfig.java @@ -0,0 +1,283 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.aai.config; + +import java.util.Arrays; +import java.util.List; +import java.util.Properties; + +import org.openecomp.sparky.dal.aai.enums.RestAuthenticationMode; +import org.openecomp.sparky.util.ConfigHelper; + +/** + * The Class ActiveInventoryRestConfig. + */ +public class ActiveInventoryRestConfig { + + private String host; + + private String port; + + private int connectTimeoutInMs; + + private int readTimeoutInMs; + + private int numRequestRetries; + + private int numResolverWorkers; + + private boolean useCacheOnly; + + private boolean cacheEnabled; + + private boolean cacheFailures; + + private String storageFolderOverride; + + int numCacheWorkers; + + private long maxTimeToLiveInMs; + + private String resourceBasePath; + + private List<String> shallowEntities; + + private RestAuthenticationMode authenticationMode; + + public List<String> getShallowEntities() { + return shallowEntities; + } + + /** + * Instantiates a new active inventory rest config. + * + * @param props the props + */ + public ActiveInventoryRestConfig(Properties props) { + + if (props == null) { + return; + } + + Properties restProps = ConfigHelper.getConfigWithPrefix("aai.rest", props); + + resourceBasePath = restProps.getProperty("resourceBasePath", "/aai/v7"); + host = restProps.getProperty("host", "localhost"); + port = restProps.getProperty("port", "8443"); + numRequestRetries = Integer.parseInt(restProps.getProperty("numRequestRetries", "5")); + numResolverWorkers = Integer.parseInt(restProps.getProperty("numResolverWorkers", "15")); + + connectTimeoutInMs = Integer.parseInt(restProps.getProperty("connectTimeoutInMs", "5000")); + readTimeoutInMs = Integer.parseInt(restProps.getProperty("readTimeoutInMs", "10000")); + + String shallowEntitiesProperty = restProps.getProperty("shallowEntities", ""); + shallowEntities = Arrays.asList(shallowEntitiesProperty.split(",")); + + Properties cacheProps = ConfigHelper.getConfigWithPrefix("aai.rest.cache", props); + cacheEnabled = Boolean.parseBoolean(cacheProps.getProperty("enabled", "false")); + storageFolderOverride = cacheProps.getProperty("storageFolderOverride", null); + cacheFailures = Boolean.parseBoolean(cacheProps.getProperty("cacheFailures", "false")); + useCacheOnly = Boolean.parseBoolean(cacheProps.getProperty("useCacheOnly", "false")); + numCacheWorkers = Integer.parseInt(cacheProps.getProperty("numWorkers", "5")); + + + if (storageFolderOverride != null && storageFolderOverride.length() == 0) { + storageFolderOverride = null; + } + /* + * The expectation of this parameter is that if the value > 0, then the cached resources will be + * served back instead of dipping AAI/DataLayer as long as the current resource age from the + * cached instance is < maxTimeToLiveInMs. + */ + maxTimeToLiveInMs = Long.parseLong(cacheProps.getProperty("maxTimeToLiveInMs", "-1")); + authenticationMode = RestAuthenticationMode.getRestAuthenticationMode(restProps.getProperty("authenticationMode", RestAuthenticationMode.SSL_CERT.getAuthenticationModeLabel())); + + /* + * In any kind of error scenario, set the authentication mode to SSL_CERT as our default. + * This is an arbitrary default, but was chosen based on the way this code worked before + * introduction of the SSL Basic Auth settings. + */ + if ( authenticationMode == RestAuthenticationMode.UNKNOWN_MODE) { + authenticationMode = RestAuthenticationMode.SSL_CERT; + } + + } + + public RestAuthenticationMode getAuthenticationMode() { + return authenticationMode; + } + + public void setAuthenticationMode(RestAuthenticationMode authenticationMode) { + this.authenticationMode = authenticationMode; + } + + public int getNumCacheWorkers() { + return numCacheWorkers; + } + + public void setNumCacheWorkers(int numCacheWorkers) { + this.numCacheWorkers = numCacheWorkers; + } + + /** + * Should cache failures. + * + * @return true, if successful + */ + public boolean shouldCacheFailures() { + return cacheFailures; + } + + public void setShouldCacheFailures(boolean enabled) { + this.cacheFailures = enabled; + } + + /** + * Checks if is shallow entity. + * + * @param entityType the entity type + * @return true, if is shallow entity + */ + public boolean isShallowEntity(String entityType) { + if (entityType == null) { + return false; + } + + for (String entity : shallowEntities) { + if (entityType.equalsIgnoreCase(entity)) { + return true; + } + } + + return false; + } + + public boolean isUseCacheOnly() { + return useCacheOnly; + } + + public void setUseCacheOnly(boolean useCacheOnly) { + this.useCacheOnly = useCacheOnly; + } + + public int getNumResolverWorkers() { + return numResolverWorkers; + } + + public void setNumResolverWorkers(int numResolverWorkers) { + this.numResolverWorkers = numResolverWorkers; + } + + public long getMaxTimeToLiveInMs() { + return maxTimeToLiveInMs; + } + + public void setMaxTimeToLiveInMs(long maxTimeToLiveInMs) { + this.maxTimeToLiveInMs = maxTimeToLiveInMs; + } + + public boolean isCacheEnabled() { + return cacheEnabled; + } + + public void setCacheEnabled(boolean cacheEnabled) { + this.cacheEnabled = cacheEnabled; + } + + public String getStorageFolderOverride() { + return storageFolderOverride; + } + + public void setStorageFolderOverride(String storageFolderOverride) { + this.storageFolderOverride = storageFolderOverride; + } + + public String getHost() { + return host; + } + + public String getPort() { + return port; + } + + public String getResourceBasePath() { + return resourceBasePath; + } + + public void setHost(String host) { + this.host = host; + } + + public void setPort(String port) { + this.port = port; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + + + public void setResourceBasePath(String resourceBasePath) { + this.resourceBasePath = resourceBasePath; + } + + @Override + public String toString() { + return "ActiveInventoryRestConfig [host=" + host + ", port=" + port + ", connectTimeoutInMs=" + + connectTimeoutInMs + ", readTimeoutInMs=" + readTimeoutInMs + ", numRequestRetries=" + + numRequestRetries + ", numResolverWorkers=" + numResolverWorkers + ", useCacheOnly=" + + useCacheOnly + ", cacheEnabled=" + cacheEnabled + ", cacheFailures=" + cacheFailures + + ", storageFolderOverride=" + storageFolderOverride + ", numCacheWorkers=" + + numCacheWorkers + ", maxTimeToLiveInMs=" + maxTimeToLiveInMs + ", resourceBasePath=" + + resourceBasePath + ", shallowEntities=" + shallowEntities + ", authenticationMode=" + + authenticationMode + "]"; + } + + public int getConnectTimeoutInMs() { + return connectTimeoutInMs; + } + + public void setConnectTimeoutInMs(int connectTimeoutInMs) { + this.connectTimeoutInMs = connectTimeoutInMs; + } + + public int getReadTimeoutInMs() { + return readTimeoutInMs; + } + + public void setReadTimeoutInMs(int readTimeoutInMs) { + this.readTimeoutInMs = readTimeoutInMs; + } + + public int getNumRequestRetries() { + return numRequestRetries; + } + + public void setNumRequestRetries(int numRequestRetries) { + this.numRequestRetries = numRequestRetries; + } + +} diff --git a/src/main/java/org/openecomp/sparky/dal/aai/config/ActiveInventorySslConfig.java b/src/main/java/org/openecomp/sparky/dal/aai/config/ActiveInventorySslConfig.java new file mode 100644 index 0000000..272e351 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/aai/config/ActiveInventorySslConfig.java @@ -0,0 +1,217 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.aai.config; + +import java.util.Properties; + +import org.eclipse.jetty.util.security.Password; +import org.openecomp.sparky.util.ConfigHelper; +import org.openecomp.sparky.util.Encryptor; +import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; + +/** + * The Class ActiveInventorySslConfig. + */ +public class ActiveInventorySslConfig { + + private Encryptor encryptor; + + private boolean enableSslDebug; + private boolean validateServerHostName; + private boolean validateServerCertificateChain; + + private String keystoreType; + private String keystoreFilename; + private String keystorePassword; + private String truststoreType; + private String truststoreFilename; + + private String basicAuthUsername; + private String basicAuthPassword; + + /** + * Instantiates a new active inventory ssl config. + * + * @param props the props + */ + public ActiveInventorySslConfig(Properties props, Encryptor encryptor) { + + if (props == null) { + return; + } + + Properties sslProps = ConfigHelper.getConfigWithPrefix("aai.ssl", props); + + enableSslDebug = Boolean.parseBoolean(sslProps.getProperty("enableDebug", "false")); + validateServerHostName = + Boolean.parseBoolean(sslProps.getProperty("validateServerHostName", "false")); + validateServerCertificateChain = + Boolean.parseBoolean(sslProps.getProperty("validateServerCertificateChain", "false")); + + if (enableSslDebug) { + System.setProperty("javax.net.debug", "ssl"); + } else { + System.setProperty("javax.net.debug", ""); + } + + this.encryptor = encryptor; + + + keystoreType = sslProps.getProperty("keystore.type", "pkcs12"); + + keystoreFilename = + TierSupportUiConstants.CONFIG_AUTH_LOCATION + sslProps.getProperty("keystore.filename"); + keystorePassword = encryptor.decryptValue(sslProps.getProperty("keystore.pass", "")); + truststoreType = sslProps.getProperty("truststore.type", "jks"); + + truststoreFilename = + TierSupportUiConstants.CONFIG_AUTH_LOCATION + sslProps.getProperty("truststore.filename"); + + basicAuthUsername = sslProps.getProperty("basicAuth.username"); + basicAuthPassword = decryptPassword(sslProps.getProperty("basicAuth.password")); + + } + + private String decryptPassword(String encryptedPassword) { + + try { + + if (encryptedPassword == null) { + return null; + } + + return Password.deobfuscate(encryptedPassword); + + } catch (Exception exc) { + + return encryptedPassword; + + } + + } + + public String getBasicAuthUsername() { + return basicAuthUsername; + } + + public void setBasicAuthUsername(String basicAuthUsername) { + this.basicAuthUsername = basicAuthUsername; + } + + public String getBasicAuthPassword() { + return basicAuthPassword; + } + + public void setBasicAuthPassword(String basicAuthPassword) { + this.basicAuthPassword = basicAuthPassword; + } + + + public Encryptor getEncryptor() { + return encryptor; + } + + public void setEncryptor(Encryptor encryptor) { + this.encryptor = encryptor; + } + + public String getKeystoreType() { + return keystoreType; + } + + public void setKeystoreType(String keystoreType) { + this.keystoreType = keystoreType; + } + + public String getKeystoreFilename() { + return keystoreFilename; + } + + public void setKeystoreFilename(String keystoreFilename) { + this.keystoreFilename = keystoreFilename; + } + + public String getKeystorePassword() { + return keystorePassword; + } + + public void setKeystorePassword(String keystorePassword) { + this.keystorePassword = keystorePassword; + } + + public String getTruststoreType() { + return truststoreType; + } + + public void setTruststoreType(String truststoreType) { + this.truststoreType = truststoreType; + } + + public String getTruststoreFilename() { + return truststoreFilename; + } + + public void setTruststoreFilename(String truststoreFilename) { + this.truststoreFilename = truststoreFilename; + } + + public boolean isValidateServerHostName() { + return validateServerHostName; + } + + public void setValidateServerHostName(boolean validateServerHostName) { + this.validateServerHostName = validateServerHostName; + } + + public boolean isValidateServerCertificateChain() { + return validateServerCertificateChain; + } + + public void setValidateServerCertificateChain(boolean validateServerCertificateChain) { + this.validateServerCertificateChain = validateServerCertificateChain; + } + + public String getBasicAuthenticationCredentials() { + + String usernameAndPassword = getBasicAuthUsername() + ":" + + getBasicAuthPassword(); + return "Basic " + java.util.Base64.getEncoder().encodeToString(usernameAndPassword.getBytes()); + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "ActiveInventorySslConfig [enableSslDebug=" + enableSslDebug + + ", validateServerHostName=" + validateServerHostName + ", validateServerCertificateChain=" + + validateServerCertificateChain + ", keystoreType=" + keystoreType + ", keystoreFilename=" + + keystoreFilename + ", truststoreType=" + truststoreType + ", truststoreFilename=" + + truststoreFilename + "]"; + } + + +} diff --git a/src/main/java/org/openecomp/sparky/dal/aai/enums/RestAuthenticationMode.java b/src/main/java/org/openecomp/sparky/dal/aai/enums/RestAuthenticationMode.java new file mode 100644 index 0000000..af2f884 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/aai/enums/RestAuthenticationMode.java @@ -0,0 +1,69 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.aai.enums; + +/** + * Authentication Modes: + * <li>HTTP_NOAUTH - intended to represent basic HTTP no authentication + * <li>SSL_BASIC - HTTP/S with username/password + * <li>SSL_CERT - HTTP/S with client cert + */ + +public enum RestAuthenticationMode { + HTTP_NOAUTH("HTTP_NO_AUTH"), + SSL_BASIC("SSL_BASIC"), + SSL_CERT("SSL_CERT"), + UNKNOWN_MODE("UNKNOWN_MODE"); + + private String authenticationModeLabel; + + private RestAuthenticationMode(String authModelLabel) { + this.authenticationModeLabel = authModelLabel; + } + + public String getAuthenticationModeLabel() { + return authenticationModeLabel; + } + + public static RestAuthenticationMode getRestAuthenticationMode(String authenticationMode) { + + RestAuthenticationMode mappedMode = RestAuthenticationMode.UNKNOWN_MODE; + + if (authenticationMode == null) { + return mappedMode; + } + + try { + mappedMode = RestAuthenticationMode.valueOf(authenticationMode); + } catch ( Exception exc) { + mappedMode = RestAuthenticationMode.UNKNOWN_MODE; + } + + return mappedMode; + + } + +} diff --git a/src/main/java/org/openecomp/sparky/dal/cache/EntityCache.java b/src/main/java/org/openecomp/sparky/dal/cache/EntityCache.java new file mode 100644 index 0000000..ccecc2d --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/cache/EntityCache.java @@ -0,0 +1,63 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.cache; + +import org.openecomp.sparky.dal.rest.OperationResult; + +/** + * The Interface EntityCache. + * + * @author davea. + */ +public interface EntityCache { + + /** + * Gets the. + * + * @param entityKey the entity key + * @param link the link + * @return the operation result + */ + public OperationResult get(String entityKey, String link); + + /** + * Put. + * + * @param entityKey the entity key + * @param result the result + */ + public void put(String entityKey, OperationResult result); + + /** + * Shutdown. + */ + public void shutdown(); + + /** + * Clear. + */ + public void clear(); +} diff --git a/src/main/java/org/openecomp/sparky/dal/cache/InMemoryEntityCache.java b/src/main/java/org/openecomp/sparky/dal/cache/InMemoryEntityCache.java new file mode 100644 index 0000000..68378db --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/cache/InMemoryEntityCache.java @@ -0,0 +1,101 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.cache; + +import java.util.concurrent.ConcurrentHashMap; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.logging.AaiUiMsgs; + +/** + * The Class InMemoryEntityCache. + * + * @author davea. + */ +public class InMemoryEntityCache implements EntityCache { + + private ConcurrentHashMap<String, OperationResult> cachedEntityData; + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(InMemoryEntityCache.class); + + /** + * Instantiates a new in memory entity cache. + */ + public InMemoryEntityCache() { + cachedEntityData = new ConcurrentHashMap<String, OperationResult>(); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.cache.EntityCache#put(java.lang.String, org.openecomp.sparky.dal.rest.OperationResult) + */ + @Override + public void put(String key, OperationResult data) { + if (data == null) { + return; + } + + if (cachedEntityData.putIfAbsent(key, data) != null) { + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DATA_CACHE_SUCCESS, key); + } + } + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.cache.EntityCache#get(java.lang.String, java.lang.String) + */ + @Override + public OperationResult get(String entityKey, String link) { + + if (link != null) { + return cachedEntityData.get(link); + } + + return null; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.cache.EntityCache#shutdown() + */ + @Override + public void shutdown() { + // TODO Auto-generated method stub + // nothing to do + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.cache.EntityCache#clear() + */ + @Override + public void clear() { + cachedEntityData.clear(); + } + +} diff --git a/src/main/java/org/openecomp/sparky/dal/cache/PersistentEntityCache.java b/src/main/java/org/openecomp/sparky/dal/cache/PersistentEntityCache.java new file mode 100644 index 0000000..1456b05 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/cache/PersistentEntityCache.java @@ -0,0 +1,305 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.cache; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.file.Files; +import java.nio.file.LinkOption; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.dal.aai.ActiveInventoryAdapter; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.synchronizer.task.PersistOperationResultToDisk; +import org.openecomp.sparky.synchronizer.task.RetrieveOperationResultFromDisk; +import org.openecomp.sparky.util.NodeUtils; + +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * The Class PersistentEntityCache. + */ +public class PersistentEntityCache implements EntityCache { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(ActiveInventoryAdapter.class); + + /* + * TODO: <li>implement time-to-live on the cache, maybe pull in one of Guava's eviction caches? + * <li>implement abstract-base-cache to hold common cach-y things (like ttl) + */ + + private static final String DEFAULT_OUTPUT_PATH = "offlineEntityCache"; + private ExecutorService persistentExecutor; + private ObjectMapper mapper; + private String storagePath; + + /** + * Instantiates a new persistent entity cache. + */ + public PersistentEntityCache() { + this(null, 10); + } + + /** + * Instantiates a new persistent entity cache. + * + * @param numWorkers the num workers + */ + public PersistentEntityCache(int numWorkers) { + this(null, numWorkers); + } + + /** + * Instantiates a new persistent entity cache. + * + * @param storageFolderOverride the storage folder override + * @param numWorkers the num workers + */ + public PersistentEntityCache(String storageFolderOverride, int numWorkers) { + persistentExecutor = NodeUtils.createNamedExecutor("PEC", numWorkers, LOG); + mapper = new ObjectMapper(); + + if (storageFolderOverride != null && storageFolderOverride.length() > 0) { + this.storagePath = storageFolderOverride; + } else { + this.storagePath = DEFAULT_OUTPUT_PATH; + } + } + + /** + * Generate offline storage path from uri. + * + * @param link the link + * @return the string + */ + private String generateOfflineStoragePathFromUri(String link) { + + try { + URI uri = new URI(link); + + String modHost = uri.getHost().replace(".", "_"); + + String[] tokens = uri.getPath().split("\\/"); + List<String> resourcePathAndDomain = new ArrayList<String>(); + + if (tokens.length >= 4) { + + int numElements = 0; + for (String w : tokens) { + + if (numElements > 3) { + break; + } + + if (w.length() > 0) { + resourcePathAndDomain.add(w); + numElements++; + } + + } + } else { + return this.storagePath + "\\"; + } + + return this.storagePath + "\\" + modHost + "\\" + + NodeUtils.concatArray(resourcePathAndDomain, "_") + "\\"; + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.OFFLINE_STORAGE_PATH_ERROR, link, exc.getMessage()); + } + + return this.storagePath + "\\"; + + } + + /** + * Creates the dirs. + * + * @param directoryPath the directory path + */ + private void createDirs(String directoryPath) { + if (directoryPath == null) { + return; + } + + Path path = Paths.get(directoryPath); + // if directory exists? + if (!Files.exists(path)) { + try { + Files.createDirectories(path); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.DISK_CREATE_DIR_IO_ERROR, exc.getMessage()); + } + } + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.cache.EntityCache#get(java.lang.String, java.lang.String) + */ + @Override + public OperationResult get(String key, String link) { + + final String storagePath = generateOfflineStoragePathFromUri(link); + createDirs(storagePath); + final String persistentFileName = storagePath + "\\" + key + ".json"; + + CompletableFuture<OperationResult> task = supplyAsync( + new RetrieveOperationResultFromDisk(persistentFileName, mapper, LOG), persistentExecutor); + + try { + /* + * this will do a blocking get, but it will be blocking only on the thread that executed this + * method which should be one of the persistentWorker threads from the executor. + */ + return task.get(); + } catch (InterruptedException | ExecutionException exc) { + // TODO Auto-generated catch block + LOG.error(AaiUiMsgs.DISK_NAMED_DATA_READ_IO_ERROR, "txn", exc.getMessage()); + } + + return null; + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.cache.EntityCache#put(java.lang.String, org.openecomp.sparky.dal.rest.OperationResult) + */ + @Override + public void put(String key, OperationResult data) { + + final String storagePath = generateOfflineStoragePathFromUri(data.getRequestLink()); + createDirs(storagePath); + final String persistentFileName = storagePath + "\\" + key + ".json"; + + Path persistentFilePath = Paths.get(persistentFileName); + + if (!Files.exists(persistentFilePath, LinkOption.NOFOLLOW_LINKS)) { + + supplyAsync(new PersistOperationResultToDisk(persistentFileName, data, mapper, LOG), + persistentExecutor).whenComplete((opResult, error) -> { + + if (error != null) { + LOG.error(AaiUiMsgs.DISK_DATA_WRITE_IO_ERROR, "entity", error.getMessage()); + } + + }); + } + + } + + /** + * The main method. + * + * @param args the arguments + * @throws URISyntaxException the URI syntax exception + */ + public static void main(String[] args) throws URISyntaxException { + + OperationResult or = new OperationResult(); + or.setResult("asdjashdkajsdhaksdj"); + or.setResultCode(200); + + String url1 = "https://aai-int1.dev.att.com:8443/aai/v8/search/nodes-query?" + + "search-node-type=tenant&filter=tenant-id:EXISTS"; + + or.setRequestLink(url1); + + PersistentEntityCache pec = new PersistentEntityCache("e:\\my_special_folder", 5); + String k1 = NodeUtils.generateUniqueShaDigest(url1); + pec.put(k1, or); + + String url2 = + "https://aai-int1.dev.att.com:8443/aai/v8/network/vnfcs/vnfc/trial-vnfc?nodes-only"; + or.setRequestLink(url2); + String k2 = NodeUtils.generateUniqueShaDigest(url2); + pec.put(k2, or); + + String url3 = "https://1.2.3.4:8443/aai/v8/network/vnfcs/vnfc/trial-vnfc?nodes-only"; + or.setRequestLink(url3); + String k3 = NodeUtils.generateUniqueShaDigest(url3); + pec.put(k3, or); + + pec.shutdown(); + + /* + * URI uri1 = new URI(url1); + * + * System.out.println("schemea = " + uri1.getScheme()); System.out.println("host = " + + * uri1.getHost()); + * + * String host = uri1.getHost(); String[] tokens = host.split("\\."); + * System.out.println(Arrays.asList(tokens)); ArrayList<String> tokenList = new + * ArrayList(Arrays.asList(tokens)); //tokenList.remove(tokens.length-1); String + * hostAsPathElement = NodeUtils.concatArray(tokenList, "_"); + * + * System.out.println("hostAsPathElement = " + hostAsPathElement); + * + * + * System.out.println("port = " + uri1.getPort()); System.out.println("path = " + + * uri1.getPath()); System.out.println("query = " + uri1.getQuery()); System.out.println( + * "fragment = " + uri1.getFragment()); + */ + + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.cache.EntityCache#shutdown() + */ + @Override + public void shutdown() { + if (persistentExecutor != null) { + persistentExecutor.shutdown(); + } + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.cache.EntityCache#clear() + */ + @Override + public void clear() { + /* + * do nothing for this one, as it is not clear if we we really want to clear on the on-disk + * cache or not + */ + } + +} diff --git a/src/main/java/org/openecomp/sparky/dal/elasticsearch/ElasticSearchAdapter.java b/src/main/java/org/openecomp/sparky/dal/elasticsearch/ElasticSearchAdapter.java new file mode 100644 index 0000000..f2df3ab --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/elasticsearch/ElasticSearchAdapter.java @@ -0,0 +1,165 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.elasticsearch; + +import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.openecomp.sparky.dal.rest.HttpMethod; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.rest.RestDataProvider; +import org.openecomp.sparky.dal.rest.RestfulDataAccessor; + +/** + * The Class ElasticSearchAdapter. + * + * @author davea. + */ +public class ElasticSearchAdapter implements ElasticSearchDataProvider { + + private static final String BULK_IMPORT_INDEX_TEMPLATE = + "{\"index\":{\"_index\":\"%s\",\"_type\":\"%s\",\"_id\":\"%s\", \"_version\":\"%s\"}}\n"; + + private final RestDataProvider restDataProvider; + private final ElasticSearchConfig esConfig; + + /** + * Instantiates a new elastic search adapter. + * + * @param provider the provider + */ + public ElasticSearchAdapter(RestDataProvider provider, ElasticSearchConfig esConfig) { + this.restDataProvider = provider; + this.esConfig = esConfig; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doGet(java.lang.String, java.lang.String) + */ + @Override + public OperationResult doGet(String url, String acceptContentType) { + return restDataProvider.doGet(url, acceptContentType); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doDelete(java.lang.String, java.lang.String) + */ + @Override + public OperationResult doDelete(String url, String acceptContentType) { + return restDataProvider.doDelete(url, acceptContentType); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doPost(java.lang.String, java.lang.String, java.lang.String) + */ + @Override + public OperationResult doPost(String url, String jsonPayload, String acceptContentType) { + return restDataProvider.doPost(url, jsonPayload, acceptContentType); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doPut(java.lang.String, java.lang.String, java.lang.String) + */ + @Override + public OperationResult doPut(String url, String jsonPayload, String acceptContentType) { + return restDataProvider.doPut(url, jsonPayload, acceptContentType); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doPatch(java.lang.String, java.lang.String, java.lang.String) + */ + @Override + public OperationResult doPatch(String url, String jsonPayload, String acceptContentType) { + return restDataProvider.doPatch(url, jsonPayload, acceptContentType); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doHead(java.lang.String, java.lang.String) + */ + @Override + public OperationResult doHead(String url, String acceptContentType) { + return restDataProvider.doHead(url, acceptContentType); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#clearCache() + */ + @Override + public void clearCache() { + restDataProvider.clearCache(); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.elasticsearch.ElasticSearchDataProvider#doBulkOperation(java.lang.String, java.lang.String) + */ + @Override + public OperationResult doBulkOperation(String url, String payload) { + + return doRestfulOperation(HttpMethod.PUT, url, payload, + RestfulDataAccessor.APPLICATION_X_WWW_FORM_URL_ENCODED, + RestfulDataAccessor.APPLICATION_JSON); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.elasticsearch.ElasticSearchDataProvider#shutdown() + */ + @Override + public void shutdown() { + restDataProvider.shutdown(); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doRestfulOperation(org.openecomp.sparky.dal.rest.HttpMethod, java.lang.String, java.lang.String, java.lang.String, java.lang.String) + */ + @Override + public OperationResult doRestfulOperation(HttpMethod method, String url, String payload, + String payloadType, String acceptContentType) { + return restDataProvider.doRestfulOperation(method, url, payload, payloadType, + acceptContentType); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.elasticsearch.ElasticSearchDataProvider#buildBulkImportOperationRequest(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String) + */ + @Override + public String buildBulkImportOperationRequest(String index, String type, String id, + String version, String payload) { + + StringBuilder requestPayload = new StringBuilder(128); + + requestPayload.append(String.format(BULK_IMPORT_INDEX_TEMPLATE, index, type, id, version)); + requestPayload.append(payload).append("\n"); + + return requestPayload.toString(); + + } + + @Override + public OperationResult retrieveEntityById(String entityId) throws Exception { + + String url = esConfig.getElasticFullUrl("/" +entityId); + return doGet( url, "application/json"); + } + +} diff --git a/src/main/java/org/openecomp/sparky/dal/elasticsearch/ElasticSearchDataProvider.java b/src/main/java/org/openecomp/sparky/dal/elasticsearch/ElasticSearchDataProvider.java new file mode 100644 index 0000000..97c4f4d --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/elasticsearch/ElasticSearchDataProvider.java @@ -0,0 +1,66 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.elasticsearch; + +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.rest.RestDataProvider; + +/** + * The Interface ElasticSearchDataProvider. + */ +public interface ElasticSearchDataProvider extends RestDataProvider { + + /** + * Builds the bulk import operation request. + * + * @param index the index + * @param type the type + * @param id the id + * @param version the version + * @param payload the payload + * @return the string + */ + String buildBulkImportOperationRequest(String index, String type, String id, String version, + String payload); + + /** + * Do bulk operation. + * + * @param url the url + * @param payload the payload + * @return the operation result + */ + OperationResult doBulkOperation(String url, String payload); + + OperationResult retrieveEntityById(String entityId) throws Exception; + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#shutdown() + */ + @Override + void shutdown(); + +} diff --git a/src/main/java/org/openecomp/sparky/dal/elasticsearch/ElasticSearchEntityStatistics.java b/src/main/java/org/openecomp/sparky/dal/elasticsearch/ElasticSearchEntityStatistics.java new file mode 100644 index 0000000..6194027 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/elasticsearch/ElasticSearchEntityStatistics.java @@ -0,0 +1,274 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.elasticsearch; + +import java.util.Comparator; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.concurrent.atomic.AtomicInteger; + +import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.rest.HttpMethod; +import org.openecomp.sparky.dal.rest.OperationResult; + +/** + * The Class ElasticSearchEntityStatistics. + */ +public class ElasticSearchEntityStatistics { + + private static final String TOTAL = "Total"; + private static final String CREATED = "Created"; + private static final String MODIFIED = "Modified"; + private static final String OTHERSUCCESS = "OTHERSUCCESS"; + private static final String DELETED = "DELETED"; + private static final String ERROR = "ERROR"; + + private Map<String, HashMap<String, AtomicInteger>> entityStatistics; + private OxmModelLoader loader; + + /** + * Creates the entity op stats. + * + * @return the hash map + */ + private HashMap<String, AtomicInteger> createEntityOpStats() { + + HashMap<String, AtomicInteger> opStats = new HashMap<String, AtomicInteger>(); + + opStats.put(TOTAL, new AtomicInteger()); + opStats.put(CREATED, new AtomicInteger()); + opStats.put(MODIFIED, new AtomicInteger()); + opStats.put(OTHERSUCCESS, new AtomicInteger()); + opStats.put(DELETED, new AtomicInteger()); + opStats.put(ERROR, new AtomicInteger()); + + return opStats; + + } + + /* + * private void createActiveInventoryEntityStatistics() { + * + * Map<String,OxmEntityDescriptor> descriptors = loader.getSearchableEntityDescriptors(); + * + * if(descriptors == null) { return; } + * + * OxmEntityDescriptor d = null; for ( String key : descriptors.keySet() ) { d = + * descriptors.get(key); entityStatistics.put(d.getEntityName(), createEntityOpStats()); } + * + * } + */ + + /** + * Initializecreate active inventory entity statistics. + */ + private void initializecreateActiveInventoryEntityStatistics() { + Set<String> keys = entityStatistics.keySet(); + + Set<String> opStatKeySet = null; + Map<String, AtomicInteger> opStats = null; + + for (String k : keys) { + + opStats = entityStatistics.get(k); + + opStatKeySet = opStats.keySet(); + + for (String opStatKey : opStatKeySet) { + opStats.get(opStatKey).set(0); + } + } + } + + /** + * Instantiates a new elastic search entity statistics. + * + * @param loader the loader + */ + public ElasticSearchEntityStatistics(OxmModelLoader loader) { + this.loader = loader; + entityStatistics = new HashMap<String, HashMap<String, AtomicInteger>>(); + // createActiveInventoryEntityStatistics(); + reset(); + } + + /** + * Initialize counters from oxm entity descriptors. + * + * @param descriptors the descriptors + */ + public void initializeCountersFromOxmEntityDescriptors( + Map<String, OxmEntityDescriptor> descriptors) { + + if (descriptors == null) { + return; + } + + OxmEntityDescriptor descriptor = null; + for (String key : descriptors.keySet()) { + descriptor = descriptors.get(key); + entityStatistics.put(descriptor.getEntityName(), createEntityOpStats()); + } + } + + /** + * Reset. + */ + public void reset() { + initializecreateActiveInventoryEntityStatistics(); + } + + /** + * Gets the result code. + * + * @param txn the txn + * @return the result code + */ + private int getResultCode(NetworkTransaction txn) { + + + if (txn == null) { + return -1; + } + + OperationResult or = txn.getOperationResult(); + + if (or == null) { + return -1; + } + + return or.getResultCode(); + + } + + /** + * Update elastic search entity counters. + * + * @param txn the txn + */ + private void updateElasticSearchEntityCounters(NetworkTransaction txn) { + + if (txn == null) { + return; + } + + Map<String, AtomicInteger> entityOpStats = entityStatistics.get(txn.getEntityType()); + + int resultCode = getResultCode(txn); + + if (txn.getOperationType() == HttpMethod.PUT) { + + entityOpStats.get(TOTAL).incrementAndGet(); + + if (resultCode == 201) { + entityOpStats.get(CREATED).incrementAndGet(); + } else if (resultCode == 200) { + entityOpStats.get(MODIFIED).incrementAndGet(); + } else if (202 <= resultCode && resultCode <= 299) { + entityOpStats.get(OTHERSUCCESS).incrementAndGet(); + } else { + entityOpStats.get(ERROR).incrementAndGet(); + } + + } else if (txn.getOperationType() == HttpMethod.DELETE) { + + entityOpStats.get(TOTAL).incrementAndGet(); + + if (200 <= resultCode && resultCode <= 299) { + entityOpStats.get(DELETED).incrementAndGet(); + } else { + entityOpStats.get(ERROR).incrementAndGet(); + } + } + + } + + /** + * Update counters. + * + * @param txn the txn + */ + public void updateCounters(NetworkTransaction txn) { + + updateElasticSearchEntityCounters(txn); + + } + + public String getStatisticsReport() { + + StringBuilder sb = new StringBuilder(128); + + /* + * sort entities, then sort nested op codes + */ + + TreeMap<String, HashMap<String, AtomicInteger>> elasticEntitySortedTreeMap = + new TreeMap<String, HashMap<String, AtomicInteger>>(new Comparator<String>() { + + @Override + public int compare(String o1, String o2) { + return o1.toLowerCase().compareTo(o2.toLowerCase()); + } + }); + + elasticEntitySortedTreeMap.putAll(entityStatistics); + + for (String counterEntityKey : elasticEntitySortedTreeMap.keySet()) { + + HashMap<String, AtomicInteger> entityCounters = + elasticEntitySortedTreeMap.get(counterEntityKey); + + AtomicInteger total = entityCounters.get(TOTAL); + AtomicInteger created = entityCounters.get(CREATED); + AtomicInteger modified = entityCounters.get(MODIFIED); + AtomicInteger otherSuccess = entityCounters.get(OTHERSUCCESS); + AtomicInteger deleted = entityCounters.get(DELETED); + AtomicInteger error = entityCounters.get(ERROR); + + int totalValue = (total == null) ? 0 : total.get(); + int createdValue = (created == null) ? 0 : created.get(); + int modifiedValue = (modified == null) ? 0 : modified.get(); + int otherSuccessValue = (otherSuccess == null) ? 0 : otherSuccess.get(); + int deletedValue = (deleted == null) ? 0 : deleted.get(); + int errorValue = (error == null) ? 0 : error.get(); + + sb.append("\n ") + .append(String.format( + "%-30s TOTAL: %-12d CREATED: %-12d MODIFIED:" + + " %-12d OTHER_2XX: %-12d DELETED: %-12d ERROR: %-12d", + counterEntityKey, totalValue, createdValue, modifiedValue, otherSuccessValue, + deletedValue, errorValue)); + } + return sb.toString(); + } + + + +} diff --git a/src/main/java/org/openecomp/sparky/dal/elasticsearch/HashQueryResponse.java b/src/main/java/org/openecomp/sparky/dal/elasticsearch/HashQueryResponse.java new file mode 100644 index 0000000..a376add --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/elasticsearch/HashQueryResponse.java @@ -0,0 +1,59 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sparky.dal.elasticsearch; + +import org.json.JSONObject; +import org.openecomp.sparky.dal.rest.OperationResult; + +public class HashQueryResponse { + private String jsonPayload = null; + private OperationResult opResult = null; + + public HashQueryResponse() { + this(null, null); + } + + public HashQueryResponse(String jsonPayload, OperationResult opResult) { + this.jsonPayload = jsonPayload; + this.opResult = opResult; + } + + public String getJsonPayload() { + return jsonPayload; + } + public void setJsonPayload(String jsonPayload) { + this.jsonPayload = jsonPayload; + } + public OperationResult getOpResult() { + return opResult; + } + public void setOpResult(OperationResult opResult) { + this.opResult = opResult; + } + @Override + public String toString() { + return "HashQueryResponse [jsonPayload=" + jsonPayload + ", opResult=" + opResult + "]"; + } +} diff --git a/src/main/java/org/openecomp/sparky/dal/elasticsearch/SearchAdapter.java b/src/main/java/org/openecomp/sparky/dal/elasticsearch/SearchAdapter.java new file mode 100644 index 0000000..9479a8f --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/elasticsearch/SearchAdapter.java @@ -0,0 +1,122 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.elasticsearch; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import javax.ws.rs.core.MediaType; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.sas.config.SearchServiceConfig; +import org.openecomp.sparky.util.Encryptor; +import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; +import org.slf4j.MDC; + +import org.openecomp.restclient.client.RestClient; +import org.openecomp.restclient.enums.RestAuthenticationMode; +import org.openecomp.restclient.client.Headers; +import org.openecomp.cl.mdc.MdcContext; + +import org.openecomp.cl.mdc.MdcContext; + +/** + * The Class SearchAdapter. + */ +public class SearchAdapter { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(SearchAdapter.class); + + private RestClient client; + + private Map<String, List<String>> commonHeaders; + private SearchServiceConfig sasConfig; + + /** + * Instantiates a new search adapter. + * @throws Exception + */ + public SearchAdapter() throws Exception { + sasConfig = SearchServiceConfig.getConfig(); + Encryptor encryptor = new Encryptor(); + client = new RestClient().authenticationMode(RestAuthenticationMode.SSL_CERT) + .validateServerHostname(false).validateServerCertChain(false) + .clientCertFile(TierSupportUiConstants.CONFIG_AUTH_LOCATION + sasConfig.getCertName()) + .clientCertPassword(encryptor.decryptValue(sasConfig.getKeystorePassword())) + .trustStore(TierSupportUiConstants.CONFIG_AUTH_LOCATION + sasConfig.getKeystore()); + + commonHeaders = new HashMap<String, List<String>>(); + commonHeaders.put("Accept", Arrays.asList("application/json")); + commonHeaders.put(Headers.FROM_APP_ID, Arrays.asList("AAI-UI")); + } + + public SearchServiceConfig getSasConfig() { + return sasConfig; + } + + public void setSasConfig(SearchServiceConfig sasConfig) { + this.sasConfig = sasConfig; + } + + public OperationResult doPost(String url, String jsonPayload, String acceptContentType) { + org.openecomp.restclient.client.OperationResult or = client.post(url, jsonPayload, getTxnHeader(), + MediaType.APPLICATION_JSON_TYPE, MediaType.APPLICATION_JSON_TYPE); + return new OperationResult(or.getResultCode(), or.getResult()); + } + + public OperationResult doGet(String url, String acceptContentType) { + org.openecomp.restclient.client.OperationResult or = + client.get(url, getTxnHeader(), MediaType.APPLICATION_JSON_TYPE); + return new OperationResult(or.getResultCode(), or.getResult()); + } + + public OperationResult doPut(String url, String payload, String acceptContentType) { + org.openecomp.restclient.client.OperationResult or = client.put(url, payload, getTxnHeader(), + MediaType.APPLICATION_JSON_TYPE, MediaType.APPLICATION_JSON_TYPE); + return new OperationResult(or.getResultCode(), or.getResult()); + } + + public OperationResult doDelete(String url, String acceptContentType) { + + org.openecomp.restclient.client.OperationResult or = + client.delete(url, getTxnHeader(), MediaType.APPLICATION_JSON_TYPE); + return new OperationResult(or.getResultCode(), or.getResult()); + } + + public Map<String, List<String>> getTxnHeader() { + Map headers = new HashMap<String, List<String>>(); + headers.putAll(this.commonHeaders); + headers.put("X-TransactionId", Arrays.asList(MDC.get(MdcContext.MDC_REQUEST_ID))); + headers.put("X-FromAppId", Arrays.asList(MDC.get(MdcContext.MDC_PARTNER_NAME))); + return headers; + } + + +} diff --git a/src/main/java/org/openecomp/sparky/dal/elasticsearch/config/ElasticSearchConfig.java b/src/main/java/org/openecomp/sparky/dal/elasticsearch/config/ElasticSearchConfig.java new file mode 100644 index 0000000..3f2cf7a --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/elasticsearch/config/ElasticSearchConfig.java @@ -0,0 +1,543 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.elasticsearch.config; + +import java.io.BufferedReader; +import java.io.FileReader; +import java.io.IOException; +import java.util.Properties; + +import org.openecomp.sparky.dal.exception.ElasticSearchOperationException; +import org.openecomp.sparky.synchronizer.config.TaskProcessorConfig; +import org.openecomp.sparky.util.ConfigHelper; +import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; + + +/** + * The Class ElasticSearchConfig. + */ +public class ElasticSearchConfig { + + public static final String CONFIG_FILE = + TierSupportUiConstants.DYNAMIC_CONFIG_APP_LOCATION + "elasticsearch.properties"; + + private static ElasticSearchConfig instance; + + private String ipAddress; + + private String httpPort; + + private String javaApiPort; + + private String indexName; + + private String type; + + private String clusterName; + + private String mappingsFileName; + + private String settingsFileName; + + private int syncAdapterMaxConcurrentWorkers; + + private String auditIndexName; + + private String topographicalSearchIndex; + + private String entityCountHistoryIndex; + + private String autosuggestIndexname; + + private String entityCountHistoryMappingsFileName; + + private String autoSuggestSettingsFileName; + + private String autoSuggestMappingsFileName; + + private String dynamicMappingsFileName; + + private static final String IP_ADDRESS_DEFAULT = "localhost"; + + private static final String HTTP_PORT_DEFAULT = "9200"; + + private static final String JAVA_API_PORT_DEFAULT = "9300"; + + private static final String TYPE_DEFAULT = "aaiEntities"; + + private static final String CLUSTER_NAME_DEFAULT = "elasticsearch"; + + private static final String INDEX_NAME_DEFAULT = "entitySearchIndex"; + + private static final String AUDIT_INDEX_NAME_DEFAULT = "auditdataindex"; + + private static final String TOPOGRAPHICAL_INDEX_NAME_DEFAULT = "topographicalSearchIndex"; + + private static final String ENTITY_COUNT_HISTORY_INDEX_NAME_DEFAULT = "entityCountHistory"; + + private static final String ENTITY_AUTO_SUGGEST_INDEX_NAME_DEFAULT = + TierSupportUiConstants.ENTITY_AUTO_SUGGEST_INDEX_NAME_DEFAULT; + + private static final String ENTITY_AUTO_SUGGEST_SETTINGS_FILE_DEFAULT = + TierSupportUiConstants.ENTITY_AUTO_SUGGEST_SETTINGS_FILE_DEFAULT; + + private static final String ENTITY_AUTO_SUGGEST_MAPPINGS_FILE_DEFAULT = + TierSupportUiConstants.ENTITY_AUTO_SUGGEST_SETTINGS_FILE_DEFAULT; + + private static final String ENTITY_DYNAMIC_MAPPINGS_FILE_DEFAULT = + TierSupportUiConstants.ENTITY_DYNAMIC_MAPPINGS_FILE_DEFAULT; + + private static final String BULK_API = "_bulk"; + + private TaskProcessorConfig processorConfig; + + public TaskProcessorConfig getProcessorConfig() { + return processorConfig; + } + + public void setProcessorConfig(TaskProcessorConfig processorConfig) { + this.processorConfig = processorConfig; + } + + public static ElasticSearchConfig getConfig() throws Exception { + + if (instance == null) { + instance = new ElasticSearchConfig(); + instance.initializeProperties(); + } + + return instance; + } + + public static void setConfig(ElasticSearchConfig config) { + /* + * Explicitly allow setting the configuration singleton. This will be useful for automation. + */ + + ElasticSearchConfig.instance = config; + } + + /** + * Instantiates a new elastic search config. + */ + public ElasticSearchConfig() { + // test method + } + + public String getElasticFullUrl(String resourceUrl, String indexName, String indexType) + throws Exception { + final String host = getIpAddress(); + final String port = getHttpPort(); + return String.format("http://%s:%s/%s/%s%s", host, port, indexName, indexType, resourceUrl); + } + + public String getElasticFullUrl(String resourceUrl, String indexName) throws Exception { + final String host = getIpAddress(); + final String port = getHttpPort(); + return String.format("http://%s:%s/%s/%s%s", host, port, indexName, + ElasticSearchConfig.getConfig().getType(), resourceUrl); + } + + public String getElasticFullUrl(String resourceUrl) throws Exception { + final String host = getIpAddress(); + final String port = getHttpPort(); + final String indexName = getIndexName(); + return String.format("http://%s:%s/%s/%s%s", host, port, indexName, getType(), resourceUrl); + } + + /** + * Initialize properties. + */ + private void initializeProperties() { + Properties props = ConfigHelper.loadConfigFromExplicitPath(CONFIG_FILE); + + ipAddress = props.getProperty("elasticsearch.ipAddress", IP_ADDRESS_DEFAULT); + httpPort = props.getProperty("elasticsearch.httpPort", "" + HTTP_PORT_DEFAULT); + javaApiPort = props.getProperty("elasticsearch.javaApiPort", "" + JAVA_API_PORT_DEFAULT); + type = props.getProperty("elasticsearch.type", TYPE_DEFAULT); + clusterName = props.getProperty("elasticsearch.clusterName", CLUSTER_NAME_DEFAULT); + indexName = props.getProperty("elasticsearch.indexName", INDEX_NAME_DEFAULT); + mappingsFileName = props.getProperty("elasticsearch.mappingsFileName"); + settingsFileName = props.getProperty("elasticsearch.settingsFileName"); + auditIndexName = props.getProperty("elasticsearch.auditIndexName", AUDIT_INDEX_NAME_DEFAULT); + topographicalSearchIndex = + props.getProperty("elasticsearch.topographicalIndexName", TOPOGRAPHICAL_INDEX_NAME_DEFAULT); + entityCountHistoryIndex = props.getProperty("elasticsearch.entityCountHistoryIndexName", + ENTITY_COUNT_HISTORY_INDEX_NAME_DEFAULT); + entityCountHistoryMappingsFileName = + props.getProperty("elasticsearch.entityCountHistoryMappingsFileName"); + + autosuggestIndexname = props.getProperty("elasticsearch.autosuggestIndexname", + ENTITY_AUTO_SUGGEST_INDEX_NAME_DEFAULT); + autoSuggestSettingsFileName = props.getProperty("elasticsearch.autosuggestSettingsFileName", + ENTITY_AUTO_SUGGEST_SETTINGS_FILE_DEFAULT); + autoSuggestMappingsFileName = props.getProperty("elasticsearch.autosuggestMappingsFileName", + ENTITY_AUTO_SUGGEST_MAPPINGS_FILE_DEFAULT); + dynamicMappingsFileName = props.getProperty("elasticsearch.dynamicMappingsFileName", + ENTITY_DYNAMIC_MAPPINGS_FILE_DEFAULT); + + syncAdapterMaxConcurrentWorkers = + Integer.parseInt(props.getProperty("elasticsearch.syncAdapter.maxConcurrentWorkers", "5")); + + processorConfig = new TaskProcessorConfig(); + processorConfig.initializeFromProperties( + ConfigHelper.getConfigWithPrefix("elasticsearch.taskProcessor", props)); + + } + + public String getIpAddress() { + return ipAddress; + } + + public void setIpAddress(String ipAddress) { + this.ipAddress = ipAddress; + } + + public String getHttpPort() { + return httpPort; + } + + public void setHttpPort(String httpPort) { + this.httpPort = httpPort; + } + + public String getJavaApiPort() { + return javaApiPort; + } + + public void setJavaApiPort(String javaApiPort) { + this.javaApiPort = javaApiPort; + } + + public String getIndexName() { + return indexName; + } + + public void setIndexName(String indexName) { + this.indexName = indexName; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getClusterName() { + return clusterName; + } + + public void setClusterName(String clusterName) { + this.clusterName = clusterName; + } + + public String getMappingsFileName() { + return mappingsFileName; + } + + public void setMappingsFileName(String mappingsFileName) { + this.mappingsFileName = mappingsFileName; + } + + public String getSettingsFileName() { + return settingsFileName; + } + + public int getSyncAdapterMaxConcurrentWorkers() { + return syncAdapterMaxConcurrentWorkers; + } + + public void setSyncAdapterMaxConcurrentWorkers(int syncAdapterMaxConcurrentWorkers) { + this.syncAdapterMaxConcurrentWorkers = syncAdapterMaxConcurrentWorkers; + } + + public void setSettingsFileName(String settingsFileName) { + this.settingsFileName = settingsFileName; + } + + public String getAuditIndexName() { + return auditIndexName; + } + + public void setAuditIndexName(String auditIndexName) { + this.auditIndexName = auditIndexName; + } + + public String getTopographicalSearchIndex() { + return topographicalSearchIndex; + } + + public void setTopographicalSearchIndex(String topographicalSearchIndex) { + this.topographicalSearchIndex = topographicalSearchIndex; + } + + public String getEntityCountHistoryIndex() { + return entityCountHistoryIndex; + } + + public void setEntityCountHistoryIndex(String entityCountHistoryIndex) { + this.entityCountHistoryIndex = entityCountHistoryIndex; + } + + + public String getEntityCountHistoryMappingsFileName() { + return entityCountHistoryMappingsFileName; + } + + public void setEntityCountHistoryMappingsFileName(String entityCountHistoryMappingsFileName) { + this.entityCountHistoryMappingsFileName = entityCountHistoryMappingsFileName; + } + + public String getBulkUrl() { + String url = this.getIpAddress(); + String port = this.getHttpPort(); + return String.format("http://%s:%s/%s", url, port, BULK_API); + } + + public String getConfigAsString(String configItem, String configFileName) + throws ElasticSearchOperationException { + String indexConfig = null; + + try { + indexConfig = ConfigHelper.getFileContents(configFileName); + } catch (IOException exc) { + throw new ElasticSearchOperationException( + "Failed to read index " + configItem + " from file = " + configFileName + ".", exc); + } + + if (indexConfig == null) { + throw new ElasticSearchOperationException( + "Failed to load index " + configItem + " with filename = " + configFileName + "."); + } + return indexConfig; + } + + public String getElasticSearchSettings() throws ElasticSearchOperationException { + return getConfigAsString("settings", + TierSupportUiConstants.getConfigPath(this.getSettingsFileName())); + } + + public String getDynamicMappings() throws ElasticSearchOperationException{ + return getConfigAsString("mapping", + TierSupportUiConstants.getConfigPath(this.getDynamicMappingsFileName())); + } + public String getElasticSearchMappings() throws ElasticSearchOperationException { + return getConfigAsString("mapping", + TierSupportUiConstants.getConfigPath(this.getMappingsFileName())); + } + + public String getElasticSearchEntityCountHistoryMappings() + throws ElasticSearchOperationException { + return getConfigAsString("mapping", + TierSupportUiConstants.getConfigPath(this.getEntityCountHistoryMappingsFileName())); + } + + public String getAutosuggestIndexSettings() throws ElasticSearchOperationException { + return getConfigAsString("setting", + TierSupportUiConstants.getConfigPath(this.getAutoSuggestSettingsFileName())); + } + + public String getAutosuggestIndexMappings() throws ElasticSearchOperationException { + return getConfigAsString("mapping", + TierSupportUiConstants.getConfigPath(this.getAutoSuggestMappingsFileName())); + } + + public String getAutosuggestIndexname() { + return autosuggestIndexname; + } + + public void setAutosuggestIndexname(String autosuggestIndexname) { + this.autosuggestIndexname = autosuggestIndexname; + } + + public String getAutoSuggestSettingsFileName() { + return autoSuggestSettingsFileName; + } + + public void setAutoSuggestSettingsFileName(String autoSuggestSettingsFileName) { + this.autoSuggestSettingsFileName = autoSuggestSettingsFileName; + } + + public String getAutoSuggestMappingsFileName() { + return autoSuggestMappingsFileName; + } + + public void setAutoSuggestMappingsFileName(String autoSuggestMappingsFileName) { + this.autoSuggestMappingsFileName = autoSuggestMappingsFileName; + } + + public String getDynamicMappingsFileName() { + return dynamicMappingsFileName; + } + + public void setDynamicMappingsFileName(String dynamicMappingsFileName) { + this.dynamicMappingsFileName = dynamicMappingsFileName; + } + + /** + * Builds the elastic search table config. + * + * @return the string + * @throws ElasticSearchOperationException the elastic search operation exception + */ + public String buildElasticSearchTableConfig() throws ElasticSearchOperationException { + + JsonNode esSettingsNode; + JsonNode esMappingsNodes; + ObjectMapper mapper = new ObjectMapper(); + + try { + esSettingsNode = mapper.readTree(getElasticSearchSettings()); + esMappingsNodes = mapper.readTree(getElasticSearchMappings()); + } catch (IOException e1) { + throw new ElasticSearchOperationException("Caught an exception building initial ES index"); + } + + ObjectNode esConfig = (ObjectNode) mapper.createObjectNode().set("settings", esSettingsNode); + ObjectNode mappings = (ObjectNode) mapper.createObjectNode().set(getType(), esMappingsNodes); + + esConfig.set("mappings", mappings); + + try { + return mapper.writeValueAsString(esConfig); + } catch (JsonProcessingException exc) { + throw new ElasticSearchOperationException("Error getting object node as string", exc); + } + + } + + /** + * Builds the elastic search entity count history table config. + * + * @return the string + * @throws ElasticSearchOperationException the elastic search operation exception + */ + public String buildElasticSearchEntityCountHistoryTableConfig() + throws ElasticSearchOperationException { + + JsonNode esSettingsNode; + JsonNode esMappingsNodes; + ObjectMapper mapper = new ObjectMapper(); + + try { + esSettingsNode = mapper.readTree(getElasticSearchSettings()); + esMappingsNodes = mapper.readTree(getElasticSearchEntityCountHistoryMappings()); + } catch (IOException e1) { + throw new ElasticSearchOperationException("Caught an exception building initial ES index"); + } + + ObjectNode esConfig = (ObjectNode) mapper.createObjectNode().set("settings", esSettingsNode); + ObjectNode mappings = (ObjectNode) mapper.createObjectNode().set(getType(), esMappingsNodes); + + esConfig.set("mappings", mappings); + + try { + return mapper.writeValueAsString(esConfig); + } catch (JsonProcessingException exc) { + throw new ElasticSearchOperationException("Error getting object node as string", exc); + } + + } + + public String buildAggregationTableConfig() throws ElasticSearchOperationException { + + JsonNode esMappingsNodes; + ObjectMapper mapper = new ObjectMapper(); + + try { + esMappingsNodes = mapper.readTree(this.getDynamicMappings()); + } catch (IOException e1) { + throw new ElasticSearchOperationException( + "Caught an exception building Aggreagation ES index"); + } + + ObjectNode mappings = (ObjectNode) mapper.createObjectNode().set(getType(), esMappingsNodes); + + ObjectNode indexConfig = (ObjectNode) mapper.createObjectNode().set("mappings", mappings); + + try { + return mapper.writeValueAsString(indexConfig); + } catch (JsonProcessingException exc) { + throw new ElasticSearchOperationException("Error getting object node as string", exc); + } + + } + + public String buildAutosuggestionTableConfig() throws ElasticSearchOperationException { + + JsonNode esSettingsNode; + JsonNode esMappingsNodes; + ObjectMapper mapper = new ObjectMapper(); + + try { + esSettingsNode = mapper.readTree(this.getAutosuggestIndexSettings()); + esMappingsNodes = mapper.readTree(this.getAutosuggestIndexMappings()); + } catch (IOException e1) { + throw new ElasticSearchOperationException( + "Caught an exception building Autosuggestion ES index"); + } + + ObjectNode indexConfig = (ObjectNode) mapper.createObjectNode().set("settings", esSettingsNode); + ObjectNode mappings = (ObjectNode) mapper.createObjectNode().set(getType(), esMappingsNodes); + + indexConfig.set("mappings", mappings); + + try { + return mapper.writeValueAsString(indexConfig); + } catch (JsonProcessingException exc) { + throw new ElasticSearchOperationException("Error getting object node as string", exc); + } + + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "ElasticSearchConfig [ipAddress=" + ipAddress + ", httpPort=" + httpPort + + ", javaApiPort=" + javaApiPort + ", indexName=" + indexName + ", type=" + type + + ", clusterName=" + clusterName + ", mappingsFileName=" + mappingsFileName + + ", settingsFileName=" + settingsFileName + ", syncAdapterMaxConcurrentWorkers=" + + syncAdapterMaxConcurrentWorkers + ", auditIndexName=" + auditIndexName + + ", topographicalSearchIndex=" + topographicalSearchIndex + ", entityCountHistoryIndex=" + + entityCountHistoryIndex + ", autosuggestIndexname=" + autosuggestIndexname + + ", entityCountHistoryMappingsFileName=" + entityCountHistoryMappingsFileName + + ", autoSuggestSettingsFileName=" + autoSuggestSettingsFileName + + ", autoSuggestMappingsFileName=" + autoSuggestMappingsFileName + + ", dynamicMappingsFileName=" + dynamicMappingsFileName + ", processorConfig=" + + processorConfig + "]"; + } +} diff --git a/src/main/java/org/openecomp/sparky/dal/exception/ElasticSearchOperationException.java b/src/main/java/org/openecomp/sparky/dal/exception/ElasticSearchOperationException.java new file mode 100644 index 0000000..cb477d0 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/exception/ElasticSearchOperationException.java @@ -0,0 +1,54 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.exception; + +/** + * The Class ElasticSearchOperationException. + */ +public class ElasticSearchOperationException extends Exception { + + private static final long serialVersionUID = -7689309913743200670L; + + /** + * Instantiates a new elastic search operation exception. + * + * @param message the message + * @param exc the exc + */ + public ElasticSearchOperationException(String message, Exception exc) { + super(message, exc); + } + + /** + * Instantiates a new elastic search operation exception. + * + * @param message the message + */ + public ElasticSearchOperationException(String message) { + super(message); + } + +} diff --git a/src/main/java/org/openecomp/sparky/dal/rest/HttpMethod.java b/src/main/java/org/openecomp/sparky/dal/rest/HttpMethod.java new file mode 100644 index 0000000..6a7c3db --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/rest/HttpMethod.java @@ -0,0 +1,34 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.rest; + + +/** + * The Enum HttpMethod. + */ +public enum HttpMethod { + GET, PUT, POST, DELETE, PATCH, HEAD +} diff --git a/src/main/java/org/openecomp/sparky/dal/rest/OperationResult.java b/src/main/java/org/openecomp/sparky/dal/rest/OperationResult.java new file mode 100644 index 0000000..fcceb2b --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/rest/OperationResult.java @@ -0,0 +1,198 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.rest; + +/** + * The Class OperationResult. + */ +public class OperationResult { + + private String result; + + private String objectId; + private String requestLink; + private String requestPayload; + + private int resultCode; + + private boolean resolvedLinkFromCache; + + private boolean resolvedLinkFromServer; + + private boolean resolvedLinkFailure; + + private int numRequestRetries; + + private long responseTimeInMs; + + /** + * Reset. + */ + public void reset() { + this.objectId = null; + this.result = null; + this.requestLink = null; + this.requestPayload = null; + this.resultCode = -1; + this.resolvedLinkFailure = false; + this.resolvedLinkFromServer = false; + this.resolvedLinkFromCache = false; + this.responseTimeInMs = 0; + this.numRequestRetries = 0; + } + + public String getObjectId() { + return objectId; + } + + public void setObjectId(String objectId) { + this.objectId = objectId; + } + + public boolean isResolvedLinkFromCache() { + return resolvedLinkFromCache; + } + + /** + * Was successful. + * + * @return true, if successful + */ + public boolean wasSuccessful() { + return (resultCode > 199 && resultCode < 300); + } + + public String getRequestLink() { + return requestLink; + } + + public void setRequestLink(String requestLink) { + this.requestLink = requestLink; + } + + public String getRequestPayload() { + return requestPayload; + } + + public void setRequestPayload(String requestPayload) { + this.requestPayload = requestPayload; + } + + public void setResolvedLinkFromCache(boolean resolvedLinkFromCache) { + this.resolvedLinkFromCache = resolvedLinkFromCache; + } + + public boolean isResolvedLinkFromServer() { + return resolvedLinkFromServer; + } + + public void setResolvedLinkFromServer(boolean resolvedLinkFromServer) { + this.resolvedLinkFromServer = resolvedLinkFromServer; + } + + public boolean isResolvedLinkFailure() { + return resolvedLinkFailure; + } + + public void setResolvedLinkFailure(boolean resolvedLinkFailure) { + this.resolvedLinkFailure = resolvedLinkFailure; + } + + public String getResult() { + return result; + } + + public int getResultCode() { + return resultCode; + } + + public void setResultCode(int resultCode) { + this.resultCode = resultCode; + } + + public void setResult(String result) { + this.result = result; + } + + /** + * Sets the result. + * + * @param resultCode the result code + * @param result the result + */ + public void setResult(int resultCode, String result) { + this.resultCode = resultCode; + this.result = result; + } + + /** + * Instantiates a new operation result. + */ + public OperationResult() { + super(); + } + + /** + * Instantiates a new operation result. + * + * @param resultCode the result code + * @param result the result + */ + public OperationResult(int resultCode, String result) { + super(); + this.resultCode = resultCode; + this.result = result; + } + + public long getResponseTimeInMs() { + return responseTimeInMs; + } + + public void setResponseTimeInMs(long responseTimeInMs) { + this.responseTimeInMs = responseTimeInMs; + } + + public int getNumRequestRetries() { + return numRequestRetries; + } + + public void setNumRequestRetries(int numRequestRetries) { + this.numRequestRetries = numRequestRetries; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "OperationResult [result=" + result + ", resultCode=" + resultCode + + ", resolvedLinkFromCache=" + resolvedLinkFromCache + ", resolvedLinkFromServer=" + + resolvedLinkFromServer + ", resolvedLinkFailure=" + resolvedLinkFailure + + ", numRequestRetries=" + numRequestRetries + ", responseTimeInMs=" + responseTimeInMs + + "]"; + } + +} diff --git a/src/main/java/org/openecomp/sparky/dal/rest/RestClientBuilder.java b/src/main/java/org/openecomp/sparky/dal/rest/RestClientBuilder.java new file mode 100644 index 0000000..267061a --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/rest/RestClientBuilder.java @@ -0,0 +1,148 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.rest; + +import com.sun.jersey.api.client.Client; +import com.sun.jersey.api.client.config.ClientConfig; +import com.sun.jersey.api.client.config.DefaultClientConfig; +import com.sun.jersey.client.urlconnection.HTTPSProperties; + +import javax.net.ssl.HostnameVerifier; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLSession; + +import org.openecomp.sparky.security.SecurityContextFactory; +import org.openecomp.sparky.security.SecurityContextFactoryImpl; + +/** + * This is a generic REST Client builder with flexible security validation. Sometimes it's nice to + * be able to disable server chain cert validation and hostname validation to work-around lab + * issues, but at the same time be able to provide complete validation with client cert + hostname + + * server cert chain validation. + * I used the ModelLoader REST client as a base and merged in the TSUI client I wrote which also + * validates the server hostname and server certificate chain. + * + * @author DAVEA + * + */ +public class RestClientBuilder { + + /* + * TODO: implement fluent interface? + */ + + private boolean useHttps; + private boolean validateServerHostname; + private int connectTimeoutInMs; + private int readTimeoutInMs; + protected SecurityContextFactory sslContextFactory; + + /** + * Instantiates a new rest client builder. + */ + public RestClientBuilder() { + validateServerHostname = false; + connectTimeoutInMs = 60000; + readTimeoutInMs = 60000; + useHttps = true; + sslContextFactory = new SecurityContextFactoryImpl(); + } + + public SecurityContextFactory getSslContextFactory() { + return sslContextFactory; + } + + public void setSslContextFactory(SecurityContextFactory sslContextFactory) { + this.sslContextFactory = sslContextFactory; + } + + public boolean isUseHttps() { + return useHttps; + } + + public void setUseHttps(boolean useHttps) { + this.useHttps = useHttps; + } + + public int getConnectTimeoutInMs() { + return connectTimeoutInMs; + } + + public void setConnectTimeoutInMs(int connectTimeoutInMs) { + this.connectTimeoutInMs = connectTimeoutInMs; + } + + public int getReadTimeoutInMs() { + return readTimeoutInMs; + } + + public void setReadTimeoutInMs(int readTimeoutInMs) { + this.readTimeoutInMs = readTimeoutInMs; + } + + public boolean isValidateServerHostname() { + return validateServerHostname; + } + + public void setValidateServerHostname(boolean validateServerHostname) { + this.validateServerHostname = validateServerHostname; + } + + public Client getClient() throws Exception { + + Client client = null; + ClientConfig clientConfig = new DefaultClientConfig(); + + if (useHttps) { + SSLContext sslContext = sslContextFactory.getSecureContext(); + + if (validateServerHostname) { + + clientConfig.getProperties().put(HTTPSProperties.PROPERTY_HTTPS_PROPERTIES, + new HTTPSProperties(null, sslContext)); + + } else { + clientConfig.getProperties().put(HTTPSProperties.PROPERTY_HTTPS_PROPERTIES, + new HTTPSProperties(new HostnameVerifier() { + @Override + public boolean verify(String string, SSLSession sslSession) { + return true; + } + }, sslContext)); + + } + } + + client = Client.create(clientConfig); + + client.setConnectTimeout(connectTimeoutInMs); + client.setReadTimeout(readTimeoutInMs); + + return client; + + } + +} diff --git a/src/main/java/org/openecomp/sparky/dal/rest/RestDataProvider.java b/src/main/java/org/openecomp/sparky/dal/rest/RestDataProvider.java new file mode 100644 index 0000000..15dad28 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/rest/RestDataProvider.java @@ -0,0 +1,112 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.rest; + +/** + * The Interface RestDataProvider. + */ +public interface RestDataProvider { + + /** + * Do get. + * + * @param url the url + * @param acceptContentType the accept content type + * @return the operation result + */ + OperationResult doGet(String url, String acceptContentType); + + /** + * Do delete. + * + * @param url the url + * @param acceptContentType the accept content type + * @return the operation result + */ + OperationResult doDelete(String url, String acceptContentType); + + /** + * Do post. + * + * @param url the url + * @param jsonPayload the json payload + * @param acceptContentType the accept content type + * @return the operation result + */ + OperationResult doPost(String url, String jsonPayload, String acceptContentType); + + /** + * Do put. + * + * @param url the url + * @param jsonPayload the json payload + * @param acceptContentType the accept content type + * @return the operation result + */ + OperationResult doPut(String url, String jsonPayload, String acceptContentType); + + /** + * Do patch. + * + * @param url the url + * @param jsonPayload the json payload + * @param acceptContentType the accept content type + * @return the operation result + */ + OperationResult doPatch(String url, String jsonPayload, String acceptContentType); + + /** + * Do head. + * + * @param url the url + * @param acceptContentType the accept content type + * @return the operation result + */ + OperationResult doHead(String url, String acceptContentType); + + /** + * Do restful operation. + * + * @param method the method + * @param url the url + * @param payload the payload + * @param payloadType the payload type + * @param acceptContentType the accept content type + * @return the operation result + */ + OperationResult doRestfulOperation(HttpMethod method, String url, String payload, + String payloadType, String acceptContentType); + + /** + * Shutdown. + */ + void shutdown(); + + /** + * Clear cache. + */ + void clearCache(); +} diff --git a/src/main/java/org/openecomp/sparky/dal/rest/RestOperationalStatistics.java b/src/main/java/org/openecomp/sparky/dal/rest/RestOperationalStatistics.java new file mode 100644 index 0000000..7b0ca48 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/rest/RestOperationalStatistics.java @@ -0,0 +1,256 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.rest; + +import org.openecomp.sparky.analytics.AbstractStatistics; +import org.openecomp.sparky.dal.NetworkTransaction; + +/** + * The Class RestOperationalStatistics. + */ +public class RestOperationalStatistics extends AbstractStatistics { + + private static final String GET_1XX = "GET_1XX"; + private static final String GET_2XX = "GET_2XX"; + private static final String GET_3XX = "GET_3XX"; + private static final String GET_4XX = "GET_4XX"; + private static final String GET_5XX = "GET_5XX"; + private static final String GET_6XX = "GET_6XX"; + + private static final String PUT_1XX = "PUT_1XX"; + private static final String PUT_2XX = "PUT_2XX"; + private static final String PUT_3XX = "PUT_3XX"; + private static final String PUT_4XX = "PUT_4XX"; + private static final String PUT_5XX = "PUT_5XX"; + private static final String PUT_6XX = "PUT_6XX"; + + private static final String POST_1XX = "POST_1XX"; + private static final String POST_2XX = "POST_2XX"; + private static final String POST_3XX = "POST_3XX"; + private static final String POST_4XX = "POST_4XX"; + private static final String POST_5XX = "POST_5XX"; + private static final String POST_6XX = "POST_6XX"; + + private static final String DELETE_1XX = "DELETE_1XX"; + private static final String DELETE_2XX = "DELETE_2XX"; + private static final String DELETE_3XX = "DELETE_3XX"; + private static final String DELETE_4XX = "DELETE_4XX"; + private static final String DELETE_5XX = "DELETE_5XX"; + private static final String DELETE_6XX = "DELETE_6XX"; + + /** + * Creates the counters. + */ + private void createCounters() { + + addCounter(GET_1XX); + addCounter(GET_2XX); + addCounter(GET_3XX); + addCounter(GET_4XX); + addCounter(GET_5XX); + addCounter(GET_6XX); + + addCounter(PUT_1XX); + addCounter(PUT_2XX); + addCounter(PUT_3XX); + addCounter(PUT_4XX); + addCounter(PUT_5XX); + addCounter(PUT_6XX); + + addCounter(POST_1XX); + addCounter(POST_2XX); + addCounter(POST_3XX); + addCounter(POST_4XX); + addCounter(POST_5XX); + addCounter(POST_6XX); + + addCounter(DELETE_1XX); + addCounter(DELETE_2XX); + addCounter(DELETE_3XX); + addCounter(DELETE_4XX); + addCounter(DELETE_5XX); + addCounter(DELETE_6XX); + + + } + + /** + * Gets the result code. + * + * @param txn the txn + * @return the result code + */ + private int getResultCode(NetworkTransaction txn) { + + if (txn == null) { + return -1; + } + + if (txn.getOperationResult() == null) { + return -1; + } + + return txn.getOperationResult().getResultCode(); + + } + + /** + * Update counters. + * + * @param txn the txn + */ + public void updateCounters(NetworkTransaction txn) { + + if (txn == null) { + return; + } + + int rc = getResultCode(txn); + + switch (txn.getOperationType()) { + + case GET: { + + if (100 <= rc && rc <= 199) { + pegCounter(GET_1XX); + } else if (200 <= rc && rc <= 299) { + pegCounter(GET_2XX); + } else if (300 <= rc && rc <= 399) { + pegCounter(GET_3XX); + } else if (400 <= rc && rc <= 499) { + pegCounter(GET_4XX); + } else if (500 <= rc && rc <= 599) { + pegCounter(GET_5XX); + } else if (600 <= rc && rc <= 699) { + pegCounter(GET_6XX); + } + + break; + } + + case PUT: { + + if (100 <= rc && rc <= 199) { + pegCounter(PUT_1XX); + } else if (200 <= rc && rc <= 299) { + pegCounter(PUT_2XX); + } else if (300 <= rc && rc <= 399) { + pegCounter(PUT_3XX); + } else if (400 <= rc && rc <= 499) { + pegCounter(PUT_4XX); + } else if (500 <= rc && rc <= 599) { + pegCounter(PUT_5XX); + } else if (600 <= rc && rc <= 699) { + pegCounter(PUT_6XX); + } + + break; + } + + case POST: { + + if (100 <= rc && rc <= 199) { + pegCounter(POST_1XX); + } else if (200 <= rc && rc <= 299) { + pegCounter(POST_2XX); + } else if (300 <= rc && rc <= 399) { + pegCounter(POST_3XX); + } else if (400 <= rc && rc <= 499) { + pegCounter(POST_4XX); + } else if (500 <= rc && rc <= 599) { + pegCounter(POST_5XX); + } else if (600 <= rc && rc <= 699) { + pegCounter(POST_6XX); + } + + break; + } + + case DELETE: { + + if (100 <= rc && rc <= 199) { + pegCounter(DELETE_1XX); + } else if (200 <= rc && rc <= 299) { + pegCounter(DELETE_2XX); + } else if (300 <= rc && rc <= 399) { + pegCounter(DELETE_3XX); + } else if (400 <= rc && rc <= 499) { + pegCounter(DELETE_4XX); + } else if (500 <= rc && rc <= 599) { + pegCounter(DELETE_5XX); + } else if (600 <= rc && rc <= 699) { + pegCounter(DELETE_6XX); + } + + break; + } + + default: { + // not expecting anything else yet + } + + } + + } + + /** + * Instantiates a new rest operational statistics. + */ + public RestOperationalStatistics() { + createCounters(); + } + + public String getStatisticsReport() { + + StringBuilder sb = new StringBuilder(128); + + sb.append("\n ") + .append(String.format( + "%-12s 1XX: %-12d 2XX: %-12d 3XX: %-12d 4XX: %-12d 5XX: %-12d 6XX: %-12d ", + HttpMethod.DELETE, getCounterValue(DELETE_1XX), getCounterValue(DELETE_2XX), + getCounterValue(DELETE_3XX), getCounterValue(DELETE_4XX), getCounterValue(DELETE_5XX), + getCounterValue(DELETE_6XX))); + + sb.append("\n ").append(String.format( + "%-12s 1XX: %-12d 2XX: %-12d 3XX: %-12d 4XX: %-12d 5XX: %-12d 6XX: %-12d ", HttpMethod.PUT, + getCounterValue(PUT_1XX), getCounterValue(PUT_2XX), getCounterValue(PUT_3XX), + getCounterValue(PUT_4XX), getCounterValue(PUT_5XX), getCounterValue(PUT_6XX))); + + sb.append("\n ").append(String.format( + "%-12s 1XX: %-12d 2XX: %-12d 3XX: %-12d 4XX: %-12d 5XX: %-12d 6XX: %-12d ", HttpMethod.POST, + getCounterValue(POST_1XX), getCounterValue(POST_2XX), getCounterValue(POST_3XX), + getCounterValue(POST_4XX), getCounterValue(POST_5XX), getCounterValue(POST_6XX))); + + sb.append("\n ").append(String.format( + "%-12s 1XX: %-12d 2XX: %-12d 3XX: %-12d 4XX: %-12d 5XX: %-12d 6XX: %-12d ", HttpMethod.GET, + getCounterValue(GET_1XX), getCounterValue(GET_2XX), getCounterValue(GET_3XX), + getCounterValue(GET_4XX), getCounterValue(GET_5XX), getCounterValue(GET_6XX))); + + return sb.toString(); + } + + +} diff --git a/src/main/java/org/openecomp/sparky/dal/rest/RestfulDataAccessor.java b/src/main/java/org/openecomp/sparky/dal/rest/RestfulDataAccessor.java new file mode 100644 index 0000000..1c2fb07 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/rest/RestfulDataAccessor.java @@ -0,0 +1,357 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.rest; + +import java.security.SecureRandom; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.dal.cache.EntityCache; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.util.NodeUtils; + +import com.sun.jersey.api.client.Client; +import com.sun.jersey.api.client.ClientResponse; +import com.sun.jersey.api.client.WebResource; +import com.sun.jersey.api.client.WebResource.Builder; + +/** + * The Class RestfulDataAccessor. + */ +public class RestfulDataAccessor implements RestDataProvider { + + protected SecureRandom txnIdGenerator; + + protected RestClientBuilder clientBuilder; + + protected EntityCache entityCache; + private boolean cacheEnabled; + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(RestfulDataAccessor.class); + + private boolean resourceNotFoundErrorsSurpressed; + + public static final String APPLICATION_JSON = "application/json"; + public static final String APPLICATION_MERGE_PATCH_JSON = "application/merge-patch+json"; + public static final String APPLICATION_X_WWW_FORM_URL_ENCODED = + "application/x-www-form-urlencoded"; + + + /** + * Instantiates a new restful data accessor. + * + * @param clientBuilder the client builder + */ + public RestfulDataAccessor(RestClientBuilder clientBuilder) { + this.clientBuilder = clientBuilder; + txnIdGenerator = new SecureRandom(); + resourceNotFoundErrorsSurpressed = false; + cacheEnabled = false; + entityCache = null; + } + + protected boolean isCacheEnabled() { + return cacheEnabled; + } + + public void setCacheEnabled(boolean cacheEnabled) { + this.cacheEnabled = cacheEnabled; + } + + protected EntityCache getEntityCache() { + return entityCache; + } + + public void setEntityCache(EntityCache entityCache) { + this.entityCache = entityCache; + } + + /** + * Cache result. + * + * @param result the result + */ + private void cacheResult(OperationResult result) { + if (cacheEnabled && entityCache != null) { + final String id = + NodeUtils.generateUniqueShaDigest(result.getRequestLink(), result.getRequestPayload()); + entityCache.put(id, result); + } + } + + /** + * Populate operation result. + * + * @param response the response + * @param opResult the op result + */ + protected void populateOperationResult(ClientResponse response, OperationResult opResult) { + + if (response == null) { + opResult.setResult(500, "Client response was null"); + return; + } + + int statusCode = response.getStatus(); + String payload = response.getEntity(String.class); + + opResult.setResult(statusCode, payload); + + } + + /** + * Gets the cached data. + * + * @param link the link + * @param payload the payload + * @return the cached data + */ + private OperationResult getCachedData(String link, String payload) { + if (cacheEnabled && entityCache != null) { + final String id = NodeUtils.generateUniqueShaDigest(link, payload); + return entityCache.get(id, link); + } + return null; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doRestfulOperation(org.openecomp.sparky.dal.rest.HttpMethod, java.lang.String, java.lang.String, java.lang.String, java.lang.String) + */ + @Override + public OperationResult doRestfulOperation(HttpMethod method, String url, String payload, + String payloadType, String acceptContentType) { + + ClientResponse clientResponse = null; + + long startTimeInMs = System.currentTimeMillis(); + Client client = null; + Builder builder = null; + + OperationResult operationResult = null; + + /* + * Attempt to get cached data for the requested URL. We don't currently cache the other + * operations. + */ + + operationResult = getCachedData(url, payload); + + if (operationResult != null) { + + /* + * cache-hit, return what we found + */ + + // System.out.println("operationResult = " + operationResult.getResultCode()); + // System.out.println("opresult = " + operationResult.getResult()); + return operationResult; + } + + /* + * else cache miss / cache disabled (default operation) + */ + + operationResult = new OperationResult(); + operationResult.setRequestLink(url); + + try { + + client = clientBuilder.getClient(); + + switch (method) { + case GET: { + builder = setClientDefaults(client, url, null, acceptContentType); + clientResponse = builder.get(ClientResponse.class); + break; + } + + case PUT: { + builder = setClientDefaults(client, url, payloadType, acceptContentType); + clientResponse = builder.put(ClientResponse.class, payload); + break; + } + + case POST: { + builder = setClientDefaults(client, url, payloadType, acceptContentType); + clientResponse = builder.post(ClientResponse.class, payload); + break; + } + + case DELETE: { + builder = setClientDefaults(client, url, null, acceptContentType); + clientResponse = builder.delete(ClientResponse.class); + break; + } + + case PATCH: { + builder = setClientDefaults(client, url, payloadType, acceptContentType); + builder = builder.header("X-HTTP-Method-Override", "PATCH"); + clientResponse = builder.post(ClientResponse.class, payload); + break; + } + + case HEAD: { + builder = setClientDefaults(client, url, null, acceptContentType); + clientResponse = builder.head(); + break; + } + + + default: { + operationResult.setResult(500, "Unhandled HTTP Method operation = " + method); + return operationResult; + } + + } + + } catch (Exception ex) { + LOG.error(AaiUiMsgs.RESTFULL_OP_ERROR_VERBOSE, url, ex.getLocalizedMessage()); + operationResult.setResult(500, + String.format("Error retrieving link = '%s' from restful endpoint due to error = '%s'", + url, ex.getLocalizedMessage())); + return operationResult; + } + + populateOperationResult(clientResponse, operationResult); + + if (operationResult.getResultCode() != 404 + || (operationResult.getResultCode() == 404 && !isResourceNotFoundErrorsSurpressed())) { + LOG.info(AaiUiMsgs.RESTFULL_OP_COMPLETE, method.toString(), + String.valueOf(System.currentTimeMillis() - startTimeInMs), url, + String.valueOf(operationResult.getResultCode())); + } + + cacheResult(operationResult); + + return operationResult; + + } + + public boolean isResourceNotFoundErrorsSurpressed() { + return resourceNotFoundErrorsSurpressed; + } + + public void setResourceNotFoundErrorsSurpressed(boolean resourceNotFoundErrorsSurpressed) { + this.resourceNotFoundErrorsSurpressed = resourceNotFoundErrorsSurpressed; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doGet(java.lang.String, java.lang.String) + */ + @Override + public OperationResult doGet(String url, String acceptContentType) { + return doRestfulOperation(HttpMethod.GET, url, null, null, acceptContentType); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doDelete(java.lang.String, java.lang.String) + */ + @Override + public OperationResult doDelete(String url, String acceptContentType) { + return doRestfulOperation(HttpMethod.DELETE, url, null, null, acceptContentType); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doPost(java.lang.String, java.lang.String, java.lang.String) + */ + @Override + public OperationResult doPost(String url, String jsonPayload, String acceptContentType) { + return doRestfulOperation(HttpMethod.POST, url, jsonPayload, APPLICATION_JSON, + acceptContentType); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doPut(java.lang.String, java.lang.String, java.lang.String) + */ + @Override + public OperationResult doPut(String url, String jsonPayload, String acceptContentType) { + return doRestfulOperation(HttpMethod.PUT, url, jsonPayload, APPLICATION_JSON, + acceptContentType); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doPatch(java.lang.String, java.lang.String, java.lang.String) + */ + @Override + public OperationResult doPatch(String url, String jsonPayload, String acceptContentType) { + return doRestfulOperation(HttpMethod.PATCH, url, jsonPayload, APPLICATION_MERGE_PATCH_JSON, + acceptContentType); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#doHead(java.lang.String, java.lang.String) + */ + @Override + public OperationResult doHead(String url, String acceptContentType) { + return doRestfulOperation(HttpMethod.HEAD, url, null, null, acceptContentType); + } + + /** + * Sets the client defaults. + * + * @param client the client + * @param url the url + * @param payloadContentType the payload content type + * @param acceptContentType the accept content type + * @return the builder + */ + protected Builder setClientDefaults(Client client, String url, String payloadContentType, + String acceptContentType) { + WebResource resource = client.resource(url); + Builder builder = null; + builder = resource.accept(acceptContentType); + + if (payloadContentType != null) { + builder = builder.header("Content-Type", payloadContentType); + } + + return builder; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#shutdown() + */ + @Override + public void shutdown() { + + if (entityCache != null) { + entityCache.shutdown(); + } + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.dal.rest.RestDataProvider#clearCache() + */ + @Override + public void clearCache() { + if (cacheEnabled) { + entityCache.clear(); + } + + } + +} diff --git a/src/main/java/org/openecomp/sparky/dal/sas/config/SearchServiceConfig.java b/src/main/java/org/openecomp/sparky/dal/sas/config/SearchServiceConfig.java new file mode 100644 index 0000000..1ff2001 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/sas/config/SearchServiceConfig.java @@ -0,0 +1,224 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.sas.config; + +import java.util.Properties; + +import org.openecomp.sparky.util.ConfigHelper; +import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; + +/** + * The Class ElasticSearchConfig. + */ +public class SearchServiceConfig { + + public static final String CONFIG_FILE = + TierSupportUiConstants.DYNAMIC_CONFIG_APP_LOCATION + "search-service.properties"; + + private static SearchServiceConfig instance; + + private String ipAddress; + + private String httpPort; + + private String indexName; + + private String auditIndexName; + + private String topographicalSearchIndex; + + private String entityCountHistoryIndex; + + private String version; + + private String type; + + private String certName; + + private String keystorePassword; + + private String keystore; + + private static final String IP_ADDRESS_DEFAULT = "localhost"; + + private static final String HTTP_PORT_DEFAULT = "9509"; + + private static final String INDEX_NAME_DEFAULT = "entitySearchIndex-localhost"; + + private static final String AUDIT_INDEX_NAME_DEFAULT = "di-violations"; + + private static final String TOPOGRAPHICAL_INDEX_NAME_DEFAULT = + "topographicalsearchindex-localhost"; + + private static final String ENTITY_COUNT_HISTORY_INDEX_NAME_DEFAULT = + "entitycounthistoryindex-localhost"; + + private static final String VERSION_DEFAULT = "v1"; + + public static SearchServiceConfig getConfig() throws Exception { + + if (instance == null) { + instance = new SearchServiceConfig(); + instance.initializeProperties(); + } + + return instance; + } + + public static void setConfig(SearchServiceConfig config) { + SearchServiceConfig.instance = config; + } + + /** + * Instantiates a new search service config. + */ + public SearchServiceConfig() { + // test method + } + + /** + * Initialize properties. + */ + private void initializeProperties() { + Properties props = ConfigHelper.loadConfigFromExplicitPath(CONFIG_FILE); + + Properties sasProps = ConfigHelper.getConfigWithPrefix("search-service", props); + + ipAddress = sasProps.getProperty("ipAddress", IP_ADDRESS_DEFAULT); + httpPort = sasProps.getProperty("httpPort", "" + HTTP_PORT_DEFAULT); + version = sasProps.getProperty("version", "" + VERSION_DEFAULT); + indexName = sasProps.getProperty("indexName", INDEX_NAME_DEFAULT); + auditIndexName = sasProps.getProperty("auditIndexName", AUDIT_INDEX_NAME_DEFAULT); + topographicalSearchIndex = sasProps.getProperty("topographicalIndexName", + TOPOGRAPHICAL_INDEX_NAME_DEFAULT); + entityCountHistoryIndex = sasProps.getProperty("entityCountHistoryIndexName", + ENTITY_COUNT_HISTORY_INDEX_NAME_DEFAULT); + certName = + sasProps.getProperty("ssl.cert-name", "aai-client-cert.p12"); + keystorePassword = sasProps.getProperty("ssl.keystore-password", + "OBF:1i9a1u2a1unz1lr61wn51wn11lss1unz1u301i6o"); + keystore = sasProps.getProperty("ssl.keystore", "tomcat_keystore"); + } + + public String getIpAddress() { + return ipAddress; + } + + public void setIpAddress(String ipAddress) { + this.ipAddress = ipAddress; + } + + public String getHttpPort() { + return httpPort; + } + + public void setHttpPort(String httpPort) { + this.httpPort = httpPort; + } + + public String getIndexName() { + return indexName; + } + + public void setIndexName(String indexName) { + this.indexName = indexName; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + + public String getAuditIndexName() { + return auditIndexName; + } + + public void setAuditIndexName(String auditIndexName) { + this.auditIndexName = auditIndexName; + } + + public String getTopographicalSearchIndex() { + return topographicalSearchIndex; + } + + public void setTopographicalSearchIndex(String topographicalSearchIndex) { + this.topographicalSearchIndex = topographicalSearchIndex; + } + + public String getEntityCountHistoryIndex() { + return entityCountHistoryIndex; + } + + public void setEntityCountHistoryIndex(String entityCountHistoryIndex) { + this.entityCountHistoryIndex = entityCountHistoryIndex; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + + public String getCertName() { + return certName; + } + + public void setCertName(String certName) { + this.certName = certName; + } + + public String getKeystorePassword() { + return keystorePassword; + } + + public void setKeystorePassword(String keystorePassword) { + this.keystorePassword = keystorePassword; + } + + public String getKeystore() { + return keystore; + } + + public void setKeystore(String keystore) { + this.keystore = keystore; + } + + @Override + public String toString() { + return "SearchServiceConfig [ipAddress=" + ipAddress + ", httpPort=" + httpPort + ", indexName=" + + indexName + ", auditIndexName=" + auditIndexName + ", topographicalSearchIndex=" + + topographicalSearchIndex + ", entityCountHistoryIndex=" + entityCountHistoryIndex + + ", version=" + version + ", type=" + type + "]"; + } + + +} diff --git a/src/main/java/org/openecomp/sparky/dal/servlet/ResettableStreamHttpServletRequest.java b/src/main/java/org/openecomp/sparky/dal/servlet/ResettableStreamHttpServletRequest.java new file mode 100644 index 0000000..d7b1e6b --- /dev/null +++ b/src/main/java/org/openecomp/sparky/dal/servlet/ResettableStreamHttpServletRequest.java @@ -0,0 +1,129 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.dal.servlet; + +import com.google.common.primitives.Bytes; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Arrays; + +import javax.servlet.ReadListener; +import javax.servlet.ServletInputStream; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletRequestWrapper; + +/** + * The Class ResettableStreamHttpServletRequest. + */ +public class ResettableStreamHttpServletRequest extends HttpServletRequestWrapper { + + private byte[] requestBody = new byte[0]; + private boolean bufferFilled = false; + + /** + * Constructs a request object wrapping the given request. + * + * @param request The request to wrap + * @throws IllegalArgumentException if the request is null + */ + public ResettableStreamHttpServletRequest(HttpServletRequest request) { + super(request); + } + + /** + * Get request body. + * + * @return Bytes with the request body contents. + * @throws IOException In case stream reqding fails. + */ + public byte[] getRequestBody() throws IOException { + if (bufferFilled) { + return Arrays.copyOf(requestBody, requestBody.length); + } + + InputStream inputStream = super.getInputStream(); + + byte[] buffer = new byte[102400]; + + int bytesRead; + while ((bytesRead = inputStream.read(buffer)) != -1) { + requestBody = Bytes.concat(this.requestBody, Arrays.copyOfRange(buffer, 0, bytesRead)); + } + + bufferFilled = true; + + return requestBody; + } + + @Override + public ServletInputStream getInputStream() throws IOException { + return new CustomServletInputStream(getRequestBody()); + } + + /** + * The Class CustomServletInputStream. + */ + private static class CustomServletInputStream extends ServletInputStream { + + private ByteArrayInputStream buffer; + + /** + * Instantiates a new custom servlet input stream. + * + * @param contents the contents + */ + public CustomServletInputStream(byte[] contents) { + this.buffer = new ByteArrayInputStream(contents); + } + + /* (non-Javadoc) + * @see java.io.InputStream#read() + */ + @Override + public int read() throws IOException { + return buffer.read(); + } + + @Override + public boolean isFinished() { + return buffer.available() == 0; + } + + @Override + public boolean isReady() { + return true; + } + + @Override + public void setReadListener(ReadListener arg0) { + throw new RuntimeException("Not implemented"); + } + + } + +} diff --git a/src/main/java/org/openecomp/sparky/inventory/EntityHistoryQueryBuilder.java b/src/main/java/org/openecomp/sparky/inventory/EntityHistoryQueryBuilder.java new file mode 100644 index 0000000..534af4a --- /dev/null +++ b/src/main/java/org/openecomp/sparky/inventory/EntityHistoryQueryBuilder.java @@ -0,0 +1,144 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.inventory; + +import javax.json.Json; +import javax.json.JsonArray; +import javax.json.JsonArrayBuilder; +import javax.json.JsonObject; +import javax.json.JsonObjectBuilder; + +/** + * The Class EntityHistoryQueryBuilder. + */ +public class EntityHistoryQueryBuilder { + + private static final String TABLE = "table"; + private static final String GRAPH = "graph"; + + /** + * Gets the query. + * + * @param type the type + * @return the query + */ + public static JsonObject getQuery(String type) { + if (type.equalsIgnoreCase(TABLE)) { + return createTableQuery(); + } else if (type.equalsIgnoreCase(GRAPH)) { + return createGraphQuery(); + } else { + return null; + } + } + + /** + * Creates the graph query. + * + * @return the json object + */ + public static JsonObject createGraphQuery() { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + + jsonBuilder.add("aggs", + Json.createObjectBuilder().add("group_by_entityType", + Json.createObjectBuilder() + .add("terms", Json.createObjectBuilder().add("field", "entityType").add("size", 0)) + .add("aggs", Json.createObjectBuilder().add("group_by_date", + Json.createObjectBuilder().add("date_histogram", createDateHistogram()) + .add("aggs", Json.createObjectBuilder().add("sort_by_date", + Json.createObjectBuilder().add("top_hits", createTopHitsBlob()))))))); + jsonBuilder.add("size", 0); + + return jsonBuilder.build(); + } + + /** + * Creates the table query. + * + * @return the json object + */ + public static JsonObject createTableQuery() { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + + jsonBuilder.add("aggs", + Json.createObjectBuilder().add("group_by_entityType", + Json.createObjectBuilder() + .add("terms", Json.createObjectBuilder().add("field", "entityType").add("size", 0)) + .add("aggs", Json.createObjectBuilder().add("sort_by_date", + Json.createObjectBuilder().add("top_hits", createTopHitsBlob()))))); + jsonBuilder.add("size", 0); + + return jsonBuilder.build(); + } + + /** + * Creates the date histogram. + * + * @return the json object + */ + private static JsonObject createDateHistogram() { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + + jsonBuilder.add("field", "timestamp"); + jsonBuilder.add("min_doc_count", 1); + jsonBuilder.add("interval", "day"); + jsonBuilder.add("format", "epoch_millis"); + + return jsonBuilder.build(); + } + + /** + * Creates the top hits blob. + * + * @return the json object + */ + private static JsonObject createTopHitsBlob() { + JsonObjectBuilder builder = Json.createObjectBuilder(); + builder.add("size", 1); + builder.add("sort", getSortCriteria()); + return builder.build(); + } + + public static JsonArray getSortCriteria() { + JsonArrayBuilder jsonBuilder = Json.createArrayBuilder(); + jsonBuilder.add(Json.createObjectBuilder().add("timestamp", + Json.createObjectBuilder().add("order", "desc"))); + + return jsonBuilder.build(); + } + + /** + * The main method. + * + * @param args the arguments + */ + public static void main(String[] args) { + System.out.println("TABLE-QUERY: " + createTableQuery().toString()); + System.out.println("GRAPH_QUERY: " + createGraphQuery().toString()); + } + +} diff --git a/src/main/java/org/openecomp/sparky/inventory/entity/GeoIndexDocument.java b/src/main/java/org/openecomp/sparky/inventory/entity/GeoIndexDocument.java new file mode 100644 index 0000000..7ea1e44 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/inventory/entity/GeoIndexDocument.java @@ -0,0 +1,322 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.inventory.entity; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; + +import java.io.Serializable; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.List; + +import javax.json.Json; +import javax.json.JsonObject; + +import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.synchronizer.entity.IndexDocument; +import org.openecomp.sparky.util.NodeUtils; + +/** + * The Class GeoIndexDocument. + */ +public class GeoIndexDocument implements Serializable, IndexDocument { + + private static final long serialVersionUID = -5188479658230319058L; + + protected String entityType; + protected String entityPrimaryKeyValue; + protected String entityPrimaryKeyName; + protected String latitude; + protected String longitude; + protected String selfLink; + protected OxmModelLoader loader; + protected ObjectMapper mapper = new ObjectMapper(); + // generated, SHA-256 digest + protected String id; + + /** + * Instantiates a new geo index document. + * + * @param loader the loader + */ + public GeoIndexDocument(OxmModelLoader loader) { + this(); + this.loader = loader; + } + + /** + * Convert bytes to hex string. + * + * @param bytesToConvert the bytes to convert + * @return the string + */ + private static String convertBytesToHexString(byte[] bytesToConvert) { + StringBuffer hexString = new StringBuffer(); + for (int i = 0; i < bytesToConvert.length; i++) { + hexString.append(Integer.toHexString(0xFF & bytesToConvert[i])); + } + return hexString.toString(); + } + + + + public boolean isValidGeoDocument() { + + boolean isValid = true; + + isValid &= (this.getEntityType() != null); + isValid &= (this.getLatitude() != null); + isValid &= (this.getLongitude() != null); + isValid &= (this.getId() != null); + isValid &= (this.getSelfLink() != null); + + isValid &= NodeUtils.isNumeric(this.getLatitude()); + isValid &= NodeUtils.isNumeric(this.getLongitude()); + + return isValid; + } + + /** + * Concat array. + * + * @param list the list + * @param delimiter the delimiter + * @return the string + */ + private static String concatArray(List<String> list, char delimiter) { + + if (list == null || list.size() == 0) { + return ""; + } + + StringBuilder result = new StringBuilder(64); + + int listSize = list.size(); + boolean firstValue = true; + + for (String item : list) { + + if (firstValue) { + result.append(item); + firstValue = false; + } else { + result.append(delimiter).append(item); + } + + } + + return result.toString(); + + } + + /* + * We'll try and create a unique identity key that we can use for differencing the previously + * imported record sets as we won't have granular control of what is created/removed and when. The + * best we can hope for is identification of resources by generated Id until the Identity-Service + * UUID is tagged against all resources, then we can use that instead. + */ + + /** + * Generate unique sha digest. + * + * @param entityType the entity type + * @param fieldName the field name + * @param fieldValue the field value + * @return the string + * @throws NoSuchAlgorithmException the no such algorithm exception + */ + public static String generateUniqueShaDigest(String entityType, String fieldName, + String fieldValue) throws NoSuchAlgorithmException { + + /* + * Basically SHA-256 will result in an identity with a guaranteed uniqueness compared to just a + * java hashcode value. + */ + MessageDigest digest = MessageDigest.getInstance("SHA-256"); + digest.update(String.format("%s.%s.%s", entityType, fieldName, fieldValue).getBytes()); + return convertBytesToHexString(digest.digest()); + } + + /** + * Instantiates a new geo index document. + */ + public GeoIndexDocument() {} + + /* + * (non-Javadoc) + * + * @see com.att.queryrouter.dao.DocumentStoreDataEntity#getAsJson() + */ + @Override + public String getIndexDocumentJson() { + + JsonObject obj = null; + + if (latitude != null && longitude != null) { + obj = + Json.createObjectBuilder().add("entityType", entityType) + .add("pkey", entityPrimaryKeyValue) + .add("location", + Json.createObjectBuilder().add("lat", latitude).add("lon", longitude)) + .add("selfLink", selfLink).build(); + + } else { + obj = Json.createObjectBuilder().add("entityType", entityType) + .add("pkey", entityPrimaryKeyValue).add("selfLink", selfLink).build(); + } + + return obj.toString(); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields() + */ + @Override + public void deriveFields() { + + /* + * We'll try and create a unique identity key that we can use for differencing the previously + * imported record sets as we won't have granular control of what is created/removed and when. + * The best we can hope for is identification of resources by generated Id until the + * Identity-Service UUID is tagged against all resources, then we can use that instead. + */ + + OxmEntityDescriptor descriptor = loader.getEntityDescriptor(entityType); + String entityPrimaryKeyName = NodeUtils.concatArray( + descriptor.getPrimaryKeyAttributeName(), "/"); + + this.id = + NodeUtils.generateUniqueShaDigest(entityType, entityPrimaryKeyName, entityPrimaryKeyValue); + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "TopographicalEntity [" + ("entityType=" + entityType + ", ") + + ("entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", ") + + ("latitude=" + latitude + ", ") + ("longitude=" + longitude + ", ") + ("ID=" + id + ", ") + + ("selfLink=" + selfLink) + "]"; + } + + @Override + public String getId() { + return this.id; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public String getEntityPrimaryKeyValue() { + return entityPrimaryKeyValue; + } + + public void setEntityPrimaryKeyValue(String entityPrimaryKeyValue) { + this.entityPrimaryKeyValue = entityPrimaryKeyValue; + } + + public String getEntityPrimaryKeyName() { + return entityPrimaryKeyName; + } + + public void setEntityPrimaryKeyName(String entityPrimaryKeyName) { + this.entityPrimaryKeyName = entityPrimaryKeyName; + } + + public String getLatitude() { + return latitude; + } + + public void setLatitude(String latitude) { + this.latitude = latitude; + } + + public String getLongitude() { + return longitude; + } + + public void setLongitude(String longitude) { + this.longitude = longitude; + } + + public String getSelfLink() { + return selfLink; + } + + public void setSelfLink(String selfLink) { + this.selfLink = selfLink; + } + + public static long getSerialversionuid() { + return serialVersionUID; + } + + public void setId(String id) { + this.id = id; + } + + @Override + public ObjectNode getBulkImportEntity() { + // TODO Auto-generated method stub + return buildImportDataObject(this.entityType, this.entityPrimaryKeyValue, this.selfLink, + this.latitude, this.longitude); + } + + /** + * Builds the import data object. + * + * @param entityType the entity type + * @param entityPrimaryKeyValue the entity primary key value + * @param selfLink the self link + * @param latitude the latitude + * @param longitude the longitude + * @return the object node + */ + @SuppressWarnings("deprecation") + protected ObjectNode buildImportDataObject(String entityType, String entityPrimaryKeyValue, + String selfLink, String latitude, String longitude) { + ObjectNode childNode = mapper.createObjectNode(); + childNode.put("lat", latitude); + childNode.put("lon", longitude); + ObjectNode parentNode = mapper.createObjectNode(); + + parentNode.put("entityType", entityType); + parentNode.put("pkey", entityPrimaryKeyValue); + parentNode.put("selfLink", selfLink); + parentNode.put("location", childNode); + + return parentNode; + } + +} diff --git a/src/main/java/org/openecomp/sparky/inventory/entity/TopographicalEntity.java b/src/main/java/org/openecomp/sparky/inventory/entity/TopographicalEntity.java new file mode 100644 index 0000000..da52728 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/inventory/entity/TopographicalEntity.java @@ -0,0 +1,221 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.inventory.entity; + +import java.io.IOException; +import java.io.Serializable; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.List; + +import javax.json.Json; +import javax.json.JsonObject; + +/** + * The Class TopographicalEntity. + */ +public class TopographicalEntity implements Serializable { + + private static final long serialVersionUID = -5188479658230319058L; + + protected String entityType; + protected String entityPrimaryKeyValue; + protected String entityPrimaryKeyName; + protected String latitude; + protected String longitude; + protected String selfLink; + + // generated, SHA-256 digest + protected String id; + + /** + * Convert bytes to hex string. + * + * @param bytesToConvert the bytes to convert + * @return the string + */ + private static String convertBytesToHexString(byte[] bytesToConvert) { + StringBuffer hexString = new StringBuffer(); + for (int i = 0; i < bytesToConvert.length; i++) { + hexString.append(Integer.toHexString(0xFF & bytesToConvert[i])); + } + return hexString.toString(); + } + + /** + * Concat array. + * + * @param list the list + * @param delimiter the delimiter + * @return the string + */ + private static String concatArray(List<String> list, char delimiter) { + + if (list == null || list.size() == 0) { + return ""; + } + + StringBuilder result = new StringBuilder(64); + + int listSize = list.size(); + boolean firstValue = true; + + for (String item : list) { + + if (firstValue) { + result.append(item); + firstValue = false; + } else { + result.append(delimiter).append(item); + } + + } + + return result.toString(); + + } + + /* + * We'll try and create a unique identity key that we can use for differencing the previously + * imported record sets as we won't have granular control of what is created/removed and when. The + * best we can hope for is identification of resources by generated Id until the Identity-Service + * UUID is tagged against all resources, then we can use that instead. + */ + + /** + * Generate unique sha digest. + * + * @param entityType the entity type + * @param fieldName the field name + * @param fieldValue the field value + * @return the string + * @throws NoSuchAlgorithmException the no such algorithm exception + */ + public static String generateUniqueShaDigest(String entityType, String fieldName, + String fieldValue) throws NoSuchAlgorithmException { + + /* + * Basically SHA-256 will result in an identity with a guaranteed uniqueness compared to just a + * java hashcode value. + */ + MessageDigest digest = MessageDigest.getInstance("SHA-256"); + digest.update(String.format("%s.%s.%s", entityType, fieldName, fieldValue).getBytes()); + return convertBytesToHexString(digest.digest()); + } + + /** + * Instantiates a new topographical entity. + */ + public TopographicalEntity() {} + + /* + * (non-Javadoc) + * + * @see com.att.queryrouter.dao.DocumentStoreDataEntity#getAsJson() + */ + public String getAsJson() throws IOException { + + JsonObject obj = + Json.createObjectBuilder().add("entityType", entityType).add("pkey", entityPrimaryKeyValue) + .add("location", Json.createObjectBuilder().add("lat", latitude).add("lon", longitude)) + .add("selfLink", selfLink).build(); + + return obj.toString(); + } + + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "TopographicalEntity [" + ("entityType=" + entityType + ", ") + + ("entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", ") + + ("latitude=" + latitude + ", ") + ("longitude=" + longitude + ", ") + ("ID=" + id + ", ") + + ("selfLink=" + selfLink) + "]"; + } + + public String getId() { + return this.id; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public String getEntityPrimaryKeyValue() { + return entityPrimaryKeyValue; + } + + public void setEntityPrimaryKeyValue(String entityPrimaryKeyValue) { + this.entityPrimaryKeyValue = entityPrimaryKeyValue; + } + + public String getEntityPrimaryKeyName() { + return entityPrimaryKeyName; + } + + public void setEntityPrimaryKeyName(String entityPrimaryKeyName) { + this.entityPrimaryKeyName = entityPrimaryKeyName; + } + + public String getLatitude() { + return latitude; + } + + public void setLatitude(String latitude) { + this.latitude = latitude; + } + + public String getLongitude() { + return longitude; + } + + public void setLongitude(String longitude) { + this.longitude = longitude; + } + + public String getSelfLink() { + return selfLink; + } + + public void setSelfLink(String selfLink) { + this.selfLink = selfLink; + } + + public static long getSerialversionuid() { + return serialVersionUID; + } + + public void setId(String id) { + this.id = id; + } + +} diff --git a/src/main/java/org/openecomp/sparky/inventory/servlet/EntityCountHistoryServlet.java b/src/main/java/org/openecomp/sparky/inventory/servlet/EntityCountHistoryServlet.java new file mode 100644 index 0000000..c3d96cf --- /dev/null +++ b/src/main/java/org/openecomp/sparky/inventory/servlet/EntityCountHistoryServlet.java @@ -0,0 +1,376 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.inventory.servlet; + +import org.openecomp.cl.mdc.MdcContext; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; + +import java.io.IOException; +import java.io.PrintWriter; +import java.nio.ByteBuffer; +import java.security.SecureRandom; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.TreeMap; + +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.json.JSONArray; +import org.json.JSONObject; +import org.openecomp.sparky.dal.elasticsearch.SearchAdapter; +import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.rest.RestClientBuilder; +import org.openecomp.sparky.inventory.EntityHistoryQueryBuilder; +import org.openecomp.sparky.util.NodeUtils; +import org.openecomp.sparky.util.ServletUtils; +import org.openecomp.sparky.viewandinspect.config.VisualizationConfig; + +/** + * The Class EntityCountHistoryServlet. + */ +public class EntityCountHistoryServlet extends HttpServlet { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(EntityCountHistoryServlet.class); + + private static final long serialVersionUID = 1L; + + private SearchAdapter search = null; + private ElasticSearchConfig elasticConfig = null; + private VisualizationConfig visualConfig = null; + private ObjectMapper mapper; + + private static final String SEARCH_STRING = "_search"; + private static final String TABLE = "table"; + private static final String GRAPH = "graph"; + + private List<String> vnfEntityTypesToSummarize; + private boolean summarizevnf = false; + + /** + * Instantiates a new entity count history servlet. + * + * @throws ServletException the servlet exception + */ + public EntityCountHistoryServlet() throws ServletException { + init(); + } + + /* (non-Javadoc) + * @see javax.servlet.GenericServlet#init() + */ + @Override + public void init() throws ServletException { + super.init(); + try { + if (elasticConfig == null) { + elasticConfig = ElasticSearchConfig.getConfig(); + } + if (visualConfig == null) { + visualConfig = VisualizationConfig.getConfig(); + vnfEntityTypesToSummarize = + Arrays.asList(visualConfig.getVnfEntityTypes().toLowerCase().split("[\\s,]+")); + summarizevnf = visualConfig.getEntityTypesToSummarize().toLowerCase().contains("vnf"); + } + if (search == null) { + search = new SearchAdapter(); + } + this.mapper = new ObjectMapper(); + this.mapper.configure(SerializationFeature.INDENT_OUTPUT, true); + } catch (Exception exc) { + new ServletException( + "Caught an exception while getting an instance of servlet configuration.", exc); + } + } + + public void setSearch(SearchAdapter search) { + this.search = search; + } + + public void setElasticConfig(ElasticSearchConfig elasticConfig) { + this.elasticConfig = elasticConfig; + } + + /* (non-Javadoc) + * @see javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) + */ + @Override + protected void doGet(HttpServletRequest request, HttpServletResponse response) + throws ServletException, IOException { + String txnID = request.getHeader("X-TransactionId"); + if (txnID == null){ + txnID = NodeUtils.getRandomTxnId(); + } + + String partnerName = request.getHeader("X-FromAppId"); + if ( partnerName == null) + partnerName = "Browser"; + + MdcContext.initialize(txnID, "AAI-UI", "", partnerName, + request.getRemoteAddr()); + + @SuppressWarnings("unused") + OperationResult operationResult = null; + if (request.getParameter("type") != null + && (request.getParameter("type").equalsIgnoreCase(TABLE) + || request.getParameter("type").equalsIgnoreCase(GRAPH))) { + try { + operationResult = getResults(response, request.getParameter("type")); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_SERVLET_PROCESSSING, exc); + } + } else { + ServletUtils.setServletResponse(LOG, true, 501, response, + ServletUtils.generateJsonErrorResponse("Unsupported request")); + } + } + + /* (non-Javadoc) + * @see javax.servlet.http.HttpServlet#doPost(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) + */ + @Override + protected void doPost(HttpServletRequest request, HttpServletResponse response) + throws ServletException { + + } + + /** + * Gets the results. + * + * @param response the response + * @param type the type + * @return the results + * @throws Exception the exception + */ + private OperationResult getResults(HttpServletResponse response, String type) throws Exception { + OperationResult operationResult = new OperationResult(); + + String requestString = + String.format("/%s/%s?pretty", elasticConfig.getEntityCountHistoryIndex(), SEARCH_STRING); + + String reqPayload = EntityHistoryQueryBuilder.getQuery(type).toString(); + + try { + final String fullUrlStr = ServletUtils.getFullUrl(elasticConfig, requestString); + OperationResult opResult = + ServletUtils.executePostQuery(LOG, search, response, fullUrlStr, reqPayload); + + JSONObject finalOutput = null; + if (type.equalsIgnoreCase(TABLE)) { + finalOutput = formatTableOutput(opResult.getResult()); + } else if (type.equalsIgnoreCase(GRAPH)) { + finalOutput = formatLineGraphOutput(opResult.getResult()); + } + + if (finalOutput != null) { + response.setContentType("application/json"); + PrintWriter out = response.getWriter(); + out.println(finalOutput); + out.close(); + } + + } catch (JsonProcessingException exc) { + ServletUtils.handleSearchServletErrors(LOG, "Unable to map JSONpayload", exc, response); + } + + return operationResult; + } + + /** + * Format table output. + * + * @param results the results + * @return the JSON object + * @throws JsonProcessingException the json processing exception + */ + private JSONObject formatTableOutput(String results) throws JsonProcessingException { + JsonNode resultNode = null; + + JSONObject finalResult = new JSONObject(); + JSONArray entitiesArr = new JSONArray(); + + Map<String, Long> entityCountInTable = initializeEntityMap(); + + long vnfCount = 0; + + try { + resultNode = mapper.readTree(results); + + final JsonNode bucketsNode = getBucketsNode(resultNode); + if (bucketsNode.isArray()) { + + for (final JsonNode entityNode : bucketsNode) { + String entityType = entityNode.get("key").asText(); + boolean isAVnf = vnfEntityTypesToSummarize.contains(entityType); + long countValue = 0; + + if (isAVnf || entityCountInTable.get(entityType) != null) { + final JsonNode hitsBucketNode = entityNode.get("sort_by_date").get("hits").get("hits"); + if (hitsBucketNode.isArray()) { + // the first bucket will be the latest + final JsonNode hitNode = hitsBucketNode.get(0); + + countValue = hitNode.get("_source").get("count").asLong(); + + /* + * Special case: Add all the VNF types together to get aggregate count + */ + if (summarizevnf && isAVnf) { + vnfCount += countValue; + countValue = vnfCount; + entityType = "vnf"; + } + + entityCountInTable.replace(entityType, countValue); + } + } + + } + } + for (Entry<String, Long> entry : entityCountInTable.entrySet()) { + JSONObject entityType = new JSONObject(); + entityType.put("key", entry.getKey()); + entityType.put("doc_count", entry.getValue()); + entitiesArr.put(entityType); + } + + finalResult.put("result", entitiesArr); + + } catch (Exception exc) { + LOG.warn(AaiUiMsgs.ERROR_BUILDING_RESPONSE_FOR_TABLE_QUERY, exc.getLocalizedMessage()); + } + + return finalResult; + } + + + /** + * Format line graph output. + * + * @param results the results + * @return the JSON object + * @throws JsonProcessingException the json processing exception + */ + private JSONObject formatLineGraphOutput(String results) throws JsonProcessingException { + Map<Long, Long> countByDateMap = new HashMap<Long, Long>(); + + JsonNode resultNode = null; + + JSONObject finalResult = new JSONObject(); + JSONArray finalResultArr = new JSONArray(); + + try { + resultNode = mapper.readTree(results); + + final JsonNode bucketsNode = getBucketsNode(resultNode); + + if (bucketsNode.isArray()) { + + for (final JsonNode entityNode : bucketsNode) { + final JsonNode dateBucketNode = entityNode.get("group_by_date").get("buckets"); + if (dateBucketNode.isArray()) { + for (final JsonNode dateBucket : dateBucketNode) { + Long date = dateBucket.get("key").asLong(); + final JsonNode countBucketNode = + dateBucket.get("sort_by_date").get("hits").get("hits"); + + if (countBucketNode.isArray()) { + final JsonNode latestEntityNode = countBucketNode.get(0); + + long currentCount = latestEntityNode.get("_source").get("count").asLong(); + if (countByDateMap.containsKey(date)) { + // add to the value if map already contains this date + currentCount += countByDateMap.get(date); + } + + countByDateMap.put(date, currentCount); + } + } + + } + } + } + /* + * Sort the map by epoch timestamp + */ + Map<Long, Long> sortedMap = new TreeMap<Long, Long>(countByDateMap); + for (Entry<Long, Long> entry : sortedMap.entrySet()) { + JSONObject dateEntry = new JSONObject(); + dateEntry.put("date", entry.getKey()); + dateEntry.put("count", entry.getValue()); + finalResultArr.put(dateEntry); + } + + } catch (Exception exc) { + LOG.warn(AaiUiMsgs.ERROR_BUILDING_SEARCH_RESPONSE, exc.getLocalizedMessage()); + } + + return finalResult.put("result", finalResultArr); + } + + /** + * Gets the buckets node. + * + * @param node the node + * @return the buckets node + * @throws Exception the exception + */ + private JsonNode getBucketsNode(JsonNode node) throws Exception { + if (node.get("aggregations").get("group_by_entityType").get("buckets") != null) { + return node.get("aggregations").get("group_by_entityType").get("buckets"); + } else { + throw new Exception("Failed to map JSON response"); + } + } + + /** + * Initialize entity map. + * + * @return the map + */ + private Map<String, Long> initializeEntityMap() { + Map<String, Long> entityMap = new HashMap<String, Long>(); + String[] entityTypes = visualConfig.getEntityTypesToSummarize().split(","); + for (String entity : entityTypes) { + entityMap.put(entity, (long) 0); + } + + return entityMap; + } + +} diff --git a/src/main/java/org/openecomp/sparky/inventory/servlet/GeoVisualizationServlet.java b/src/main/java/org/openecomp/sparky/inventory/servlet/GeoVisualizationServlet.java new file mode 100644 index 0000000..cf6e0f2 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/inventory/servlet/GeoVisualizationServlet.java @@ -0,0 +1,223 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.inventory.servlet; + +import org.openecomp.cl.mdc.MdcContext; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +import java.io.IOException; +import java.io.PrintWriter; +import java.nio.ByteBuffer; +import java.security.SecureRandom; + +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.json.JSONArray; +import org.json.JSONObject; +import org.openecomp.sparky.dal.elasticsearch.SearchAdapter; +import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.rest.RestClientBuilder; +import org.openecomp.sparky.util.NodeUtils; +import org.openecomp.sparky.util.ServletUtils; + +/** + * The Class GeoVisualizationServlet. + */ +public class GeoVisualizationServlet extends HttpServlet { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(GeoVisualizationServlet.class); + + private static final long serialVersionUID = 1L; + + private SearchAdapter search = null; + private ElasticSearchConfig elasticConfig = null; + private ObjectMapper mapper; + + private static final String SEARCH_STRING = "_search"; + + private static final String SEARCH_PARAMETER = + "?filter_path=hits.hits._source&_source=location&size=5000&q=entityType:"; + + /** + * Instantiates a new geo visualization servlet. + * + * @throws ServletException the servlet exception + */ + public GeoVisualizationServlet() throws ServletException { + init(); + } + + /* (non-Javadoc) + * @see javax.servlet.GenericServlet#init() + */ + @Override + public void init() throws ServletException { + super.init(); + try { + if (elasticConfig == null) { + elasticConfig = ElasticSearchConfig.getConfig(); + } + if (search == null) { + search = new SearchAdapter(); + } + this.mapper = new ObjectMapper(); + } catch (Exception exc) { + new ServletException( + "Caught an exception while getting an instance of servlet configuration.", exc); + } + } + + public void setSearch(SearchAdapter search) { + this.search = search; + } + + public void setElasticConfig(ElasticSearchConfig elasticConfig) { + this.elasticConfig = elasticConfig; + } + + /* (non-Javadoc) + * @see javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) + */ + @Override + protected void doGet(HttpServletRequest request, HttpServletResponse response) + throws ServletException, IOException { + String txnID = request.getHeader("X-TransactionId"); + if (txnID == null){ + txnID = NodeUtils.getRandomTxnId(); + } + + String partnerName = request.getHeader("X-FromAppId"); + if ( partnerName == null) + partnerName = "Browser"; + + MdcContext.initialize(txnID, "AAI-UI", "", partnerName, + request.getRemoteAddr()); + + OperationResult operationResult = null; + try { + operationResult = getGeoVisualizationResults(response, request.getParameter("entity")); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_PROCESSING_REQUEST, exc); + } + } + + /* (non-Javadoc) + * @see javax.servlet.http.HttpServlet#doPost(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) + */ + @Override + protected void doPost(HttpServletRequest request, HttpServletResponse response) + throws ServletException { + + } + + /** + * Gets the geo visualization results. + * + * @param response the response + * @param entityType the entity type + * @return the geo visualization results + * @throws Exception the exception + */ + protected OperationResult getGeoVisualizationResults(HttpServletResponse response, + String entityType) throws Exception { + OperationResult operationResult = new OperationResult(); + + String parameters = SEARCH_PARAMETER + entityType; + String requestString = String.format("/%s/%s/%s", elasticConfig.getTopographicalSearchIndex(), + SEARCH_STRING, parameters); + + try { + final String fullUrlStr = ServletUtils.getFullUrl(elasticConfig, requestString); + OperationResult opResult = ServletUtils.executeGetQuery(LOG, search, response, fullUrlStr); + + JSONObject finalOutputJson = formatOutput(opResult.getResult()); + + if (finalOutputJson != null) { + response.setContentType("application/json"); + PrintWriter out = response.getWriter(); + out.println(finalOutputJson); + out.close(); + } + + } catch (JsonProcessingException exc) { + ServletUtils.handleSearchServletErrors(LOG, "Unable to map JSONpayload", exc, response); + } + + return operationResult; + } + + /** + * Format output. + * + * @param results the results + * @return the JSON object + */ + private JSONObject formatOutput(String results) { + JsonNode resultNode = null; + JSONObject finalResult = new JSONObject(); + JSONArray entitiesArr = new JSONArray(); + + try { + resultNode = mapper.readTree(results); + + final JsonNode hitsNode = resultNode.get("hits").get("hits"); + if (hitsNode.isArray()) { + + for (final JsonNode arrayNode : hitsNode) { + JsonNode sourceNode = arrayNode.get("_source"); + if (sourceNode.get("location") != null) { + JsonNode locationNode = sourceNode.get("location"); + if (NodeUtils.isNumeric(locationNode.get("lon").asText()) + && NodeUtils.isNumeric(locationNode.get("lat").asText())) { + JSONObject location = new JSONObject(); + location.put("longitude", locationNode.get("lon").asText()); + location.put("latitude", locationNode.get("lat").asText()); + + entitiesArr.put(location); + } + + } + } + } + finalResult.put("plotPoints", entitiesArr); + + } catch (IOException exc) { + LOG.warn(AaiUiMsgs.ERROR_BUILDING_SEARCH_RESPONSE, exc.getLocalizedMessage()); + } + + return finalResult; + } +} diff --git a/src/main/java/org/openecomp/sparky/logging/AaiUiMsgs.java b/src/main/java/org/openecomp/sparky/logging/AaiUiMsgs.java new file mode 100644 index 0000000..a06559c --- /dev/null +++ b/src/main/java/org/openecomp/sparky/logging/AaiUiMsgs.java @@ -0,0 +1,422 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sparky.logging; + +import com.att.eelf.i18n.EELFResourceManager; + +import org.openecomp.cl.eelf.LogMessageEnum; + +/** + * The Enum AaiUiMsgs. + */ +public enum AaiUiMsgs implements LogMessageEnum { + /** Arguments: {0} = Exception/error. */ + FAILURE_TO_PROCESS_REQUEST, + /** Arguments: {0} = Message and or error body. */ + FAILED_TO_DETERMINE, + /** Arguments: {0} = Exception/error. */ + UNKNOWN_SERVER_ERROR, + /** Arguments: {0} = Message and or error body. */ + FAILED_TO_ANALYZE, + /** Arguments: {0} = Exception/error. */ + FAILED_TO_GET_NODES_QUERY_RESULT, + /** Arguments: {0} = Expected link count, {1} = Actual link count. */ + UNEXPECTED_NUMBER_OF_LINKS, + /** Arguments: {0} = Reason. */ + DANGLING_NODE_WARNING, + /** Arguments: {0} = Node count, {1} = Link count. */ + VISUALIZATION_GRAPH_OUTPUT, + /** Arguments: {0} = JsonNode. */ + ITEM_TYPE_NULL, + /** Arguments: {0} = Filter property. */ + UNEXPECTED_TOKEN_COUNT, + /** Arguments: {0} = Error/exception message. */ + ADD_SEARCH_TARGET_ATTRIBUTES_FAILED, + /** No argument */ + MAX_EVALUATION_ATTEMPTS_EXCEEDED, + /** Arguments: {0} = Error/exception message. */ + VISUALIZATION_OUTPUT_ERROR, + /** Arguments: {0} = Total resolve time, {1} = Total links retrieved, {2} = Op time. */ + ALL_TRANSACTIONS_RESOLVED, + /** Arguments: {0} = Error/exception message. */ + PROCESSING_LOOP_INTERUPTED, + /** Arguments: {0} = Node ID. */ + IGNORING_SKELETON_NODE, + /** Arguments: {0} = Node count. */ + OUTSTANDING_WORK_PENDING_NODES, + /** Arguments: {0} = Reason. */ + FAILED_TO_ADD_SKELETON_NODE, + /** Arguments: {0} = Reason. */ + FAILED_TO_PROCESS_SKELETON_NODE, + INVALID_RESOLVE_STATE_DURING_INIT, + /** Arguments: {0} = Reason. */ + FAILED_TO_PROCESS_INITIAL_STATE, + /** Arguments: {0} = Relationship. */ + SKIPPING_RELATIONSHIP, + /** Arguments: {0} = Failure reason. */ + FAILED_TO_DETERMINE_NODE_ID, + /** Arguments: {0} = Error/exception message. */ + EXTRACTION_ERROR, + /** Arguments: {0} = Error/exception message. */ + SELF_LINK_NODE_PARSE_ERROR, + /** Arguments: {0} = Node ID. */ + ROOT_NODE_DISCOVERED, + /** Arguments: {0} = Error/exception message. */ + SELF_LINK_PROCESS_NEIGHBORS_ERROR, + /** Arguments: {0} = Error/exception message. */ + SELF_LINK_JSON_PARSE_ERROR, + /** Arguments: {0} = Error/exception message. */ + SELF_LINK_PROCESSING_ERROR, + /** Arguments: {0} = Entity type. */ + UNHANDLED_OBJ_TYPE_FOR_ENTITY_TYPE, + /** Arguments: {0} = Attribute group. */ + ATTRIBUTE_GROUP_FAILURE, + /** Arguments: {0} = Situational description, {1} = Exception message. */ + EXCEPTION_CAUGHT, + /** Arguments: {0} = Operation name, {1} = Operation time. */ + OPERATION_TIME, + /** Arguments: {0} = Error message. */ + SEARCH_SERVLET_ERROR, + /** Arguments: {0} = Exception message. */ + SEARCH_RESPONSE_BUILDING_EXCEPTION, + /** Arguments: {0} = Error message, {1} = Error message. */ + SEARCH_TAG_ANNOTATION_ERROR, + /** Arguments: {0} = App type. */ + QUERY_FAILED_UNHANDLED_APP_TYPE, + /** Arguments: {0} = Entity type. */ + ENTITY_NOT_FOUND_IN_OXM, + /** Arguments: {0} = JSON conversion type, {1} = Error thrown. */ + JSON_CONVERSION_ERROR, + /** Arguments: {0} = Node ID */ + NO_RELATIONSHIP_DISCOVERED, + /** No argument */ + SELF_LINK_NULL_EMPTY_RESPONSE, + /** Arguments: {0} = Error message. */ + SELF_LINK_RELATIONSHIP_LIST_ERROR, + /** Arguments: {0} = AIN id, {1} = old depth, {2} = new depth. */ + ACTIVE_INV_NODE_CHANGE_DEPTH, + /** Arguments: {0} = Node ID, {1} = Current state, {2} = New state {3} = Triggering action */ + ACTIVE_INV_NODE_CHANGE_STATE, + /** Arguments: {0} = Current state, {1} = New state {2} = Triggering action */ + ACTIVE_INV_NODE_CHANGE_STATE_NO_NODE_ID, + /** Arguments: {0} = Count Key {1} = Aggregation Key. */ + AGGREGATION_KEY_ERROR, + /** Arguments: {0} Configuration */ + CONFIGURATION_ERROR, + /** Arguments: {0} = Source. */ + ERROR_PARSING_JSON_PAYLOAD_NONVERBOSE, + /** Arguments: {0} = Payload. */ + ERROR_PARSING_JSON_PAYLOAD_VERBOSE, + /** Arguments: {0} = Key {1} = JSON Blob. */ + ERROR_FETCHING_JSON_VALUE, + /** Arguments: {0} = Error. */ + ERROR_PARSING_PARAMS, + /** No argument */ + INVALID_REQUEST_PARAMS, + /** Arguments: {0} = Key. */ + ERROR_SORTING_VIOLATION_DATA, + /** Arguments: {0} = exception */ + ERROR_SERVLET_PROCESSSING, + /** Arguments: {0} = exception */ + ERROR_BUILDING_RESPONSE_FOR_TABLE_QUERY, + /** Arguments: {0} = exception */ + ERROR_BUILDING_SEARCH_RESPONSE, + /** No argument */ + ERROR_CSP_CONFIG_FILE, + /** Arguments: {0} = exception */ + ERROR_SHUTDOWN_EXECUTORS, + /** No argument */ + ERROR_LOADING_OXM, + /** Arguments: {0} = exception */ + ERROR_GETTING_DATA_FROM_AAI, + /** No argument */ + WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED, + /** Arguments: {0} = Entity Type */ + MISSING_ENTITY_DESCRIPTOR, + /** Arguments: {0} = Error */ + SELF_LINK_GET, + /** Arguments: {0} = Error */ + ES_FAILED_TO_CONSTRUCT_QUERY, + /** Arguments: {0} = Error */ + ES_RETRIEVAL_FAILED, + /** Arguments: {0} = Error */ + ES_LINK_UPSERT, + /** Arguments: {0} = Element */ + ES_SIMPLE_PUT, + /** Arguments: {0} = Value {1} = Element {2} = Error */ + ES_ABORT_CROSS_ENTITY_REF_SYNC, + /** Arguments: {0} Return Code */ + ES_OPERATION_RETURN_CODE, + /** Arguments: {0} = Error */ + ES_CROSS_ENTITY_REF_PUT, + /** No argument */ + ES_CROSS_REF_SYNC_VERSION_CONFLICT, + /** Arguments: {0} Result Code {1} = Error */ + ES_CROSS_REF_SYNC_FAILURE, + /** Arguments: {0} = Error */ + ES_FAILED_TO_CONSTRUCT_URI, + /** No argument */ + ES_RETRIEVAL_FAILED_RESYNC, + /** Arguments: {0} = Entity */ + ES_CROSS_ENTITY_RESYNC_LIMIT, + /** Arguments: {0} Entity Name */ + ES_PKEYVALUE_NULL, + /** Arguments: {0} = Error */ + ES_STORE_FAILURE, + /** Arguments: {0} Index Name {1} = Error */ + ES_PRE_SYNC_FAILURE, + /** Arguments: {0} Index Name */ + ES_SYNC_CLEAN_UP, + /** Arguments: {0} Index Name {1} Size before clean up {2} = Size after clean up */ + ES_SYNC_CLEAN_UP_SIZE, + /** Arguments: {0} Index Name {1} Index Type {2} = Size before delete */ + ES_SYNC_SELECTIVE_DELETE, + /** Arguments: {0} Index Name {1} Number of records */ + ES_BULK_DELETE, + /** Arguments: {0} Index name {1} = Error */ + ES_BULK_DELETE_ERROR, + /** Arguments: {0} Type of retrieval {1} Completion Time */ + COLLECT_TIME_WITH_ERROR, + /** Arguments: {0} Type of retrieval {1} Completion Time */ + COLLECT_TIME_WITH_SUCCESS, + /** Arguments: {0} Type of retrieval {1} Number of records */ + COLLECT_TOTAL, + /** Arguments: {0} Number of required fetches */ + SYNC_NUMBER_REQ_FETCHES, + /** Arguments: {0} Number of total fetches {1} Number of available records*/ + SYNC_NUMBER_TOTAL_FETCHES, + /** Arguments: {0} Completion Time */ + COLLECT_TOTAL_TIME, + /** Arguments: {0} = Error */ + ES_SCROLL_CONTEXT_ERROR, + /** No argument */ + ES_BULK_DELETE_SKIP, + /** Arguments: {0} = Number of docs */ + ES_BULK_DELETE_START, + /** No argument */ + SELF_LINK_CROSS_REF_SYNC, + /** Arguments: {0} = message */ + ERROR_GENERIC, + /** Arguments: {0} = error */ + JSON_PROCESSING_ERROR, + /** Arguments: {0} = exception */ + ERROR_PROCESSING_REQUEST, + /** Arguments: {0} = Self Link */ + SELF_LINK_GET_NO_RESPONSE, + /** Arguments: {0} = error */ + HISTORICAL_COLLECT_ERROR, + /** Arguments: {0} = Time */ + HISTORICAL_ENTITY_COUNT_SUMMARIZER_STARTING, + /** No argument */ + HISTORICAL_ENTITY_COUNT_SUMMARIZER_NOT_STARTED, + /** Arguments: {0} = Controller {1} = Time */ + HISTORICAL_SYNC_DURATION, + /** No argument */ + HISTORICAL_SYNC_PENDING, + /** Arguments: {0} = Time */ + HISTORICAL_SYNC_TO_BEGIN, + /** Arguments: {0} = message */ + DEBUG_GENERIC, + /** Arguments: {0} = message */ + INFO_GENERIC, + /** Arguments: {0} = message */ + WARN_GENERIC, + /** Arguments: {0} = context {1} = Exception*/ + INTERRUPTED, + /** Arguments: {0} = Entity Type {1} Entity */ + GEO_SYNC_IGNORING_ENTITY, + /** Arguments: {0} = type */ + OXM_FAILED_RETRIEVAL, + /** Arguments: {0} = Directory. */ + OXM_FILE_NOT_FOUND, + /** No argument */ + OXM_READ_ERROR_NONVERBOSE, + /** Arguments: {0} = OXM File name */ + OXM_READ_ERROR_VERBOSE, + /** No argument */ + OXM_PARSE_ERROR_NONVERBOSE, + /** Arguments: {0} = OXM File name {1} = Exception*/ + OXM_PARSE_ERROR_VERBOSE, + /** No argument */ + OXM_LOAD_SUCCESS, + /** Arguments: {0} = Entity {1} = Found property-value*/ + OXM_PROP_DEF_ERR_CROSS_ENTITY_REF, + /** Arguments: {0} = Sequence Number */ + ETAG_RETRY_SEQ, + /** Arguments: {0} = Reason */ + ETAG_WAIT_INTERRUPTION, + /** Arguments: {0} = URL {1} = Sequence Number */ + QUERY_AAI_RETRY_SEQ, + /** Arguments: {0} = URL {1} = Sequence Number */ + QUERY_AAI_RETRY_DONE_SEQ, + /** Arguments: {0} = Reason */ + QUERY_AAI_WAIT_INTERRUPTION, + /** Arguments: {0} = URL {1} = Sequence Number */ + QUERY_AAI_RETRY_FAILURE_WITH_SEQ, + /** Arguments: {0} = URL */ + QUERY_AAI_RETRY_MAXED_OUT, + /** Arguments: {0} = Reason */ + PEGGING_ERROR, + /** Arguments: {0} = Key */ + DATA_CACHE_SUCCESS, + /** Arguments: {0} = URL {1} = Sequence Number */ + EXECUTOR_SERV_EXCEPTION, + /** Arguments: {0} = Exception */ + DISK_CACHE_READ_IO_ERROR, + /** Arguments: {0} = Exception */ + DISK_CREATE_DIR_IO_ERROR, + /** Arguments: {0} = Exception */ + DISK_DATA_WRITE_IO_ERROR, + /** Arguments: {0} = Data Item {1} = Exception */ + DISK_NAMED_DATA_WRITE_IO_ERROR, + /** Arguments: {0} = Data Item {1} = Exception */ + DISK_NAMED_DATA_READ_IO_ERROR, + /** No argument */ + OFFLINE_STORAGE_PATH_ERROR, + /** Arguments: {0} = URL {1} = Error */ + RESTFULL_OP_ERROR_VERBOSE, + /** Arguments: {0} = Method {1} = Time {2} = URL {3} = Result Code */ + RESTFULL_OP_COMPLETE, + /** No argument */ + INITIALIZE_OXM_MODEL_LOADER, + /** Arguments: {0} = Exception */ + AAI_RETRIEVAL_FAILED_GENERIC, + /** Arguments: {0} = Self Link */ + AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, + /** Arguments: {0} = Cookie */ + COOKIE_FOUND, + /** No argument */ + COOKIE_NOT_FOUND, + /** Arguments: {0} = Message */ + INVALID_REQUEST, + /** Arguments: {0} = User ID */ + USER_AUTHORIZATION_FILE_UNAVAILABLE, + /** Arguments: {0} = URL {1} = Cause */ + INVALID_URL_VERBOSE, + /** Arguments: {0} = Row ID */ + DI_DATA_NOT_FOUND_NONVERBOSE, + /** Arguments: {0} = Row ID {1} Attempt count */ + DI_DATA_NOT_FOUND_VERBOSE, + /** Arguments: {0} = Time in ms {1} Status */ + DI_MS_TIME_FOR_DATA_FETCH, + /** Arguments: {0} = Number of Entity Links */ + ENTITY_SYNC_FAILED_SELFLINK_AMBIGUITY, + /** Arguments: {0} = Message */ + ERROR_EXTRACTING_FROM_RESPONSE, + /** No argument */ + ERROR_LOADING_OXM_SEARCHABLE_ENTITIES, + /** Arguments: {0} = Message */ + ES_SEARCHABLE_ENTITY_SYNC_ERROR, + /** Arguments: {0} = Message */ + FAILED_TO_REGISTER_DUE_TO_NULL, + /** Arguments: {0} = File Path */ + FAILED_TO_RESTORE_TXN_FILE_MISSING, + /** Arguments: {0} = Index Name */ + INDEX_ALREADY_EXISTS, + /** Arguments: {0} = Index Name */ + INDEX_EXISTS, + /** Arguments: {0} = Index Name {1} = Operation Result */ + INDEX_INTEGRITY_CHECK_FAILED, + /** Arguments: {0} = Index Name */ + INDEX_NOT_EXIST, + /** Arguments: {0} = Index Name */ + INDEX_RECREATED, + /** Arguments: {0} = Time */ + SEARCH_ENGINE_SYNC_STARTED, + /** Arguments: {0} = Time */ + SKIP_PERIODIC_SYNC_AS_SYNC_DIDNT_FINISH, + /** Arguments: {0} = Message */ + SYNC_DURATION, + /** Arguments: {0} = Entity Type */ + ENTITY_SYNC_FAILED_DESCRIPTOR_NOT_FOUND, + /** Arguments: {0} = AAI Query Result */ + ENTITY_SYNC_FAILED_DURING_AAI_RESPONSE_CONVERSION, + /** Arguments: {0} = Message */ + ENTITY_SYNC_FAILED_QUERY_ERROR, + /** Arguments: {0} = Self Link Query */ + SELF_LINK_DETERMINATION_FAILED_GENERIC, + /** Arguments: {0} = Number of Entity Links */ + SELF_LINK_DETERMINATION_FAILED_UNEXPECTED_LINKS, + /** Arguments: {1} = Query {2} = Operation Result Code {3} = Operation Result */ + SELF_LINK_RETRIEVAL_FAILED, + /** Arguments: {0} = Controller {1} = Synchronizer Current Internal State {2} = New State {3} = Caused By Action */ + SYNC_INTERNAL_STATE_CHANGED, + /** Arguments: {0} = Message */ + SYNC_INVALID_CONFIG_PARAM, + /** Arguments: {0} = Synchronizer Current Internal State */ + SYNC_NOT_VALID_STATE_DURING_REQUEST, + /** No argument */ + SYNC_SKIPPED_SYNCCONTROLLER_NOT_INITIALIZED, + /** No argument */ + SYNC_START_TIME, + /** Arguments: {0} = Controller {1} = Time */ + SYNC_TO_BEGIN, + /** Arguments: {0} = File Path */ + WILL_RETRIEVE_TXN, + /** Arguments: {0} = Configuration file name {1} = Exception */ + CONFIG_NOT_FOUND_VERBOSE, + /** Arguments: {0} = File name */ + FILE_NOT_FOUND, + /** Arguments: {0} = File name */ + FILE_READ_IN_PROGRESS, + ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES, + /** Arguments: {0} = Error message */ + ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, + /** Arguments: {0} = Error message */ + ES_AGGREGATION_SUGGESTION_ENTITY_SYNC_ERROR, + /** Arguments: {0} = Error message. */ + ENTITY_SYNC_SEARCH_TAG_ANNOTATION_FAILED, + /** Arguments: {0} = Error message */ + SEARCH_ADAPTER_ERROR, + /** Arguments: {0} = Decoding exception message */ + UNSUPPORTED_URL_ENCODING, + /** Arguments: {0} = Invalid URL */ + INVALID_REDIRECT_URL, + /** Arguments: {0} = Valid login URL */ + VALID_REDIRECT_URL, + /** Arguments: {0} = Query Parameter Self-Link Extraction Error */ + QUERY_PARAM_EXTRACTION_ERROR, + /** Arguments: {0} = Info message */ + LOGIN_FILTER_INFO, + /** Arguments: {0} = Debug message */ + LOGIN_FILTER_DEBUG, + /** Arguments: {0} = URL to extract parameter from */ + ERROR_REMOVING_URL_PARAM, + /** Arguments: {0} = Hash value */ + ERROR_INVALID_HASH, + ERROR_HASH_NOT_FOUND, + ERROR_READING_HTTP_REQ_PARAMS, + /** Arguments: {0} = Exception */ + ERROR_D3_GRAPH_VISUALIZATION, + /** Arguments: {0} = Exception */ + ERROR_AAI_QUERY_WITH_RETRY; + + /** + * Static initializer to ensure the resource bundles for this class are loaded... + */ + static { + EELFResourceManager.loadMessageBundle("logging/AAIUIMsgs"); + } +} diff --git a/src/main/java/org/openecomp/sparky/logging/util/LoggingUtils.java b/src/main/java/org/openecomp/sparky/logging/util/LoggingUtils.java new file mode 100644 index 0000000..13f2337 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/logging/util/LoggingUtils.java @@ -0,0 +1,44 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.logging.util; + +/** + * The Class LoggingUtils. + */ +public class LoggingUtils { + + /** + * Sets the duration. + * + * @param startTime the start time + * @param stopTime the stop time + * @return the string + */ + public static String setDuration(long startTime, long stopTime) { + return String.valueOf(stopTime - startTime); + } + +} diff --git a/src/main/java/org/openecomp/sparky/search/EntityTypeSummary.java b/src/main/java/org/openecomp/sparky/search/EntityTypeSummary.java new file mode 100644 index 0000000..aa79f3d --- /dev/null +++ b/src/main/java/org/openecomp/sparky/search/EntityTypeSummary.java @@ -0,0 +1,53 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sparky.search; + +import java.util.ArrayList; +import java.util.List; + +public class EntityTypeSummary { + private int totalChartHits; + private List<EntityTypeSummaryBucket> buckets = new ArrayList<>(); + + public int getTotalChartHits() { + return totalChartHits; + } + + public List<EntityTypeSummaryBucket> getBuckets() { + return buckets; + } + + public void setTotalChartHits(int totalChartHits) { + this.totalChartHits = totalChartHits; + } + + public void setBuckets(List<EntityTypeSummaryBucket> buckets) { + this.buckets = buckets; + } + + public void addBucket(EntityTypeSummaryBucket bucket) { + this.buckets.add(bucket); + } +} diff --git a/src/main/java/org/openecomp/sparky/search/EntityTypeSummaryBucket.java b/src/main/java/org/openecomp/sparky/search/EntityTypeSummaryBucket.java new file mode 100644 index 0000000..540b300 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/search/EntityTypeSummaryBucket.java @@ -0,0 +1,46 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sparky.search; + +public class EntityTypeSummaryBucket { + private int count; + private String key; + + public int getCount() { + return count; + } + + public String getKey() { + return key; + } + + public void setCount(int count) { + this.count = count; + } + + public void setKey(String key) { + this.key = key; + } +} diff --git a/src/main/java/org/openecomp/sparky/search/SearchEntityProperties.java b/src/main/java/org/openecomp/sparky/search/SearchEntityProperties.java new file mode 100644 index 0000000..bcf46f9 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/search/SearchEntityProperties.java @@ -0,0 +1,49 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sparky.search; + +import java.util.HashMap; +import java.util.Map; + +public class SearchEntityProperties { + private String type; + private Map<String, String> fields = new HashMap<>(); + + public String getType() { + return type; + } + + public Map<String, String> getFields() { + return fields; + } + + public void setType(String type) { + this.type = type; + } + + public void setFields(Map<String, String> field) { + this.fields = field; + } +} diff --git a/src/main/java/org/openecomp/sparky/search/Suggestion.java b/src/main/java/org/openecomp/sparky/search/Suggestion.java new file mode 100644 index 0000000..79eb240 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/search/Suggestion.java @@ -0,0 +1,59 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sparky.search; + +public class Suggestion { + private String entityType; + private String searchTags; + private SearchEntityProperties properties; + + public Suggestion(SearchEntityProperties properties) { + this.properties = properties; + } + + public String getEntityType() { + return entityType; + } + + public String getSearchTags() { + return searchTags; + } + + public SearchEntityProperties getProperties() { + return properties; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public void setSearchTags(String searchTags) { + this.searchTags = searchTags; + } + + public void setProperties(SearchEntityProperties properties) { + this.properties = properties; + } +} diff --git a/src/main/java/org/openecomp/sparky/search/SuggestionList.java b/src/main/java/org/openecomp/sparky/search/SuggestionList.java new file mode 100644 index 0000000..cd56099 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/search/SuggestionList.java @@ -0,0 +1,72 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sparky.search; + +import java.util.LinkedList; +import java.util.List; + +public class SuggestionList { + // TODO: verify which data type these fields should be + private Long processingTimeInMs; + private Long totalFound; + private Long numReturned; + private List<Suggestion> suggestions = new LinkedList<>(); + + public void addSuggestion(Suggestion suggestion) { + suggestions.add(suggestion); + } + + public List<Suggestion> getSuggestions() { + return suggestions; + } + + public void setSuggestions(List<Suggestion> suggestions) { + this.suggestions = suggestions; + } + + public Long getProcessingTimeInMs() { + return processingTimeInMs; + } + + public Long getTotalFound() { + return totalFound; + } + + public Long getNumReturned() { + return numReturned; + } + + public void setProcessingTimeInMs(Long processingTimeInMs) { + this.processingTimeInMs = processingTimeInMs; + } + + public void setTotalFound(Long totalFound) { + this.totalFound = totalFound; + } + + public void setNumReturned(Long numReturned) { + this.numReturned = numReturned; + } +}
\ No newline at end of file diff --git a/src/main/java/org/openecomp/sparky/search/VnfSearchQueryBuilder.java b/src/main/java/org/openecomp/sparky/search/VnfSearchQueryBuilder.java new file mode 100644 index 0000000..55d003e --- /dev/null +++ b/src/main/java/org/openecomp/sparky/search/VnfSearchQueryBuilder.java @@ -0,0 +1,200 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sparky.search; + +import java.util.Date; +import java.util.Map; + +import javax.json.Json; +import javax.json.JsonArray; +import javax.json.JsonArrayBuilder; +import javax.json.JsonObject; +import javax.json.JsonObjectBuilder; + +/** + * Build a JSON payload to send to elastic search to get vnf search data. + */ + +public class VnfSearchQueryBuilder { + + /** + * Creates the suggestions query. + * + * @param maxResults maximum number of suggestions to fetch + * @param queryStr query string + * @return the json object + */ + + /* + * { "vnfs" : { "text" : "VNFs", "completion" : { "field" : "entity_suggest", "size": 1 } } } + */ + public static JsonObject createSuggestionsQuery(String maxResults, String queryStr) { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + + JsonObjectBuilder completionBlob = Json.createObjectBuilder(); + completionBlob.add("field", "entity_suggest"); + completionBlob.add("size", maxResults); + + JsonObjectBuilder jsonAllBuilder = Json.createObjectBuilder(); + jsonAllBuilder.add("text", queryStr); + jsonAllBuilder.add("completion", completionBlob); + + jsonBuilder.add("vnfs", jsonAllBuilder.build()); + return jsonBuilder.build(); + } + + public static JsonObject getTermBlob(String key, String value) { + JsonObjectBuilder termBlobBuilder = Json.createObjectBuilder(); + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder().add(key, value); + return termBlobBuilder.add("term", jsonBuilder.build()).build(); + } + + public static void getSummaryAggsBlob(JsonObjectBuilder aggsBlobBuilder, String aggsKey, + int resultSize) { + JsonObjectBuilder fieldBuilder = + Json.createObjectBuilder().add("field", aggsKey).add("size", resultSize); + JsonObject aggsFieldBlob = fieldBuilder.build(); + JsonObjectBuilder defaultBlobBuilder = Json.createObjectBuilder().add("terms", aggsFieldBlob); + JsonObject defaultBlob = defaultBlobBuilder.build(); + aggsBlobBuilder.add("default", defaultBlob); + } + + public static void buildSingleTermCountQuery(JsonObjectBuilder jsonBuilder, String key, + String value) { + jsonBuilder.add("query", getTermBlob(key, value)); + } + + public static void buildSingleTermSummaryQuery(JsonObjectBuilder jsonBuilder, String key, + String value, String groupByKey) { + JsonObjectBuilder queryBlobBuilder = Json.createObjectBuilder(); + JsonObjectBuilder aggsBlobBuilder = Json.createObjectBuilder(); + + queryBlobBuilder.add("constant_score", + Json.createObjectBuilder().add("filter", getTermBlob(key, value))); + + getSummaryAggsBlob(aggsBlobBuilder, groupByKey, 0); + + jsonBuilder.add("query", queryBlobBuilder.build()); + jsonBuilder.add("aggs", aggsBlobBuilder.build()); + } + + public static void buildMultiTermSummaryQuery(JsonObjectBuilder jsonBuilder, + Map<String, String> attributes, String groupByKey) { + JsonObjectBuilder queryBlobBuilder = Json.createObjectBuilder(); + JsonObjectBuilder aggsBlobBuilder = Json.createObjectBuilder(); + JsonArrayBuilder mustBlobBuilder = Json.createArrayBuilder(); + for (String key : attributes.keySet()) { + mustBlobBuilder.add(getTermBlob(key, attributes.get(key))); + } + JsonArray mustBlob = mustBlobBuilder.build(); + + queryBlobBuilder.add("constant_score", Json.createObjectBuilder().add("filter", + Json.createObjectBuilder().add("bool", Json.createObjectBuilder().add("must", mustBlob)))); + + getSummaryAggsBlob(aggsBlobBuilder, groupByKey, 0); + + jsonBuilder.add("query", queryBlobBuilder.build()); + jsonBuilder.add("aggs", aggsBlobBuilder.build()); + } + + public static void buildZeroTermSummaryQuery(JsonObjectBuilder jsonBuilder, String groupByKey) { + JsonObjectBuilder aggsBlobBuilder = Json.createObjectBuilder(); + + getSummaryAggsBlob(aggsBlobBuilder, groupByKey, 0); + + jsonBuilder.add("aggs", aggsBlobBuilder.build()); + } + + public static void buildMultiTermCountQuery(JsonObjectBuilder jsonBuilder, + Map<String, String> attributes) { + JsonArrayBuilder mustBlobBuilder = Json.createArrayBuilder(); + for (String key : attributes.keySet()) { + mustBlobBuilder.add(getTermBlob(key, attributes.get(key))); + } + jsonBuilder.add("query", Json.createObjectBuilder().add("bool", + Json.createObjectBuilder().add("must", mustBlobBuilder))); + } + + + + public static JsonObject createSummaryByEntityTypeQuery(Map<String, String> attributes, + String groupByKey) { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + jsonBuilder.add("size", "0"); // avoid source data + if (attributes.size() == 0) { + buildZeroTermSummaryQuery(jsonBuilder, groupByKey); + } else if (attributes.size() == 1) { + Map.Entry<String, String> entry = attributes.entrySet().iterator().next(); + buildSingleTermSummaryQuery(jsonBuilder, entry.getKey(), entry.getValue(), groupByKey); + } else { + buildMultiTermSummaryQuery(jsonBuilder, attributes, groupByKey); + } + return jsonBuilder.build(); + } + + public static JsonObject createEntityCountsQuery(Map<String, String> attributes) { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + if (attributes.size() == 1) { + Map.Entry<String, String> entry = attributes.entrySet().iterator().next(); + buildSingleTermCountQuery(jsonBuilder, entry.getKey(), entry.getValue()); + } else { + buildMultiTermCountQuery(jsonBuilder, attributes); + } + return jsonBuilder.build(); + } + + public static JsonArray getSortCriteria(String sortFieldName, String sortOrder) { + JsonArrayBuilder jsonBuilder = Json.createArrayBuilder(); + jsonBuilder.add(Json.createObjectBuilder().add(sortFieldName, + Json.createObjectBuilder().add("order", sortOrder))); + + return jsonBuilder.build(); + } + + + /** + * The main method. + * + * @param args the arguments + */ + public static void main(String[] args) { + Date start = new Date(System.currentTimeMillis() - Integer.MAX_VALUE); + Date end = new Date(); + String timezone = "-05:00"; + // JsonObject arr = createDateHistogramQuery(start, end, timezone); + + // System.out.println(arr.toString()); + + + // JsonObject table = createTableQuery(start, end, timezone, 0, 25); + // JsonObject aggre = createAuditQuery(start, end, timezone, "entityType", null, null); + + // System.out.println(arr.toString()); + // System.out.println(table.toString()); + // System.out.println(aggre.toString()); + + + } +} diff --git a/src/main/java/org/openecomp/sparky/search/VnfSearchService.java b/src/main/java/org/openecomp/sparky/search/VnfSearchService.java new file mode 100644 index 0000000..1cef43c --- /dev/null +++ b/src/main/java/org/openecomp/sparky/search/VnfSearchService.java @@ -0,0 +1,348 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sparky.search; + +import java.io.IOException; +import java.io.PrintWriter; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Map; + +import javax.json.Json; +import javax.json.JsonObject; +import javax.json.JsonObjectBuilder; +import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.core.MediaType; + +import org.json.JSONArray; +import org.json.JSONObject; +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.dal.elasticsearch.HashQueryResponse; +import org.openecomp.sparky.dal.elasticsearch.SearchAdapter; +import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.suggestivesearch.SuggestionEntity; +import org.openecomp.sparky.util.NodeUtils; +import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; +import org.openecomp.sparky.viewandinspect.entity.QuerySearchEntity; + + +/** + * From the given HTTP request, create vnf-search query for document store, and process document + * store response. + */ + +public class VnfSearchService { + + private static final String APP_JSON = MediaType.APPLICATION_JSON; + + private static ElasticSearchConfig esConfig = null; + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(VnfSearchService.class); + + private static SearchAdapter search = null; + private static final String ES_SUGGEST_API = TierSupportUiConstants.ES_SUGGEST_API; + private static final String ES_COUNT_API = TierSupportUiConstants.ES_COUNT_API; + private static final String ES_SEARCH_API = TierSupportUiConstants.ES_SEARCH_API; + + private static final String ENTITY_TYPE = "generic-vnf"; + + /** + * Get Full URL for search using elastic search configuration. + * + * @param api the api + * @return the full url + */ + private static String getFullUrl(String indexName, String api) { + + final String host = esConfig.getIpAddress(); + final String port = esConfig.getHttpPort(); + return String.format("http://%s:%s/%s/%s", host, port, indexName, api); + } + + /** + * Process operation result. + * + * @param api the api + * @param response the response + * @param opResult the op result + * @throws IOException Signals that an I/O exception has occurred. + */ + private static void buildVnfQuerySearchResponse(String apiKey, HttpServletResponse response, + OperationResult opResult) throws IOException { + int resonseCode = opResult.getResultCode(); + String result = opResult.getResult(); + + if (resonseCode > 300) { + setServletResponse(true, resonseCode, response, result); + return; + } + + if (result != null) { + JSONObject finalOutputToFe = new JSONObject(); + JSONObject responseJson = new JSONObject(result); + + if (apiKey.equalsIgnoreCase(ES_SUGGEST_API)) { // process suggestion results + try { + String suggestionsKey = "vnfs"; + int total = 0; + JSONArray suggestionsArray = new JSONArray(); + JSONArray suggestions = responseJson.getJSONArray(suggestionsKey); + if (suggestions.length() > 0) { + suggestionsArray = suggestions.getJSONObject(0).getJSONArray("options"); + for (int i = 0; i < suggestionsArray.length(); i++) { + suggestionsArray.getJSONObject(i).remove("score"); // FE doesn't like this noise: 'score' + } + + total = suggestionsArray.length(); + } + finalOutputToFe.put("totalFound", total); + finalOutputToFe.put("suggestions", suggestionsArray); + } catch (Exception e) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "Error parsing response from suggestions index. Response: " + result); + } + } else if (apiKey.equalsIgnoreCase(ES_COUNT_API)) { + try { + String shardsKey = "_shards"; + responseJson.remove(shardsKey); + finalOutputToFe = responseJson; + } catch (Exception e) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "Error fetching total count response from aggregation index. Response: " + result); + } + } else if (apiKey.equalsIgnoreCase(ES_SEARCH_API)) { + try { + JSONArray bucketsArray = (responseJson.getJSONObject("aggregations") + .getJSONObject("default").getJSONArray("buckets")); + int count = 0; + for (int i=0; i< bucketsArray.length(); i++) { + count += bucketsArray.getJSONObject(i).getInt("doc_count"); + } + JSONObject content = new JSONObject(); + content.put("totalChartHits", count); + content.put("buckets", bucketsArray); + finalOutputToFe.put("groupby_aggregation", content); + } catch (Exception e) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "Error fetching group-by query response from aggregation index. Response: " + result); + } + } + + setServletResponse(false, resonseCode, response, finalOutputToFe.toString()); + } + } + + /** + * Sets the servlet response. + * + * @param isError the is error + * @param responseCode the response code + * @param response the response + * @param postPayload the post payload + * @throws IOException Signals that an I/O exception has occurred. + */ + public static void setServletResponse(boolean isError, int responseCode, + HttpServletResponse response, String postPayload) throws IOException { + + if (isError) { + LOG.error(AaiUiMsgs.ERROR_PARSING_JSON_PAYLOAD_VERBOSE, postPayload); + } + + response.setStatus(responseCode); + + if (postPayload != null) { + response.setContentType(APP_JSON); + PrintWriter out = response.getWriter(); + out.println(postPayload); + out.close(); + } + } + + /** + * Instantiates a new vnf search service. + */ + public VnfSearchService() { + try { + if (esConfig == null) { + esConfig = ElasticSearchConfig.getConfig(); + } + + if (search == null) { + search = new SearchAdapter(); + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.CONFIGURATION_ERROR, "Search"); + } + } + + + /** + * Gets the suggestions results. + * + * @param response the response + * @param maxResults maximum number of suggestions + * @param queryStr query string + * @return the suggestions results + * @throws IOException Signals that an I/O exception has occurred. + */ + public List<SuggestionEntity> getSuggestionsResults(QuerySearchEntity querySearchEntity, int resultCountLimit) throws IOException { + List<SuggestionEntity> returnList = new ArrayList<SuggestionEntity>(); + + /* Create suggestions query */ + JsonObject vnfSearch = VnfSearchQueryBuilder.createSuggestionsQuery(String.valueOf(resultCountLimit), querySearchEntity.getQueryStr()); + + /* Parse suggestions response */ + OperationResult opResult = search.doPost(getFullUrl(esConfig.getAutosuggestIndexname(), ES_SUGGEST_API), vnfSearch.toString(), APP_JSON); + + String result = opResult.getResult(); + + if (!opResult.wasSuccessful()) { + LOG.error(AaiUiMsgs.ERROR_PARSING_JSON_PAYLOAD_VERBOSE, result); + return returnList; + } + + JSONObject responseJson = new JSONObject(result); + String suggestionsKey = "vnfs"; + JSONArray suggestionsArray = new JSONArray(); + JSONArray suggestions = responseJson.getJSONArray(suggestionsKey); + if (suggestions.length() > 0) { + suggestionsArray = suggestions.getJSONObject(0).getJSONArray("options"); + for (int i = 0; i < suggestionsArray.length(); i++) { + JSONObject querySuggestion = suggestionsArray.getJSONObject(i); + if (querySuggestion != null) { + SuggestionEntity responseSuggestion = new SuggestionEntity(); + responseSuggestion.setText(querySuggestion.getString("text")); + responseSuggestion.setRoute("vnfSearch"); // TODO -> Read route from suggestive-search.properties instead of hard coding + responseSuggestion.setHashId(NodeUtils.generateUniqueShaDigest(querySuggestion.getString("text"))); + returnList.add(responseSuggestion); + } + } + } + return returnList; + } + + + /** + * This method sets server response if lookup in ES has 0 count + * TODO: Change the response code to appropriate when FE-BE contract is finalized + * @param response + */ + public void setZeroCountResponse(HttpServletResponse response) throws IOException { + JSONObject payload = new JSONObject(); + payload.put("count", 0); + setServletResponse(false, 200, response, payload.toString() ); + } + + /** + * This method sets server response if lookup in ES for an aggregation has 0 results + * TODO: Change the response code to appropriate when FE-BE contract is finalized + * @param response + */ + public void setEmptyAggResponse(HttpServletResponse response) throws IOException { + JSONObject aggPayload = new JSONObject(); + aggPayload.put("totalChartHits", 0); + aggPayload.put("buckets", new JSONArray()); + JSONObject payload = new JSONObject(); + payload.append("groupby_aggregation", aggPayload); + setServletResponse(false, 200, response, payload.toString() ); + } + + public HashQueryResponse getJSONPayloadFromHash(String hashId) { + + HashQueryResponse hashQueryResponse = new HashQueryResponse(); + JsonObjectBuilder hashSearch = Json.createObjectBuilder(); + VnfSearchQueryBuilder.buildSingleTermCountQuery(hashSearch, "_id", hashId); + String hashSearchQuery = hashSearch.build().toString(); + OperationResult opResult = search.doPost( + getFullUrl(esConfig.getAutosuggestIndexname(), ES_SEARCH_API), + hashSearchQuery, APP_JSON); + hashQueryResponse.setOpResult(opResult); + + if(opResult != null && opResult.wasSuccessful()){ + String result = opResult.getResult(); + if (result != null) { + JSONObject responseJson = new JSONObject(result); + JSONArray hits = responseJson.getJSONObject("hits").getJSONArray("hits"); + if(hits != null && hits.length() > 0){ + hashQueryResponse.setJsonPayload (hits.getJSONObject(0).getJSONObject("_source") + .getJSONObject("entity_suggest").toString()); + } + } + } + return hashQueryResponse; + } + + public void getEntityCountResults(HttpServletResponse response, Map<String, String> attributes) + throws IOException { + // Create entity counts query + JsonObject vnfSearch = VnfSearchQueryBuilder.createEntityCountsQuery(attributes); + + // Parse response for entity counts query + OperationResult opResult = search.doPost( + getFullUrl(TierSupportUiConstants.getAggregationIndexName(ENTITY_TYPE), ES_COUNT_API), + vnfSearch.toString(), APP_JSON); + buildVnfQuerySearchResponse(ES_COUNT_API, response, opResult); + } + + public void getSummaryByEntityType(HttpServletResponse response, Map<String, String> attributes, + String groupByKey) throws IOException { + // Create query for summary by entity type + JsonObject vnfSearch = + VnfSearchQueryBuilder.createSummaryByEntityTypeQuery(attributes, groupByKey); + + // Parse response for summary by entity type query + OperationResult opResult = search.doPost( + getFullUrl(TierSupportUiConstants.getAggregationIndexName(ENTITY_TYPE), ES_SEARCH_API), + vnfSearch.toString(), APP_JSON); + buildVnfQuerySearchResponse(ES_SEARCH_API, response, opResult); + } + + public SearchAdapter getSearch() { + return search; + } + + public void setSearch(SearchAdapter search) { + VnfSearchService.search = search; + } + + public static ElasticSearchConfig getEsConfig() { + return esConfig; + } + + public static void setEsConfig(ElasticSearchConfig esConfig) { + VnfSearchService.esConfig = esConfig; + } + + public static void main(String agrs[]) { + VnfSearchService vnfs = new VnfSearchService(); + Date start = new Date(); + Date end = start; + } + +} diff --git a/src/main/java/org/openecomp/sparky/search/config/SuggestionConfig.java b/src/main/java/org/openecomp/sparky/search/config/SuggestionConfig.java new file mode 100644 index 0000000..c9dbc6e --- /dev/null +++ b/src/main/java/org/openecomp/sparky/search/config/SuggestionConfig.java @@ -0,0 +1,143 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sparky.search.config; + +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; + +import org.openecomp.sparky.util.ConfigHelper; +import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; + +public class SuggestionConfig { + public static final String CONFIG_FILE = + TierSupportUiConstants.DYNAMIC_CONFIG_APP_LOCATION + "suggestive-search.properties"; + + private static SuggestionConfig config; + private static final String INDEX_SEARCH_MAPPER_DEFAULT = "elasticsearch.autosuggestIndexname:SearchServiceWrapper,elasticsearch.indexName:VnfSearchService"; + + private Map<String, String> searchIndexToSearchService; + + private static final String CALLED_PAIRING_KEY_DEFAULT = "volume-group-id,volume-group-name,physical-location-id,data-center-code,complex-name,tenant-id,tenant-name,vserver-id,vserver-name,vserver-name2,hostname,pserver-name2,pserver-id,global-customer-id,subscriber-name,service-instance-id,service-instance-name,link-name,vpn-id,vpn-name,vpe-id,vnf-id,vnf-name,vnf-name2,vnfc-name,network-id,network-name,network-policy-id,vf-module-id,vf-module-name,vnf-id2,pnf-name,circuit-id"; + private static final String CALLED_PAIRING_VALUE_DEFAULT = "called"; + private static final String AT_PAIRING_KEY_DEFAULT = "street1,street2,postal-code,ipv4-oam-address,network-policy-fqdn"; + private static final String AT_PAIRING_VALUE_DEFAULT = "at"; + private static final String DEFAULT_PAIRING_DEFAULT_VALUE = "with"; + private String conjunctionForAt; + Map<String, String> pairingList; + private Collection<String> stopWords; + private String defaultPairingValue; + + + private SuggestionConfig() {} + + /** + * Returns initialized instance as per singleton pattern. + * + * @return initialized SuggestionConfig instance + */ + public static SuggestionConfig getConfig() { + if (config == null) { + config = new SuggestionConfig(); + config.initializeConfigProperties(); + } + return config; + } + + public void initializeConfigProperties() { + + Properties props = ConfigHelper.loadConfigFromExplicitPath(CONFIG_FILE); + Properties suggestionProps = ConfigHelper.getConfigWithPrefix("suggestion", props); + + String indexSearchMapper = suggestionProps.getProperty("routing", INDEX_SEARCH_MAPPER_DEFAULT); + String[] indexesToSearchClassesArray = indexSearchMapper.split(","); + searchIndexToSearchService = new HashMap<String, String>(); + for (String pair : indexesToSearchClassesArray) { + String[] subPair = pair.split(":"); + searchIndexToSearchService.put(subPair[0], subPair[1]); + } + + defaultPairingValue=suggestionProps.getProperty("pairing.default.value", DEFAULT_PAIRING_DEFAULT_VALUE); + String calledValue = suggestionProps.getProperty("pairing.called.value", CALLED_PAIRING_VALUE_DEFAULT); + String[] calledPairingArray = suggestionProps.getProperty("pairing.called.key", CALLED_PAIRING_KEY_DEFAULT).split(","); + pairingList = new HashMap<String, String>(); + for(String calledField: calledPairingArray){ + pairingList.put(calledField, calledValue); + } + + this.conjunctionForAt = suggestionProps.getProperty("pairing.at.value", AT_PAIRING_VALUE_DEFAULT); + String[] atPairingArray = suggestionProps.getProperty("pairing.at.key", AT_PAIRING_KEY_DEFAULT).split(","); + for(String atField: atPairingArray){ + pairingList.put(atField, conjunctionForAt); + } + + stopWords = Arrays.asList(suggestionProps.getProperty("stopwords", "").split(",")); + + } + + public void setSearchIndexToSearchService(Map<String, String> searchIndexToSearchService) { + this.searchIndexToSearchService = searchIndexToSearchService; + } + + public Map<String, String> getSearchIndexToSearchService() { + return searchIndexToSearchService; + } + + public Collection<String> getStopWords() { + return stopWords; + } + + public void setStopWords(Collection<String> stopWords) { + this.stopWords = stopWords; + } + + public Map<String, String> getPairingList() { + return pairingList; + } + + public void setPairingList(Map<String, String> pairingList) { + this.pairingList = pairingList; + } + + public String getDefaultPairingValue() { + return defaultPairingValue; + } + + public void setDefaultPairingValue(String defaultPairingValue) { + this.defaultPairingValue = defaultPairingValue; + } + + public String getConjunctionForAt() { + return conjunctionForAt; + } + + public void setConjunctionForAt(String conjunctionForAt) { + this.conjunctionForAt = conjunctionForAt; + } + + +} diff --git a/src/main/java/org/openecomp/sparky/security/EcompSso.java b/src/main/java/org/openecomp/sparky/security/EcompSso.java new file mode 100644 index 0000000..a008066 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/security/EcompSso.java @@ -0,0 +1,160 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sparky.security; + +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.portalsdk.core.onboarding.util.PortalApiProperties; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.security.portal.config.PortalAuthenticationConfig; +import org.openecomp.portalsdk.core.onboarding.util.CipherUtil; + + +/** + * Provides authentication services for onboarded ECOMP applications. + */ +public class EcompSso { + + public static final String EP_SERVICE = "EPService"; + public static final String CSP_COOKIE_NAME = "csp_cookie_name"; + public static final String CSP_GATE_KEEPER_PROD_KEY = "csp_gate_keeper_prod_key"; + public static final String ONAP_ENABLED = "ONAP_ENABLED"; + private static final Logger LOG = LoggerFactory.getInstance().getLogger(EcompSso.class); + + /** + * Searches the request for a cookie with the specified name. + * + * @param request + * @param cookieName + * @return Cookie, or null if not found. + */ + public static Cookie getCookie(HttpServletRequest request, String cookieName) { + Cookie[] cookies = request.getCookies(); + if (cookies != null) + for (Cookie cookie : cookies) { + if (cookie.getName().equals(cookieName)) { + return cookie; + } + } + + return null; + } + + /** + * Answers whether the ECOMP Portal service cookie is present in the specified request. + * + * @param request + * @return true if the cookie is found, else false. + */ + private static boolean isEPServiceCookiePresent(HttpServletRequest request) { + Cookie ep = getCookie(request, EP_SERVICE); + return (ep != null); + } + + /** + * Validates whether the ECOMP Portal sign-on process has completed, which relies the AT&T Global + * Log On single-sign on process. Checks for the ECOMP cookie (see {@link #EP_SERVICE}). If found, + * then searches for a CSP cookie; if not found, for a WebJunction header. + * + * @param request + * @return ATT UID if the ECOMP cookie is present and the sign-on process established an ATT UID; + * else null. + */ + public static String validateEcompSso(HttpServletRequest request) { + boolean isOnapEnabled = PortalAuthenticationConfig.getInstance().getIsOnapEnabled(); + if (isOnapEnabled) { + if (isEPServiceCookiePresent(request)) { + /* This is a "temporary" fix until proper separation + * between closed source and open source code is reached */ + return ONAP_ENABLED; + } + return null; + } else { + return getLoginIdFromCookie(request); + } + } + + /** + * Searches the specified request for the CSP cookie, decodes it and gets the ATT UID. + * + * @param request + * @return ATTUID if the cookie is present in the request and can be decoded successfully (expired + * cookies do not decode); else null. + */ + private static String getLoginIdFromCookie(HttpServletRequest request) { + String attuid = null; + try { + String[] cspFields = getCspData(request); + if (cspFields != null && cspFields.length > 5) + attuid = cspFields[5]; + } catch (Throwable t) { + LOG.info(AaiUiMsgs.LOGIN_FILTER_INFO, + "getLoginIdFromCookie failed " + t.getLocalizedMessage()); + } + return attuid; + } + + /** + * Searches the specified request for the CSP cookie, decodes it and parses it to a String array. + * + * @param request + * @return Array of String as parsed from the cookie; null if the cookie is not present; empty + * array if the cookie could not be decoded. + */ + private static String[] getCspData(HttpServletRequest request) { + final String cookieName = PortalApiProperties.getProperty(CSP_COOKIE_NAME); + if (cookieName == null) { + LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, + "getCspData: Failed to get property " + CSP_COOKIE_NAME); + return null; + } + Cookie csp = getCookie(request, cookieName); + if (csp == null) { + LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, "getCspData failed to get cookie " + cookieName); + return null; + } + final String cspCookieEncrypted = csp.getValue(); + + String gateKeeperProdKey = PortalApiProperties.getProperty(CSP_GATE_KEEPER_PROD_KEY); + if (gateKeeperProdKey == null) { + LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, + "getCspData: failed to get property " + CSP_GATE_KEEPER_PROD_KEY); + } + + String cspCookieDecrypted = ""; + try { + cspCookieDecrypted = CipherUtil.decrypt(cspCookieEncrypted,""); + } catch (Exception e) { + LOG.info(AaiUiMsgs.LOGIN_FILTER_INFO, + "decrypting cookie failed " + e.getLocalizedMessage()); + } + + String[] cspData = cspCookieDecrypted.split("\\|"); + return cspData; + } +} diff --git a/src/main/java/org/openecomp/sparky/security/SecurityContextFactory.java b/src/main/java/org/openecomp/sparky/security/SecurityContextFactory.java new file mode 100644 index 0000000..3144dee --- /dev/null +++ b/src/main/java/org/openecomp/sparky/security/SecurityContextFactory.java @@ -0,0 +1,79 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.security; + +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.UnrecoverableKeyException; +import java.security.cert.CertificateException; + +import javax.net.ssl.SSLContext; + +/** + * A factory for creating SecurityContext objects. + */ +public interface SecurityContextFactory { + + public String getSslAlgorithm(); + + public void setSslAlgorithm(String sslAlgorithm); + + public String getKeyManagerAlgortihm(); + + public void setKeyManagerAlgortihm(String keyManagerAlgortihm); + + public String getKeyStoreType(); + + public void setKeyStoreType(String keyStoreType); + + public boolean isServerCertificationChainValidationEnabled(); + + public void setServerCertificationChainValidationEnabled( + boolean serverCertificationChainValidationEnabled); + + public String getTrustStoreFileName(); + + public void setTrustStoreFileName(String filename); + + public String getClientCertPassword(); + + public void setClientCertPassword(String password); + + public void setClientCertFileInputStream(FileInputStream fis); + + public void setClientCertFileName(String filename) throws IOException; + + public FileInputStream getClientCertFileInputStream(); + + public SSLContext getSecureContext() + throws KeyManagementException, NoSuchAlgorithmException, FileNotFoundException, + KeyStoreException, CertificateException, IOException, UnrecoverableKeyException; + +} diff --git a/src/main/java/org/openecomp/sparky/security/SecurityContextFactoryImpl.java b/src/main/java/org/openecomp/sparky/security/SecurityContextFactoryImpl.java new file mode 100644 index 0000000..1fb03a7 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/security/SecurityContextFactoryImpl.java @@ -0,0 +1,206 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.security; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.nio.file.Files; +import java.security.KeyManagementException; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.UnrecoverableKeyException; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; + +import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; + +/** + * The Class SecurityContextFactoryImpl. + */ +public class SecurityContextFactoryImpl implements SecurityContextFactory { + + protected String sslAlgorithm; + protected String keyManagerAlgortihm; + protected String keyStoreType; + protected boolean serverCertificationChainValidationEnabled; + protected String trustStoreFileName; + protected String clientCertPassword; + protected FileInputStream clientCertFileInputStream; + protected String clientCertFileName; + protected byte[] clientCertBytes; + + /** + * Instantiates a new security context factory impl. + */ + public SecurityContextFactoryImpl() { + this.sslAlgorithm = "TLS"; + this.keyManagerAlgortihm = "SunX509"; + this.keyStoreType = "PKCS12"; + this.serverCertificationChainValidationEnabled = false; + this.clientCertFileInputStream = null; + this.clientCertFileName = null; + } + + @Override + public String getSslAlgorithm() { + return sslAlgorithm; + } + + @Override + public void setSslAlgorithm(String sslAlgorithm) { + this.sslAlgorithm = sslAlgorithm; + } + + @Override + public String getKeyManagerAlgortihm() { + return keyManagerAlgortihm; + } + + @Override + public void setKeyManagerAlgortihm(String keyManagerAlgortihm) { + this.keyManagerAlgortihm = keyManagerAlgortihm; + } + + @Override + public String getKeyStoreType() { + return keyStoreType; + } + + @Override + public void setKeyStoreType(String keyStoreType) { + this.keyStoreType = keyStoreType; + } + + @Override + public boolean isServerCertificationChainValidationEnabled() { + return serverCertificationChainValidationEnabled; + } + + @Override + public void setServerCertificationChainValidationEnabled( + boolean serverCertificationChainValidationEnabled) { + this.serverCertificationChainValidationEnabled = serverCertificationChainValidationEnabled; + } + + @Override + public void setClientCertFileName(String filename) throws IOException { + this.clientCertFileName = filename; + + if (filename == null) { + this.clientCertBytes = null; + } else { + this.clientCertBytes = Files.readAllBytes(new File(filename).toPath()); + } + } + + @Override + public void setClientCertFileInputStream(FileInputStream fis) { + this.clientCertFileInputStream = fis; + } + + @Override + public FileInputStream getClientCertFileInputStream() { + return this.clientCertFileInputStream; + } + + @Override + public SSLContext getSecureContext() throws KeyManagementException, NoSuchAlgorithmException, + KeyStoreException, CertificateException, IOException, UnrecoverableKeyException { + + TrustManager[] trustAllCerts = null; + + if (serverCertificationChainValidationEnabled) { + + System.setProperty("javax.net.ssl.trustStore", trustStoreFileName); + + } else { + + // Create a trust manager that does not validate certificate chains + trustAllCerts = new TrustManager[] {new X509TrustManager() { + @Override + public X509Certificate[] getAcceptedIssuers() { + return null; + } + + @Override + public void checkClientTrusted(X509Certificate[] certs, String authType) {} + + @Override + public void checkServerTrusted(X509Certificate[] certs, String authType) {} + } }; + } + + KeyManagerFactory kmf = KeyManagerFactory.getInstance(keyManagerAlgortihm); + + KeyStore ks = KeyStore.getInstance(keyStoreType); + + char[] pwd = null; + if (clientCertPassword != null) { + pwd = clientCertPassword.toCharArray(); + } + + if (clientCertBytes != null) { + ks.load(new ByteArrayInputStream(clientCertBytes), pwd); + } else { + ks.load(null, pwd); + } + + kmf.init(ks, pwd); + + SSLContext ctx = SSLContext.getInstance(sslAlgorithm); + ctx.init(kmf.getKeyManagers(), trustAllCerts, null); + + return ctx; + + } + + @Override + public String getTrustStoreFileName() { + return this.trustStoreFileName; + } + + @Override + public void setTrustStoreFileName(String filename) { + this.trustStoreFileName = filename; + } + + @Override + public String getClientCertPassword() { + return this.clientCertPassword; + } + + @Override + public void setClientCertPassword(String password) { + this.clientCertPassword = password; + } + +} diff --git a/src/main/java/org/openecomp/sparky/security/filter/CspCookieFilter.java b/src/main/java/org/openecomp/sparky/security/filter/CspCookieFilter.java new file mode 100644 index 0000000..7140e96 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/security/filter/CspCookieFilter.java @@ -0,0 +1,271 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.security.filter; + +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.UnsupportedEncodingException; +import java.net.InetAddress; +import java.net.URLDecoder; +import java.net.URLEncoder; +import java.net.UnknownHostException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Properties; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.util.NodeUtils; +import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; + +import org.openecomp.cl.mdc.MdcContext; + +// import esGateKeeper.esGateKeeper; + +/** + * Redirects to the AT&T global login page if the user is not authenticated.<br> + * Filter properties need to be configured in: csp-cookie-filter.properties + */ +public class CspCookieFilter implements Filter { + + /** Redirect URL for the login page. */ + private String globalLoginUrl; + + /** Application identifier. */ + private String applicationId; + + /** Gatekeeper environment setting (development or production). */ + private String gateKeeperEnvironment; + + private static final String FILTER_PARAMETER_CONFIG = "config"; + private static final String PROPERTY_GLOBAL_LOGIN_URL = "global.login.url"; + private static final String PROPERTY_APPLICATION_ID = "application.id"; + private static final String PROPERTY_GATEKEEPER_ENVIRONMENT = "gatekeeper.environment"; + // valid open redirect domains + private List<String> redirectDomains = new ArrayList<>(); + private static final String PROPERTY_REDIRECT_DOMAINS = "redirect-domain"; + + /** Needed by esGateKeeper, does not accept any other value. */ + private static final String GATEKEEPER_ACCOUNT_NAME = "CSP"; + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(CspCookieFilter.class); + + + /* (non-Javadoc) + * @see javax.servlet.Filter#init(javax.servlet.FilterConfig) + */ + @Override + public void init(FilterConfig filterConfig) throws ServletException { + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "CspCookieFilter", "", "Init", ""); + + try { + setConfigurationProperties(filterConfig); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.ERROR_CSP_CONFIG_FILE); + throw new ServletException(exc); + } + } + + + /* (non-Javadoc) + * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, javax.servlet.FilterChain) + */ + @Override + public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) + throws IOException, ServletException { + HttpServletRequest request = (HttpServletRequest) req; + HttpServletResponse response = (HttpServletResponse) res; + + Cookie[] cookies = request.getCookies(); + if ((cookies == null) || (cookies.length == 0)) { + doLogin(request, response); + return; + } + + /* + * String attEsSec = getSecurityCookie(cookies); + * + * if (attESSec == null || attESSec.length() == 0) { doLogin(request, response); return; } + * + * String attESSecUnEncrypted = esGateKeeper.esGateKeeper(attESSec, GATEKEEPER_ACCOUNT_NAME, + * gateKeeperEnvironment); if (attESSecUnEncrypted == null) { doLogin(request, response); } else + * { + */ + // LOG.info("User has valid cookie"); + chain.doFilter(request, response); + // } + } + + + /* (non-Javadoc) + * @see javax.servlet.Filter#destroy() + */ + @Override + public void destroy() {} + + /** + * Sets all required properties needed by this filter. + * + * @param filterConfig the filter configuration defined in the application web.xml + * @throws IOException if the properties failed to load. + */ + private void setConfigurationProperties(FilterConfig filterConfig) throws IOException { + InputStream inputStream = new FileInputStream(TierSupportUiConstants.STATIC_CONFIG_APP_LOCATION + + filterConfig.getInitParameter(FILTER_PARAMETER_CONFIG)); + Properties cspProperties = new Properties(); + cspProperties.load(inputStream); + globalLoginUrl = cspProperties.getProperty(PROPERTY_GLOBAL_LOGIN_URL); + applicationId = cspProperties.getProperty(PROPERTY_APPLICATION_ID); + gateKeeperEnvironment = cspProperties.getProperty(PROPERTY_GATEKEEPER_ENVIRONMENT); + redirectDomains = Arrays.asList(cspProperties.getProperty(PROPERTY_REDIRECT_DOMAINS).split(",")); + } + + /** + * Returns the attESSec cookie if found in the client. + * + * @param cookies the cookies available in the client + * @return the attESSec authentication cookie generated by the login page. + */ + private String getSecurityCookie(Cookie[] cookies) { + String attEsSec = null; + for (int i = 0; i < cookies.length; i++) { + Cookie thisCookie = cookies[i]; + String cookieName = thisCookie.getName(); + + if ("attESSec".equals(cookieName)) { + attEsSec = thisCookie.getValue(); + break; + } + } + return attEsSec; + } + + /** + * Redirects to the AT&T global login page. If this is an AJAX request it returns an unauthorized + * HTTP error in the response. + * + * @param request the filter request object + * @param response the filter response object + * @throws IOException if there is an error setting the error response + */ + private void doLogin(HttpServletRequest request, HttpServletResponse response) + throws IOException { + if (isAjaxRequest(request)) { + response.sendError(HttpServletResponse.SC_UNAUTHORIZED, + "User is not authorized. Please login to application"); + } else { + // Fix for Safari 7.0.2 onwards to avoid login page cache + response.addHeader("Cache-Control", "no-cache, no-store"); + String redirectURL = createRedirectUrl(request); + if (this.isValidRedirectURL(redirectURL)){ + response.sendRedirect(redirectURL); + LOG.debug(AaiUiMsgs.VALID_REDIRECT_URL, redirectURL); + } else{ + response.sendError(400, "Bad redirect URL: " + redirectURL); + LOG.error(AaiUiMsgs.INVALID_REDIRECT_URL, redirectURL); + } + } + } + + /** + * Checks if a redirect url is valid + * @param url URL to validate + * @return true if URL is a valid redirect URL, false otherwise + */ + private boolean isValidRedirectURL (String url){ + String redirectTo = url.substring(url.indexOf("?retURL=")+ "?retURL=".length()); + try { + redirectTo = URLDecoder.decode(redirectTo, StandardCharsets.UTF_8.toString()); + } catch (UnsupportedEncodingException e) { + LOG.error(AaiUiMsgs.UNSUPPORTED_URL_ENCODING, e.getLocalizedMessage()); + return false; + } + for (String domain: this.redirectDomains){ + if (redirectTo.endsWith(domain)) + return true; + } + return false; + } + + + /** + * Returns <code>true</code> if the request is an AJAX request. + * + * @param request the filter request object + * @return <code>true</code> if the request is an AJAX request. + */ + private boolean isAjaxRequest(HttpServletRequest request) { + String headerValue = request.getHeader("X-Requested-With"); + if ("XMLHttpRequest".equals(headerValue)) { + return true; + } + return false; + } + + /** + * Returns the redirection URL to the AT&T Global login page. + * + * @param request the request + * @return the string + * @throws UnsupportedEncodingException the unsupported encoding exception + */ + private String createRedirectUrl(HttpServletRequest request) throws UnsupportedEncodingException { + String returnUrl = getReturnUrl(request); + + return globalLoginUrl + "?retURL=" + returnUrl + "&sysName=" + applicationId; + } + + /** + * Gets the URL encoded return URL. + * + * @param request the HTTP request + * @return an encoded URL to return to following login + * @throws UnsupportedEncodingException the unsupported encoding exception + */ + private String getReturnUrl(HttpServletRequest request) throws UnsupportedEncodingException { + StringBuffer retUrl = request.getRequestURL(); + String urlParams = request.getQueryString(); + if (urlParams != null) { + retUrl.append("?" + urlParams); + } + return URLEncoder.encode(retUrl.toString(), StandardCharsets.UTF_8.toString()); + } +} diff --git a/src/main/java/org/openecomp/sparky/security/filter/LoginFilter.java b/src/main/java/org/openecomp/sparky/security/filter/LoginFilter.java new file mode 100644 index 0000000..3ab8990 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/security/filter/LoginFilter.java @@ -0,0 +1,230 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.security.filter; + +import java.io.IOException; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.servlet.http.HttpSession; +import javax.ws.rs.core.HttpHeaders; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.portalsdk.core.onboarding.listener.PortalTimeoutHandler; +import org.openecomp.portalsdk.core.onboarding.util.PortalApiConstants; +import org.openecomp.portalsdk.core.onboarding.util.PortalApiProperties; +import org.openecomp.portalsdk.core.onboarding.util.SSOUtil; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.security.EcompSso; +import org.openecomp.sparky.security.portal.config.PortalAuthenticationConfig; + +/** + * This filter checks every request for proper ECOMP Portal single sign on initialization. The + * possible paths and actions: + * <OL> + * <LI>User starts at an app page via a bookmark. No ECOMP portal cookie is set. Redirect there to + * get one; then continue as below. + * <LI>User starts at ECOMP Portal and goes to app. Alternately, the user's session times out and + * the user hits refresh. The ECOMP Portal cookie is set, but there is no valid session. Create one + * and publish info. + * <LI>User has valid ECOMP Portal cookie and session. Reset the max idle in that session. + * </OL> + * <P> + * Notes: + * <UL> + * <LI>Portal Session should be up prior to App Session</LI> + * <LI>If App Session Expires or if EPService cookie is unavailable, we need to redirect to Portal. + * <LI>Method {@link #initiateSessionMgtHandler(HttpServletRequest)} should be called for Session + * management when the initial session is created + * <LI>While redirecting, the cookie "redirectUrl" should also be set so that Portal knows where to + * forward the request to once the Portal Session is created and EPService cookie is set. + * <LI>Method {@link #resetSessionMaxIdleTimeOut(HttpServletRequest)} should be called for every + * request to reset the MaxInactiveInterval to the right value. + * </UL> + * <P> + * This filter incorporates most features of the SDK application's SessionTimeoutInterceptor and + * SingleSignOnController classes + */ +public class LoginFilter implements Filter { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(LoginFilter.class); + + @Override + public void init(FilterConfig filterConfig) throws ServletException { + // Validate that app has provided useful portal properties + if (PortalApiProperties.getProperty(PortalApiConstants.ECOMP_REDIRECT_URL) == null) { + throw new ServletException("Failed to find URL in portal.properties"); + } + + PortalAuthenticationConfig appProperties; + try { + appProperties = PortalAuthenticationConfig.getInstance(); + } catch (Exception ex) { + throw new ServletException("Failed to get properties", ex); + } + + String restUser = appProperties.getUsername(); + String restPassword = appProperties.getPassword(); + if (restUser == null || restPassword == null) { + throw new ServletException("Failed to find user and/or password from properties"); + } + } + + @Override + public void destroy() { + // No resources to release + } + + /* + * (non-Javadoc) + * + * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, + * javax.servlet.FilterChain) + */ + @Override + public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) + throws ServletException, IOException { + HttpServletRequest request = (HttpServletRequest) req; + HttpServletResponse response = (HttpServletResponse) res; + + // Choose authentication appropriate for the request. + final String restApiURI = request.getContextPath() + PortalApiConstants.API_PREFIX; + if (request.getRequestURI().startsWith(restApiURI)) { + // REST servlet checks credentials + LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, "doFilter: delegating auth to REST servlet for request " + request.getRequestURI()); + chain.doFilter(request, response); + } else { + // All other requests require ECOMP Portal authentication + if (EcompSso.validateEcompSso(request) == null) { + String redirectURL, logMessage; + + // Redirect to Portal UI + redirectURL = PortalApiProperties.getProperty(PortalApiConstants.ECOMP_REDIRECT_URL); + logMessage = "Unauthorized login attempt."; + + LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, + logMessage + + " | Remote IP: " + request.getRemoteAddr() + + " | User agent: " + request.getHeader(HttpHeaders.USER_AGENT) + + " | Request URL: " + request.getRequestURL() + + " | Redirecting to: " + redirectURL); + + response.sendRedirect(redirectURL); + } else { + HttpSession session = request.getSession(false); + if (session == null) { + // New session + session = request.getSession(true); + LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, "doFilter: created new session " + session.getId()); + initiateSessionMgtHandler(request); + } else { + // Existing session + LOG.debug(AaiUiMsgs.LOGIN_FILTER_DEBUG, "doFilter: resetting idle in existing session " + session.getId()); + resetSessionMaxIdleTimeOut(request); + } + // Pass request back down the filter chain + chain.doFilter(request, response); + } + } + } + + /** + * Publishes information about the session. + * + * @param request + */ + private void initiateSessionMgtHandler(HttpServletRequest request) { + String portalJSessionId = getPortalJSessionId(request); + String jSessionId = getJessionId(request); + storeMaxInactiveTime(request); + PortalTimeoutHandler.sessionCreated(portalJSessionId, jSessionId, request.getSession(false)); + } + + /** + * Gets the ECOMP Portal service cookie value. + * + * @param request + * @return Cookie value, or null if not found. + */ + private String getPortalJSessionId(HttpServletRequest request) { + Cookie ep = EcompSso.getCookie(request, EcompSso.EP_SERVICE); + return ep == null ? null : ep.getValue(); + } + + /** + * Gets the container session ID. + * + * @param request + * @return Session ID, or null if no session. + */ + private String getJessionId(HttpServletRequest request) { + HttpSession session = request.getSession(); + return session == null ? null : session.getId(); + } + + /** + * Sets the global session's max idle time to the session's max inactive interval. + * + * @param request + */ + private void storeMaxInactiveTime(HttpServletRequest request) { + HttpSession session = request.getSession(false); + if (session != null + && session.getAttribute(PortalApiConstants.GLOBAL_SESSION_MAX_IDLE_TIME) == null) { + session.setAttribute(PortalApiConstants.GLOBAL_SESSION_MAX_IDLE_TIME, + session.getMaxInactiveInterval()); + } + } + + /** + * Sets the session's max inactive interval. + * + * @param request + */ + private void resetSessionMaxIdleTimeOut(HttpServletRequest request) { + try { + HttpSession session = request.getSession(false); + if (session != null) { + final Object maxIdleAttribute = session + .getAttribute(PortalApiConstants.GLOBAL_SESSION_MAX_IDLE_TIME); + if (maxIdleAttribute != null) { + session.setMaxInactiveInterval(Integer.parseInt(maxIdleAttribute.toString())); + } + } + } catch (Exception e) { + LOG.info(AaiUiMsgs.LOGIN_FILTER_INFO, "resetSessionMaxIdleTimeOut: failed to set session max inactive interval - " + e.getLocalizedMessage()); + } + } + +} diff --git a/src/main/java/org/openecomp/sparky/security/portal/PortalRestAPIServiceImpl.java b/src/main/java/org/openecomp/sparky/security/portal/PortalRestAPIServiceImpl.java new file mode 100644 index 0000000..ce43ea2 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/security/portal/PortalRestAPIServiceImpl.java @@ -0,0 +1,229 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.security.portal; + +import java.io.File; +import java.io.IOException; +import java.text.MessageFormat; +import java.util.LinkedHashSet; +import java.util.List; + +import javax.servlet.http.HttpServletRequest; + +import org.openecomp.portalsdk.core.onboarding.crossapi.IPortalRestAPIService; +import org.openecomp.portalsdk.core.onboarding.exception.PortalAPIException; +import org.openecomp.portalsdk.core.restful.domain.EcompRole; +import org.openecomp.portalsdk.core.restful.domain.EcompUser; +import org.openecomp.sparky.security.EcompSso; +import org.openecomp.sparky.security.portal.config.PortalAuthenticationConfig; +import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Responds to ECOMP Portal's REST queries for user and role information and management. + */ +public class PortalRestAPIServiceImpl implements IPortalRestAPIService { + + private static final Logger LOG = LoggerFactory.getLogger(PortalRestAPIServiceImpl.class); + private static final String ERROR_MESSAGE = "Failed to {0} user [loginId:{1}]"; + + private UserManager userManager; + + /** + * Initialise user manager. + */ + public PortalRestAPIServiceImpl() { + userManager = new UserManager(new File(TierSupportUiConstants.USERS_FILE_LOCATION)); + } + + ///////////////////////////////////////////////////////////////////////////// + // User interface + ///////////////////////////////////////////////////////////////////////////// + + /* + * (non-Javadoc) + * + * @see + * com.att.fusion.core.onboarding.crossapi.IPortalRestAPIService#pushUser(com.att.fusion.core. + * restful.domain.EcompUser) + */ + @Override + public void pushUser(EcompUser user) throws PortalAPIException { + LOG.debug("Push user [loginId:" + user.getLoginId() + "]"); + + if (userManager.getUser(user.getLoginId()).isPresent()) { + String message = getMessage(ERROR_MESSAGE, "push", user.getLoginId()) + + ", user is already stored"; + LOG.error(message); + throw new PortalAPIException(message); + } + + try { + userManager.pushUser(user); + } catch (IOException e) { + String message = getMessage(ERROR_MESSAGE, "push", user.getLoginId()); + LOG.error(message, e); + throw new PortalAPIException(message, e); + } + } + + /* + * (non-Javadoc) + * + * @see com.att.fusion.core.onboarding.crossapi.IPortalRestAPIService#editUser(java.lang.String, + * com.att.fusion.core.restful.domain.EcompUser) + */ + @Override + public void editUser(String loginId, EcompUser user) throws PortalAPIException { + LOG.debug("Edit user [loginId:" + loginId + "]"); + + userManager.getUser(loginId).orElseThrow(() -> { + String message = getMessage(ERROR_MESSAGE, "edit", loginId) + ", unknown user"; + LOG.error(message); + return new PortalAPIException(message); + }); + + try { + userManager.editUser(loginId, user); + } catch (IOException e) { + String message = getMessage(ERROR_MESSAGE, "edit", loginId); + LOG.error(message, e); + throw new PortalAPIException(message, e); + } + } + + /* + * (non-Javadoc) + * + * @see com.att.fusion.core.onboarding.crossapi.IPortalRestAPIService#getUser(java.lang.String) + */ + @Override + public EcompUser getUser(String loginId) throws PortalAPIException { + LOG.debug("Get user [loginId:" + loginId + "]"); + return userManager.getUser(loginId).orElseThrow(() -> { + String message = getMessage(ERROR_MESSAGE, "get", loginId) + ", unknown user"; + LOG.error(message); + return new PortalAPIException(message); + }); + } + + /* + * (non-Javadoc) + * + * @see com.att.fusion.core.onboarding.crossapi.IPortalRestAPIService#getUsers() + */ + @Override + public List<EcompUser> getUsers() throws PortalAPIException { + LOG.debug("Get users"); + return userManager.getUsers(); + } + + @Override + public String getUserId(HttpServletRequest request) throws PortalAPIException { + return EcompSso.validateEcompSso(request); + } + + ///////////////////////////////////////////////////////////////////////////// + // Role interface + ///////////////////////////////////////////////////////////////////////////// + + /* + * (non-Javadoc) + * + * @see com.att.fusion.core.onboarding.crossapi.IPortalRestAPIService#getAvailableRoles() + */ + @Override + public List<EcompRole> getAvailableRoles() throws PortalAPIException { + LOG.debug("Get available roles"); + return UserManager.getRoles(); + } + + /* + * (non-Javadoc) + * + * @see + * com.att.fusion.core.onboarding.crossapi.IPortalRestAPIService#getUserRoles(java.lang.String) + */ + @Override + public List<EcompRole> getUserRoles(String loginId) throws PortalAPIException { + LOG.debug("Get user roles"); + return userManager.getUserRoles(loginId); + } + + /* + * (non-Javadoc) + * + * @see + * com.att.fusion.core.onboarding.crossapi.IPortalRestAPIService#pushUserRole(java.lang.String, + * java.util.List) + */ + @Override + public void pushUserRole(String loginId, List<EcompRole> roles) throws PortalAPIException { + LOG.debug("Push user role [loginId:" + loginId + "]"); + try { + EcompUser user = getUser(loginId); + if (roles != null) { + user.setRoles(new LinkedHashSet<EcompRole>(roles)); + } else { + user.setRoles(new LinkedHashSet<EcompRole>()); + } + editUser(loginId, user); + } catch (PortalAPIException e) { + String message = getMessage(ERROR_MESSAGE, "push role", loginId); + LOG.error(message); + throw new PortalAPIException(message, e); + } + } + + ///////////////////////////////////////////////////////////////////////////// + // Security interface + ///////////////////////////////////////////////////////////////////////////// + + /* + * (non-Javadoc) + * + * @see + * com.att.fusion.core.onboarding.crossapi.IPortalRestAPIService#isAppAuthenticated(javax.servlet. + * http.HttpServletRequest) + */ + @Override + public boolean isAppAuthenticated(HttpServletRequest request) throws PortalAPIException { + LOG.debug("Authentication request"); + PortalAuthenticationConfig config = PortalAuthenticationConfig.getInstance(); + String restUsername = request.getHeader(PortalAuthenticationConfig.PROP_USERNAME); + String restPassword = request.getHeader(PortalAuthenticationConfig.PROP_PASSWORD); + return restUsername != null && restPassword != null && restUsername.equals(config.getUsername()) + && restPassword.equals(config.getPassword()); + } + + private String getMessage(String message, Object... args) { + MessageFormat formatter = new MessageFormat(""); + formatter.applyPattern(message); + return formatter.format(args); + } + +}
\ No newline at end of file diff --git a/src/main/java/org/openecomp/sparky/security/portal/UserManager.java b/src/main/java/org/openecomp/sparky/security/portal/UserManager.java new file mode 100644 index 0000000..bbc4ee3 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/security/portal/UserManager.java @@ -0,0 +1,171 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.security.portal; + +import java.io.File; +import java.io.IOException; +import java.lang.reflect.Type; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.stream.Collectors; + +import org.openecomp.portalsdk.core.restful.domain.EcompRole; +import org.openecomp.portalsdk.core.restful.domain.EcompUser; +import org.openecomp.sparky.security.portal.config.RolesConfig; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.reflect.TypeToken; + +/** + * Basic file based user storage. + */ +public class UserManager { + + private File usersFile; + + private static final ReadWriteLock LOCK = new ReentrantReadWriteLock(true); + private static final Lock READ_LOCK = LOCK.readLock(); + private static final Lock WRITE_LOCK = LOCK.writeLock(); + + private static final Gson GSON = new GsonBuilder().setPrettyPrinting().create(); + + /** + * + * @param usersFile a file to store the users + */ + public UserManager(File usersFile) { + this.usersFile = usersFile; + } + + /** + * Returns all users stored. + * + * @return a list of users. + */ + public List<EcompUser> getUsers() { + Type collectionType = new TypeToken<List<EcompUser>>() { + }.getType(); + + Optional<String> users = read(usersFile); + if (users.isPresent()) { + return GSON.fromJson(users.get(), collectionType); + } + + return new ArrayList<>(); + } + + /** + * Returns a stored user. + * + * @param loginId the identifier of the user + * @return an optional user. + */ + public Optional<EcompUser> getUser(String loginId) { + if (!getUsers().isEmpty()) { + return getUsers().stream().filter(u -> loginId.equals(u.getLoginId())).findFirst(); + } + return Optional.empty(); + } + + /** + * Stores a user if not already stored. + * + * @param user the user to be stored + * @throws IOException + */ + public void pushUser(EcompUser user) throws IOException { + WRITE_LOCK.lock(); + try { + if (!getUser(user.getLoginId()).isPresent()) { + addUser(getUsers(), user); + } + } finally { + WRITE_LOCK.unlock(); + } + } + + /** + * Replaces an existing user. + * + * @param loginId the id of the user + * @param user the new user details + * @throws IOException + */ + public void editUser(String loginId, EcompUser user) throws IOException { + WRITE_LOCK.lock(); + try { + if (getUser(loginId).isPresent()) { + List<EcompUser> users = getUsers().stream().filter(u -> !u.getLoginId().equals(loginId)) + .collect(Collectors.toList()); + addUser(users, user); + } + } finally { + WRITE_LOCK.unlock(); + } + } + + /** + * Gets the roles assigned to a user. + * + * @param loginId the id of the user + * @return the assigned roles + */ + public List<EcompRole> getUserRoles(String loginId) { + List<EcompRole> roles = new ArrayList<>(); + roles.addAll(getUser(loginId).orElseGet(EcompUser::new).getRoles()); + return roles; + } + + public static List<EcompRole> getRoles() { + return RolesConfig.getInstance().getRoles(); + } + + private void addUser(List<EcompUser> users, EcompUser user) throws IOException { + users.add(user); + write(users); + } + + private void write(List<EcompUser> users) throws IOException { + Files.write(usersFile.toPath(), GSON.toJson(users).getBytes()); + } + + private Optional<String> read(File file) { + READ_LOCK.lock(); + try { + return Optional.of(new String(Files.readAllBytes(file.toPath()))); + } catch (IOException e) { // NOSONAR + return Optional.empty(); + } finally { + READ_LOCK.unlock(); + } + } +}
\ No newline at end of file diff --git a/src/main/java/org/openecomp/sparky/security/portal/config/PortalAuthenticationConfig.java b/src/main/java/org/openecomp/sparky/security/portal/config/PortalAuthenticationConfig.java new file mode 100644 index 0000000..c217615 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/security/portal/config/PortalAuthenticationConfig.java @@ -0,0 +1,99 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.security.portal.config; + +import java.util.Properties; + +import org.openecomp.sparky.util.ConfigHelper; +import org.openecomp.sparky.util.Encryptor; +import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; + +/** + * Provides Portal authentication configuration. + */ +public class PortalAuthenticationConfig { + + private String username; + private String password; + private boolean isOnapEnabled; + + public static final String PROP_USERNAME = "username"; + public static final String PROP_PASSWORD = "password"; // NOSONAR + public static final String PROP_IS_ONAP_ENABLED = "onap_enabled"; // NOSONAR + private static final String AUTHENTICATION_CONFIG_FILE = TierSupportUiConstants.PORTAL_AUTHENTICATION_FILE_LOCATION; + + private PortalAuthenticationConfig() { + // Prevent instantiation + } + + private static class PortalAuthenticationConfigHelper { + private static final PortalAuthenticationConfig INSTANCE = new PortalAuthenticationConfig(); + + private PortalAuthenticationConfigHelper() { + // Deliberately empty + } + } + + /** + * Get a singleton instance of the configuration. + * + * @return + */ + public static PortalAuthenticationConfig getInstance() { + PortalAuthenticationConfigHelper.INSTANCE.load(); + return PortalAuthenticationConfigHelper.INSTANCE; + } + + public String getUsername() { + return username; + } + + public String getPassword() { + Encryptor encryptor = new Encryptor(); + return encryptor.decryptValue(password); + } + + public boolean getIsOnapEnabled() { + return isOnapEnabled; + } + + /** + * Reload the Portal authentication properties from the classpath. + */ + public void reload() { + load(); + } + + /** + * Load the Portal authentication properties from the classpath. + */ + private void load() { + Properties props = ConfigHelper.loadConfigFromExplicitPath(AUTHENTICATION_CONFIG_FILE); + username = props.getProperty(PROP_USERNAME); + password = props.getProperty(PROP_PASSWORD); + isOnapEnabled = Boolean.parseBoolean(props.getProperty(PROP_IS_ONAP_ENABLED, "true")); + } +}
\ No newline at end of file diff --git a/src/main/java/org/openecomp/sparky/security/portal/config/RolesConfig.java b/src/main/java/org/openecomp/sparky/security/portal/config/RolesConfig.java new file mode 100644 index 0000000..18753a4 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/security/portal/config/RolesConfig.java @@ -0,0 +1,91 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.security.portal.config; + +import java.io.IOException; +import java.lang.reflect.Type; +import java.net.URISyntaxException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Collections; +import java.util.List; + +import org.openecomp.portalsdk.core.restful.domain.EcompRole; +import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; + +import com.google.gson.Gson; +import com.google.gson.JsonSyntaxException; +import com.google.gson.reflect.TypeToken; + +/** + * Provides roles configuration. + */ +public class RolesConfig { + + private List<EcompRole> roles; + + private static final Gson GSON = new Gson(); + private static final String ROLES_CONFIG_FILE = TierSupportUiConstants.ROLES_FILE_LOCATION; + + private RolesConfig() { + // Prevent instantiation + } + + private static class RolesConfigHelper { + private static final RolesConfig INSTANCE = new RolesConfig(); + + private RolesConfigHelper() { + // Deliberately empty + } + } + + /** + * Get a singleton instance of the configuration. + * + * @return + */ + public static RolesConfig getInstance() { + try { + RolesConfigHelper.INSTANCE.load(); + } catch (Exception e) { + throw new ExceptionInInitializerError(e); + } + + return RolesConfigHelper.INSTANCE; + } + + public List<EcompRole> getRoles() { + return roles; + } + + private void load() throws JsonSyntaxException, IOException, URISyntaxException { + Type collectionType = new TypeToken<List<EcompRole>>() { + }.getType(); + + roles = Collections.unmodifiableList(GSON + .fromJson(new String(Files.readAllBytes(Paths.get(ROLES_CONFIG_FILE))), collectionType)); + } +}
\ No newline at end of file diff --git a/src/main/java/org/openecomp/sparky/suggestivesearch/SuggestionEntity.java b/src/main/java/org/openecomp/sparky/suggestivesearch/SuggestionEntity.java new file mode 100644 index 0000000..3badc50 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/suggestivesearch/SuggestionEntity.java @@ -0,0 +1,59 @@ +/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sparky.suggestivesearch;
+
+public class SuggestionEntity {
+ private String route;
+ private String hashId;
+ private String text;
+
+ public SuggestionEntity() {
+ }
+
+ public SuggestionEntity(String route, String hashId, String text) {
+ this.route = route;
+ this.hashId = hashId;
+ this.text = text;
+ }
+
+ public String getRoute() {
+ return route;
+ }
+ public void setRoute(String route) {
+ this.route = route;
+ }
+ public String getHashId() {
+ return hashId;
+ }
+ public void setHashId(String hashId) {
+ this.hashId = hashId;
+ }
+ public String getText() {
+ return text;
+ }
+ public void setText(String text) {
+ this.text = text;
+ }
+}
diff --git a/src/main/java/org/openecomp/sparky/synchronizer/AbstractEntitySynchronizer.java b/src/main/java/org/openecomp/sparky/synchronizer/AbstractEntitySynchronizer.java new file mode 100644 index 0000000..14ea149 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/AbstractEntitySynchronizer.java @@ -0,0 +1,559 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.EnumSet; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; + +import org.openecomp.cl.api.Logger; +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider; +import org.openecomp.sparky.dal.aai.ActiveInventoryEntityStatistics; +import org.openecomp.sparky.dal.aai.ActiveInventoryProcessingExceptionStatistics; +import org.openecomp.sparky.dal.aai.config.ActiveInventoryConfig; +import org.openecomp.sparky.dal.elasticsearch.ElasticSearchDataProvider; +import org.openecomp.sparky.dal.elasticsearch.ElasticSearchEntityStatistics; +import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.openecomp.sparky.dal.rest.HttpMethod; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.rest.RestOperationalStatistics; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.util.NodeUtils; + +import org.openecomp.cl.mdc.MdcContext; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * The Class AbstractEntitySynchronizer. + * + * @author davea. + */ +public abstract class AbstractEntitySynchronizer { + + protected static final int VERSION_CONFLICT_EXCEPTION_CODE = 409; + protected static final Integer RETRY_COUNT_PER_ENTITY_LIMIT = new Integer(3); + + protected final Logger logger; + protected ObjectMapper mapper; + protected OxmModelLoader oxmModelLoader; + + /** + * The Enum StatFlag. + */ + protected enum StatFlag { + AAI_REST_STATS, AAI_ENTITY_STATS, AAI_PROCESSING_EXCEPTION_STATS, + AAI_TASK_PROCESSING_STATS, ES_REST_STATS, ES_ENTITY_STATS, ES_TASK_PROCESSING_STATS + } + + protected EnumSet<StatFlag> enabledStatFlags; + + protected ActiveInventoryDataProvider aaiDataProvider; + protected ElasticSearchDataProvider esDataProvider; + + protected ExecutorService synchronizerExecutor; + protected ExecutorService aaiExecutor; + protected ExecutorService esExecutor; + + private RestOperationalStatistics esRestStats; + protected ElasticSearchEntityStatistics esEntityStats; + + private RestOperationalStatistics aaiRestStats; + protected ActiveInventoryEntityStatistics aaiEntityStats; + private ActiveInventoryProcessingExceptionStatistics aaiProcessingExceptionStats; + + private TaskProcessingStats aaiTaskProcessingStats; + private TaskProcessingStats esTaskProcessingStats; + + private TransactionRateController aaiTransactionRateController; + private TransactionRateController esTransactionRateController; + + protected AtomicInteger aaiWorkOnHand; + protected AtomicInteger esWorkOnHand; + protected String synchronizerName; + + protected abstract boolean isSyncDone(); + + public String getActiveInventoryStatisticsReport() { + + StringBuilder sb = new StringBuilder(128); + + if (enabledStatFlags.contains(StatFlag.AAI_REST_STATS)) { + sb.append("\n\n ").append("REST Operational Stats:"); + sb.append(aaiRestStats.getStatisticsReport()); + } + + if (enabledStatFlags.contains(StatFlag.AAI_ENTITY_STATS)) { + sb.append("\n\n ").append("Entity Stats:"); + sb.append(aaiEntityStats.getStatisticsReport()); + } + + if (enabledStatFlags.contains(StatFlag.AAI_PROCESSING_EXCEPTION_STATS)) { + sb.append("\n\n ").append("Processing Exception Stats:"); + sb.append(aaiProcessingExceptionStats.getStatisticsReport()); + } + + return sb.toString(); + + } + + public String getElasticSearchStatisticsReport() { + + StringBuilder sb = new StringBuilder(128); + + if (enabledStatFlags.contains(StatFlag.ES_REST_STATS)) { + sb.append("\n\n ").append("REST Operational Stats:"); + sb.append(esRestStats.getStatisticsReport()); + } + + if (enabledStatFlags.contains(StatFlag.ES_ENTITY_STATS)) { + sb.append("\n\n ").append("Entity Stats:"); + sb.append(esEntityStats.getStatisticsReport()); + } + + return sb.toString(); + + } + + /** + * Adds the active inventory stat report. + * + * @param sb the sb + */ + private void addActiveInventoryStatReport(StringBuilder sb) { + + if (sb == null) { + return; + } + + sb.append("\n\n AAI"); + sb.append(getActiveInventoryStatisticsReport()); + + double currentTps = 0; + if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) { + sb.append("\n\n ").append("Task Processor Stats:"); + sb.append(aaiTaskProcessingStats.getStatisticsReport(false, " ")); + + currentTps = aaiTransactionRateController.getCurrentTps(); + + sb.append("\n ").append("Current TPS: ").append(currentTps); + } + + sb.append("\n ").append("Current WOH: ").append(aaiWorkOnHand.get()); + + if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) { + if (currentTps > 0) { + double numMillisecondsToCompletion = (aaiWorkOnHand.get() / currentTps) * 1000; + sb.append("\n ").append("SyncDurationRemaining=") + .append(NodeUtils.getDurationBreakdown((long) numMillisecondsToCompletion)); + } + } + + } + + /** + * Adds the elastic stat report. + * + * @param sb the sb + */ + private void addElasticStatReport(StringBuilder sb) { + + if (sb == null) { + return; + } + + sb.append("\n\n ELASTIC"); + sb.append(getElasticSearchStatisticsReport()); + + double currentTps = 0; + + if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) { + sb.append("\n\n ").append("Task Processor Stats:"); + sb.append(esTaskProcessingStats.getStatisticsReport(false, " ")); + + currentTps = esTransactionRateController.getCurrentTps(); + + sb.append("\n ").append("Current TPS: ").append(currentTps); + } + + sb.append("\n ").append("Current WOH: ").append(esWorkOnHand.get()); + + if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) { + if (currentTps > 0) { + double numMillisecondsToCompletion = (esWorkOnHand.get() / currentTps) * 1000; + sb.append("\n ").append("SyncDurationRemaining=") + .append(NodeUtils.getDurationBreakdown((long) numMillisecondsToCompletion)); + } + } + + + } + + /** + * Gets the stat report. + * + * @param syncOpTimeInMs the sync op time in ms + * @param showFinalReport the show final report + * @return the stat report + */ + protected String getStatReport(long syncOpTimeInMs, boolean showFinalReport) { + + StringBuilder sb = new StringBuilder(128); + + sb.append("\n").append(synchronizerName + " Statistics: ( Sync Operation Duration = " + + NodeUtils.getDurationBreakdown(syncOpTimeInMs) + " )"); + + addActiveInventoryStatReport(sb); + addElasticStatReport(sb); + + if (showFinalReport) { + sb.append("\n\n ").append("Sync Completed!\n"); + } else { + sb.append("\n\n ").append("Sync in Progress...\n"); + } + + return sb.toString(); + + } + + protected String indexName; + protected long syncStartedTimeStampInMs; + + /** + * Instantiates a new abstract entity synchronizer. + * + * @param logger the logger + * @param syncName the sync name + * @param numSyncWorkers the num sync workers + * @param numActiveInventoryWorkers the num active inventory workers + * @param numElasticsearchWorkers the num elasticsearch workers + * @param indexName the index name + * @throws Exception the exception + */ + protected AbstractEntitySynchronizer(Logger logger, String syncName, int numSyncWorkers, + int numActiveInventoryWorkers, int numElasticsearchWorkers, String indexName) + throws Exception { + this.logger = logger; + this.synchronizerExecutor = + NodeUtils.createNamedExecutor(syncName + "-INTERNAL", numSyncWorkers, logger); + this.aaiExecutor = + NodeUtils.createNamedExecutor(syncName + "-AAI", numActiveInventoryWorkers, logger); + this.esExecutor = + NodeUtils.createNamedExecutor(syncName + "-ES", numElasticsearchWorkers, logger); + this.mapper = new ObjectMapper(); + this.oxmModelLoader = OxmModelLoader.getInstance(); + this.indexName = indexName; + this.esRestStats = new RestOperationalStatistics(); + this.esEntityStats = new ElasticSearchEntityStatistics(oxmModelLoader); + this.aaiRestStats = new RestOperationalStatistics(); + this.aaiEntityStats = new ActiveInventoryEntityStatistics(oxmModelLoader); + this.aaiProcessingExceptionStats = new ActiveInventoryProcessingExceptionStatistics(); + this.aaiTaskProcessingStats = + new TaskProcessingStats(ActiveInventoryConfig.getConfig().getTaskProcessorConfig()); + this.esTaskProcessingStats = + new TaskProcessingStats(ElasticSearchConfig.getConfig().getProcessorConfig()); + + this.aaiTransactionRateController = + new TransactionRateController(ActiveInventoryConfig.getConfig().getTaskProcessorConfig()); + this.esTransactionRateController = + new TransactionRateController(ElasticSearchConfig.getConfig().getProcessorConfig()); + + this.aaiWorkOnHand = new AtomicInteger(0); + this.esWorkOnHand = new AtomicInteger(0); + + enabledStatFlags = EnumSet.allOf(StatFlag.class); + + this.synchronizerName = "Abstact Entity Synchronizer"; + + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "AbstractEntitySynchronizer", "", "Sync", ""); + + } + + /** + * Inc active inventory work on hand counter. + */ + protected void incActiveInventoryWorkOnHandCounter() { + aaiWorkOnHand.incrementAndGet(); + } + + /** + * Dec active inventory work on hand counter. + */ + protected void decActiveInventoryWorkOnHandCounter() { + aaiWorkOnHand.decrementAndGet(); + } + + /** + * Inc elastic search work on hand counter. + */ + protected void incElasticSearchWorkOnHandCounter() { + esWorkOnHand.incrementAndGet(); + } + + /** + * Dec elastic search work on hand counter. + */ + protected void decElasticSearchWorkOnHandCounter() { + esWorkOnHand.decrementAndGet(); + } + + /** + * Shutdown executors. + */ + protected void shutdownExecutors() { + try { + synchronizerExecutor.shutdown(); + aaiExecutor.shutdown(); + esExecutor.shutdown(); + aaiDataProvider.shutdown(); + esDataProvider.shutdown(); + } catch (Exception exc) { + logger.error(AaiUiMsgs.ERROR_SHUTDOWN_EXECUTORS, exc ); + } + } + + /** + * Clear cache. + */ + public void clearCache() { + if (aaiDataProvider != null) { + aaiDataProvider.clearCache(); + } + } + + protected ActiveInventoryDataProvider getAaiDataProvider() { + return aaiDataProvider; + } + + public void setAaiDataProvider(ActiveInventoryDataProvider aaiDataProvider) { + this.aaiDataProvider = aaiDataProvider; + } + + protected ElasticSearchDataProvider getEsDataProvider() { + return esDataProvider; + } + + public void setEsDataProvider(ElasticSearchDataProvider provider) { + this.esDataProvider = provider; + } + + /** + * Gets the elastic full url. + * + * @param resourceUrl the resource url + * @param indexName the index name + * @param indexType the index type + * @return the elastic full url + * @throws Exception the exception + */ + protected String getElasticFullUrl(String resourceUrl, String indexName, String indexType) + throws Exception { + return ElasticSearchConfig.getConfig().getElasticFullUrl(resourceUrl, indexName, indexType); + } + + /** + * Gets the elastic full url. + * + * @param resourceUrl the resource url + * @param indexName the index name + * @return the elastic full url + * @throws Exception the exception + */ + protected String getElasticFullUrl(String resourceUrl, String indexName) throws Exception { + return ElasticSearchConfig.getConfig().getElasticFullUrl(resourceUrl, indexName); + } + + public String getIndexName() { + return indexName; + } + + public void setIndexName(String indexName) { + this.indexName = indexName; + } + + + /** + * Gets the response length. + * + * @param txn the txn + * @return the response length + */ + private long getResponseLength(NetworkTransaction txn) { + + if (txn == null) { + return -1; + } + + OperationResult result = txn.getOperationResult(); + + if (result == null) { + return -1; + } + + if (result.getResult() != null) { + return result.getResult().length(); + } + + return -1; + } + + /** + * Update elastic search counters. + * + * @param method the method + * @param or the or + */ + protected void updateElasticSearchCounters(HttpMethod method, OperationResult or) { + updateElasticSearchCounters(new NetworkTransaction(method, null, or)); + } + + /** + * Update elastic search counters. + * + * @param method the method + * @param entityType the entity type + * @param or the or + */ + protected void updateElasticSearchCounters(HttpMethod method, String entityType, + OperationResult or) { + updateElasticSearchCounters(new NetworkTransaction(method, entityType, or)); + } + + /** + * Update elastic search counters. + * + * @param txn the txn + */ + protected void updateElasticSearchCounters(NetworkTransaction txn) { + + if (enabledStatFlags.contains(StatFlag.ES_REST_STATS)) { + esRestStats.updateCounters(txn); + } + + if (enabledStatFlags.contains(StatFlag.ES_ENTITY_STATS)) { + esEntityStats.updateCounters(txn); + } + + if (enabledStatFlags.contains(StatFlag.ES_TASK_PROCESSING_STATS)) { + + esTransactionRateController.trackResponseTime(txn.getOperationResult().getResponseTimeInMs()); + + esTaskProcessingStats + .updateTaskResponseStatsHistogram(txn.getOperationResult().getResponseTimeInMs()); + esTaskProcessingStats.updateTaskAgeStatsHistogram(txn.getTaskAgeInMs()); + + // don't know the cost of the lengh calc, we'll see if it causes a + // problem + + long responsePayloadSizeInBytes = getResponseLength(txn); + if (responsePayloadSizeInBytes >= 0) { + esTaskProcessingStats.updateResponseSizeInBytesHistogram(responsePayloadSizeInBytes); + } + + esTaskProcessingStats + .updateTransactionsPerSecondHistogram((long) esTransactionRateController.getCurrentTps()); + } + } + + /** + * Update active inventory counters. + * + * @param method the method + * @param or the or + */ + protected void updateActiveInventoryCounters(HttpMethod method, OperationResult or) { + updateActiveInventoryCounters(new NetworkTransaction(method, null, or)); + } + + /** + * Update active inventory counters. + * + * @param method the method + * @param entityType the entity type + * @param or the or + */ + protected void updateActiveInventoryCounters(HttpMethod method, String entityType, + OperationResult or) { + updateActiveInventoryCounters(new NetworkTransaction(method, entityType, or)); + } + + /** + * Update active inventory counters. + * + * @param txn the txn + */ + protected void updateActiveInventoryCounters(NetworkTransaction txn) { + + if (enabledStatFlags.contains(StatFlag.AAI_REST_STATS)) { + aaiRestStats.updateCounters(txn); + } + + if (enabledStatFlags.contains(StatFlag.AAI_ENTITY_STATS)) { + aaiEntityStats.updateCounters(txn); + } + + if (enabledStatFlags.contains(StatFlag.AAI_PROCESSING_EXCEPTION_STATS)) { + aaiProcessingExceptionStats.updateCounters(txn); + } + + if (enabledStatFlags.contains(StatFlag.AAI_TASK_PROCESSING_STATS)) { + aaiTransactionRateController + .trackResponseTime(txn.getOperationResult().getResponseTimeInMs()); + + aaiTaskProcessingStats + .updateTaskResponseStatsHistogram(txn.getOperationResult().getResponseTimeInMs()); + aaiTaskProcessingStats.updateTaskAgeStatsHistogram(txn.getTaskAgeInMs()); + + // don't know the cost of the lengh calc, we'll see if it causes a + // problem + + long responsePayloadSizeInBytes = getResponseLength(txn); + if (responsePayloadSizeInBytes >= 0) { + aaiTaskProcessingStats.updateResponseSizeInBytesHistogram(responsePayloadSizeInBytes); + } + + aaiTaskProcessingStats.updateTransactionsPerSecondHistogram( + (long) aaiTransactionRateController.getCurrentTps()); + } + } + + /** + * Reset counters. + */ + protected void resetCounters() { + aaiRestStats.reset(); + aaiEntityStats.reset(); + aaiProcessingExceptionStats.reset(); + + esRestStats.reset(); + esEntityStats.reset(); + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/AggregationSuggestionSynchronizer.java b/src/main/java/org/openecomp/sparky/synchronizer/AggregationSuggestionSynchronizer.java new file mode 100644 index 0000000..0337f6a --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/AggregationSuggestionSynchronizer.java @@ -0,0 +1,187 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import org.openecomp.cl.mdc.MdcContext; + +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.Map; +import java.util.concurrent.ExecutorService; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.rest.HttpMethod; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.synchronizer.entity.AggregationSuggestionEntity; +import org.openecomp.sparky.synchronizer.enumeration.OperationState; +import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; +import org.openecomp.sparky.synchronizer.task.PerformElasticSearchPut; +import org.openecomp.sparky.util.NodeUtils; +import org.slf4j.MDC; + +public class AggregationSuggestionSynchronizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(AggregationSuggestionSynchronizer.class); + + private boolean isSyncInProgress; + private boolean shouldPerformRetry; + private Map<String, String> contextMap; + protected ExecutorService esPutExecutor; + + public AggregationSuggestionSynchronizer(String indexName) throws Exception { + super(LOG, "ASS-" + indexName.toUpperCase(), 2, 5, 5, indexName); + + this.isSyncInProgress = false; + this.shouldPerformRetry = false; + this.synchronizerName = "Aggregation Suggestion Synchronizer"; + this.contextMap = MDC.getCopyOfContextMap(); + this.esPutExecutor = NodeUtils.createNamedExecutor("ASS-ES-PUT", 2, LOG); + } + + @Override + protected boolean isSyncDone() { + int totalWorkOnHand = esWorkOnHand.get(); + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + indexName + ", isSyncDone(), totalWorkOnHand = " + totalWorkOnHand); + } + + if (totalWorkOnHand > 0 || !isSyncInProgress) { + return false; + } + + return true; + } + + @Override + public OperationState doSync() { + isSyncInProgress = true; + + syncEntity(); + + while (!isSyncDone()) { + try { + if (shouldPerformRetry) { + syncEntity(); + } + Thread.sleep(1000); + } catch (Exception exc) { + // We don't care about this exception + } + } + + return OperationState.OK; + } + + private void syncEntity() { + String txnId = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnId, "AggregationSuggestionSynchronizer", "", "Sync", ""); + + AggregationSuggestionEntity syncEntity = new AggregationSuggestionEntity(); + syncEntity.deriveFields(); + + String link = null; + try { + link = getElasticFullUrl("/" + syncEntity.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); + } + + try { + String jsonPayload = null; + jsonPayload = syncEntity.getIndexDocumentJson(); + if (link != null && jsonPayload != null) { + + NetworkTransaction elasticPutTxn = new NetworkTransaction(); + elasticPutTxn.setLink(link); + elasticPutTxn.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + final Map<String, String> contextMap = MDC.getCopyOfContextMap(); + supplyAsync(new PerformElasticSearchPut(jsonPayload, elasticPutTxn, + esDataProvider, contextMap), esPutExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = "Aggregation suggestion entity sync UPDATE PUT error - " + + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_AGGREGATION_SUGGESTION_ENTITY_SYNC_ERROR, message); + } else { + updateElasticSearchCounters(result); + wasEsOperationSuccessful(result); + } + }); + } + } catch (Exception exc) { + String message = + "Exception caught during aggregation suggestion entity sync PUT operation. Message - " + + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_AGGREGATION_SUGGESTION_ENTITY_SYNC_ERROR, message); + } + } + + private void wasEsOperationSuccessful(NetworkTransaction result) { + if (result != null) { + OperationResult opResult = result.getOperationResult(); + + if (!opResult.wasSuccessful()) { + shouldPerformRetry = true; + } else { + isSyncInProgress = false; + shouldPerformRetry = false; + } + } + } + + @Override + public SynchronizerState getState() { + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + } + + @Override + public String getStatReport(boolean shouldDisplayFinalReport) { + return getStatReport(System.currentTimeMillis() - this.syncStartedTimeStampInMs, + shouldDisplayFinalReport); + } + + @Override + public void shutdown() { + this.shutdownExecutors(); + } +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/AggregationSynchronizer.java b/src/main/java/org/openecomp/sparky/synchronizer/AggregationSynchronizer.java new file mode 100644 index 0000000..ba1fb24 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/AggregationSynchronizer.java @@ -0,0 +1,772 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.sql.Timestamp; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Deque; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; + +import javax.json.Json; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.openecomp.sparky.dal.rest.HttpMethod; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.synchronizer.config.SynchronizerConfiguration; +import org.openecomp.sparky.synchronizer.entity.AggregationEntity; +import org.openecomp.sparky.synchronizer.entity.MergableEntity; +import org.openecomp.sparky.synchronizer.entity.SelfLinkDescriptor; +import org.openecomp.sparky.synchronizer.enumeration.OperationState; +import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; +import org.openecomp.sparky.synchronizer.task.PerformActiveInventoryRetrieval; +import org.openecomp.sparky.synchronizer.task.PerformElasticSearchPut; +import org.openecomp.sparky.synchronizer.task.PerformElasticSearchRetrieval; +import org.openecomp.sparky.synchronizer.task.PerformElasticSearchUpdate; +import org.openecomp.sparky.util.NodeUtils; +import org.slf4j.MDC; + +import org.openecomp.cl.mdc.MdcContext; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectReader; +import com.fasterxml.jackson.databind.node.ArrayNode; + +/** + * The Class AutosuggestionSynchronizer. + */ +public class AggregationSynchronizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + /** + * The Class RetryAggregationEntitySyncContainer. + */ + private class RetryAggregationEntitySyncContainer { + NetworkTransaction txn; + AggregationEntity ae; + + /** + * Instantiates a new retry aggregation entity sync container. + * + * @param txn the txn + * @param ae the se + */ + public RetryAggregationEntitySyncContainer(NetworkTransaction txn, AggregationEntity ae) { + this.txn = txn; + this.ae = ae; + } + + public NetworkTransaction getNetworkTransaction() { + return txn; + } + + public AggregationEntity getAggregationEntity() { + return ae; + } + } + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(AggregationSynchronizer.class); + private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ"; + + private boolean allWorkEnumerated; + private Deque<SelfLinkDescriptor> selflinks; + private Deque<RetryAggregationEntitySyncContainer> retryQueue; + private Map<String, Integer> retryLimitTracker; + protected ExecutorService esPutExecutor; + private ConcurrentHashMap<String, AtomicInteger> entityCounters; + private boolean syncInProgress; + private Map<String, String> contextMap; + private String entityType; + + /** + * Instantiates a new entity aggregation synchronizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public AggregationSynchronizer(String entityType, String indexName) throws Exception { + super(LOG, "AGGES-" + indexName.toUpperCase(), 2, 5, 5, indexName); // multiple Autosuggestion + // Entity Synchronizer will + // run for different indices + + this.entityType = entityType; + this.allWorkEnumerated = false; + this.entityCounters = new ConcurrentHashMap<String, AtomicInteger>(); + this.synchronizerName = "Entity Aggregation Synchronizer"; + this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS); + this.syncInProgress = false; + this.allWorkEnumerated = false; + this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>(); + this.retryQueue = new ConcurrentLinkedDeque<RetryAggregationEntitySyncContainer>(); + this.retryLimitTracker = new ConcurrentHashMap<String, Integer>(); + + this.esPutExecutor = NodeUtils.createNamedExecutor("AGGES-ES-PUT", 1, LOG); + Map<String, OxmEntityDescriptor> descriptor = new HashMap<String, OxmEntityDescriptor>(); + descriptor.put(entityType, oxmModelLoader.getEntityDescriptors().get(entityType)); + this.aaiEntityStats.initializeCountersFromOxmEntityDescriptors( + descriptor); + this.esEntityStats.initializeCountersFromOxmEntityDescriptors( + descriptor); + this.contextMap = MDC.getCopyOfContextMap(); + } + + /** + * Collect all the work. + * + * @return the operation state + */ + private OperationState collectAllTheWork() { + final Map<String, String> contextMap = MDC.getCopyOfContextMap(); + final String entity = this.getEntityType(); + try { + + aaiWorkOnHand.set(1); + + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + OperationResult typeLinksResult = null; + try { + typeLinksResult = aaiDataProvider.getSelfLinksByEntityType(entity); + aaiWorkOnHand.decrementAndGet(); + processEntityTypeSelfLinks(typeLinksResult); + } catch (Exception exc) { + // TODO -> LOG, what should be logged here? + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred getting data from AAI. Error = " + error.getMessage()); + } + }); + + while (aaiWorkOnHand.get() != 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); + } + + Thread.sleep(1000); + } + + aaiWorkOnHand.set(selflinks.size()); + allWorkEnumerated = true; + syncEntityTypes(); + + while (!isSyncDone()) { + performRetrySync(); + Thread.sleep(1000); + } + + /* + * Make sure we don't hang on to retries that failed which could cause issues during future + * syncs + */ + retryLimitTracker.clear(); + + } catch (Exception exc) { + // TODO -> LOG, waht should be logged here? + } + + return OperationState.OK; + } + + + /** + * Perform retry sync. + */ + private void performRetrySync() { + while (retryQueue.peek() != null) { + + RetryAggregationEntitySyncContainer rsc = retryQueue.poll(); + if (rsc != null) { + + AggregationEntity ae = rsc.getAggregationEntity(); + NetworkTransaction txn = rsc.getNetworkTransaction(); + + String link = null; + try { + /* + * In this retry flow the se object has already derived its fields + */ + link = getElasticFullUrl("/" + ae.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction retryTransaction = new NetworkTransaction(); + retryTransaction.setLink(link); + retryTransaction.setEntityType(txn.getEntityType()); + retryTransaction.setDescriptor(txn.getDescriptor()); + retryTransaction.setOperationType(HttpMethod.GET); + + /* + * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already + * called incrementAndGet when queuing the failed PUT! + */ + + supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, esDataProvider), + esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, ae); + } + }); + } + + } + } + } + + /** + * Perform document upsert. + * + * @param esGetTxn the es get txn + * @param ae the ae + */ + protected void performDocumentUpsert(NetworkTransaction esGetTxn, AggregationEntity ae) { + /** + * <p> + * <ul> + * As part of the response processing we need to do the following: + * <li>1. Extract the version (if present), it will be the ETAG when we use the + * Search-Abstraction-Service + * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version + * tag + * <li>a) if version is null or RC=404, then standard put, no _update with version tag + * <li>b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic + * </ul> + * </p> + */ + String link = null; + try { + link = getElasticFullUrl("/" + ae.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); + return; + } + + String versionNumber = null; + boolean wasEntryDiscovered = false; + if (esGetTxn.getOperationResult().getResultCode() == 404) { + LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, ae.getEntityPrimaryKeyValue()); + } else if (esGetTxn.getOperationResult().getResultCode() == 200) { + wasEntryDiscovered = true; + try { + versionNumber = NodeUtils.extractFieldValueFromObject( + NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), + "_version"); + } catch (IOException exc) { + String message = + "Error extracting version number from response, aborting aggregation entity sync of " + + ae.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); + return; + } + } else { + /* + * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we + * return. + */ + LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE, + String.valueOf(esGetTxn.getOperationResult().getResultCode())); + return; + } + + try { + String jsonPayload = null; + if (wasEntryDiscovered) { + try { + ArrayList<JsonNode> sourceObject = new ArrayList<JsonNode>(); + NodeUtils.extractObjectsByKey( + NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), + "_source", sourceObject); + + if (!sourceObject.isEmpty()) { + String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false); + MergableEntity me = mapper.readValue(responseSource, MergableEntity.class); + ObjectReader updater = mapper.readerForUpdating(me); + MergableEntity merged = updater.readValue(ae.getIndexDocumentJson()); + jsonPayload = mapper.writeValueAsString(merged); + } + } catch (IOException exc) { + String message = + "Error extracting source value from response, aborting aggregation entity sync of " + + ae.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); + return; + } + } else { + jsonPayload = ae.getIndexDocumentJson(); + } + + if (wasEntryDiscovered) { + if (versionNumber != null && jsonPayload != null) { + + String requestPayload = esDataProvider.buildBulkImportOperationRequest(getIndexName(), + ElasticSearchConfig.getConfig().getType(), ae.getId(), versionNumber, jsonPayload); + + NetworkTransaction transactionTracker = new NetworkTransaction(); + transactionTracker.setEntityType(esGetTxn.getEntityType()); + transactionTracker.setDescriptor(esGetTxn.getDescriptor()); + transactionTracker.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchUpdate(ElasticSearchConfig.getConfig().getBulkUrl(), + requestPayload, esDataProvider, transactionTracker), esPutExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = "Aggregation entity sync UPDATE PUT error - " + + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetTxn, ae); + } + }); + } + + } else { + if (link != null && jsonPayload != null) { + + NetworkTransaction updateElasticTxn = new NetworkTransaction(); + updateElasticTxn.setLink(link); + updateElasticTxn.setEntityType(esGetTxn.getEntityType()); + updateElasticTxn.setDescriptor(esGetTxn.getDescriptor()); + updateElasticTxn.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, esDataProvider), + esPutExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = + "Aggregation entity sync UPDATE PUT error - " + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetTxn, ae); + } + }); + } + } + } catch (Exception exc) { + String message = "Exception caught during aggregation entity sync PUT operation. Message - " + + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + } + + /** + * Should allow retry. + * + * @param id the id + * @return true, if successful + */ + private boolean shouldAllowRetry(String id) { + boolean isRetryAllowed = true; + if (retryLimitTracker.get(id) != null) { + Integer currentCount = retryLimitTracker.get(id); + if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { + isRetryAllowed = false; + String message = "Aggregation entity re-sync limit reached for " + id + + ", re-sync will no longer be attempted for this entity"; + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } else { + Integer newCount = new Integer(currentCount.intValue() + 1); + retryLimitTracker.put(id, newCount); + } + } else { + Integer firstRetryCount = new Integer(1); + retryLimitTracker.put(id, firstRetryCount); + } + + return isRetryAllowed; + } + + /** + * Process store document result. + * + * @param esPutResult the es put result + * @param esGetResult the es get result + * @param ae the ae + */ + private void processStoreDocumentResult(NetworkTransaction esPutResult, + NetworkTransaction esGetResult, AggregationEntity ae) { + + OperationResult or = esPutResult.getOperationResult(); + + if (!or.wasSuccessful()) { + if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { + + if (shouldAllowRetry(ae.getId())) { + esWorkOnHand.incrementAndGet(); + + RetryAggregationEntitySyncContainer rsc = + new RetryAggregationEntitySyncContainer(esGetResult, ae); + retryQueue.push(rsc); + + String message = "Store document failed during aggregation entity synchronization" + + " due to version conflict. Entity will be re-synced."; + LOG.warn(AaiUiMsgs.ERROR_GENERIC, message); + } + } else { + String message = + "Store document failed during aggregation entity synchronization with result code " + + or.getResultCode() + " and result message " + or.getResult(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + } + } + + /** + * Sync entity types. + */ + private void syncEntityTypes() { + + while (selflinks.peek() != null) { + + SelfLinkDescriptor linkDescriptor = selflinks.poll(); + aaiWorkOnHand.decrementAndGet(); + + OxmEntityDescriptor descriptor = null; + + if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { + + descriptor = oxmModelLoader.getEntityDescriptor(linkDescriptor.getEntityType()); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); + // go to next element in iterator + continue; + } + + NetworkTransaction txn = new NetworkTransaction(); + txn.setDescriptor(descriptor); + txn.setLink(linkDescriptor.getSelfLink()); + txn.setOperationType(HttpMethod.GET); + txn.setEntityType(linkDescriptor.getEntityType()); + + aaiWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiDataProvider), aaiExecutor) + .whenComplete((result, error) -> { + + aaiWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage()); + } else { + if (result == null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, + linkDescriptor.getSelfLink()); + } else { + updateActiveInventoryCounters(result); + fetchDocumentForUpsert(result); + } + } + }); + } + + } + + } + + /** + * Fetch document for upsert. + * + * @param txn the txn + */ + private void fetchDocumentForUpsert(NetworkTransaction txn) { + // modified + if (!txn.getOperationResult().wasSuccessful()) { + String message = "Self link failure. Result - " + txn.getOperationResult().getResult(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + return; + } + + try { + final String jsonResult = txn.getOperationResult().getResult(); + if (jsonResult != null && jsonResult.length() > 0) { + + AggregationEntity ae = new AggregationEntity(oxmModelLoader); + ae.setLink( txn.getLink() ); + populateAggregationEntityDocument(ae, jsonResult, txn.getDescriptor()); + ae.deriveFields(); + + String link = null; + try { + link = getElasticFullUrl("/" + ae.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction n2 = new NetworkTransaction(); + n2.setLink(link); + n2.setEntityType(txn.getEntityType()); + n2.setDescriptor(txn.getDescriptor()); + n2.setOperationType(HttpMethod.GET); + + esWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformElasticSearchRetrieval(n2, esDataProvider), esExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, ae); + } + }); + } + } + + } catch (JsonProcessingException exc) { + // TODO -> LOG, waht should be logged here? + } catch (IOException exc) { + // TODO -> LOG, waht should be logged here? + } + } + + + /** + * Populate aggregation entity document. + * + * @param doc the doc + * @param result the result + * @param resultDescriptor the result descriptor + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + protected void populateAggregationEntityDocument(AggregationEntity doc, String result, + OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException { + doc.setEntityType(resultDescriptor.getEntityName()); + JsonNode entityNode = mapper.readTree(result); + Map<String, Object> map = mapper.convertValue(entityNode, Map.class); + doc.copyAttributeKeyValuePair(map); + } + + /** + * Process entity type self links. + * + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(OperationResult operationResult) { + + JsonNode rootNode = null; + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + String message = + "Could not deserialize JSON (representing operation result) as node tree. " + + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + if (resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + + Iterator<JsonNode> elementIterator = resultDataArrayNode.elements(); + JsonNode element = null; + + while (elementIterator.hasNext()) { + element = elementIterator.next(); + + final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); + final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); + + OxmEntityDescriptor descriptor = null; + + if (resourceType != null && resourceLink != null) { + + descriptor = oxmModelLoader.getEntityDescriptor(resourceType); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); + // go to next element in iterator + continue; + } + + selflinks.add(new SelfLinkDescriptor(resourceLink, SynchronizerConfiguration.NODES_ONLY_MODIFIER, resourceType)); + + + } + } + } + } + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "AggregationSynchronizer", "", "Sync", ""); + + return collectAllTheWork(); + } + + @Override + public SynchronizerState getState() { + + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + return getStatReport(System.currentTimeMillis() - this.syncStartedTimeStampInMs, + showFinalReport); + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + @Override + protected boolean isSyncDone() { + + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + ", isSyncDone(), totalWorkOnHand = " + + totalWorkOnHand + " all work enumerated = " + allWorkEnumerated); + } + + if (totalWorkOnHand > 0 || !allWorkEnumerated) { + return false; + } + + this.syncInProgress = false; + + return true; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.AbstractEntitySynchronizer#clearCache() + */ + @Override + public void clearCache() { + + if (syncInProgress) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Autosuggestion Entity Summarizer in progress, request to clear cache ignored"); + return; + } + + super.clearCache(); + this.resetCounters(); + if (entityCounters != null) { + entityCounters.clear(); + } + + allWorkEnumerated = false; + + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/AutosuggestionSynchronizer.java b/src/main/java/org/openecomp/sparky/synchronizer/AutosuggestionSynchronizer.java new file mode 100644 index 0000000..05a9698 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/AutosuggestionSynchronizer.java @@ -0,0 +1,736 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Deque; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.rest.HttpMethod; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.synchronizer.config.SynchronizerConfiguration; +import org.openecomp.sparky.synchronizer.entity.SelfLinkDescriptor; +import org.openecomp.sparky.synchronizer.entity.SuggestionSearchEntity; +import org.openecomp.sparky.synchronizer.enumeration.OperationState; +import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; +import org.openecomp.sparky.synchronizer.task.PerformActiveInventoryRetrieval; +import org.openecomp.sparky.synchronizer.task.PerformElasticSearchPut; +import org.openecomp.sparky.synchronizer.task.PerformElasticSearchRetrieval; +import org.openecomp.sparky.util.NodeUtils; +import org.openecomp.sparky.util.SuggestionsPermutation; +import org.slf4j.MDC; + +import org.openecomp.cl.mdc.MdcContext; + +import org.openecomp.cl.mdc.MdcContext; + +import org.openecomp.cl.mdc.MdcContext; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; + +/** + * The Class AutosuggestionSynchronizer. + */ +public class AutosuggestionSynchronizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + private class RetrySuggestionEntitySyncContainer { + NetworkTransaction txn; + SuggestionSearchEntity ssec; + + /** + * Instantiates a new RetrySuggestionEntitySyncContainer. + * + * @param txn the txn + * @param icer the icer + */ + public RetrySuggestionEntitySyncContainer(NetworkTransaction txn, SuggestionSearchEntity icer) { + this.txn = txn; + this.ssec = icer; + } + + public NetworkTransaction getNetworkTransaction() { + return txn; + } + + public SuggestionSearchEntity getSuggestionSearchEntity() { + return ssec; + } + } + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(AutosuggestionSynchronizer.class); + private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ"; + + private boolean allWorkEnumerated; + private Deque<SelfLinkDescriptor> selflinks; + private ConcurrentHashMap<String, AtomicInteger> entityCounters; + private boolean syncInProgress; + private Map<String, String> contextMap; + protected ExecutorService esPutExecutor; + private Deque<RetrySuggestionEntitySyncContainer> retryQueue; + private Map<String, Integer> retryLimitTracker; + + /** + * Instantiates a new historical entity summarizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public AutosuggestionSynchronizer(String indexName) throws Exception { + super(LOG, "ASES-" + indexName.toUpperCase(), 2, 5, 5, indexName); // multiple Autosuggestion + // Entity Synchronizer will + // run for different indices + + this.allWorkEnumerated = false; + this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>(); + this.entityCounters = new ConcurrentHashMap<String, AtomicInteger>(); + this.synchronizerName = "Autosuggestion Entity Synchronizer"; + this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS); + this.syncInProgress = false; + this.contextMap = MDC.getCopyOfContextMap(); + this.esPutExecutor = NodeUtils.createNamedExecutor("SUES-ES-PUT", 5, LOG); + } + + /** + * Collect all the work. + * + * @return the operation state + */ + private OperationState collectAllTheWork() { + final Map<String, String> contextMap = MDC.getCopyOfContextMap(); + Map<String, OxmEntityDescriptor> descriptorMap = + oxmModelLoader.getSuggestionSearchEntityDescriptors(); + + if (descriptorMap.isEmpty()) { + LOG.error(AaiUiMsgs.ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES); + LOG.info(AaiUiMsgs.ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES); + return OperationState.ERROR; + } + + Collection<String> syncTypes = descriptorMap.keySet(); + + try { + + /* + * launch a parallel async thread to process the documents for each entity-type (to max the of + * the configured executor anyway) + */ + + aaiWorkOnHand.set(syncTypes.size()); + + for (String key : syncTypes) { + + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + OperationResult typeLinksResult = null; + try { + typeLinksResult = aaiDataProvider.getSelfLinksByEntityType(key); + aaiWorkOnHand.decrementAndGet(); + processEntityTypeSelfLinks(typeLinksResult); + } catch (Exception exc) { + // TODO -> LOG, what should be logged here? + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred getting data from AAI. Error = " + error.getMessage()); + } + }); + + } + + while (aaiWorkOnHand.get() != 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); + } + + Thread.sleep(1000); + } + + aaiWorkOnHand.set(selflinks.size()); + allWorkEnumerated = true; + syncEntityTypes(); + + while (!isSyncDone()) { + performRetrySync(); + Thread.sleep(1000); + } + + /* + * Make sure we don't hang on to retries that failed which could cause issues during future + * syncs + */ + retryLimitTracker.clear(); + + } catch (Exception exc) { + // TODO -> LOG, waht should be logged here? + } + + return OperationState.OK; + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "AutosuggestionSynchronizer", "", "Sync", ""); + + return collectAllTheWork(); + } + + /** + * Process entity type self links. + * + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(OperationResult operationResult) { + + JsonNode rootNode = null; + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + String message = "Could not deserialize JSON (representing operation result) as node tree. " + + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + if (resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + + Iterator<JsonNode> elementIterator = resultDataArrayNode.elements(); + JsonNode element = null; + + while (elementIterator.hasNext()) { + element = elementIterator.next(); + + final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); + final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); + + OxmEntityDescriptor descriptor = null; + + if (resourceType != null && resourceLink != null) { + + descriptor = oxmModelLoader.getEntityDescriptor(resourceType); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); + // go to next element in iterator + continue; + } + selflinks.add(new SelfLinkDescriptor(resourceLink, + SynchronizerConfiguration.NODES_ONLY_MODIFIER, resourceType)); + + + } + } + } + } + } + + /** + * Sync entity types. + */ + private void syncEntityTypes() { + + while (selflinks.peek() != null) { + + SelfLinkDescriptor linkDescriptor = selflinks.poll(); + aaiWorkOnHand.decrementAndGet(); + + OxmEntityDescriptor descriptor = null; + + if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { + + descriptor = oxmModelLoader.getEntityDescriptor(linkDescriptor.getEntityType()); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); + // go to next element in iterator + continue; + } + + NetworkTransaction txn = new NetworkTransaction(); + txn.setDescriptor(descriptor); + txn.setLink(linkDescriptor.getSelfLink()); + txn.setOperationType(HttpMethod.GET); + txn.setEntityType(linkDescriptor.getEntityType()); + + aaiWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiDataProvider), aaiExecutor) + .whenComplete((result, error) -> { + + aaiWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage()); + } else { + if (result == null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, + linkDescriptor.getSelfLink()); + } else { + updateActiveInventoryCounters(result); + fetchDocumentForUpsert(result); + } + } + }); + } + + } + + } + /* + * Return a set of valid suggestion attributes for the provided entityName + * that are present in the JSON + * @param node JSON node in which the attributes should be found + * @param entityName Name of the entity + * @return List of all valid suggestion attributes(key's) + */ + public List<String> getSuggestionFromReponse(JsonNode node, String entityName) { + List<String> suggestableAttr = new ArrayList<String>(); + HashMap<String, String> desc = oxmModelLoader.getOxmModel().get(entityName); + String attr = desc.get("suggestibleAttributes"); + suggestableAttr = Arrays.asList(attr.split(",")); + List<String> suggestableValue = new ArrayList<>(); + for (String attribute : suggestableAttr) { + if (node.get(attribute) != null && node.get(attribute).asText().length() > 0) { + suggestableValue.add(attribute); + } + } + return suggestableValue; + } + + /** + * Fetch all the documents for upsert. Based on the number of permutations that are available the + * number of documents will be different + * + * @param txn the txn + */ + private void fetchDocumentForUpsert(NetworkTransaction txn) { + if (!txn.getOperationResult().wasSuccessful()) { + String message = "Self link failure. Result - " + txn.getOperationResult().getResult(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + return; + } + try { + final String jsonResult = txn.getOperationResult().getResult(); + + if (jsonResult != null && jsonResult.length() > 0) { + + // Step 1: Calculate the number of possible permutations of attributes + String entityName = txn.getDescriptor().getEntityName(); + JsonNode entityNode = mapper.readTree(jsonResult); + + SuggestionsPermutation suggPermutation = new SuggestionsPermutation(); + ArrayList<ArrayList<String>> uniqueLists = suggPermutation + .getSuggestionsPermutation(getSuggestionFromReponse(entityNode, entityName)); + + // Now we have a list of all possible permutations for the status that are + // defined for this entity type. Try inserting a document for every combination. + for (ArrayList<String> uniqueList : uniqueLists) { + SuggestionSearchEntity sse = new SuggestionSearchEntity(oxmModelLoader); + sse.setSuggestableAttr(uniqueList); + sse.setPayloadFromResponse(entityNode); + sse.setLink(txn.getLink()); + populateSuggestionSearchEntityDocument(sse, jsonResult, txn); + // The unique id for the document will be created at derive fields + sse.deriveFields(); + // Insert the document only if it has valid statuses + if (sse.isSuggestableDoc()) { + String link = null; + try { + link = getElasticFullUrl("/" + sse.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction n2 = new NetworkTransaction(); + n2.setLink(link); + n2.setEntityType(txn.getEntityType()); + n2.setDescriptor(txn.getDescriptor()); + n2.setOperationType(HttpMethod.GET); + + esWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformElasticSearchRetrieval(n2, esDataProvider), esExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, sse); + } + }); + } + } + } + } + } catch (JsonProcessingException exc) { + // TODO -> LOG, waht should be logged here? + } catch (IOException exc) { + // TODO -> LOG, waht should be logged here? + } + } + + protected void populateSuggestionSearchEntityDocument(SuggestionSearchEntity sse, String result, + NetworkTransaction txn) throws JsonProcessingException, IOException { + + OxmEntityDescriptor resultDescriptor = txn.getDescriptor(); + + sse.setEntityType(resultDescriptor.getEntityName()); + + JsonNode entityNode = mapper.readTree(result); + + List<String> primaryKeyValues = new ArrayList<String>(); + String pkeyValue = null; + + for (String keyName : resultDescriptor.getPrimaryKeyAttributeName()) { + pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); + if (pkeyValue != null) { + primaryKeyValues.add(pkeyValue); + } else { + String message = "populateSuggestionSearchEntityDocument()," + + " pKeyValue is null for entityType = " + resultDescriptor.getEntityName(); + LOG.warn(AaiUiMsgs.WARN_GENERIC, message); + } + } + + final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); + sse.setEntityPrimaryKeyValue(primaryCompositeKeyValue); + sse.generateSuggestionInputPermutations(); + } + + protected void performDocumentUpsert(NetworkTransaction esGetTxn, SuggestionSearchEntity sse) { + /** + * <p> + * <ul> + * As part of the response processing we need to do the following: + * <li>1. Extract the version (if present), it will be the ETAG when we use the + * Search-Abstraction-Service + * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version + * tag + * <li>a) if version is null or RC=404, then standard put, no _update with version tag + * <li>b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic + * </ul> + * </p> + */ + String link = null; + try { + link = getElasticFullUrl("/" + sse.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); + return; + } + + boolean wasEntryDiscovered = false; + if (esGetTxn.getOperationResult().getResultCode() == 404) { + LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, sse.getEntityPrimaryKeyValue()); + } else if (esGetTxn.getOperationResult().getResultCode() == 200) { + wasEntryDiscovered = true; + } else { + /* + * Not being a 200 does not mean a failure. eg 201 is returned for created. and 500 for es not + * found TODO -> Should we return. + */ + LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE, + String.valueOf(esGetTxn.getOperationResult().getResultCode())); + return; + } + // Insert a new document only if the paylod is different. + // This is determined by hashing the payload and using it as a id for the document + // + if (!wasEntryDiscovered) { + try { + String jsonPayload = null; + + jsonPayload = sse.getIndexDocumentJson(); + if (link != null && jsonPayload != null) { + + NetworkTransaction updateElasticTxn = new NetworkTransaction(); + updateElasticTxn.setLink(link); + updateElasticTxn.setEntityType(esGetTxn.getEntityType()); + updateElasticTxn.setDescriptor(esGetTxn.getDescriptor()); + updateElasticTxn.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, esDataProvider), + esPutExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = "Suggestion search entity sync UPDATE PUT error - " + + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetTxn, sse); + } + }); + } + } catch (Exception exc) { + String message = + "Exception caught during suggestion search entity sync PUT operation. Message - " + + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); + } + } + } + + private void processStoreDocumentResult(NetworkTransaction esPutResult, + NetworkTransaction esGetResult, SuggestionSearchEntity sse) { + + OperationResult or = esPutResult.getOperationResult(); + + if (!or.wasSuccessful()) { + if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { + + if (shouldAllowRetry(sse.getId())) { + esWorkOnHand.incrementAndGet(); + + RetrySuggestionEntitySyncContainer rssec = + new RetrySuggestionEntitySyncContainer(esGetResult, sse); + retryQueue.push(rssec); + + String message = "Store document failed during suggestion search entity synchronization" + + " due to version conflict. Entity will be re-synced."; + LOG.warn(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); + } + } else { + String message = + "Store document failed during suggestion search entity synchronization with result code " + + or.getResultCode() + " and result message " + or.getResult(); + LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message); + } + } + } + + /** + * Perform retry sync. + */ + private void performRetrySync() { + while (retryQueue.peek() != null) { + + RetrySuggestionEntitySyncContainer susc = retryQueue.poll(); + if (susc != null) { + + SuggestionSearchEntity sus = susc.getSuggestionSearchEntity(); + NetworkTransaction txn = susc.getNetworkTransaction(); + + String link = null; + try { + /* + * In this retry flow the se object has already derived its fields + */ + link = getElasticFullUrl("/" + sus.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction retryTransaction = new NetworkTransaction(); + retryTransaction.setLink(link); + retryTransaction.setEntityType(txn.getEntityType()); + retryTransaction.setDescriptor(txn.getDescriptor()); + retryTransaction.setOperationType(HttpMethod.GET); + + /* + * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already + * called incrementAndGet when queuing the failed PUT! + */ + + supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, esDataProvider), + esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, sus); + } + }); + } + + } + } + } + + /** + * Should allow retry. + * + * @param id the id + * @return true, if successful + */ + private boolean shouldAllowRetry(String id) { + boolean isRetryAllowed = true; + if (retryLimitTracker.get(id) != null) { + Integer currentCount = retryLimitTracker.get(id); + if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { + isRetryAllowed = false; + String message = "Searchable entity re-sync limit reached for " + id + + ", re-sync will no longer be attempted for this entity"; + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } else { + Integer newCount = new Integer(currentCount.intValue() + 1); + retryLimitTracker.put(id, newCount); + } + } else { + Integer firstRetryCount = new Integer(1); + retryLimitTracker.put(id, firstRetryCount); + } + + return isRetryAllowed; + } + + + + @Override + public SynchronizerState getState() { + + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + return getStatReport(System.currentTimeMillis() - this.syncStartedTimeStampInMs, + showFinalReport); + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + @Override + protected boolean isSyncDone() { + + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + ", isSyncDone(), totalWorkOnHand = " + + totalWorkOnHand + " all work enumerated = " + allWorkEnumerated); + } + + if (totalWorkOnHand > 0 || !allWorkEnumerated) { + return false; + } + + this.syncInProgress = false; + + return true; + } + + /* + * (non-Javadoc) + * + * @see org.openecomp.sparky.synchronizer.AbstractEntitySynchronizer#clearCache() + */ + @Override + public void clearCache() { + + if (syncInProgress) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Autosuggestion Entity Summarizer in progress, request to clear cache ignored"); + return; + } + + super.clearCache(); + this.resetCounters(); + if (entityCounters != null) { + entityCounters.clear(); + } + + allWorkEnumerated = false; + + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/CrossEntityReferenceSynchronizer.java b/src/main/java/org/openecomp/sparky/synchronizer/CrossEntityReferenceSynchronizer.java new file mode 100644 index 0000000..2ba2500 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/CrossEntityReferenceSynchronizer.java @@ -0,0 +1,879 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Deque; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ExecutorService; +import java.util.function.Supplier; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.config.oxm.CrossEntityReference; +import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.aai.config.ActiveInventoryConfig; +import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.openecomp.sparky.dal.rest.HttpMethod; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.synchronizer.config.SynchronizerConfiguration; +import org.openecomp.sparky.synchronizer.entity.IndexableCrossEntityReference; +import org.openecomp.sparky.synchronizer.entity.MergableEntity; +import org.openecomp.sparky.synchronizer.entity.SelfLinkDescriptor; +import org.openecomp.sparky.synchronizer.enumeration.OperationState; +import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; +import org.openecomp.sparky.synchronizer.task.PerformActiveInventoryRetrieval; +import org.openecomp.sparky.synchronizer.task.PerformElasticSearchPut; +import org.openecomp.sparky.synchronizer.task.PerformElasticSearchRetrieval; +import org.openecomp.sparky.synchronizer.task.PerformElasticSearchUpdate; +import org.openecomp.sparky.util.NodeUtils; +import org.slf4j.MDC; + +import org.openecomp.cl.mdc.MdcContext; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectReader; +import com.fasterxml.jackson.databind.node.ArrayNode; + +/** + * The Class CrossEntityReferenceSynchronizer. + */ +public class CrossEntityReferenceSynchronizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + /** + * The Class RetryCrossEntitySyncContainer. + */ + private class RetryCrossEntitySyncContainer { + NetworkTransaction txn; + IndexableCrossEntityReference icer; + + /** + * Instantiates a new retry cross entity sync container. + * + * @param txn the txn + * @param icer the icer + */ + public RetryCrossEntitySyncContainer(NetworkTransaction txn, + IndexableCrossEntityReference icer) { + this.txn = txn; + this.icer = icer; + } + + public NetworkTransaction getNetworkTransaction() { + return txn; + } + + public IndexableCrossEntityReference getIndexableCrossEntityReference() { + return icer; + } + } + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(CrossEntityReferenceSynchronizer.class); + + private Deque<SelfLinkDescriptor> selflinks; + private Deque<RetryCrossEntitySyncContainer> retryQueue; + private Map<String, Integer> retryLimitTracker; + private boolean isAllWorkEnumerated; + protected ExecutorService esPutExecutor; + protected ActiveInventoryConfig aaiConfig; + + /** + * Instantiates a new cross entity reference synchronizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public CrossEntityReferenceSynchronizer(String indexName, ActiveInventoryConfig aaiConfig) throws Exception { + super(LOG, "CERS", 2, 5, 5, indexName); + this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>(); + this.retryQueue = new ConcurrentLinkedDeque<RetryCrossEntitySyncContainer>(); + this.retryLimitTracker = new ConcurrentHashMap<String, Integer>(); + this.synchronizerName = "Cross Reference Entity Synchronizer"; + this.isAllWorkEnumerated = false; + this.esPutExecutor = NodeUtils.createNamedExecutor("CERS-ES-PUT", 5, LOG); + this.aaiEntityStats.initializeCountersFromOxmEntityDescriptors( + oxmModelLoader.getCrossReferenceEntityDescriptors()); + this.esEntityStats.initializeCountersFromOxmEntityDescriptors( + oxmModelLoader.getCrossReferenceEntityDescriptors()); + this.aaiConfig = aaiConfig; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "CrossEntitySynchronizer", "", "Sync", ""); + + resetCounters(); + syncStartedTimeStampInMs = System.currentTimeMillis(); + launchSyncFlow(); + return OperationState.OK; + } + + @Override + public SynchronizerState getState() { + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + return this.getStatReport(System.currentTimeMillis() - syncStartedTimeStampInMs, + showFinalReport); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + @Override + protected boolean isSyncDone() { + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (totalWorkOnHand > 0 || !isAllWorkEnumerated) { + return false; + } + + return true; + } + + /** + * Launch sync flow. + * + * @return the operation state + */ + private OperationState launchSyncFlow() { + final Map<String,String> contextMap = MDC.getCopyOfContextMap(); + Map<String, OxmEntityDescriptor> descriptorMap = + oxmModelLoader.getCrossReferenceEntityDescriptors(); + + if (descriptorMap.isEmpty()) { + LOG.error(AaiUiMsgs.ERROR_LOADING_OXM); + + return OperationState.ERROR; + } + + Collection<String> syncTypes = descriptorMap.keySet(); + + try { + + /* + * launch a parallel async thread to process the documents for each entity-type (to max the of + * the configured executor anyway) + */ + + aaiWorkOnHand.set(syncTypes.size()); + + for (String key : syncTypes) { + + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + OperationResult typeLinksResult = null; + try { + typeLinksResult = aaiDataProvider.getSelfLinksByEntityType(key); + aaiWorkOnHand.decrementAndGet(); + processEntityTypeSelfLinks(typeLinksResult); + } catch (Exception exc) { + // TODO -> LOG, what should be logged here? + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, error.getMessage()); + } + }); + } + + while (aaiWorkOnHand.get() != 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); + } + + Thread.sleep(1000); + } + + aaiWorkOnHand.set(selflinks.size()); + isAllWorkEnumerated = true; + performSync(); + + while (!isSyncDone()) { + performRetrySync(); + Thread.sleep(1000); + } + + /* + * Make sure we don't hang on to retries that failed which could cause issues during future + * syncs + */ + retryLimitTracker.clear(); + + } catch (Exception exc) { + // TODO -> LOG, waht should be logged here? + } + + return OperationState.OK; + } + + /** + * Perform sync. + */ + private void performSync() { + while (selflinks.peek() != null) { + + SelfLinkDescriptor linkDescriptor = selflinks.poll(); + aaiWorkOnHand.decrementAndGet(); + + OxmEntityDescriptor descriptor = null; + + if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { + + descriptor = oxmModelLoader.getEntityDescriptor(linkDescriptor.getEntityType()); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); + // go to next element in iterator + continue; + } + + if (descriptor.hasCrossEntityReferences()) { + + NetworkTransaction txn = new NetworkTransaction(); + txn.setDescriptor(descriptor); + txn.setLink(linkDescriptor.getSelfLink() + linkDescriptor.getDepthModifier()); + txn.setOperationType(HttpMethod.GET); + txn.setEntityType(linkDescriptor.getEntityType()); + + aaiWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiDataProvider), aaiExecutor) + .whenComplete((result, error) -> { + + aaiWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.SELF_LINK_GET, error.getLocalizedMessage()); + } else { + if (result == null) { + LOG.error(AaiUiMsgs.SELF_LINK_CROSS_REF_SYNC); + } else { + updateActiveInventoryCounters(result); + fetchDocumentForUpsert(result); + } + } + }); + } + } + } + } + + /** + * Process entity type self links. + * + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(OperationResult operationResult) { + + JsonNode rootNode = null; + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + // TODO // TODO -> LOG, waht should be logged here? + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + if (resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + + Iterator<JsonNode> elementIterator = resultDataArrayNode.elements(); + JsonNode element = null; + + while (elementIterator.hasNext()) { + element = elementIterator.next(); + + final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); + final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); + + OxmEntityDescriptor descriptor = null; + + if (resourceType != null && resourceLink != null) { + descriptor = oxmModelLoader.getEntityDescriptor(resourceType); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); + // go to next element in iterator + continue; + } + if (descriptor.hasCrossEntityReferences()) { + selflinks.add(new SelfLinkDescriptor( + resourceLink,SynchronizerConfiguration.DEPTH_ALL_MODIFIER, resourceType)); + } + } + } + } + } + } + + + + /** + * By providing the entity type and a json node for the entity, determine the + * primary key name(s) + primary key value(s) sufficient to build an entity query string + * of the following format: + * + * <entityType>.<primaryKeyNames>:<primaryKeyValues> + * + * @return - a composite string in the above format or null + */ + private String determineEntityQueryString(String entityType, JsonNode entityJsonNode) { + + OxmEntityDescriptor entityDescriptor = + oxmModelLoader.getEntityDescriptor(entityType); + + String queryString = null; + + if ( entityDescriptor != null ) { + + final List<String> primaryKeyNames = entityDescriptor.getPrimaryKeyAttributeName(); + final List<String> keyValues = new ArrayList<String>(); + NodeUtils.extractFieldValuesFromObject(entityJsonNode, primaryKeyNames, keyValues); + + queryString = entityType + "." + NodeUtils.concatArray(primaryKeyNames,"/") + ":" + NodeUtils.concatArray(keyValues); + + } + + return queryString; + + + } + + /** + * Fetch document for upsert. + * + * @param txn the txn + */ + private void fetchDocumentForUpsert(NetworkTransaction txn) { + + if (!txn.getOperationResult().wasSuccessful()) { + LOG.error(AaiUiMsgs.SELF_LINK_GET, txn.getOperationResult().getResult()); + return; + } + + if (txn.getDescriptor().hasCrossEntityReferences()) { + + final String jsonResult = txn.getOperationResult().getResult(); + + if (jsonResult != null && jsonResult.length() > 0) { + + /** + * Here's what we are going to do: + * + * <li>Extract primary key name and value from the parent type. + * <li>Extract the primary key and value from the nested child instance. + * <li>Build a generic query to discover the self-link for the nested-child-instance using + * parent and child. + * <li>Set the self-link on the child. + * <li>Generate the id that will allow the elastic-search upsert to work. + * <li>Rinse and repeat. + */ + + OxmEntityDescriptor parentEntityDescriptor = + oxmModelLoader.getEntityDescriptor(txn.getEntityType()); + + if ( parentEntityDescriptor != null ) { + + CrossEntityReference cerDefinition = parentEntityDescriptor.getCrossEntityReference(); + + if (cerDefinition != null) { + JsonNode convertedNode = null; + try { + convertedNode = NodeUtils.convertJsonStrToJsonNode(txn.getOperationResult().getResult()); + + final String parentEntityQueryString = determineEntityQueryString(txn.getEntityType(), convertedNode); + + List<String> extractedParentEntityAttributeValues = new ArrayList<String>(); + + NodeUtils.extractFieldValuesFromObject(convertedNode, + cerDefinition.getReferenceAttributes(), + extractedParentEntityAttributeValues); + + List<JsonNode> nestedTargetEntityInstances = new ArrayList<JsonNode>(); + NodeUtils.extractObjectsByKey(convertedNode, cerDefinition.getTargetEntityType(), + nestedTargetEntityInstances); + + for (JsonNode targetEntityInstance : nestedTargetEntityInstances) { + + OxmEntityDescriptor cerDescriptor = + oxmModelLoader.getSearchableEntityDescriptor(cerDefinition.getTargetEntityType()); + + if (cerDescriptor != null) { + + String childEntityType = cerDefinition.getTargetEntityType(); + + List<String> childPrimaryKeyNames = cerDescriptor.getPrimaryKeyAttributeName(); + + List<String> childKeyValues = new ArrayList<String>(); + NodeUtils.extractFieldValuesFromObject(targetEntityInstance, childPrimaryKeyNames, childKeyValues); + + String childEntityQueryKeyString = childEntityType + "." + NodeUtils.concatArray(childPrimaryKeyNames,"/") + ":" + NodeUtils.concatArray(childKeyValues); + + /** + * Build generic-query to query child instance self-link from AAI + */ + List<String> orderedQueryKeyParams = new ArrayList<String>(); + orderedQueryKeyParams.add(parentEntityQueryString); + orderedQueryKeyParams.add(childEntityQueryKeyString); + String genericQueryStr = null; + try { + genericQueryStr = aaiDataProvider.getGenericQueryForSelfLink(childEntityType, orderedQueryKeyParams); + + if (genericQueryStr != null) { + + OperationResult aaiQueryResult = aaiDataProvider.queryActiveInventoryWithRetries( + genericQueryStr, "application/json", + aaiConfig.getAaiRestConfig().getNumRequestRetries()); + + if (aaiQueryResult!= null && aaiQueryResult.wasSuccessful()) { + + Collection<JsonNode> entityLinks = new ArrayList<JsonNode>(); + JsonNode genericQueryResult = null; + try { + genericQueryResult = NodeUtils.convertJsonStrToJsonNode(aaiQueryResult.getResult()); + + if ( genericQueryResult != null ) { + + NodeUtils.extractObjectsByKey(genericQueryResult, "resource-link", entityLinks); + + String selfLink = null; + + if (entityLinks.size() != 1) { + /** + * an ambiguity exists where we can't reliably determine the self + * link, this should be a permanent error + */ + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_SELFLINK_AMBIGUITY, String.valueOf(entityLinks.size())); + } else { + selfLink = ((JsonNode) entityLinks.toArray()[0]).asText(); + + if (!cerDescriptor.getSearchableAttributes().isEmpty()) { + + IndexableCrossEntityReference icer = + getPopulatedDocument(targetEntityInstance, cerDescriptor); + + for (String parentCrossEntityReferenceAttributeValue : extractedParentEntityAttributeValues) { + icer.addCrossEntityReferenceValue( + parentCrossEntityReferenceAttributeValue); + } + + icer.setLink(selfLink); + + icer.deriveFields(); + + String link = null; + try { + link = getElasticFullUrl("/" + icer.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction n2 = new NetworkTransaction(); + n2.setLink(link); + n2.setEntityType(txn.getEntityType()); + n2.setDescriptor(txn.getDescriptor()); + n2.setOperationType(HttpMethod.GET); + + esWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformElasticSearchRetrieval(n2, esDataProvider), + esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, icer); + } + }); + } + } + } + } else { + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_DURING_AAI_RESPONSE_CONVERSION); + } + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.JSON_CONVERSION_ERROR, JsonNode.class.toString(), exc.getLocalizedMessage()); + } + + } else { + String message = "Entity sync failed because AAI query failed with error " + aaiQueryResult.getResult(); + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message); + } + + } else { + String message = "Entity Sync failed because generic query str could not be determined."; + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message); + } + } catch (Exception exc) { + String message = "Failed to sync entity because generation of generic query failed with error = " + exc.getMessage(); + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_QUERY_ERROR, message); + } + + } + } + + } catch (IOException ioe) { + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, ioe.getMessage()); + } + } + + } else { + LOG.error(AaiUiMsgs.ENTITY_SYNC_FAILED_DESCRIPTOR_NOT_FOUND, txn.getEntityType()); + } + } + } + } + + /** + * Perform document upsert. + * + * @param esGetResult the es get result + * @param icer the icer + */ + protected void performDocumentUpsert(NetworkTransaction esGetResult, + IndexableCrossEntityReference icer) { + /** + * <p> + * <ul> + * As part of the response processing we need to do the following: + * <li>1. Extract the version (if present), it will be the ETAG when we use the + * Search-Abstraction-Service + * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version + * tag + * <li>a) if version is null or RC=404, then standard put, no _update with version tag + * <li>b) if version != null, do PUT with _update?version= (versionNumber) in the URI to elastic + * </ul> + * </p> + */ + String link = null; + try { + link = getElasticFullUrl("/" + icer.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); + return; + } + + boolean wasEntryDiscovered = false; + String versionNumber = null; + if (esGetResult.getOperationResult().getResultCode() == 404) { + LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, icer.getEntityPrimaryKeyValue()); + } else if (esGetResult.getOperationResult().getResultCode() == 200) { + wasEntryDiscovered = true; + try { + versionNumber = NodeUtils.extractFieldValueFromObject( + NodeUtils.convertJsonStrToJsonNode(esGetResult.getOperationResult().getResult()), + "_version"); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.ES_ABORT_CROSS_ENTITY_REF_SYNC, "version Number", + icer.getEntityPrimaryKeyValue(), exc.getLocalizedMessage()); + return; + } + } else { + /* + * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we + * return. + */ + LOG.info(AaiUiMsgs.ES_OPERATION_RETURN_CODE, + String.valueOf(esGetResult.getOperationResult().getResultCode())); + return; + } + + try { + String jsonPayload = null; + if (wasEntryDiscovered) { + try { + ArrayList<JsonNode> sourceObject = new ArrayList<JsonNode>(); + NodeUtils.extractObjectsByKey( + NodeUtils.convertJsonStrToJsonNode(esGetResult.getOperationResult().getResult()), + "_source", sourceObject); + + if (!sourceObject.isEmpty()) { + String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false); + MergableEntity me = mapper.readValue(responseSource, MergableEntity.class); + ObjectReader updater = mapper.readerForUpdating(me); + MergableEntity merged = updater.readValue(icer.getIndexDocumentJson()); + jsonPayload = mapper.writeValueAsString(merged); + } + } catch (IOException exc) { + LOG.error(AaiUiMsgs.ES_ABORT_CROSS_ENTITY_REF_SYNC, "source value", + icer.getEntityPrimaryKeyValue(), exc.getLocalizedMessage()); + return; + } + } else { + jsonPayload = icer.getIndexDocumentJson(); + } + + if (wasEntryDiscovered) { + if (versionNumber != null && jsonPayload != null) { + + String requestPayload = esDataProvider.buildBulkImportOperationRequest(getIndexName(), + ElasticSearchConfig.getConfig().getType(), icer.getId(), versionNumber, jsonPayload); + + NetworkTransaction transactionTracker = new NetworkTransaction(); + transactionTracker.setEntityType(esGetResult.getEntityType()); + transactionTracker.setDescriptor(esGetResult.getDescriptor()); + transactionTracker.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchUpdate(ElasticSearchConfig.getConfig().getBulkUrl(), + requestPayload, esDataProvider, transactionTracker), esPutExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetResult, icer); + } + }); + } + + } else { + if (link != null && jsonPayload != null) { + + NetworkTransaction updateElasticTxn = new NetworkTransaction(); + updateElasticTxn.setLink(link); + updateElasticTxn.setEntityType(esGetResult.getEntityType()); + updateElasticTxn.setDescriptor(esGetResult.getDescriptor()); + updateElasticTxn.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, esDataProvider), + esPutExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetResult, icer); + } + }); + } + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_REF_PUT, exc.getLocalizedMessage()); + } + } + + /** + * Process store document result. + * + * @param esPutResult the es put result + * @param esGetResult the es get result + * @param icer the icer + */ + private void processStoreDocumentResult(NetworkTransaction esPutResult, + NetworkTransaction esGetResult, IndexableCrossEntityReference icer) { + + OperationResult or = esPutResult.getOperationResult(); + + if (!or.wasSuccessful()) { + if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { + + if (shouldAllowRetry(icer.getId())) { + + esWorkOnHand.incrementAndGet(); + + RetryCrossEntitySyncContainer rsc = new RetryCrossEntitySyncContainer(esGetResult, icer); + retryQueue.push(rsc); + + LOG.warn(AaiUiMsgs.ES_CROSS_REF_SYNC_VERSION_CONFLICT); + } + } else { + LOG.error(AaiUiMsgs.ES_CROSS_REF_SYNC_FAILURE, String.valueOf(or.getResultCode()), + or.getResult()); + } + } + } + + /** + * Perform retry sync. + */ + private void performRetrySync() { + while (retryQueue.peek() != null) { + + RetryCrossEntitySyncContainer rsc = retryQueue.poll(); + if (rsc != null) { + + IndexableCrossEntityReference icer = rsc.getIndexableCrossEntityReference(); + NetworkTransaction txn = rsc.getNetworkTransaction(); + + String link = null; + try { + // In this retry flow the icer object has already + // derived its fields + link = getElasticFullUrl("/" + icer.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction retryTransaction = new NetworkTransaction(); + retryTransaction.setLink(link); + retryTransaction.setEntityType(txn.getEntityType()); + retryTransaction.setDescriptor(txn.getDescriptor()); + retryTransaction.setOperationType(HttpMethod.GET); + + /* + * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow and we did + * that for this request already when queuing the failed PUT! + */ + + supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, esDataProvider), + esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, icer); + } + }); + } + + } + } + } + + /** + * Should allow retry. + * + * @param id the id + * @return true, if successful + */ + private boolean shouldAllowRetry(String id) { + boolean isRetryAllowed = true; + if (retryLimitTracker.get(id) != null) { + Integer currentCount = retryLimitTracker.get(id); + if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { + isRetryAllowed = false; + LOG.error(AaiUiMsgs.ES_CROSS_ENTITY_RESYNC_LIMIT, id); + } else { + Integer newCount = new Integer(currentCount.intValue() + 1); + retryLimitTracker.put(id, newCount); + } + + } else { + Integer firstRetryCount = new Integer(1); + retryLimitTracker.put(id, firstRetryCount); + } + + return isRetryAllowed; + } + + /** + * Gets the populated document. + * + * @param entityNode the entity node + * @param resultDescriptor the result descriptor + * @return the populated document + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + protected IndexableCrossEntityReference getPopulatedDocument(JsonNode entityNode, + OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException { + + IndexableCrossEntityReference icer = new IndexableCrossEntityReference(oxmModelLoader); + + icer.setEntityType(resultDescriptor.getEntityName()); + + List<String> primaryKeyValues = new ArrayList<String>(); + String pkeyValue = null; + + for (String keyName : resultDescriptor.getPrimaryKeyAttributeName()) { + pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); + if (pkeyValue != null) { + primaryKeyValues.add(pkeyValue); + } else { + LOG.warn(AaiUiMsgs.ES_PKEYVALUE_NULL, resultDescriptor.getEntityName()); + } + } + + final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); + icer.setEntityPrimaryKeyValue(primaryCompositeKeyValue); + + return icer; + + } +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/ElasticSearchIndexCleaner.java b/src/main/java/org/openecomp/sparky/synchronizer/ElasticSearchIndexCleaner.java new file mode 100644 index 0000000..37b27fd --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/ElasticSearchIndexCleaner.java @@ -0,0 +1,642 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; + +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.rest.RestDataProvider; +import org.openecomp.sparky.synchronizer.entity.ObjectIdCollection; +import org.openecomp.sparky.synchronizer.entity.SearchableEntity; +import org.openecomp.sparky.synchronizer.enumeration.OperationState; +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.logging.AaiUiMsgs; + +/** + * The Class ElasticSearchIndexCleaner. + */ +public class ElasticSearchIndexCleaner implements IndexCleaner { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(ElasticSearchIndexCleaner.class); + + private static final String BULK_OP_LINE_TEMPLATE = "%s\n"; + private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; + + private ObjectIdCollection before; + private ObjectIdCollection after; + + private String host; + private String port; + + private String indexName; + private String indexType; + private int scrollContextTimeToLiveInMinutes; + private int numItemsToGetBulkRequest; + + private RestDataProvider restDataProvider; + private ObjectMapper mapper; + + /** + * Instantiates a new elastic search index cleaner. + * + * @param restDataProvider the rest data provider + * @param indexName the index name + * @param indexType the index type + * @param host the host + * @param port the port + * @param scrollContextTimeToLiveInMinutes the scroll context time to live in minutes + * @param numItemsToGetBulkRequest the num items to get bulk request + */ + protected ElasticSearchIndexCleaner(RestDataProvider restDataProvider, String indexName, + String indexType, String host, String port, int scrollContextTimeToLiveInMinutes, + int numItemsToGetBulkRequest) { + this.restDataProvider = restDataProvider; + this.before = null; + this.after = null; + this.indexName = indexName; + this.indexType = indexType; + this.mapper = new ObjectMapper(); + this.host = host; + this.port = port; + this.scrollContextTimeToLiveInMinutes = scrollContextTimeToLiveInMinutes; + this.numItemsToGetBulkRequest = numItemsToGetBulkRequest; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexCleaner#populatePreOperationCollection() + */ + @Override + public OperationState populatePreOperationCollection() { + + try { + before = retrieveAllDocumentIdentifiers(); + return OperationState.OK; + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_PRE_SYNC_FAILURE, indexName, exc.getMessage()); + return OperationState.ERROR; + } + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexCleaner#populatePostOperationCollection() + */ + @Override + public OperationState populatePostOperationCollection() { + try { + after = retrieveAllDocumentIdentifiers(); + return OperationState.OK; + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_PRE_SYNC_FAILURE, indexName, exc.getMessage()); + return OperationState.ERROR; + } + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexCleaner#performCleanup() + */ + @Override + public OperationState performCleanup() { + // TODO Auto-generated method stub + LOG.info(AaiUiMsgs.ES_SYNC_CLEAN_UP, indexName); + + int sizeBefore = before.getSize(); + int sizeAfter = after.getSize(); + + LOG.info(AaiUiMsgs.ES_SYNC_CLEAN_UP_SIZE, String.valueOf(sizeBefore), + String.valueOf(sizeAfter)); + + /* + * If the processedImportIds size <= 0, then something has failed in the sync operation and we + * shouldn't do the selective delete right now. + */ + + if (sizeAfter > 0) { + + Collection<String> presyncIds = before.getImportedObjectIds(); + presyncIds.removeAll(after.getImportedObjectIds()); + + try { + LOG.info(AaiUiMsgs.ES_SYNC_SELECTIVE_DELETE, indexName, indexType, + String.valueOf(presyncIds.size())); + + ObjectIdCollection bulkIds = new ObjectIdCollection(); + + Iterator<String> it = presyncIds.iterator(); + int numItemsInBulkRequest = 0; + int numItemsRemainingToBeDeleted = presyncIds.size(); + + while (it.hasNext()) { + + bulkIds.addObjectId(it.next()); + numItemsInBulkRequest++; + + if (numItemsInBulkRequest >= this.numItemsToGetBulkRequest) { + LOG.info(AaiUiMsgs.ES_BULK_DELETE, indexName, String.valueOf(bulkIds.getSize())); + OperationResult bulkDeleteResult = bulkDelete(bulkIds.getImportedObjectIds()); + // pegCountersForElasticBulkDelete(bulkDeleteResult); + numItemsRemainingToBeDeleted -= numItemsInBulkRequest; + numItemsInBulkRequest = 0; + bulkIds.clear(); + } + } + + if (numItemsRemainingToBeDeleted > 0) { + LOG.info(AaiUiMsgs.ES_BULK_DELETE, indexName, String.valueOf(bulkIds.getSize())); + OperationResult bulkDeleteResult = bulkDelete(bulkIds.getImportedObjectIds()); + // pegCountersForElasticBulkDelete(bulkDeleteResult); + } + + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_BULK_DELETE_ERROR, indexName, exc.getLocalizedMessage()); + + } + } + + return OperationState.OK; + } + + @Override + public String getIndexName() { + return indexName; + } + + public void setIndexName(String indexName) { + this.indexName = indexName; + } + + /** + * Builds the initial scroll request payload. + * + * @param numItemsToGetPerRequest the num items to get per request + * @param fieldList the field list + * @return the string + * @throws JsonProcessingException the json processing exception + */ + protected String buildInitialScrollRequestPayload(int numItemsToGetPerRequest, + List<String> fieldList) throws JsonProcessingException { + + ObjectNode rootNode = mapper.createObjectNode(); + rootNode.put("size", numItemsToGetPerRequest); + + ArrayNode fields = mapper.createArrayNode(); + + for (String f : fieldList) { + fields.add(f); + } + + rootNode.set("fields", fields); + + ObjectNode queryNode = mapper.createObjectNode(); + queryNode.set("match_all", mapper.createObjectNode()); + + rootNode.set("query", queryNode); + + return mapper.writeValueAsString(rootNode); + + } + + /** + * Builds the subsequent scroll context request payload. + * + * @param scrollId the scroll id + * @param contextTimeToLiveInMinutes the context time to live in minutes + * @return the string + * @throws JsonProcessingException the json processing exception + */ + protected String buildSubsequentScrollContextRequestPayload(String scrollId, + int contextTimeToLiveInMinutes) throws JsonProcessingException { + + ObjectNode rootNode = mapper.createObjectNode(); + + rootNode.put("scroll", contextTimeToLiveInMinutes + "m"); + rootNode.put("scroll_id", scrollId); + + return mapper.writeValueAsString(rootNode); + + } + + /** + * Parses the elastic search result. + * + * @param jsonResult the json result + * @return the json node + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + protected JsonNode parseElasticSearchResult(String jsonResult) + throws JsonProcessingException, IOException { + ObjectMapper mapper = new ObjectMapper(); + return mapper.readTree(jsonResult); + } + + /** + * Lookup index doc. + * + * @param ids the ids + * @param docs the docs + * @return the array list + */ + protected ArrayList<SearchableEntity> lookupIndexDoc(ArrayList<String> ids, + List<SearchableEntity> docs) { + ArrayList<SearchableEntity> objs = new ArrayList<SearchableEntity>(); + + if (ids != null && docs != null) { + for (SearchableEntity d : docs) { + if (ids.contains(d.getId())) { + objs.add(d); + } + } + } + + return objs; + } + + /** + * Builds the delete data object. + * + * @param index the index + * @param type the type + * @param id the id + * @return the object node + */ + protected ObjectNode buildDeleteDataObject(String index, String type, String id) { + + ObjectNode indexDocProperties = mapper.createObjectNode(); + + indexDocProperties.put("_index", index); + indexDocProperties.put("_type", type); + indexDocProperties.put("_id", id); + + ObjectNode rootNode = mapper.createObjectNode(); + rootNode.set("delete", indexDocProperties); + + return rootNode; + } + + /** + * This method might appear to be a little strange, and is simply an optimization to take an + * elipsed JsonNode key path and retrieve the node at the end of the path, if it exists. + * + * @param startNode the start node + * @param fieldPath the field path + * @return the node path + */ + protected JsonNode getNodePath(JsonNode startNode, String... fieldPath) { + + JsonNode jsonNode = null; + + for (String field : fieldPath) { + if (jsonNode == null) { + jsonNode = startNode.get(field); + } else { + jsonNode = jsonNode.get(field); + } + + /* + * This is our safety net in case any intermediate path returns a null + */ + + if (jsonNode == null) { + return null; + } + + } + + return jsonNode; + } + + /** + * Gets the full url. + * + * @param resourceUrl the resource url + * @return the full url + */ + private String getFullUrl(String resourceUrl) { + return String.format("http://%s:%s%s", host, port, resourceUrl); + } + + /** + * Retrieve all document identifiers. + * + * @return the object id collection + * @throws IOException Signals that an I/O exception has occurred. + */ + public ObjectIdCollection retrieveAllDocumentIdentifiers() throws IOException { + + ObjectIdCollection currentDocumentIds = new ObjectIdCollection(); + + long opStartTimeInMs = System.currentTimeMillis(); + + List<String> fields = new ArrayList<String>(); + fields.add("_id"); + // fields.add("entityType"); + + String scrollRequestPayload = + buildInitialScrollRequestPayload(this.numItemsToGetBulkRequest, fields); + + final String fullUrlStr = getFullUrl("/" + indexName + "/" + indexType + "/_search?scroll=" + + this.scrollContextTimeToLiveInMinutes + "m"); + + OperationResult result = + restDataProvider.doPost(fullUrlStr, scrollRequestPayload, "application/json"); + + if (result.wasSuccessful()) { + + JsonNode rootNode = parseElasticSearchResult(result.getResult()); + + /* + * Check the result for success / failure, and enumerate all the index ids that resulted in + * success, and ignore the ones that failed or log them so we have a record of the failure. + */ + int totalRecordsAvailable = 0; + String scrollId = null; + int numRecordsFetched = 0; + + if (rootNode != null) { + + scrollId = getFieldValue(rootNode, "_scroll_id"); + final String tookStr = getFieldValue(rootNode, "took"); + int tookInMs = (tookStr == null) ? 0 : Integer.parseInt(tookStr); + boolean timedOut = Boolean.parseBoolean(getFieldValue(rootNode, "timed_out")); + + if (timedOut) { + LOG.error(AaiUiMsgs.COLLECT_TIME_WITH_ERROR, "all document Identifiers", + String.valueOf(tookInMs)); + } else { + LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_SUCCESS, "all document Identifiers", + String.valueOf(tookInMs)); + } + + JsonNode hitsNode = rootNode.get("hits"); + totalRecordsAvailable = Integer.parseInt(hitsNode.get("total").asText()); + + LOG.info(AaiUiMsgs.COLLECT_TOTAL, "all document Identifiers", + String.valueOf(totalRecordsAvailable)); + + /* + * Collect all object ids + */ + + ArrayNode hitsArray = (ArrayNode) hitsNode.get("hits"); + + Iterator<JsonNode> nodeIterator = hitsArray.iterator(); + + String key = null; + String value = null; + JsonNode jsonNode = null; + + while (nodeIterator.hasNext()) { + + jsonNode = nodeIterator.next(); + + key = getFieldValue(jsonNode, "_id"); + + if (key != null) { + currentDocumentIds.addObjectId(key); + } + + /* + * if (key != null) { + * + * JsonNode fieldsNode = jNode.get("fields"); + * + * if (fieldsNode != null) { + * + * JsonNode entityTypeNode = fieldsNode.get("entityType"); + * + * if (entityTypeNode != null) { ArrayNode aNode = (ArrayNode) entityTypeNode; + * + * if (aNode.size() > 0) { value = aNode.get(0).asText(); objAndtTypesMap.put(key, value); + * numRecordsFetched++; } } } } + */ + + } + + int totalRecordsRemainingToFetch = (totalRecordsAvailable - numRecordsFetched); + + int numRequiredAdditionalFetches = + (totalRecordsRemainingToFetch / this.numItemsToGetBulkRequest); + + /* + * Do an additional fetch for the remaining items (if needed) + */ + + if (totalRecordsRemainingToFetch % numItemsToGetBulkRequest != 0) { + numRequiredAdditionalFetches += 1; + } + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.SYNC_NUMBER_REQ_FETCHES, + String.valueOf(numRequiredAdditionalFetches)); + } + + + for (int x = 0; x < numRequiredAdditionalFetches; x++) { + + if (collectItemsFromScrollContext(scrollId, currentDocumentIds) != OperationState.OK) { + // abort the whole thing because now we can't reliably cleanup the orphans. + throw new IOException( + "Failed to collect pre-sync doc collection from index. Aborting operation"); + } + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.SYNC_NUMBER_TOTAL_FETCHES, + String.valueOf(currentDocumentIds.getSize()), + String.valueOf(totalRecordsAvailable)); + } + + } + + } + + } else { + // scroll context get failed, nothing else to do + LOG.error(AaiUiMsgs.ERROR_GENERIC, result.toString()); + } + + LOG.info(AaiUiMsgs.COLLECT_TOTAL_TIME, "all document Identifiers", + String.valueOf((System.currentTimeMillis() - opStartTimeInMs))); + + return currentDocumentIds; + + } + + /** + * Collect items from scroll context. + * + * @param scrollId the scroll id + * @param objectIds the object ids + * @return the operation state + * @throws IOException Signals that an I/O exception has occurred. + */ + private OperationState collectItemsFromScrollContext(String scrollId, + ObjectIdCollection objectIds) throws IOException { + + // ObjectIdCollection documentIdCollection = new ObjectIdCollection(); + + String requestPayload = + buildSubsequentScrollContextRequestPayload(scrollId, scrollContextTimeToLiveInMinutes); + + final String fullUrlStr = getFullUrl("/_search/scroll"); + + OperationResult opResult = + restDataProvider.doPost(fullUrlStr, requestPayload, "application/json"); + + if (opResult.getResultCode() >= 300) { + LOG.warn(AaiUiMsgs.ES_SCROLL_CONTEXT_ERROR, opResult.getResult()); + return OperationState.ERROR; + } + + JsonNode rootNode = parseElasticSearchResult(opResult.getResult()); + boolean timedOut = Boolean.parseBoolean(getFieldValue(rootNode, "timed_out")); + final String tookStr = getFieldValue(rootNode, "took"); + int tookInMs = (tookStr == null) ? 0 : Integer.parseInt(tookStr); + + JsonNode hitsNode = rootNode.get("hits"); + + /* + * Check the result for success / failure, and enumerate all the index ids that resulted in + * success, and ignore the ones that failed or log them so we have a record of the failure. + */ + + if (rootNode != null) { + + if (timedOut) { + LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_ERROR, "Scroll Context", String.valueOf(tookInMs)); + } else { + LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_SUCCESS, "Scroll Context", String.valueOf(tookInMs)); + } + + /* + * Collect all object ids + */ + + ArrayNode hitsArray = (ArrayNode) hitsNode.get("hits"); + String key = null; + String value = null; + JsonNode jsonNode = null; + + Iterator<JsonNode> nodeIterator = hitsArray.iterator(); + + while (nodeIterator.hasNext()) { + + jsonNode = nodeIterator.next(); + + key = getFieldValue(jsonNode, "_id"); + + if (key != null) { + objectIds.addObjectId(key); + + /* + * JsonNode fieldsNode = jNode.get("fields"); + * + * if (fieldsNode != null) { + * + * JsonNode entityTypeNode = fieldsNode.get("entityType"); + * + * if (entityTypeNode != null) { ArrayNode aNode = (ArrayNode) entityTypeNode; + * + * if (aNode.size() > 0) { value = aNode.get(0).asText(); objectIdsAndTypes.put(key, + * value); } } } } + */ + + } + + } + } + + return OperationState.OK; + } + + /** + * Gets the field value. + * + * @param node the node + * @param fieldName the field name + * @return the field value + */ + protected String getFieldValue(JsonNode node, String fieldName) { + + JsonNode field = node.get(fieldName); + + if (field != null) { + return field.asText(); + } + + return null; + + } + + /** + * Bulk delete. + * + * @param docIds the doc ids + * @return the operation result + * @throws IOException Signals that an I/O exception has occurred. + */ + public OperationResult bulkDelete(Collection<String> docIds) throws IOException { + + if (docIds == null || docIds.size() == 0) { + LOG.info(AaiUiMsgs.ES_BULK_DELETE_SKIP); + return new OperationResult(500, + "Skipping bulkDelete(); operation because docs to delete list is empty"); + } + + LOG.info(AaiUiMsgs.ES_BULK_DELETE_START, String.valueOf(docIds.size())); + + StringBuilder sb = new StringBuilder(128); + + for (String id : docIds) { + sb.append( + String.format(BULK_OP_LINE_TEMPLATE, buildDeleteDataObject(indexName, indexType, id))); + } + + sb.append("\n"); + + final String fullUrlStr = getFullUrl("/_bulk"); + + return restDataProvider.doPost(fullUrlStr, sb.toString(), "application/x-www-form-urlencoded"); + + } + + /* + + */ + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/GeoSynchronizer.java b/src/main/java/org/openecomp/sparky/synchronizer/GeoSynchronizer.java new file mode 100644 index 0000000..e53c5a7 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/GeoSynchronizer.java @@ -0,0 +1,469 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Deque; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.function.Supplier; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.openecomp.sparky.dal.rest.HttpMethod; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.inventory.entity.GeoIndexDocument; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.synchronizer.entity.SelfLinkDescriptor; +import org.openecomp.sparky.synchronizer.enumeration.OperationState; +import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; +import org.openecomp.sparky.synchronizer.task.PerformActiveInventoryRetrieval; +import org.openecomp.sparky.synchronizer.task.StoreDocumentTask; +import org.openecomp.sparky.util.NodeUtils; +import org.slf4j.MDC; + +import org.openecomp.cl.mdc.MdcContext; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; + + +/** + * The Class GeoSynchronizer. + */ +public class GeoSynchronizer extends AbstractEntitySynchronizer implements IndexSynchronizer { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(GeoSynchronizer.class); + + private boolean allWorkEnumerated; + private Deque<SelfLinkDescriptor> selflinks; + + private ElasticSearchConfig elasticConfig = null; + private Map<String, OxmEntityDescriptor> geoDescriptorMap = null; + + /** + * Instantiates a new geo synchronizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public GeoSynchronizer(String indexName) throws Exception { + + super(LOG, "GEO", 2, 5, 5, indexName); + this.allWorkEnumerated = false; + this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>(); + this.synchronizerName = "Geo Synchronizer"; + this.geoDescriptorMap = oxmModelLoader.getGeoEntityDescriptors(); + this.aaiEntityStats.initializeCountersFromOxmEntityDescriptors(geoDescriptorMap); + this.esEntityStats.initializeCountersFromOxmEntityDescriptors(geoDescriptorMap); + + } + + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + resetCounters(); + allWorkEnumerated = false; + syncStartedTimeStampInMs = System.currentTimeMillis(); + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "GeoSynchronizer", "", "Sync", ""); + + collectAllTheWork(); + return OperationState.OK; + } + + + /** + * Collect all the work. + * + * @return the operation state + */ + public OperationState collectAllTheWork() { + final Map<String,String> contextMap = MDC.getCopyOfContextMap(); + if (elasticConfig == null) { + try { + elasticConfig = ElasticSearchConfig.getConfig(); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.CONFIGURATION_ERROR, "Search"); + } + } + + if (geoDescriptorMap.isEmpty()) { + LOG.error(AaiUiMsgs.OXM_FAILED_RETRIEVAL, "geo entities"); + return OperationState.ERROR; + } + + Collection<String> syncTypes = geoDescriptorMap.keySet(); + + try { + + /* + * launch a parallel async thread to process the documents for each entity-type (to max the of + * the configured executor anyway) + */ + + aaiWorkOnHand.set(syncTypes.size()); + + for (String key : syncTypes) { + + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + OperationResult typeLinksResult = null; + try { + typeLinksResult = aaiDataProvider.getSelfLinksByEntityType(key); + aaiWorkOnHand.decrementAndGet(); + processEntityTypeSelfLinks(typeLinksResult); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, exc); + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, error.getMessage()); + } + }); + + } + + while (aaiWorkOnHand.get() != 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); + } + + Thread.sleep(1000); + } + + aaiWorkOnHand.set(selflinks.size()); + allWorkEnumerated = true; + syncEntityTypes(); + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, exc); + } + return OperationState.OK; + } + + /** + * Sync entity types. + */ + private void syncEntityTypes() { + + while (selflinks.peek() != null) { + + SelfLinkDescriptor linkDescriptor = selflinks.poll(); + aaiWorkOnHand.decrementAndGet(); + + OxmEntityDescriptor descriptor = null; + + if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { + + descriptor = oxmModelLoader.getEntityDescriptor(linkDescriptor.getEntityType()); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); + // go to next element in iterator + continue; + } + + NetworkTransaction txn = new NetworkTransaction(); + txn.setDescriptor(descriptor); + txn.setLink(linkDescriptor.getSelfLink()); + txn.setOperationType(HttpMethod.GET); + txn.setEntityType(linkDescriptor.getEntityType()); + + aaiWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiDataProvider), aaiExecutor) + .whenComplete((result, error) -> { + + aaiWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, error.getMessage()); + } else { + if (result == null) { + LOG.error(AaiUiMsgs.SELF_LINK_GET_NO_RESPONSE, linkDescriptor.getSelfLink()); + } else { + processEntityTypeSelfLinkResult(result); + } + } + }); + } + } + } + + /** + * Process entity type self links. + * + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(OperationResult operationResult) { + + JsonNode rootNode = null; + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, exc); + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + if (resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + + Iterator<JsonNode> elementIterator = resultDataArrayNode.elements(); + JsonNode element = null; + + while (elementIterator.hasNext()) { + element = elementIterator.next(); + + final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); + final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); + + if (resourceType != null && resourceLink != null) { + + if (geoDescriptorMap.containsKey(resourceType)) { + selflinks.add(new SelfLinkDescriptor(resourceLink + "?nodes-only", resourceType)); + } else { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); + // go to next element in iterator + continue; + } + + } + } + } + } + + } + + /** + * Process entity type self link result. + * + * @param txn the txn + */ + private void processEntityTypeSelfLinkResult(NetworkTransaction txn) { + + updateActiveInventoryCounters(txn); + + if (!txn.getOperationResult().wasSuccessful()) { + return; + } + + try { + if (!(txn.getDescriptor().getGeoLatName().isEmpty() + && txn.getDescriptor().getGeoLongName().isEmpty())) { + + GeoIndexDocument geoDoc = new GeoIndexDocument(oxmModelLoader); + + final String jsonResult = txn.getOperationResult().getResult(); + + if (jsonResult != null && jsonResult.length() > 0) { + + populateGeoDocument(geoDoc, jsonResult, txn.getDescriptor(), txn.getLink()); + + if (!geoDoc.isValidGeoDocument()) { + + LOG.info(AaiUiMsgs.GEO_SYNC_IGNORING_ENTITY, geoDoc.getEntityType(), geoDoc.toString()); + + } else { + + String link = null; + try { + link = getElasticFullUrl("/" + geoDoc.getId(), getIndexName(), "default"); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc); + } + + if (link != null) { + + NetworkTransaction n2 = new NetworkTransaction(); + n2.setLink(link); + n2.setEntityType(txn.getEntityType()); + n2.setDescriptor(txn.getDescriptor()); + n2.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + + supplyAsync(new StoreDocumentTask(geoDoc, n2, esDataProvider), esExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_STORE_FAILURE, error.getMessage()); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result); + } + }); + } + } + } + } + } catch (JsonProcessingException exc) { + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, exc); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, exc); + } + + return; + } + + + /** + * Process store document result. + * + * @param txn the txn + */ + private void processStoreDocumentResult(NetworkTransaction txn) { + + OperationResult or = txn.getOperationResult(); + + if (!or.wasSuccessful()) { + LOG.error(AaiUiMsgs.ES_STORE_FAILURE, or.toString()); + /* + * if(or.getResultCode() != 404 || (or.getResultCode() == 404 && + * !synchronizerConfig.isResourceNotFoundErrorsSupressed())) { logger.error( + * "Skipping failed resource = " + "link" + " RC=[" + or.getResultCode() + "]. Message: " + + * or.getResult()); } + */ + + } + + } + + + @Override + public SynchronizerState getState() { + + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + return this.getStatReport(System.currentTimeMillis() - syncStartedTimeStampInMs, + showFinalReport); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + /** + * Populate geo document. + * + * @param doc the doc + * @param result the result + * @param resultDescriptor the result descriptor + * @param entityLink the entity link + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + protected void populateGeoDocument(GeoIndexDocument doc, String result, + OxmEntityDescriptor resultDescriptor, String entityLink) + throws JsonProcessingException, IOException { + + doc.setSelfLink(entityLink); + doc.setEntityType(resultDescriptor.getEntityName()); + + JsonNode entityNode = mapper.readTree(result); + + List<String> primaryKeyValues = new ArrayList<String>(); + String pkeyValue = null; + + for (String keyName : resultDescriptor.getPrimaryKeyAttributeName()) { + pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); + if (pkeyValue != null) { + primaryKeyValues.add(pkeyValue); + } else { + LOG.warn(AaiUiMsgs.ES_PKEYVALUE_NULL, resultDescriptor.getEntityName()); + } + } + + final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); + doc.setEntityPrimaryKeyValue(primaryCompositeKeyValue); + String geoLatKey = resultDescriptor.getGeoLatName(); + String geoLongKey = resultDescriptor.getGeoLongName(); + + doc.setLatitude(NodeUtils.getNodeFieldAsText(entityNode, geoLatKey)); + doc.setLongitude(NodeUtils.getNodeFieldAsText(entityNode, geoLongKey)); + doc.deriveFields(); + + } + + @Override + protected boolean isSyncDone() { + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (totalWorkOnHand > 0 || !allWorkEnumerated) { + return false; + } + + return true; + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/HistoricalEntitySummarizer.java b/src/main/java/org/openecomp/sparky/synchronizer/HistoricalEntitySummarizer.java new file mode 100644 index 0000000..81201d2 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/HistoricalEntitySummarizer.java @@ -0,0 +1,374 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.io.IOException; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.sql.Timestamp; +import java.text.SimpleDateFormat; +import java.util.Collection; +import java.util.EnumSet; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; + +import javax.json.Json; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; +import org.openecomp.sparky.dal.rest.HttpMethod; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.synchronizer.enumeration.OperationState; +import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; +import org.openecomp.sparky.util.NodeUtils; +import org.slf4j.MDC; + +import org.openecomp.cl.mdc.MdcContext; + +import org.openecomp.cl.mdc.MdcContext; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; + +/** + * The Class HistoricalEntitySummarizer. + */ +public class HistoricalEntitySummarizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(HistoricalEntitySummarizer.class); + private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ"; + + private boolean allWorkEnumerated; + private ConcurrentHashMap<String, AtomicInteger> entityCounters; + private boolean syncInProgress; + private Map<String, String> contextMap; + + /** + * Instantiates a new historical entity summarizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public HistoricalEntitySummarizer(String indexName) throws Exception { + super(LOG, "HES", 2, 5, 5, indexName); + + this.allWorkEnumerated = false; + this.entityCounters = new ConcurrentHashMap<String, AtomicInteger>(); + this.synchronizerName = "Historical Entity Summarizer"; + this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS); + this.syncInProgress = false; + this.contextMap = MDC.getCopyOfContextMap(); + } + + /** + * Collect all the work. + * + * @return the operation state + */ + private OperationState collectAllTheWork() { + + Map<String, OxmEntityDescriptor> descriptorMap = + oxmModelLoader.getSearchableEntityDescriptors(); + + if (descriptorMap.isEmpty()) { + LOG.error(AaiUiMsgs.OXM_FAILED_RETRIEVAL, "historical entities"); + + return OperationState.ERROR; + } + + Collection<String> entityTypes = descriptorMap.keySet(); + + AtomicInteger asyncWoH = new AtomicInteger(0); + + asyncWoH.set(entityTypes.size()); + + try { + for (String entityType : entityTypes) { + + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + try { + OperationResult typeLinksResult = + aaiDataProvider.getSelfLinksByEntityType(entityType); + updateActiveInventoryCounters(HttpMethod.GET, entityType, typeLinksResult); + processEntityTypeSelfLinks(entityType, typeLinksResult); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, exc.getMessage()); + + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + + asyncWoH.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.HISTORICAL_COLLECT_ERROR, error.getMessage()); + } + + }); + + } + + + while (asyncWoH.get() > 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + " summarizer waiting for all the links to be processed."); + } + + Thread.sleep(250); + } + + esWorkOnHand.set(entityCounters.size()); + + // start doing the real work + allWorkEnumerated = true; + + insertEntityTypeCounters(); + + if (LOG.isDebugEnabled()) { + + StringBuilder sb = new StringBuilder(128); + + sb.append("\n\nHistorical Entity Counters:"); + + for (Entry<String, AtomicInteger> entry : entityCounters.entrySet()) { + sb.append("\n").append(entry.getKey()).append(" = ").append(entry.getValue().get()); + } + + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, sb.toString()); + + } + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.HISTORICAL_COLLECT_ERROR, exc.getMessage()); + + + esWorkOnHand.set(0); + allWorkEnumerated = true; + + return OperationState.ERROR; + } + + return OperationState.OK; + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "HistoricalEntitySynchronizer", "", "Sync", ""); + + if (syncInProgress) { + LOG.info(AaiUiMsgs.HISTORICAL_SYNC_PENDING); + return OperationState.PENDING; + } + + clearCache(); + + syncInProgress = true; + this.syncStartedTimeStampInMs = System.currentTimeMillis(); + allWorkEnumerated = false; + + return collectAllTheWork(); + } + + /** + * Process entity type self links. + * + * @param entityType the entity type + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(String entityType, OperationResult operationResult) { + + JsonNode rootNode = null; + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, exc.getMessage()); + return; + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + if (resultData != null && resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + entityCounters.put(entityType, new AtomicInteger(resultDataArrayNode.size())); + } + } + + } + + /** + * Insert entity type counters. + */ + private void insertEntityTypeCounters() { + + if (esWorkOnHand.get() <= 0) { + return; + } + + SimpleDateFormat dateFormat = new SimpleDateFormat(INSERTION_DATE_TIME_FORMAT); + Timestamp timestamp = new Timestamp(System.currentTimeMillis()); + String currentFormattedTimeStamp = dateFormat.format(timestamp); + + Set<Entry<String, AtomicInteger>> entityCounterEntries = entityCounters.entrySet(); + + for (Entry<String, AtomicInteger> entityCounterEntry : entityCounterEntries) { + + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + String jsonString = Json.createObjectBuilder().add( + "count", entityCounterEntry.getValue().get()) + .add("entityType", entityCounterEntry.getKey()) + .add("timestamp", currentFormattedTimeStamp).build().toString(); + + String link = null; + try { + link = getElasticFullUrl("", indexName); + OperationResult or = esDataProvider.doPost(link, jsonString, "application/json"); + updateElasticSearchCounters(HttpMethod.POST, entityCounterEntry.getKey(), or); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_STORE_FAILURE, exc.getMessage() ); + } + + return null; + } + + }, esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + }); + + } + + while (esWorkOnHand.get() > 0) { + + try { + Thread.sleep(500); + } catch (InterruptedException exc) { + LOG.error(AaiUiMsgs.INTERRUPTED, "historical Entities", exc.getMessage()); + } + } + + } + + @Override + public SynchronizerState getState() { + + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + return getStatReport(System.currentTimeMillis() - this.syncStartedTimeStampInMs, + showFinalReport); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + @Override + protected boolean isSyncDone() { + + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC,indexName + ", isSyncDone(), totalWorkOnHand = " + totalWorkOnHand + + " all work enumerated = " + allWorkEnumerated); + } + + if (totalWorkOnHand > 0 || !allWorkEnumerated) { + return false; + } + + this.syncInProgress = false; + + return true; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.AbstractEntitySynchronizer#clearCache() + */ + @Override + public void clearCache() { + + if (syncInProgress) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "Historical Entity Summarizer in progress, request to clear cache ignored"); + return; + } + + super.clearCache(); + this.resetCounters(); + if (entityCounters != null) { + entityCounters.clear(); + } + + allWorkEnumerated = false; + + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/IndexCleaner.java b/src/main/java/org/openecomp/sparky/synchronizer/IndexCleaner.java new file mode 100644 index 0000000..51ce652 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/IndexCleaner.java @@ -0,0 +1,58 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +import org.openecomp.sparky.synchronizer.enumeration.OperationState; + +/** + * The Interface IndexCleaner. + */ +public interface IndexCleaner { + + /** + * Populate pre operation collection. + * + * @return the operation state + */ + public OperationState populatePreOperationCollection(); + + /** + * Populate post operation collection. + * + * @return the operation state + */ + public OperationState populatePostOperationCollection(); + + /** + * Perform cleanup. + * + * @return the operation state + */ + public OperationState performCleanup(); + + public String getIndexName(); + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/IndexIntegrityValidator.java b/src/main/java/org/openecomp/sparky/synchronizer/IndexIntegrityValidator.java new file mode 100644 index 0000000..dcd016b --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/IndexIntegrityValidator.java @@ -0,0 +1,165 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.rest.RestDataProvider; +import org.openecomp.sparky.logging.AaiUiMsgs; + +/** + * The Class IndexIntegrityValidator. + * + * @author davea. + */ +public class IndexIntegrityValidator implements IndexValidator { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(IndexIntegrityValidator.class); + + private String host; + private String port; + private String indexName; + private String indexType; + private String tableConfigJson; + + private final RestDataProvider restDataProvider; + + /** + * Instantiates a new index integrity validator. + * + * @param restDataProvider the rest data provider + * @param indexName the index name + * @param indexType the index type + * @param host the host + * @param port the port + * @param tableConfigJson the table config json + */ + public IndexIntegrityValidator(RestDataProvider restDataProvider, String indexName, + String indexType, String host, String port, String tableConfigJson) { + this.restDataProvider = restDataProvider; + this.host = host; + this.port = port; + this.indexName = indexName; + this.indexType = indexType; + this.tableConfigJson = tableConfigJson; + } + + @Override + public String getIndexName() { + return indexName; + } + + public void setIndexName(String indexName) { + this.indexName = indexName; + } + + public String getIndexType() { + return indexType; + } + + public void setIndexType(String indexType) { + this.indexType = indexType; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexValidator#exists() + */ + @Override + public boolean exists() { + final String fullUrlStr = getFullUrl("/" + indexName + "/"); + OperationResult existsResult = restDataProvider.doHead(fullUrlStr, "application/json"); + + int rc = existsResult.getResultCode(); + + if (rc >= 200 && rc < 300) { + LOG.info(AaiUiMsgs.INDEX_EXISTS, indexName); + return true; + } else { + LOG.info(AaiUiMsgs.INDEX_NOT_EXIST, indexName); + return false; + } + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexValidator#integrityValid() + */ + @Override + public boolean integrityValid() { + // TODO Auto-generated method stub + // logger.info("; + // System.out.println("IndexIntegrityValidator.integrityValid() for + // indexName = " + indexName); + return true; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexValidator#createOrRepair() + */ + @Override + public void createOrRepair() { + // TODO Auto-generated method stub + String message = "IndexIntegrityValidator.createOrRepair() for indexName = " + indexName; + LOG.info(AaiUiMsgs.INFO_GENERIC, message); + + final String fullUrlStr = getFullUrl("/" + indexName + "/"); + OperationResult createResult = + restDataProvider.doPut(fullUrlStr, tableConfigJson, "application/json"); + + int rc = createResult.getResultCode(); + + if (rc >= 200 && rc < 300) { + LOG.info(AaiUiMsgs.INDEX_RECREATED, indexName); + } else if (rc == 400) { + LOG.info(AaiUiMsgs.INDEX_ALREADY_EXISTS, indexName); + } else { + LOG.warn(AaiUiMsgs.INDEX_INTEGRITY_CHECK_FAILED, indexName, createResult.getResult()); + } + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexValidator#destroyIndex() + */ + @Override + public void destroyIndex() { + // TODO Auto-generated method stub + // we don't do this for now + + } + + /** + * Gets the full url. + * + * @param resourceUrl the resource url + * @return the full url + */ + private String getFullUrl(String resourceUrl) { + return String.format("http://%s:%s%s", host, port, resourceUrl); + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/IndexSynchronizer.java b/src/main/java/org/openecomp/sparky/synchronizer/IndexSynchronizer.java new file mode 100644 index 0000000..520606d --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/IndexSynchronizer.java @@ -0,0 +1,68 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +import org.openecomp.sparky.synchronizer.enumeration.OperationState; +import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; + +/** + * The Interface IndexSynchronizer. + * + * @author davea. + */ +public interface IndexSynchronizer { + + /** + * Do sync. + * + * @return the operation state + */ + public OperationState doSync(); + + public SynchronizerState getState(); + + /** + * Gets the stat report. + * + * @param finalReport the final report + * @return the stat report + */ + public String getStatReport(boolean finalReport); + + /** + * Shutdown. + */ + public void shutdown(); + + public String getIndexName(); + + /** + * Clear cache. + */ + public void clearCache(); + + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/IndexValidator.java b/src/main/java/org/openecomp/sparky/synchronizer/IndexValidator.java new file mode 100644 index 0000000..29d44e3 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/IndexValidator.java @@ -0,0 +1,59 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +/** + * The Interface IndexValidator. + */ +public interface IndexValidator { + + /** + * Exists. + * + * @return true, if successful + */ + public boolean exists(); + + /** + * Integrity valid. + * + * @return true, if successful + */ + public boolean integrityValid(); + + /** + * Creates the or repair. + */ + public void createOrRepair(); + + /** + * Destroy index. + */ + public void destroyIndex(); + + public String getIndexName(); + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/MyErrorHandler.java b/src/main/java/org/openecomp/sparky/synchronizer/MyErrorHandler.java new file mode 100644 index 0000000..e1a695c --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/MyErrorHandler.java @@ -0,0 +1,94 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +import java.io.PrintWriter; + +import org.xml.sax.ErrorHandler; +import org.xml.sax.SAXException; +import org.xml.sax.SAXParseException; + +/** + * The Class MyErrorHandler. + */ +public class MyErrorHandler implements ErrorHandler { + + /** Error handler output goes here. */ + private PrintWriter out; + + /** + * Instantiates a new my error handler. + * + * @param out the out + */ + public MyErrorHandler(PrintWriter out) { + this.out = out; + } + + /** + * Returns a string describing parse exception details. + * + * @param spe the spe + * @return the parses the exception info + */ + private String getParseExceptionInfo(SAXParseException spe) { + String systemId = spe.getSystemId(); + if (systemId == null) { + systemId = "null"; + } + String info = "URI=" + systemId + " Line=" + spe.getLineNumber() + ": " + spe.getMessage(); + return info; + } + + // The following methods are standard SAX ErrorHandler methods. + // See SAX documentation for more info. + + /* (non-Javadoc) + * @see org.xml.sax.ErrorHandler#warning(org.xml.sax.SAXParseException) + */ + @Override + public void warning(SAXParseException spe) throws SAXException { + out.println("Warning: " + getParseExceptionInfo(spe)); + } + + /* (non-Javadoc) + * @see org.xml.sax.ErrorHandler#error(org.xml.sax.SAXParseException) + */ + @Override + public void error(SAXParseException spe) throws SAXException { + String message = "Error: " + getParseExceptionInfo(spe); + throw new SAXException(message); + } + + /* (non-Javadoc) + * @see org.xml.sax.ErrorHandler#fatalError(org.xml.sax.SAXParseException) + */ + @Override + public void fatalError(SAXParseException spe) throws SAXException { + String message = "Fatal Error: " + getParseExceptionInfo(spe); + throw new SAXException(message); + } +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/SearchableEntitySynchronizer.java b/src/main/java/org/openecomp/sparky/synchronizer/SearchableEntitySynchronizer.java new file mode 100644 index 0000000..3ebf203 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/SearchableEntitySynchronizer.java @@ -0,0 +1,760 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import org.openecomp.cl.mdc.MdcContext; + +import org.openecomp.cl.mdc.MdcContext; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectReader; +import com.fasterxml.jackson.databind.node.ArrayNode; + +import java.io.IOException; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Deque; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.ExecutorService; +import java.util.function.Supplier; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.openecomp.sparky.dal.rest.HttpMethod; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.synchronizer.config.SynchronizerConfiguration; +import org.openecomp.sparky.synchronizer.entity.MergableEntity; +import org.openecomp.sparky.synchronizer.entity.SearchableEntity; +import org.openecomp.sparky.synchronizer.entity.SelfLinkDescriptor; +import org.openecomp.sparky.synchronizer.enumeration.OperationState; +import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; +import org.openecomp.sparky.synchronizer.task.PerformActiveInventoryRetrieval; +import org.openecomp.sparky.synchronizer.task.PerformElasticSearchPut; +import org.openecomp.sparky.synchronizer.task.PerformElasticSearchRetrieval; +import org.openecomp.sparky.synchronizer.task.PerformElasticSearchUpdate; +import org.openecomp.sparky.util.NodeUtils; +import org.slf4j.MDC; + +/** + * The Class SearchableEntitySynchronizer. + */ +public class SearchableEntitySynchronizer extends AbstractEntitySynchronizer + implements IndexSynchronizer { + + /** + * The Class RetrySearchableEntitySyncContainer. + */ + private class RetrySearchableEntitySyncContainer { + NetworkTransaction txn; + SearchableEntity se; + + /** + * Instantiates a new retry searchable entity sync container. + * + * @param txn the txn + * @param se the se + */ + public RetrySearchableEntitySyncContainer(NetworkTransaction txn, SearchableEntity se) { + this.txn = txn; + this.se = se; + } + + public NetworkTransaction getNetworkTransaction() { + return txn; + } + + public SearchableEntity getSearchableEntity() { + return se; + } + } + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(SearchableEntitySynchronizer.class); + + private boolean allWorkEnumerated; + private Deque<SelfLinkDescriptor> selflinks; + private Deque<RetrySearchableEntitySyncContainer> retryQueue; + private Map<String, Integer> retryLimitTracker; + protected ExecutorService esPutExecutor; + + /** + * Instantiates a new searchable entity synchronizer. + * + * @param indexName the index name + * @throws Exception the exception + */ + public SearchableEntitySynchronizer(String indexName) throws Exception { + super(LOG, "SES", 2, 5, 5, indexName); + this.allWorkEnumerated = false; + this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>(); + this.retryQueue = new ConcurrentLinkedDeque<RetrySearchableEntitySyncContainer>(); + this.retryLimitTracker = new ConcurrentHashMap<String, Integer>(); + this.synchronizerName = "Searchable Entity Synchronizer"; + this.esPutExecutor = NodeUtils.createNamedExecutor("SES-ES-PUT", 5, LOG); + this.aaiEntityStats.initializeCountersFromOxmEntityDescriptors( + oxmModelLoader.getSearchableEntityDescriptors()); + this.esEntityStats.initializeCountersFromOxmEntityDescriptors( + oxmModelLoader.getSearchableEntityDescriptors()); + } + + /** + * Collect all the work. + * + * @return the operation state + */ + private OperationState collectAllTheWork() { + final Map<String, String> contextMap = MDC.getCopyOfContextMap(); + Map<String, OxmEntityDescriptor> descriptorMap = + oxmModelLoader.getSearchableEntityDescriptors(); + + if (descriptorMap.isEmpty()) { + LOG.error(AaiUiMsgs.ERROR_LOADING_OXM_SEARCHABLE_ENTITIES); + LOG.info(AaiUiMsgs.ERROR_LOADING_OXM_SEARCHABLE_ENTITIES); + return OperationState.ERROR; + } + + Collection<String> syncTypes = descriptorMap.keySet(); + + /*Collection<String> syncTypes = new ArrayList<String>(); + syncTypes.add("service-instance");*/ + + try { + + /* + * launch a parallel async thread to process the documents for each entity-type (to max the + * of the configured executor anyway) + */ + + aaiWorkOnHand.set(syncTypes.size()); + + for (String key : syncTypes) { + + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + MDC.setContextMap(contextMap); + OperationResult typeLinksResult = null; + try { + typeLinksResult = aaiDataProvider.getSelfLinksByEntityType(key); + aaiWorkOnHand.decrementAndGet(); + processEntityTypeSelfLinks(typeLinksResult); + } catch (Exception exc) { + // TODO -> LOG, what should be logged here? + } + + return null; + } + + }, aaiExecutor).whenComplete((result, error) -> { + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred getting data from AAI. Error = " + error.getMessage()); + } + }); + + } + + while (aaiWorkOnHand.get() != 0) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED); + } + + Thread.sleep(1000); + } + + aaiWorkOnHand.set(selflinks.size()); + allWorkEnumerated = true; + syncEntityTypes(); + + while (!isSyncDone()) { + performRetrySync(); + Thread.sleep(1000); + } + + /* + * Make sure we don't hang on to retries that failed which could cause issues during future + * syncs + */ + retryLimitTracker.clear(); + + } catch (Exception exc) { + // TODO -> LOG, waht should be logged here? + } + + return OperationState.OK; + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync() + */ + @Override + public OperationState doSync() { + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "SearchableEntitySynchronizer", "", "Sync", ""); + + resetCounters(); + this.allWorkEnumerated = false; + syncStartedTimeStampInMs = System.currentTimeMillis(); + collectAllTheWork(); + + return OperationState.OK; + } + + /** + * Process entity type self links. + * + * @param operationResult the operation result + */ + private void processEntityTypeSelfLinks(OperationResult operationResult) { + + JsonNode rootNode = null; + + final String jsonResult = operationResult.getResult(); + + if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) { + + try { + rootNode = mapper.readTree(jsonResult); + } catch (IOException exc) { + String message = + "Could not deserialize JSON (representing operation result) as node tree. " + + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message); + } + + JsonNode resultData = rootNode.get("result-data"); + ArrayNode resultDataArrayNode = null; + + if (resultData.isArray()) { + resultDataArrayNode = (ArrayNode) resultData; + + Iterator<JsonNode> elementIterator = resultDataArrayNode.elements(); + JsonNode element = null; + + while (elementIterator.hasNext()) { + element = elementIterator.next(); + + final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type"); + final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link"); + + OxmEntityDescriptor descriptor = null; + + if (resourceType != null && resourceLink != null) { + + descriptor = oxmModelLoader.getEntityDescriptor(resourceType); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType); + // go to next element in iterator + continue; + } + + if (descriptor.hasSearchableAttributes()) { + selflinks.add(new SelfLinkDescriptor(resourceLink, SynchronizerConfiguration.NODES_ONLY_MODIFIER, resourceType)); + } + + } + } + } + } + + } + + /** + * Sync entity types. + */ + private void syncEntityTypes() { + + while (selflinks.peek() != null) { + + SelfLinkDescriptor linkDescriptor = selflinks.poll(); + aaiWorkOnHand.decrementAndGet(); + + OxmEntityDescriptor descriptor = null; + + if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) { + + descriptor = oxmModelLoader.getEntityDescriptor(linkDescriptor.getEntityType()); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType()); + // go to next element in iterator + continue; + } + + NetworkTransaction txn = new NetworkTransaction(); + txn.setDescriptor(descriptor); + txn.setLink(linkDescriptor.getSelfLink()); + txn.setOperationType(HttpMethod.GET); + txn.setEntityType(linkDescriptor.getEntityType()); + + aaiWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiDataProvider), aaiExecutor) + .whenComplete((result, error) -> { + + aaiWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage()); + } else { + if (result == null) { + LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK, + linkDescriptor.getSelfLink()); + } else { + updateActiveInventoryCounters(result); + fetchDocumentForUpsert(result); + } + } + }); + } + + } + + } + + /** + * Perform document upsert. + * + * @param esGetTxn the es get txn + * @param se the se + */ + protected void performDocumentUpsert(NetworkTransaction esGetTxn, SearchableEntity se) { + /** + * <p> + * <ul> + * As part of the response processing we need to do the following: + * <li>1. Extract the version (if present), it will be the ETAG when we use the + * Search-Abstraction-Service + * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version + * tag + * <li>a) if version is null or RC=404, then standard put, no _update with version tag + * <li>b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic + * </ul> + * </p> + */ + String link = null; + try { + link = getElasticFullUrl("/" + se.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage()); + return; + } + + String versionNumber = null; + boolean wasEntryDiscovered = false; + if (esGetTxn.getOperationResult().getResultCode() == 404) { + LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, se.getEntityPrimaryKeyValue()); + } else if (esGetTxn.getOperationResult().getResultCode() == 200) { + wasEntryDiscovered = true; + try { + versionNumber = NodeUtils.extractFieldValueFromObject( + NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), + "_version"); + } catch (IOException exc) { + String message = + "Error extracting version number from response, aborting searchable entity sync of " + + se.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); + return; + } + } else { + /* + * Not being a 200 does not mean a failure. eg 201 is returned for created. TODO -> Should we + * return. + */ + LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE, + String.valueOf(esGetTxn.getOperationResult().getResultCode())); + return; + } + + try { + String jsonPayload = null; + if (wasEntryDiscovered) { + try { + ArrayList<JsonNode> sourceObject = new ArrayList<JsonNode>(); + NodeUtils.extractObjectsByKey( + NodeUtils.convertJsonStrToJsonNode(esGetTxn.getOperationResult().getResult()), + "_source", sourceObject); + + if (!sourceObject.isEmpty()) { + String responseSource = NodeUtils.convertObjectToJson(sourceObject.get(0), false); + MergableEntity me = mapper.readValue(responseSource, MergableEntity.class); + ObjectReader updater = mapper.readerForUpdating(me); + MergableEntity merged = updater.readValue(se.getIndexDocumentJson()); + jsonPayload = mapper.writeValueAsString(merged); + } + } catch (IOException exc) { + String message = + "Error extracting source value from response, aborting searchable entity sync of " + + se.getEntityPrimaryKeyValue() + ". Error - " + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ERROR_EXTRACTING_FROM_RESPONSE, message); + return; + } + } else { + jsonPayload = se.getIndexDocumentJson(); + } + + if (wasEntryDiscovered) { + if (versionNumber != null && jsonPayload != null) { + + String requestPayload = esDataProvider.buildBulkImportOperationRequest(getIndexName(), + ElasticSearchConfig.getConfig().getType(), se.getId(), versionNumber, jsonPayload); + + NetworkTransaction transactionTracker = new NetworkTransaction(); + transactionTracker.setEntityType(esGetTxn.getEntityType()); + transactionTracker.setDescriptor(esGetTxn.getDescriptor()); + transactionTracker.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchUpdate(ElasticSearchConfig.getConfig().getBulkUrl(), + requestPayload, esDataProvider, transactionTracker), esPutExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = "Searchable entity sync UPDATE PUT error - " + + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetTxn, se); + } + }); + } + + } else { + if (link != null && jsonPayload != null) { + + NetworkTransaction updateElasticTxn = new NetworkTransaction(); + updateElasticTxn.setLink(link); + updateElasticTxn.setEntityType(esGetTxn.getEntityType()); + updateElasticTxn.setDescriptor(esGetTxn.getDescriptor()); + updateElasticTxn.setOperationType(HttpMethod.PUT); + + esWorkOnHand.incrementAndGet(); + supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, esDataProvider), + esPutExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + String message = + "Searchable entity sync UPDATE PUT error - " + error.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } else { + updateElasticSearchCounters(result); + processStoreDocumentResult(result, esGetTxn, se); + } + }); + } + } + } catch (Exception exc) { + String message = "Exception caught during searchable entity sync PUT operation. Message - " + + exc.getLocalizedMessage(); + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } + } + + /** + * Populate searchable entity document. + * + * @param doc the doc + * @param result the result + * @param resultDescriptor the result descriptor + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + protected void populateSearchableEntityDocument(SearchableEntity doc, String result, + OxmEntityDescriptor resultDescriptor) throws JsonProcessingException, IOException { + + doc.setEntityType(resultDescriptor.getEntityName()); + + JsonNode entityNode = mapper.readTree(result); + + List<String> primaryKeyValues = new ArrayList<String>(); + String pkeyValue = null; + + for (String keyName : resultDescriptor.getPrimaryKeyAttributeName()) { + pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName); + if (pkeyValue != null) { + primaryKeyValues.add(pkeyValue); + } else { + String message = "populateSearchableEntityDocument(), pKeyValue is null for entityType = " + + resultDescriptor.getEntityName(); + LOG.warn(AaiUiMsgs.WARN_GENERIC, message); + } + } + + final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/"); + doc.setEntityPrimaryKeyValue(primaryCompositeKeyValue); + + final List<String> searchTagFields = resultDescriptor.getSearchableAttributes(); + + /* + * Based on configuration, use the configured field names for this entity-Type to build a + * multi-value collection of search tags for elastic search entity search criteria. + */ + for (String searchTagField : searchTagFields) { + String searchTagValue = NodeUtils.getNodeFieldAsText(entityNode, searchTagField); + if (searchTagValue != null && !searchTagValue.isEmpty()) { + doc.addSearchTagWithKey(searchTagValue, searchTagField); + } + } + } + + /** + * Fetch document for upsert. + * + * @param txn the txn + */ + private void fetchDocumentForUpsert(NetworkTransaction txn) { + if (!txn.getOperationResult().wasSuccessful()) { + String message = "Self link failure. Result - " + txn.getOperationResult().getResult(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + return; + } + + try { + if (txn.getDescriptor().hasSearchableAttributes()) { + + final String jsonResult = txn.getOperationResult().getResult(); + if (jsonResult != null && jsonResult.length() > 0) { + + SearchableEntity se = new SearchableEntity(oxmModelLoader); + se.setLink( txn.getLink() ); + populateSearchableEntityDocument(se, jsonResult, txn.getDescriptor()); + se.deriveFields(); + + String link = null; + try { + link = getElasticFullUrl("/" + se.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction n2 = new NetworkTransaction(); + n2.setLink(link); + n2.setEntityType(txn.getEntityType()); + n2.setDescriptor(txn.getDescriptor()); + n2.setOperationType(HttpMethod.GET); + + esWorkOnHand.incrementAndGet(); + + supplyAsync(new PerformElasticSearchRetrieval(n2, esDataProvider), esExecutor) + .whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, se); + } + }); + } + } + + } + } catch (JsonProcessingException exc) { + // TODO -> LOG, waht should be logged here? + } catch (IOException exc) { + // TODO -> LOG, waht should be logged here? + } + } + + /** + * Process store document result. + * + * @param esPutResult the es put result + * @param esGetResult the es get result + * @param se the se + */ + private void processStoreDocumentResult(NetworkTransaction esPutResult, + NetworkTransaction esGetResult, SearchableEntity se) { + + OperationResult or = esPutResult.getOperationResult(); + + if (!or.wasSuccessful()) { + if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) { + + if (shouldAllowRetry(se.getId())) { + esWorkOnHand.incrementAndGet(); + + RetrySearchableEntitySyncContainer rsc = + new RetrySearchableEntitySyncContainer(esGetResult, se); + retryQueue.push(rsc); + + String message = "Store document failed during searchable entity synchronization" + + " due to version conflict. Entity will be re-synced."; + LOG.warn(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } + } else { + String message = + "Store document failed during searchable entity synchronization with result code " + + or.getResultCode() + " and result message " + or.getResult(); + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } + } + } + + /** + * Perform retry sync. + */ + private void performRetrySync() { + while (retryQueue.peek() != null) { + + RetrySearchableEntitySyncContainer rsc = retryQueue.poll(); + if (rsc != null) { + + SearchableEntity se = rsc.getSearchableEntity(); + NetworkTransaction txn = rsc.getNetworkTransaction(); + + String link = null; + try { + /* + * In this retry flow the se object has already derived its fields + */ + link = getElasticFullUrl("/" + se.getId(), getIndexName()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage()); + } + + if (link != null) { + NetworkTransaction retryTransaction = new NetworkTransaction(); + retryTransaction.setLink(link); + retryTransaction.setEntityType(txn.getEntityType()); + retryTransaction.setDescriptor(txn.getDescriptor()); + retryTransaction.setOperationType(HttpMethod.GET); + + /* + * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already + * called incrementAndGet when queuing the failed PUT! + */ + + supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, esDataProvider), + esExecutor).whenComplete((result, error) -> { + + esWorkOnHand.decrementAndGet(); + + if (error != null) { + LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage()); + } else { + updateElasticSearchCounters(result); + performDocumentUpsert(result, se); + } + }); + } + + } + } + } + + /** + * Should allow retry. + * + * @param id the id + * @return true, if successful + */ + private boolean shouldAllowRetry(String id) { + boolean isRetryAllowed = true; + if (retryLimitTracker.get(id) != null) { + Integer currentCount = retryLimitTracker.get(id); + if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) { + isRetryAllowed = false; + String message = "Searchable entity re-sync limit reached for " + id + + ", re-sync will no longer be attempted for this entity"; + LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message); + } else { + Integer newCount = new Integer(currentCount.intValue() + 1); + retryLimitTracker.put(id, newCount); + } + } else { + Integer firstRetryCount = new Integer(1); + retryLimitTracker.put(id, firstRetryCount); + } + + return isRetryAllowed; + } + + @Override + public SynchronizerState getState() { + if (!isSyncDone()) { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + } + + return SynchronizerState.IDLE; + + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean) + */ + @Override + public String getStatReport(boolean showFinalReport) { + return this.getStatReport(System.currentTimeMillis() - syncStartedTimeStampInMs, + showFinalReport); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown() + */ + @Override + public void shutdown() { + this.shutdownExecutors(); + } + + @Override + protected boolean isSyncDone() { + int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get(); + + if (totalWorkOnHand > 0 || !allWorkEnumerated) { + return false; + } + + return true; + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/SyncController.java b/src/main/java/org/openecomp/sparky/synchronizer/SyncController.java new file mode 100644 index 0000000..85cbeb5 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/SyncController.java @@ -0,0 +1,480 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.util.Collection; +import java.util.LinkedHashSet; +import java.util.concurrent.ExecutorService; +import java.util.function.Supplier; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; +import org.openecomp.sparky.util.NodeUtils; + +/** + * The Class SyncController. + * + * @author davea. + */ +public class SyncController { + private static final Logger LOG = LoggerFactory.getInstance().getLogger(SyncController.class); + + /** + * The Enum InternalState. + */ + private enum InternalState { + IDLE, PRE_SYNC, SYNC_OPERATION, SELECTIVE_DELETE, ABORTING_SYNC, REPAIRING_INDEX, POST_SYNC, + TEST_INDEX_INTEGRITY, GENERATE_FINAL_REPORT + } + + /** + * The Enum SyncActions. + */ + public enum SyncActions { + SYNCHRONIZE, REPAIR_INDEX, INDEX_INTEGRITY_VALIDATION_COMPLETE, PRE_SYNC_COMPLETE, + SYNC_COMPLETE, SYNC_ABORTED, SYNC_FAILURE, POST_SYNC_COMPLETE, PURGE_COMPLETE, REPORT_COMPLETE + } + + private Collection<IndexSynchronizer> registeredSynchronizers; + private Collection<IndexValidator> registeredIndexValidators; + private Collection<IndexCleaner> registeredIndexCleaners; + private InternalState currentInternalState; + private ExecutorService syncControllerExecutor; + private ExecutorService statReporterExecutor; + private final String controllerName; + + /** + * Instantiates a new sync controller. + * + * @param name the name + * @throws Exception the exception + */ + public SyncController(String name) throws Exception { + + this.controllerName = name; + /* + * Does LHS result in a non-duplicated object collection?? What happens if you double-add an + * object? + */ + + registeredSynchronizers = new LinkedHashSet<IndexSynchronizer>(); + registeredIndexValidators = new LinkedHashSet<IndexValidator>(); + registeredIndexCleaners = new LinkedHashSet<IndexCleaner>(); + + this.syncControllerExecutor = NodeUtils.createNamedExecutor("SyncController", 5, LOG); + this.statReporterExecutor = NodeUtils.createNamedExecutor("StatReporter", 1, LOG); + + this.currentInternalState = InternalState.IDLE; + } + + /** + * Change internal state. + * + * @param newState the new state + * @param causedByAction the caused by action + */ + private void changeInternalState(InternalState newState, SyncActions causedByAction) { + LOG.info(AaiUiMsgs.SYNC_INTERNAL_STATE_CHANGED, controllerName, + currentInternalState.toString(), newState.toString(), causedByAction.toString()); + + this.currentInternalState = newState; + + performStateAction(); + } + + public String getControllerName() { + return controllerName; + } + + /** + * Perform action. + * + * @param requestedAction the requested action + */ + public void performAction(SyncActions requestedAction) { + + if (currentInternalState == InternalState.IDLE) { + + try { + switch (requestedAction) { + case SYNCHRONIZE: + changeInternalState(InternalState.TEST_INDEX_INTEGRITY, requestedAction); + break; + + default: + break; + } + + } catch (Exception exc) { + String message = "An error occurred while performing action = " + requestedAction + + ". Error = " + exc.getMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + } else { + LOG.error(AaiUiMsgs.SYNC_NOT_VALID_STATE_DURING_REQUEST, currentInternalState.toString()); + } + } + + /** + * Perform state action. + */ + private void performStateAction() { + + try { + switch (currentInternalState) { + + case TEST_INDEX_INTEGRITY: + performIndexIntegrityValidation(); + break; + + case PRE_SYNC: + performPreSyncCleanupCollection(); + break; + + case SYNC_OPERATION: + performSynchronization(); + break; + + case POST_SYNC: + performIndexSyncPostCollection(); + changeInternalState(InternalState.SELECTIVE_DELETE, SyncActions.POST_SYNC_COMPLETE); + break; + + case SELECTIVE_DELETE: + performIndexCleanup(); + changeInternalState(InternalState.GENERATE_FINAL_REPORT, SyncActions.PURGE_COMPLETE); + break; + + case GENERATE_FINAL_REPORT: + + dumpStatReport(true); + clearCaches(); + changeInternalState(InternalState.IDLE, SyncActions.REPORT_COMPLETE); + break; + + case ABORTING_SYNC: + performSyncAbort(); + break; + + default: + break; + } + } catch (Exception exc) { + String message = "Caught an error which performing action. Error = " + exc.getMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + } + + /** + * Register entity synchronizer. + * + * @param entitySynchronizer the entity synchronizer + */ + public void registerEntitySynchronizer(IndexSynchronizer entitySynchronizer) { + + String indexName = entitySynchronizer.getIndexName(); + + if (indexName != null) { + registeredSynchronizers.add(entitySynchronizer); + } else { + String message = "Failed to register entity synchronizer because index name is null"; + LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message); + } + + } + + /** + * Register index validator. + * + * @param indexValidator the index validator + */ + public void registerIndexValidator(IndexValidator indexValidator) { + + String indexName = indexValidator.getIndexName(); + + if (indexName != null) { + registeredIndexValidators.add(indexValidator); + } else { + String message = "Failed to register index validator because index name is null"; + LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message); + } + + } + + /** + * Register index cleaner. + * + * @param indexCleaner the index cleaner + */ + public void registerIndexCleaner(IndexCleaner indexCleaner) { + + String indexName = indexCleaner.getIndexName(); + + if (indexName != null) { + registeredIndexCleaners.add(indexCleaner); + } else { + String message = "Failed to register index cleaner because index name is null"; + LOG.error(AaiUiMsgs.FAILED_TO_REGISTER_DUE_TO_NULL, message); + } + } + + /* + * State machine should drive our flow dosync just dispatches an action and the state machine + * determines what is in play and what is next + */ + + /** + * Dump stat report. + * + * @param showFinalReport the show final report + */ + private void dumpStatReport(boolean showFinalReport) { + + for (IndexSynchronizer synchronizer : registeredSynchronizers) { + + String statReport = synchronizer.getStatReport(showFinalReport); + + if (statReport != null) { + LOG.info(AaiUiMsgs.INFO_GENERIC, statReport); + } + } + } + + /** + * Clear caches. + */ + private void clearCaches() { + + /* + * Any entity caches that were built as part of the sync operation should be cleared to save + * memory. The original intent of the caching was to provide a short-lived cache to satisfy + * entity requests from multiple synchronizers yet minimizing interactions with the AAI. + */ + + for (IndexSynchronizer synchronizer : registeredSynchronizers) { + synchronizer.clearCache(); + } + } + + /** + * Perform pre sync cleanup collection. + */ + private void performPreSyncCleanupCollection() { + + /* + * ask the index cleaners to collect the their pre-sync object id collections + */ + + for (IndexCleaner cleaner : registeredIndexCleaners) { + cleaner.populatePreOperationCollection(); + } + + changeInternalState(InternalState.SYNC_OPERATION, SyncActions.PRE_SYNC_COMPLETE); + + } + + /** + * Perform index sync post collection. + */ + private void performIndexSyncPostCollection() { + + /* + * ask the entity purgers to collect the their pre-sync object id collections + */ + + for (IndexCleaner cleaner : registeredIndexCleaners) { + cleaner.populatePostOperationCollection(); + } + + } + + /** + * Perform index cleanup. + */ + private void performIndexCleanup() { + + /* + * ask the entity purgers to collect the their pre-sync object id collections + */ + + for (IndexCleaner cleaner : registeredIndexCleaners) { + cleaner.performCleanup(); + } + + } + + /** + * Perform sync abort. + */ + private void performSyncAbort() { + changeInternalState(InternalState.IDLE, SyncActions.SYNC_ABORTED); + } + + /** + * Perform index integrity validation. + */ + private void performIndexIntegrityValidation() { + + /* + * loop through registered index validators and test and fix, if needed + */ + + for (IndexValidator validator : registeredIndexValidators) { + try { + if (!validator.exists()) { + validator.createOrRepair(); + } + } catch (Exception exc) { + String message = "Index validator caused an error = " + exc.getMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + } + + changeInternalState(InternalState.PRE_SYNC, SyncActions.INDEX_INTEGRITY_VALIDATION_COMPLETE); + + } + + /** + * Shutdown. + */ + public void shutdown() { + + this.syncControllerExecutor.shutdown(); + for (IndexSynchronizer synchronizer : registeredSynchronizers) { + + try { + synchronizer.shutdown(); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "Synchronizer shutdown caused an error = " + exc.getMessage()); + } + + } + this.statReporterExecutor.shutdown(); + } + + /* + * Need some kind of task running that responds to a transient boolean to kill it or we just stop + * the executor that it is in? + */ + + + + /** + * Perform synchronization. + */ + private void performSynchronization() { + + /* + * Get all the synchronizers running in parallel + */ + + for (IndexSynchronizer synchronizer : registeredSynchronizers) { + supplyAsync(new Supplier<Void>() { + + @Override + public Void get() { + + synchronizer.doSync(); + return null; + } + + }, this.syncControllerExecutor).whenComplete((result, error) -> { + + /* + * We don't bother checking the result, because it will always be null as the doSync() is + * non-blocking. + */ + + if (error != null) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "doSync operation failed with an error = " + error.getMessage()); + } + }); + } + + boolean allDone = false; + long nextReportTimeStampInMs = System.currentTimeMillis() + 30000L; + + while (!allDone) { + + // allDone = false; + + int totalFinished = 0; + + for (IndexSynchronizer synchronizer : registeredSynchronizers) { + if (System.currentTimeMillis() > nextReportTimeStampInMs) { + + nextReportTimeStampInMs = System.currentTimeMillis() + 30000L; + + String statReport = synchronizer.getStatReport(false); + + if (statReport != null) { + LOG.info(AaiUiMsgs.INFO_GENERIC, statReport); + } + } + + if (synchronizer.getState() == SynchronizerState.IDLE) { + totalFinished++; + } + } + + allDone = (totalFinished == registeredSynchronizers.size()); + + try { + Thread.sleep(250); + } catch (InterruptedException exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, + "An error occurred while waiting for sync to complete. Error = " + exc.getMessage()); + } + + } + + changeInternalState(InternalState.POST_SYNC, SyncActions.SYNC_COMPLETE); + + } + + public SynchronizerState getState() { + + switch (currentInternalState) { + + case IDLE: { + return SynchronizerState.IDLE; + } + + default: { + return SynchronizerState.PERFORMING_SYNCHRONIZATION; + + } + } + + } + +}
\ No newline at end of file diff --git a/src/main/java/org/openecomp/sparky/synchronizer/SyncHelper.java b/src/main/java/org/openecomp/sparky/synchronizer/SyncHelper.java new file mode 100644 index 0000000..7c37859 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/SyncHelper.java @@ -0,0 +1,705 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +import com.google.common.util.concurrent.ThreadFactoryBuilder; + +import java.lang.Thread.UncaughtExceptionHandler; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.List; +import java.util.Map; +import java.util.TimeZone; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.dal.aai.ActiveInventoryAdapter; +import org.openecomp.sparky.dal.aai.config.ActiveInventoryConfig; +import org.openecomp.sparky.dal.aai.config.ActiveInventoryRestConfig; +import org.openecomp.sparky.dal.cache.EntityCache; +import org.openecomp.sparky.dal.cache.InMemoryEntityCache; +import org.openecomp.sparky.dal.cache.PersistentEntityCache; +import org.openecomp.sparky.dal.elasticsearch.ElasticSearchAdapter; +import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.openecomp.sparky.dal.rest.RestClientBuilder; +import org.openecomp.sparky.dal.rest.RestfulDataAccessor; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.synchronizer.SyncController.SyncActions; +import org.openecomp.sparky.synchronizer.config.SynchronizerConfiguration; +import org.openecomp.sparky.synchronizer.config.SynchronizerConstants; +import org.openecomp.sparky.synchronizer.enumeration.SynchronizerState; +import org.openecomp.sparky.util.ErrorUtil; +import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; +import org.slf4j.MDC; + +/** + * The Class SyncHelper. + * + * @author davea. + */ +public class SyncHelper { + + private final Logger LOG = LoggerFactory.getInstance().getLogger(SyncHelper.class); + private SyncController syncController = null; + private SyncController entityCounterHistorySummarizer = null; + + private ScheduledExecutorService oneShotExecutor = Executors.newSingleThreadScheduledExecutor(); + private ScheduledExecutorService periodicExecutor = null; + private ScheduledExecutorService historicalExecutor = + Executors.newSingleThreadScheduledExecutor(); + + private SynchronizerConfiguration syncConfig; + private ElasticSearchConfig esConfig; + private OxmModelLoader oxmModelLoader; + + private Boolean initialSyncRunning = false; + private SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z"); + private AtomicLong timeNextSync = new AtomicLong(); + Map<String, String> contextMap; + + /** + * The Class SyncTask. + */ + private class SyncTask implements Runnable { + + private boolean isInitialSync; + + /** + * Instantiates a new sync task. + * + * @param initialSync the initial sync + */ + public SyncTask(boolean initialSync) { + this.isInitialSync = initialSync; + } + + /* + * (non-Javadoc) + * + * @see java.lang.Runnable#run() + */ + @Override + public void run() { + long opStartTime = System.currentTimeMillis(); + MDC.setContextMap(contextMap); + + LOG.info(AaiUiMsgs.SEARCH_ENGINE_SYNC_STARTED, sdf.format(opStartTime) + .replaceAll(SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD)); + + try { + + if (syncController == null) { + LOG.error(AaiUiMsgs.SYNC_SKIPPED_SYNCCONTROLLER_NOT_INITIALIZED); + return; + } + + int taskFrequencyInDays = SynchronizerConfiguration.getConfig().getSyncTaskFrequencyInDay(); + + /* + * Do nothing if the initial start-up sync hasn't finished yet, but the regular sync + * scheduler fired up a regular sync. + */ + if (!initialSyncRunning) { + if (isInitialSync) { + initialSyncRunning = true; + } else { + // update 'timeNextSync' for periodic sync + timeNextSync.getAndAdd(taskFrequencyInDays * SynchronizerConstants.MILLISEC_IN_A_DAY); + + } + + LOG.info(AaiUiMsgs.INFO_GENERIC, "SyncTask, starting syncrhonization"); + + syncController.performAction(SyncActions.SYNCHRONIZE); + + while (syncController.getState() == SynchronizerState.PERFORMING_SYNCHRONIZATION) { + Thread.sleep(1000); + } + + } else { + LOG.info(AaiUiMsgs.SKIP_PERIODIC_SYNC_AS_SYNC_DIDNT_FINISH, sdf.format(opStartTime) + .replaceAll(SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD)); + + return; + } + + long opEndTime = System.currentTimeMillis(); + + if (isInitialSync) { + /* + * Handle corner case when start-up sync operation overlapped with a scheduled + * sync-start-time. Note that the scheduled sync does nothing if 'initialSyncRunning' is + * TRUE. So the actual next-sync is one more sync-cycle away + */ + long knownNextSyncTime = timeNextSync.get(); + if (knownNextSyncTime != SynchronizerConstants.DELAY_NO_PERIODIC_SYNC_IN_MS + && opEndTime > knownNextSyncTime) { + timeNextSync.compareAndSet(knownNextSyncTime, + knownNextSyncTime + taskFrequencyInDays * SynchronizerConstants.MILLISEC_IN_A_DAY); + initialSyncRunning = false; + } + } + + String durationMessage = + String.format(syncController.getControllerName() + " synchronization took '%d' ms.", + (opEndTime - opStartTime)); + + LOG.info(AaiUiMsgs.SYNC_DURATION, durationMessage); + + // Provide log about the time for next synchronization + if (syncConfig.isConfigOkForPeriodicSync() + && timeNextSync.get() != SynchronizerConstants.DELAY_NO_PERIODIC_SYNC_IN_MS) { + TimeZone tz = TimeZone.getTimeZone(syncConfig.getSyncTaskStartTimeTimeZone()); + sdf.setTimeZone(tz); + if (opEndTime - opStartTime > taskFrequencyInDays + * SynchronizerConstants.MILLISEC_IN_A_DAY) { + String durationWasLongerMessage = String.format( + syncController.getControllerName() + + " synchronization took '%d' ms which is larger than" + + " synchronization interval of '%d' ms.", + (opEndTime - opStartTime), + taskFrequencyInDays * SynchronizerConstants.MILLISEC_IN_A_DAY); + + LOG.info(AaiUiMsgs.SYNC_DURATION, durationWasLongerMessage); + } + + LOG.info(AaiUiMsgs.SYNC_TO_BEGIN, syncController.getControllerName(), + sdf.format(timeNextSync).replaceAll(SynchronizerConstants.TIME_STD, + SynchronizerConstants.TIME_CONFIG_STD)); + } + + } catch (Exception exc) { + String message = "Caught an exception while attempt to synchronize elastic search " + + "with an error cause = " + ErrorUtil.extractStackTraceElements(5, exc); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + + } + + } + + /** + * The Class HistoricalEntityCountSummaryTask. + */ + private class HistoricalEntityCountSummaryTask implements Runnable { + + /** + * Instantiates a new historical entity count summary task. + */ + public HistoricalEntityCountSummaryTask() {} + + /* + * (non-Javadoc) + * + * @see java.lang.Runnable#run() + */ + @Override + public void run() { + + long opStartTime = System.currentTimeMillis(); + MDC.setContextMap(contextMap); + LOG.info(AaiUiMsgs.HISTORICAL_ENTITY_COUNT_SUMMARIZER_STARTING, sdf.format(opStartTime) + .replaceAll(SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD)); + + try { + if (entityCounterHistorySummarizer == null) { + LOG.error(AaiUiMsgs.HISTORICAL_ENTITY_COUNT_SUMMARIZER_NOT_STARTED); + return; + } + + LOG.info(AaiUiMsgs.INFO_GENERIC, + "EntityCounterHistorySummarizer, starting syncrhonization"); + + entityCounterHistorySummarizer.performAction(SyncActions.SYNCHRONIZE); + + while (entityCounterHistorySummarizer + .getState() == SynchronizerState.PERFORMING_SYNCHRONIZATION) { + Thread.sleep(1000); + } + + long opEndTime = System.currentTimeMillis(); + + LOG.info(AaiUiMsgs.HISTORICAL_SYNC_DURATION, + entityCounterHistorySummarizer.getControllerName(), + String.valueOf(opEndTime - opStartTime)); + + long taskFrequencyInMs = + syncConfig.getHistoricalEntitySummarizedFrequencyInMinutes() * 60 * 1000; + + if (syncConfig.isHistoricalEntitySummarizerEnabled()) { + String time = sdf.format(System.currentTimeMillis() + taskFrequencyInMs) + .replaceAll(SynchronizerConstants.TIME_STD, SynchronizerConstants.TIME_CONFIG_STD); + + LOG.info(AaiUiMsgs.HISTORICAL_SYNC_TO_BEGIN, time); + } + + + } catch (Exception exc) { + String message = "Caught an exception while attempting to populate entity country " + + "history elasticsearch table with an error cause = " + + ErrorUtil.extractStackTraceElements(5, exc); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + + } + + } + + /** + * Gets the first sync time. + * + * @param calendar the calendar + * @param timeNow the time now + * @param taskFreqInDay the task freq in day + * @return the first sync time + */ + public long getFirstSyncTime(Calendar calendar, long timeNow, int taskFreqInDay) { + if (taskFreqInDay == SynchronizerConstants.DELAY_NO_PERIODIC_SYNC_IN_MS) { + return SynchronizerConstants.DELAY_NO_PERIODIC_SYNC_IN_MS; + } else if (timeNow > calendar.getTimeInMillis()) { + calendar.add(Calendar.DAY_OF_MONTH, taskFreqInDay); + } + return calendar.getTimeInMillis(); + } + + /** + * Boot strap and configure the moving pieces of the Sync Controller. + */ + + private void initializeSyncController() { + + try { + + /* + * TODO: it would be nice to have XML IoC / dependency injection kind of thing for these + * pieces maybe Spring? + */ + + /* + * Sync Controller itself + */ + + syncController = new SyncController("entitySyncController"); + + /* + * Create common elements + */ + + ActiveInventoryAdapter aaiAdapter = new ActiveInventoryAdapter(new RestClientBuilder()); + ActiveInventoryRestConfig aaiRestConfig = + ActiveInventoryConfig.getConfig().getAaiRestConfig(); + + + EntityCache cache = null; + + if (aaiRestConfig.isCacheEnabled()) { + cache = new PersistentEntityCache(aaiRestConfig.getStorageFolderOverride(), + aaiRestConfig.getNumCacheWorkers()); + } else { + cache = new InMemoryEntityCache(); + } + + RestClientBuilder clientBuilder = new RestClientBuilder(); + + aaiAdapter.setCacheEnabled(true); + aaiAdapter.setEntityCache(cache); + + clientBuilder.setUseHttps(false); + + RestfulDataAccessor nonCachingRestProvider = new RestfulDataAccessor(clientBuilder); + + ElasticSearchConfig esConfig = ElasticSearchConfig.getConfig(); + ElasticSearchAdapter esAdapter = new ElasticSearchAdapter(nonCachingRestProvider, esConfig); + + /* + * Register Index Validators + */ + + IndexIntegrityValidator entitySearchIndexValidator = + new IndexIntegrityValidator(nonCachingRestProvider, esConfig.getIndexName(), + esConfig.getType(), esConfig.getIpAddress(), esConfig.getHttpPort(), + esConfig.buildElasticSearchTableConfig()); + + syncController.registerIndexValidator(entitySearchIndexValidator); + + // TODO: Insert IndexValidator for TopographicalEntityIndex + // we should have one, but one isn't 100% required as none of the fields are analyzed + + /* + * Register Synchronizers + */ + + SearchableEntitySynchronizer ses = new SearchableEntitySynchronizer(esConfig.getIndexName()); + ses.setAaiDataProvider(aaiAdapter); + ses.setEsDataProvider(esAdapter); + syncController.registerEntitySynchronizer(ses); + + CrossEntityReferenceSynchronizer cers = new CrossEntityReferenceSynchronizer( + esConfig.getIndexName(), ActiveInventoryConfig.getConfig()); + cers.setAaiDataProvider(aaiAdapter); + cers.setEsDataProvider(esAdapter); + syncController.registerEntitySynchronizer(cers); + + GeoSynchronizer geo = new GeoSynchronizer(esConfig.getTopographicalSearchIndex()); + geo.setAaiDataProvider(aaiAdapter); + geo.setEsDataProvider(esAdapter); + syncController.registerEntitySynchronizer(geo); + + if (syncConfig.isAutosuggestSynchronizationEnabled()) { + initAutoSuggestionSynchronizer(esConfig, aaiAdapter, esAdapter, nonCachingRestProvider); + initAggregationSynchronizer(esConfig, aaiAdapter, esAdapter, nonCachingRestProvider); + } + + /* + * Register Cleaners + */ + + IndexCleaner searchableIndexCleaner = new ElasticSearchIndexCleaner(nonCachingRestProvider, + esConfig.getIndexName(), esConfig.getType(), esConfig.getIpAddress(), + esConfig.getHttpPort(), syncConfig.getScrollContextTimeToLiveInMinutes(), + syncConfig.getNumScrollContextItemsToRetrievePerRequest()); + + syncController.registerIndexCleaner(searchableIndexCleaner); + + IndexCleaner geoIndexCleaner = new ElasticSearchIndexCleaner(nonCachingRestProvider, + esConfig.getTopographicalSearchIndex(), esConfig.getType(), esConfig.getIpAddress(), + esConfig.getHttpPort(), syncConfig.getScrollContextTimeToLiveInMinutes(), + syncConfig.getNumScrollContextItemsToRetrievePerRequest()); + + syncController.registerIndexCleaner(geoIndexCleaner); + + + } catch (Exception exc) { + String message = "Error: failed to sync with message = " + exc.getMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + + } + + /** + * Inits the entity counter history summarizer. + */ + private void initEntityCounterHistorySummarizer() { + + LOG.info(AaiUiMsgs.INFO_GENERIC, "initEntityCounterHistorySummarizer"); + + try { + entityCounterHistorySummarizer = new SyncController("entityCounterHistorySummarizer"); + + ActiveInventoryAdapter aaiAdapter = new ActiveInventoryAdapter(new RestClientBuilder()); + aaiAdapter.setCacheEnabled(false); + + RestClientBuilder clientBuilder = new RestClientBuilder(); + clientBuilder.setUseHttps(false); + + RestfulDataAccessor nonCachingRestProvider = new RestfulDataAccessor(clientBuilder); + ElasticSearchConfig esConfig = ElasticSearchConfig.getConfig(); + ElasticSearchAdapter esAdapter = new ElasticSearchAdapter(nonCachingRestProvider, esConfig); + + IndexIntegrityValidator entityCounterHistoryValidator = + new IndexIntegrityValidator(nonCachingRestProvider, esConfig.getEntityCountHistoryIndex(), + esConfig.getType(), esConfig.getIpAddress(), esConfig.getHttpPort(), + esConfig.buildElasticSearchEntityCountHistoryTableConfig()); + + entityCounterHistorySummarizer.registerIndexValidator(entityCounterHistoryValidator); + + HistoricalEntitySummarizer historicalSummarizer = + new HistoricalEntitySummarizer(esConfig.getEntityCountHistoryIndex()); + historicalSummarizer.setAaiDataProvider(aaiAdapter); + historicalSummarizer.setEsDataProvider(esAdapter); + //entityCounterHistorySummarizer.registerEntitySynchronizer(historicalSummarizer); + + } catch (Exception exc) { + String message = "Error: failed to sync with message = " + exc.getMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + } + + private List<String> getAutosuggestableEntitiesFromOXM() { + Map<String, OxmEntityDescriptor> map = oxmModelLoader.getSuggestionSearchEntityDescriptors(); + List<String> suggestableEntities = new ArrayList<String>(); + + for (String entity: map.keySet()){ + suggestableEntities.add(entity); + } + return suggestableEntities; + } + + /** + * Initialize the AutosuggestionSynchronizer and + * AggregationSuggestionSynchronizer + * + * @param esConfig + * @param aaiAdapter + * @param esAdapter + * @param nonCachingRestProvider + */ + private void initAutoSuggestionSynchronizer(ElasticSearchConfig esConfig, + ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter, + RestfulDataAccessor nonCachingRestProvider) { + LOG.info(AaiUiMsgs.INFO_GENERIC, "initAutoSuggestionSynchronizer"); + + // Initialize for entityautosuggestindex + try { + IndexIntegrityValidator autoSuggestionIndexValidator = + new IndexIntegrityValidator(nonCachingRestProvider, esConfig.getAutosuggestIndexname(), + esConfig.getType(), esConfig.getIpAddress(), esConfig.getHttpPort(), + esConfig.buildAutosuggestionTableConfig()); + + syncController.registerIndexValidator(autoSuggestionIndexValidator); + + AutosuggestionSynchronizer suggestionSynchronizer = + new AutosuggestionSynchronizer(esConfig.getAutosuggestIndexname()); + suggestionSynchronizer.setAaiDataProvider(aaiAdapter); + suggestionSynchronizer.setEsDataProvider(esAdapter); + syncController.registerEntitySynchronizer(suggestionSynchronizer); + + AggregationSuggestionSynchronizer aggregationSuggestionSynchronizer = + new AggregationSuggestionSynchronizer(esConfig.getAutosuggestIndexname()); + aggregationSuggestionSynchronizer.setEsDataProvider(esAdapter); + syncController.registerEntitySynchronizer(aggregationSuggestionSynchronizer); + + IndexCleaner autosuggestIndexCleaner = new ElasticSearchIndexCleaner(nonCachingRestProvider, + esConfig.getAutosuggestIndexname(), esConfig.getType(), esConfig.getIpAddress(), + esConfig.getHttpPort(), syncConfig.getScrollContextTimeToLiveInMinutes(), + syncConfig.getNumScrollContextItemsToRetrievePerRequest()); + + syncController.registerIndexCleaner(autosuggestIndexCleaner); + } catch (Exception exc) { + String message = "Error: failed to sync with message = " + exc.getMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + } + + /** + * Initialize the AggregationSynchronizer + * + * @param esConfig + * @param aaiAdapter + * @param esAdapter + * @param nonCachingRestProvider + */ + private void initAggregationSynchronizer(ElasticSearchConfig esConfig, + ActiveInventoryAdapter aaiAdapter, ElasticSearchAdapter esAdapter, + RestfulDataAccessor nonCachingRestProvider) { + LOG.info(AaiUiMsgs.INFO_GENERIC, "initAggregationSynchronizer"); + + List<String> aggregationEntities = getAutosuggestableEntitiesFromOXM(); + + // For each index: create an IndexValidator, a Synchronizer, and an IndexCleaner + for (String entity : aggregationEntities) { + try { + String indexName = TierSupportUiConstants.getAggregationIndexName(entity); + + IndexIntegrityValidator aggregationIndexValidator = new IndexIntegrityValidator( + nonCachingRestProvider, indexName, esConfig.getType(), esConfig.getIpAddress(), + esConfig.getHttpPort(), esConfig.buildAggregationTableConfig()); + + syncController.registerIndexValidator(aggregationIndexValidator); + + /* + * TODO: This per-entity-synchronizer approach will eventually result in AAI / ES overload + * because of the existing dedicated thread pools for ES + AAI operations within the + * synchronizer. If we had 50 types to sync then the thread pools within each Synchronizer + * would cause some heartburn as there would be hundreds of threads trying to talk to AAI. + * Given that we our running out of time, let's make sure we can get it functional and then + * we'll re-visit. + */ + AggregationSynchronizer aggSynchronizer = new AggregationSynchronizer(entity, indexName); + aggSynchronizer.setAaiDataProvider(aaiAdapter); + aggSynchronizer.setEsDataProvider(esAdapter); + syncController.registerEntitySynchronizer(aggSynchronizer); + + IndexCleaner entityDataIndexCleaner = new ElasticSearchIndexCleaner(nonCachingRestProvider, + indexName, esConfig.getType(), esConfig.getIpAddress(), esConfig.getHttpPort(), + syncConfig.getScrollContextTimeToLiveInMinutes(), + syncConfig.getNumScrollContextItemsToRetrievePerRequest()); + + syncController.registerIndexCleaner(entityDataIndexCleaner); + + } catch (Exception exc) { + String message = "Error: failed to sync with message = " + exc.getMessage(); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + } + } + + /** + * Instantiates a new sync helper. + * + * @param loader the loader + */ + public SyncHelper(OxmModelLoader loader) { + try { + this.contextMap = MDC.getCopyOfContextMap(); + this.syncConfig = SynchronizerConfiguration.getConfig(); + this.esConfig = ElasticSearchConfig.getConfig(); + this.oxmModelLoader = loader; + + UncaughtExceptionHandler uncaughtExceptionHandler = new Thread.UncaughtExceptionHandler() { + + @Override + public void uncaughtException(Thread thread, Throwable exc) { + LOG.error(AaiUiMsgs.ERROR_GENERIC, thread.getName() + ": " + exc); + } + }; + + ThreadFactory namedThreadFactory = new ThreadFactoryBuilder().setNameFormat("SyncHelper-%d") + .setUncaughtExceptionHandler(uncaughtExceptionHandler).build(); + + periodicExecutor = Executors.newScheduledThreadPool(3, namedThreadFactory); + + /* + * We only want to initialize the synchronizer if sync has been configured to start + */ + if (syncConfig.isConfigOkForStartupSync() || syncConfig.isConfigOkForPeriodicSync()) { + initializeSyncController(); + } + + if (syncConfig.isHistoricalEntitySummarizerEnabled()) { + initEntityCounterHistorySummarizer(); + } else { + LOG.info(AaiUiMsgs.INFO_GENERIC, "history summarizer disabled"); + } + + + // schedule startup synchronization + if (syncConfig.isConfigOkForStartupSync()) { + + long taskInitialDelayInMs = syncConfig.getSyncTaskInitialDelayInMs(); + if (taskInitialDelayInMs != SynchronizerConstants.DELAY_NO_STARTUP_SYNC_IN_MS) { + oneShotExecutor.schedule(new SyncTask(true), taskInitialDelayInMs, TimeUnit.MILLISECONDS); + LOG.info(AaiUiMsgs.INFO_GENERIC, "Search Engine startup synchronization is enabled."); + } else { + LOG.info(AaiUiMsgs.INFO_GENERIC, "Search Engine startup synchronization is disabled."); + } + } + + // schedule periodic synchronization + if (syncConfig.isConfigOkForPeriodicSync()) { + + TimeZone tz = TimeZone.getTimeZone(syncConfig.getSyncTaskStartTimeTimeZone()); + Calendar calendar = Calendar.getInstance(tz); + sdf.setTimeZone(tz); + + calendar.set(Calendar.HOUR_OF_DAY, syncConfig.getSyncTaskStartTimeHr()); + calendar.set(Calendar.MINUTE, syncConfig.getSyncTaskStartTimeMin()); + calendar.set(Calendar.SECOND, syncConfig.getSyncTaskStartTimeSec()); + + long timeCurrent = calendar.getTimeInMillis(); + int taskFrequencyInDay = syncConfig.getSyncTaskFrequencyInDay(); + timeNextSync.getAndSet(getFirstSyncTime(calendar, timeCurrent, taskFrequencyInDay)); + + long delayUntilFirstRegSyncInMs = 0; + delayUntilFirstRegSyncInMs = timeNextSync.get() - timeCurrent; + + // Do all calculation in milliseconds + long taskFreqencyInMs = taskFrequencyInDay * SynchronizerConstants.MILLISEC_IN_A_DAY; + + if (taskFreqencyInMs != SynchronizerConstants.DELAY_NO_PERIODIC_SYNC_IN_MS) { + periodicExecutor.scheduleAtFixedRate(new SyncTask(false), delayUntilFirstRegSyncInMs, + taskFreqencyInMs, TimeUnit.MILLISECONDS); + LOG.info(AaiUiMsgs.INFO_GENERIC, "Search Engine periodic synchronization is enabled."); + // case: when - startup sync is misconfigured or is disabled + // - give a clue to user when is the next periodic sync + if (!syncConfig.isConfigOkForStartupSync() + || syncConfig.isConfigDisabledForInitialSync()) { + LOG.info(AaiUiMsgs.SYNC_TO_BEGIN, syncController.getControllerName(), + sdf.format(timeNextSync).replaceAll(SynchronizerConstants.TIME_STD, + SynchronizerConstants.TIME_CONFIG_STD)); + } + } else { + LOG.info(AaiUiMsgs.INFO_GENERIC, "Search Engine periodic synchronization is disabled."); + } + } + + // schedule periodic synchronization + if (syncConfig.isHistoricalEntitySummarizerEnabled()) { + scheduleHistoricalCounterSyncTask(); + } + + } catch (Exception exc) { + String message = "Caught an exception while starting up the SyncHelper. Error cause = \n" + + ErrorUtil.extractStackTraceElements(5, exc); + LOG.error(AaiUiMsgs.ERROR_GENERIC, message); + } + } + + /** + * Schedule historical counter sync task. + */ + private void scheduleHistoricalCounterSyncTask() { + long taskFrequencyInMs = + syncConfig.getHistoricalEntitySummarizedFrequencyInMinutes() * 60 * 1000; + historicalExecutor.scheduleWithFixedDelay(new HistoricalEntityCountSummaryTask(), 0, + taskFrequencyInMs, TimeUnit.MILLISECONDS); + LOG.info(AaiUiMsgs.INFO_GENERIC, + "Historical Entity Count Summarizer synchronization is enabled."); + } + + /** + * Shutdown. + */ + public void shutdown() { + + if (oneShotExecutor != null) { + oneShotExecutor.shutdown(); + } + + if (periodicExecutor != null) { + periodicExecutor.shutdown(); + } + + if (historicalExecutor != null) { + historicalExecutor.shutdown(); + } + + if (syncController != null) { + syncController.shutdown(); + } + + if (entityCounterHistorySummarizer != null) { + entityCounterHistorySummarizer.shutdown(); + } + + } + + public OxmModelLoader getOxmModelLoader() { + return oxmModelLoader; + } + + public void setOxmModelLoader(OxmModelLoader oxmModelLoader) { + this.oxmModelLoader = oxmModelLoader; + } +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/TaskProcessingStats.java b/src/main/java/org/openecomp/sparky/synchronizer/TaskProcessingStats.java new file mode 100644 index 0000000..deb83a5 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/TaskProcessingStats.java @@ -0,0 +1,136 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +import org.openecomp.sparky.analytics.AbstractStatistics; +import org.openecomp.sparky.synchronizer.config.TaskProcessorConfig; + +/** + * The Class TaskProcessingStats. + */ +public class TaskProcessingStats extends AbstractStatistics { + + private static String TASK_AGE_STATS = "taskAgeStats"; + private static String TASK_RESPONSE_STATS = "taskResponseStats"; + private static String RESPONSE_SIZE_IN_BYTES = "taskResponseSizeInBytes"; + // private static String QUEUE_ITEM_LENGTH = "queueItemLength"; + private static String TPS = "transactionsPerSecond"; + + /** + * Instantiates a new task processing stats. + * + * @param config the config + */ + public TaskProcessingStats(TaskProcessorConfig config) { + + addHistogram(TASK_AGE_STATS, config.getTaskAgeHistogramLabel(), + config.getTaskAgeHistogramMaxYAxis(), config.getTaskAgeHistogramNumBins(), + config.getTaskAgeHistogramNumDecimalPoints()); + + addHistogram(TASK_RESPONSE_STATS, config.getResponseTimeHistogramLabel(), + config.getResponseTimeHistogramMaxYAxis(), config.getResponseTimeHistogramNumBins(), + config.getResponseTimeHistogramNumDecimalPoints()); + + addHistogram(RESPONSE_SIZE_IN_BYTES, config.getBytesHistogramLabel(), + config.getBytesHistogramMaxYAxis(), config.getBytesHistogramNumBins(), + config.getBytesHistogramNumDecimalPoints()); + + /* + * addHistogram(QUEUE_ITEM_LENGTH, config.getQueueLengthHistogramLabel(), + * config.getQueueLengthHistogramMaxYAxis(), config.getQueueLengthHistogramNumBins(), + * config.getQueueLengthHistogramNumDecimalPoints()); + */ + + addHistogram(TPS, config.getTpsHistogramLabel(), config.getTpsHistogramMaxYAxis(), + config.getTpsHistogramNumBins(), config.getTpsHistogramNumDecimalPoints()); + + } + + /* + * public void updateQueueItemLengthHistogram(long value) { updateHistogram(QUEUE_ITEM_LENGTH, + * value); } + */ + + /** + * Update task age stats histogram. + * + * @param value the value + */ + public void updateTaskAgeStatsHistogram(long value) { + updateHistogram(TASK_AGE_STATS, value); + } + + /** + * Update task response stats histogram. + * + * @param value the value + */ + public void updateTaskResponseStatsHistogram(long value) { + updateHistogram(TASK_RESPONSE_STATS, value); + } + + /** + * Update response size in bytes histogram. + * + * @param value the value + */ + public void updateResponseSizeInBytesHistogram(long value) { + updateHistogram(RESPONSE_SIZE_IN_BYTES, value); + } + + /** + * Update transactions per second histogram. + * + * @param value the value + */ + public void updateTransactionsPerSecondHistogram(long value) { + updateHistogram(TPS, value); + } + + /** + * Gets the statistics report. + * + * @param verboseEnabled the verbose enabled + * @param indentPadding the indent padding + * @return the statistics report + */ + public String getStatisticsReport(boolean verboseEnabled, String indentPadding) { + + StringBuilder sb = new StringBuilder(); + + sb.append("\n").append(getHistogramStats(TASK_AGE_STATS, verboseEnabled, indentPadding)); + // sb.append("\n").append(getHistogramStats(QUEUE_ITEM_LENGTH, verboseEnabled, indentPadding)); + sb.append("\n").append(getHistogramStats(TASK_RESPONSE_STATS, verboseEnabled, indentPadding)); + sb.append("\n") + .append(getHistogramStats(RESPONSE_SIZE_IN_BYTES, verboseEnabled, indentPadding)); + sb.append("\n").append(getHistogramStats(TPS, verboseEnabled, indentPadding)); + + return sb.toString(); + + } + + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/TransactionRateController.java b/src/main/java/org/openecomp/sparky/synchronizer/TransactionRateController.java new file mode 100644 index 0000000..8cc3409 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/TransactionRateController.java @@ -0,0 +1,113 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer; + +import java.util.concurrent.atomic.AtomicInteger; + +import org.openecomp.sparky.analytics.AveragingRingBuffer; +import org.openecomp.sparky.synchronizer.config.TaskProcessorConfig; + +/** + * TODO: Fill in description. + * + * @author davea. + */ +public class TransactionRateController { + + private AveragingRingBuffer responseTimeTracker; + private double msPerTransaction; + private int numThreads; + private TaskProcessorConfig config; + private long startTimeInMs; + private AtomicInteger numTransactions; + + /** + * Instantiates a new transaction rate controller. + * + * @param config the config + */ + public TransactionRateController(TaskProcessorConfig config) { + + this.config = config; + this.responseTimeTracker = new AveragingRingBuffer( + config.getNumSamplesPerThreadForRunningAverage() * config.getMaxConcurrentWorkers()); + this.msPerTransaction = 1000 / config.getTargetTps(); + this.numThreads = config.getMaxConcurrentWorkers(); + this.startTimeInMs = System.currentTimeMillis(); + this.numTransactions = new AtomicInteger(0); + } + + /** + * Track response time. + * + * @param responseTimeInMs the response time in ms + */ + public void trackResponseTime(long responseTimeInMs) { + this.numTransactions.incrementAndGet(); + responseTimeTracker.addSample(responseTimeInMs); + } + + public long getFixedDelayInMs() { + + /* + * The math here is pretty simple: + * + * 1. Target TPS is 10. Then the msPerTxn = 1000/10 = 100ms + * + * 2. If the calculated avgResponseTime = 40 ms, then the proposed delay is 60ms per thread. + * + * 3. If the calculated avgResponseTime = 200ms, then the proposed delay is -100 ms, which is + * not possible, we can't speed it up, so we don't propose any further delay. + */ + + double proposedDelay = 0; + + if (config.isTransactionRateControllerEnabled()) { + proposedDelay = ((msPerTransaction - responseTimeTracker.getAvg()) * this.numThreads); + + if (proposedDelay > 0) { + return (long) (proposedDelay); + } + } + + return (long) proposedDelay; + } + + public long getAvg() { + return responseTimeTracker.getAvg(); + } + + public double getCurrentTps() { + if (numTransactions.get() > 0) { + double timeDelta = System.currentTimeMillis() - startTimeInMs; + double numTxns = numTransactions.get(); + return (numTxns / timeDelta) * 1000.0; + } + + return 0.0; + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/config/SynchronizerConfiguration.java b/src/main/java/org/openecomp/sparky/synchronizer/config/SynchronizerConfiguration.java new file mode 100644 index 0000000..34286b4 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/config/SynchronizerConfiguration.java @@ -0,0 +1,444 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.config; + +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.Arrays; +import java.util.Calendar; +import java.util.Date; +import java.util.List; +import java.util.Properties; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.util.ConfigHelper; +import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; + + +/** + * The Class SynchronizerConfiguration. + */ +public class SynchronizerConfiguration { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(SynchronizerConfiguration.class); + + public static final String CONFIG_FILE = + TierSupportUiConstants.DYNAMIC_CONFIG_APP_LOCATION + "synchronizer.properties"; + + private static SynchronizerConfiguration instance; + + public static final String DEPTH_MODIFIER = "?depth=0"; + public static final String DEPTH_ALL_MODIFIER = "?depth=all"; + public static final String DEPTH_AND_NODES_ONLY_MODIFIER = "?depth=0&nodes-only"; + public static final String NODES_ONLY_MODIFIER = "?nodes-only"; + + public static SynchronizerConfiguration getConfig() throws Exception { + + if (instance == null) { + instance = new SynchronizerConfiguration(); + instance.initialize(); + } + + return instance; + } + + /** + * Instantiates a new synchronizer configuration. + */ + public SynchronizerConfiguration() { + // test method + } + + /** + * Initialize. + * + * @throws Exception the exception + */ + protected void initialize() throws Exception { + + Properties props = ConfigHelper.loadConfigFromExplicitPath(CONFIG_FILE); + + // parse config for startup sync + try { + syncTaskInitialDelayInMs = + Integer.parseInt(props.getProperty("synchronizer.syncTask.initialDelayInMs", + SynchronizerConstants.DEFAULT_INITIAL_DELAY_IN_MS)); + if (syncTaskInitialDelayInMs < 0) { + throw new Exception(); + } + } catch (Exception exc) { + this.setConfigOkForStartupSync(false); + syncTaskInitialDelayInMs = SynchronizerConstants.DEFAULT_CONFIG_ERROR_INT_VALUE; + String message = "Invalid configuration for synchronizer parameter:" + + " 'synchronizer.syncTask.initialDelayInMs'"; + LOG.error(AaiUiMsgs.SYNC_INVALID_CONFIG_PARAM, message); + } + + // parse config for periodic sync + try { + syncTaskFrequencyInDay = + Integer.parseInt(props.getProperty("synchronizer.syncTask.taskFrequencyInDay", + SynchronizerConstants.DEFAULT_TASK_FREQUENCY_IN_DAY)); + if (syncTaskFrequencyInDay < 0) { + throw new Exception(); + } + } catch (Exception exc) { + this.setConfigOkForPeriodicSync(false); + syncTaskFrequencyInDay = SynchronizerConstants.DEFAULT_CONFIG_ERROR_INT_VALUE; + String message = "Invalid configuration for synchronizer parameter:" + + " 'synchronizer.syncTask.taskFrequencyInDay'"; + LOG.error(AaiUiMsgs.SYNC_INVALID_CONFIG_PARAM, message); + } + + try { + syncTaskStartTime = props.getProperty("synchronizer.syncTask.startTimestamp", + SynchronizerConstants.DEFAULT_START_TIMESTAMP); // Default 05:00:00 UTC + Pattern pattern = Pattern.compile(SynchronizerConstants.TIMESTAMP24HOURS_PATTERN); + Matcher matcher = pattern.matcher(syncTaskStartTime); + if (!matcher.matches()) { + throw new Exception(); + } + + List<String> timestampVal = Arrays.asList(syncTaskStartTime.split(" ")); + + if (timestampVal.size() == SynchronizerConstants.COMPONENTS_IN_TIMESTAMP) { + // Need both time and timezone offset + syncTaskStartTimeTimeZone = timestampVal + .get(SynchronizerConstants.IDX_TIMEZONE_IN_TIMESTAMP).replaceAll("UTC", "GMT"); + + String time = timestampVal.get(SynchronizerConstants.IDX_TIME_IN_TIMESTAMP); + DateFormat format = new SimpleDateFormat("HH:mm:ss"); + Date date = format.parse(time); + Calendar calendar = Calendar.getInstance(); + calendar.setTime(date); + + syncTaskStartTimeHr = calendar.get(Calendar.HOUR_OF_DAY); + syncTaskStartTimeMin = calendar.get(Calendar.MINUTE); + syncTaskStartTimeSec = calendar.get(Calendar.SECOND); + } else { + LOG.info(AaiUiMsgs.SYNC_START_TIME); + } + } catch (Exception exc) { + this.setConfigOkForPeriodicSync(false); + String message = "Invalid configuration for synchronizer parameter:" + + " 'synchronizer.syncTask.startTimestamp'"; + LOG.error(AaiUiMsgs.SYNC_INVALID_CONFIG_PARAM, message); + } + + scrollContextTimeToLiveInMinutes = + Integer.parseInt(props.getProperty("synchronizer.scrollContextTimeToLiveInMinutes", "5")); + numScrollContextItemsToRetrievePerRequest = Integer.parseInt( + props.getProperty("synchronizer.numScrollContextItemsToRetrievePerRequest", "5000")); + + resolverProgressLogFrequencyInMs = Long + .parseLong(props.getProperty("synchronizer.resolver.progressLogFrequencyInMs", "60000")); + resolverQueueMonitorFrequencyInMs = Long + .parseLong(props.getProperty("synchronizer.resolver.queueMonitorFrequencyInMs", "1000")); + + indexIntegrityValidatorEnabled = Boolean + .parseBoolean(props.getProperty("synchronizer.indexIntegrityValidator.enabled", "false")); + indexIntegrityValidatorFrequencyInMs = Long.parseLong( + props.getProperty("synchronizer.indexIntegrityValidatorFrequencyInMs", "300000")); + + displayVerboseQueueManagerStats = Boolean + .parseBoolean(props.getProperty("synchronizer.resolver.displayVerboseQueueManagerStats")); + + resourceNotFoundErrorsSupressed = + Boolean.parseBoolean(props.getProperty("synchronizer.suppressResourceNotFoundErrors")); + + nodesOnlyModifierEnabled = + Boolean.parseBoolean(props.getProperty("synchronizer.applyNodesOnlyModifier")); + + historicalEntitySummarizerEnabled = Boolean + .parseBoolean(props.getProperty("synchronizer.historicalEntitySummarizerEnabled", "true")); + historicalEntitySummarizedFrequencyInMinutes = Long.parseLong( + props.getProperty("synchronizer.historicalEntitySummarizedFrequencyInMinutes", "60")); + + autosuggestSynchronizationEnabled = Boolean + .parseBoolean(props.getProperty("synchronizer.autosuggestSynchronizationEnabled", "true")); + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, this.toString()); + } + } + + public boolean isNodesOnlyModifierEnabled() { + return nodesOnlyModifierEnabled; + } + + public void setNodesOnlyModifierEnabled(boolean nodesOnlyModifierEnabled) { + this.nodesOnlyModifierEnabled = nodesOnlyModifierEnabled; + } + + public int getSyncTaskInitialDelayInMs() { + return syncTaskInitialDelayInMs; + } + + public void setSyncTaskInitialDelayInMs(int syncTaskInitialDelayInMs) { + this.syncTaskInitialDelayInMs = syncTaskInitialDelayInMs; + } + + public boolean isDisplayVerboseQueueManagerStats() { + return displayVerboseQueueManagerStats; + } + + public void setDisplayVerboseQueueManagerStats(boolean displayVerboseQueueManagerStats) { + this.displayVerboseQueueManagerStats = displayVerboseQueueManagerStats; + } + + public boolean isHistoricalEntitySummarizerEnabled() { + return historicalEntitySummarizerEnabled; + } + + public void setHistoricalEntitySummarizerEnabled(boolean historicalEntitySummarizerEnabled) { + this.historicalEntitySummarizerEnabled = historicalEntitySummarizerEnabled; + } + + public long getHistoricalEntitySummarizedFrequencyInMinutes() { + return historicalEntitySummarizedFrequencyInMinutes; + } + + public void setHistoricalEntitySummarizedFrequencyInMinutes( + long historicalEntitySummarizedFrequencyInMinutes) { + this.historicalEntitySummarizedFrequencyInMinutes = + historicalEntitySummarizedFrequencyInMinutes; + } + + private int syncTaskInitialDelayInMs; + + private int syncTaskFrequencyInMs; + + private int scrollContextTimeToLiveInMinutes; + + private int numScrollContextItemsToRetrievePerRequest; + + private long resolverProgressLogFrequencyInMs; + + private long resolverQueueMonitorFrequencyInMs; + + private boolean indexIntegrityValidatorEnabled; + + private long indexIntegrityValidatorFrequencyInMs; + + private int syncTaskFrequencyInDay; + + private String syncTaskStartTime; + + private int syncTaskStartTimeHr = 5; // for default sync start time + + private int syncTaskStartTimeMin; + + private int syncTaskStartTimeSec; + + private String syncTaskStartTimeTimeZone; + + private boolean displayVerboseQueueManagerStats; + + private boolean resourceNotFoundErrorsSupressed; + + private boolean nodesOnlyModifierEnabled; + + private boolean historicalEntitySummarizerEnabled; + + private boolean autosuggestSynchronizationEnabled; + + private long historicalEntitySummarizedFrequencyInMinutes; + + + private boolean configOkForStartupSync = true; + + private boolean configOkForPeriodicSync = true; + + public boolean isResourceNotFoundErrorsSupressed() { + return resourceNotFoundErrorsSupressed; + } + + public void setResourceNotFoundErrorsSupressed(boolean resourceNotFoundErrorsSupressed) { + this.resourceNotFoundErrorsSupressed = resourceNotFoundErrorsSupressed; + } + + public int getScrollContextTimeToLiveInMinutes() { + return scrollContextTimeToLiveInMinutes; + } + + public void setScrollContextTimeToLiveInMinutes(int scrollContextTimeToLiveInMinutes) { + this.scrollContextTimeToLiveInMinutes = scrollContextTimeToLiveInMinutes; + } + + public int getNumScrollContextItemsToRetrievePerRequest() { + return numScrollContextItemsToRetrievePerRequest; + } + + public void setNumScrollContextItemsToRetrievePerRequest( + int numScrollContextItemsToRetrievePerRequest) { + this.numScrollContextItemsToRetrievePerRequest = numScrollContextItemsToRetrievePerRequest; + } + + public int getSyncTaskFrequencyInDay() { + return syncTaskFrequencyInDay; + } + + public void setSyncTaskFrequencyInDay(int syncTaskFrequencyInDay) { + this.syncTaskFrequencyInDay = syncTaskFrequencyInDay; + } + + public String getSyncTaskStartTime() { + return syncTaskStartTime; + } + + public void setSyncTaskStartTime(String syncTaskStartTime) { + this.syncTaskStartTime = syncTaskStartTime; + } + + public int getSyncTaskStartTimeHr() { + return syncTaskStartTimeHr; + } + + public void setSyncTaskStartTimeHr(int syncTaskStartTimeHr) { + this.syncTaskStartTimeHr = syncTaskStartTimeHr; + } + + public int getSyncTaskStartTimeMin() { + return syncTaskStartTimeMin; + } + + public void setSyncTaskStartTimeMin(int syncTaskStartTimeMin) { + this.syncTaskStartTimeMin = syncTaskStartTimeMin; + } + + public int getSyncTaskStartTimeSec() { + return syncTaskStartTimeSec; + } + + public void setSyncTaskStartTimeSec(int syncTaskStartTimeSec) { + this.syncTaskStartTimeSec = syncTaskStartTimeSec; + } + + public String getSyncTaskStartTimeTimeZone() { + return syncTaskStartTimeTimeZone; + } + + public void setSyncTaskStartTimeTimeZone(String syncTaskStartTimeTimeZone) { + this.syncTaskStartTimeTimeZone = syncTaskStartTimeTimeZone; + } + + public int getSyncTaskFrequencyInMs() { + return syncTaskFrequencyInMs; + } + + public void setSyncTaskFrequencyInMs(int syncTaskFrequencyInMs) { + this.syncTaskFrequencyInMs = syncTaskFrequencyInMs; + } + + public long getResolverProgressLogFrequencyInMs() { + return resolverProgressLogFrequencyInMs; + } + + public void setResolverProgressLogFrequencyInMs(long resolverProgressLogFrequencyInMs) { + this.resolverProgressLogFrequencyInMs = resolverProgressLogFrequencyInMs; + } + + public long getResolverQueueMonitorFrequencyInMs() { + return resolverQueueMonitorFrequencyInMs; + } + + public void setResolverQueueMonitorFrequencyInMs(long resolverQueueMonitorFrequencyInMs) { + this.resolverQueueMonitorFrequencyInMs = resolverQueueMonitorFrequencyInMs; + } + + public boolean isIndexIntegrityValidatorEnabled() { + return indexIntegrityValidatorEnabled; + } + + public void setIndexIntegrityValidatorEnabled(boolean indexIntegrityValidatorEnabled) { + this.indexIntegrityValidatorEnabled = indexIntegrityValidatorEnabled; + } + + public long getIndexIntegrityValidatorFrequencyInMs() { + return indexIntegrityValidatorFrequencyInMs; + } + + public void setIndexIntegrityValidatorFrequencyInMs(long indexIntegrityValidatorFrequencyInMs) { + this.indexIntegrityValidatorFrequencyInMs = indexIntegrityValidatorFrequencyInMs; + } + + public boolean isConfigOkForStartupSync() { + return configOkForStartupSync; + } + + public void setConfigOkForStartupSync(boolean configOkForStartupSync) { + this.configOkForStartupSync = configOkForStartupSync; + } + + public boolean isConfigOkForPeriodicSync() { + return configOkForPeriodicSync; + } + + public void setConfigOkForPeriodicSync(boolean configOkForPeriodicSync) { + this.configOkForPeriodicSync = configOkForPeriodicSync; + } + + public boolean isConfigDisabledForInitialSync() { + return syncTaskInitialDelayInMs == SynchronizerConstants.DELAY_NO_STARTUP_SYNC_IN_MS; + } + + public boolean isAutosuggestSynchronizationEnabled() { + return autosuggestSynchronizationEnabled; + } + + public void setAutosuggestSynchronizationEnabled(boolean autosuggestSynchronizationEnabled) { + this.autosuggestSynchronizationEnabled = autosuggestSynchronizationEnabled; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "SynchronizerConfiguration [syncTaskInitialDelayInMs=" + syncTaskInitialDelayInMs + + ", syncTaskFrequencyInMs=" + syncTaskFrequencyInMs + ", scrollContextTimeToLiveInMinutes=" + + scrollContextTimeToLiveInMinutes + ", numScrollContextItemsToRetrievePerRequest=" + + numScrollContextItemsToRetrievePerRequest + ", resolverProgressLogFrequencyInMs=" + + resolverProgressLogFrequencyInMs + ", resolverQueueMonitorFrequencyInMs=" + + resolverQueueMonitorFrequencyInMs + ", indexIntegrityValidatorEnabled=" + + indexIntegrityValidatorEnabled + ", indexIntegrityValidatorFrequencyInMs=" + + indexIntegrityValidatorFrequencyInMs + ", ssyncTaskFrequencyInDay=" + + syncTaskFrequencyInDay + ", syncTaskStartTime=" + syncTaskStartTime + + ", syncTaskStartTimeHr=" + syncTaskStartTimeHr + ", syncTaskStartTimeMin=" + + syncTaskStartTimeMin + ", syncTaskStartTimeSec=" + syncTaskStartTimeSec + + ", syncTaskStartTimeTimeZone=" + syncTaskStartTimeTimeZone + + ", displayVerboseQueueManagerStats=" + displayVerboseQueueManagerStats + + ", resourceNotFoundErrorsSupressed=" + resourceNotFoundErrorsSupressed + + ", nodesOnlyModifierEnabled=" + nodesOnlyModifierEnabled + ", configOKForStartupSync=" + + configOkForStartupSync + ", configOKForPeriodicSync=" + configOkForPeriodicSync + + ", autosuggestSynchronizationEnabled=" + autosuggestSynchronizationEnabled + "]"; + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/config/SynchronizerConstants.java b/src/main/java/org/openecomp/sparky/synchronizer/config/SynchronizerConstants.java new file mode 100644 index 0000000..8e22157 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/config/SynchronizerConstants.java @@ -0,0 +1,63 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.config; + +import java.util.Date; + +/** + * The Class SynchronizerConstants. + */ +public final class SynchronizerConstants { + // Error values for invalid user input + public static final int DEFAULT_CONFIG_ERROR_INT_VALUE = Integer.MAX_VALUE; + public static final Date DEFAULT_CONFIG_ERROR_DATE_VALUE = new Date(Long.MAX_VALUE); + + // constants for scheduling synchronizer + public static final int COMPONENTS_IN_TIMESTAMP = 2; + public static final String DEFAULT_INITIAL_DELAY_IN_MS = "0"; + public static final String DEFAULT_TASK_FREQUENCY_IN_DAY = "0"; + public static final String DEFAULT_START_TIMESTAMP = "05:00:00 UTC"; + public static final long DELAY_NO_STARTUP_SYNC_IN_MS = 0; + public static final long DELAY_NO_PERIODIC_SYNC_IN_MS = 0; + public static final int IDX_TIME_IN_TIMESTAMP = 0; + public static final int IDX_TIMEZONE_IN_TIMESTAMP = 1; + public static final long MILLISEC_IN_A_MIN = 60000; + public static final long MILLISEC_IN_A_DAY = 24 * 60 * 60 * 1000; + public static final String TIME_STD = "GMT"; + public static final String TIME_CONFIG_STD = "UTC"; + public static final String TIMESTAMP24HOURS_PATTERN = + "([01]?[0-9]|2[0-3]):[0-5][0-9]:[0-5][0-9] UTC[+|-][0-5][0-9]:[0-5][0-9]"; + + + + public static final String DEFAULT_SCROLL_CTX_TIME_TO_LIVE_IN_MIN = "5"; + public static final String DEFAULT_NUM_SCROLL_CTX_ITEMS_TO_RETRIEVE_PER_REQ = "5000"; + + /** + * Instantiates a new synchronizer constants. + */ + private SynchronizerConstants() {} +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/config/TaskProcessorConfig.java b/src/main/java/org/openecomp/sparky/synchronizer/config/TaskProcessorConfig.java new file mode 100644 index 0000000..7cbfe31 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/config/TaskProcessorConfig.java @@ -0,0 +1,328 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.config; + +import java.util.Properties; + +/** + * TODO: Fill in description. + * + * @author davea. + */ +public class TaskProcessorConfig { + /** + * Initialize from properties. + * + * @param props the props + */ + public void initializeFromProperties(Properties props) { + + if (props == null) { + return; + } + + maxConcurrentWorkers = Integer.parseInt(props.getProperty("maxConcurrentWorkers")); + transactionRateControllerEnabled = + Boolean.parseBoolean(props.getProperty("transactionRateControllerEnabled")); + numSamplesPerThreadForRunningAverage = + Integer.parseInt(props.getProperty("numSamplesPerThreadForRunningAverage")); + targetTps = Double.parseDouble(props.getProperty("targetTPS")); + bytesHistogramLabel = props.getProperty("bytesHistogramLabel"); + bytesHistogramMaxYAxis = Long.parseLong(props.getProperty("bytesHistogramMaxYAxis")); + bytesHistogramNumBins = Integer.parseInt(props.getProperty("bytesHistogramNumBins")); + bytesHistogramNumDecimalPoints = + Integer.parseInt(props.getProperty("bytesHistogramNumDecimalPoints")); + queueLengthHistogramLabel = props.getProperty("queueLengthHistogramLabel"); + queueLengthHistogramMaxYAxis = + Long.parseLong(props.getProperty("queueLengthHistogramMaxYAxis")); + queueLengthHistogramNumBins = + Integer.parseInt(props.getProperty("queueLengthHistogramNumBins")); + queueLengthHistogramNumDecimalPoints = + Integer.parseInt(props.getProperty("queueLengthHistogramNumDecimalPoints")); + + taskAgeHistogramLabel = props.getProperty("taskAgeHistogramLabel"); + taskAgeHistogramMaxYAxis = Long.parseLong(props.getProperty("taskAgeHistogramMaxYAxis")); + taskAgeHistogramNumBins = Integer.parseInt(props.getProperty("taskAgeHistogramNumBins")); + taskAgeHistogramNumDecimalPoints = + Integer.parseInt(props.getProperty("taskAgeHistogramNumDecimalPoints")); + + responseTimeHistogramLabel = props.getProperty("responseTimeHistogramLabel"); + responseTimeHistogramMaxYAxis = + Long.parseLong(props.getProperty("responseTimeHistogramMaxYAxis")); + responseTimeHistogramNumBins = + Integer.parseInt(props.getProperty("responseTimeHistogramNumBins")); + responseTimeHistogramNumDecimalPoints = + Integer.parseInt(props.getProperty("responseTimeHistogramNumDecimalPoints")); + + tpsHistogramLabel = props.getProperty("tpsHistogramLabel"); + tpsHistogramMaxYAxis = Long.parseLong(props.getProperty("tpsHistogramMaxYAxis")); + tpsHistogramNumBins = Integer.parseInt(props.getProperty("tpsHistogramNumBins")); + tpsHistogramNumDecimalPoints = + Integer.parseInt(props.getProperty("tpsHistogramNumDecimalPoints")); + + } + + private int maxConcurrentWorkers; + + private boolean transactionRateControllerEnabled; + + private int numSamplesPerThreadForRunningAverage; + + private double targetTps; + + private String bytesHistogramLabel; + + private long bytesHistogramMaxYAxis; + + private int bytesHistogramNumBins; + + private int bytesHistogramNumDecimalPoints; + + private String queueLengthHistogramLabel; + + private long queueLengthHistogramMaxYAxis; + + private int queueLengthHistogramNumBins; + + private int queueLengthHistogramNumDecimalPoints; + + private String taskAgeHistogramLabel; + + private long taskAgeHistogramMaxYAxis; + + private int taskAgeHistogramNumBins; + + private int taskAgeHistogramNumDecimalPoints; + + private String responseTimeHistogramLabel; + + private long responseTimeHistogramMaxYAxis; + + private int responseTimeHistogramNumBins; + + private int responseTimeHistogramNumDecimalPoints; + + private String tpsHistogramLabel; + + private long tpsHistogramMaxYAxis; + + private int tpsHistogramNumBins; + + private int tpsHistogramNumDecimalPoints; + + public String getBytesHistogramLabel() { + return bytesHistogramLabel; + } + + public void setBytesHistogramLabel(String bytesHistogramLabel) { + this.bytesHistogramLabel = bytesHistogramLabel; + } + + public long getBytesHistogramMaxYAxis() { + return bytesHistogramMaxYAxis; + } + + public void setBytesHistogramMaxYAxis(long bytesHistogramMaxYAxis) { + this.bytesHistogramMaxYAxis = bytesHistogramMaxYAxis; + } + + public int getBytesHistogramNumBins() { + return bytesHistogramNumBins; + } + + public void setBytesHistogramNumBins(int bytesHistogramNumBins) { + this.bytesHistogramNumBins = bytesHistogramNumBins; + } + + public int getBytesHistogramNumDecimalPoints() { + return bytesHistogramNumDecimalPoints; + } + + public void setBytesHistogramNumDecimalPoints(int bytesHistogramNumDecimalPoints) { + this.bytesHistogramNumDecimalPoints = bytesHistogramNumDecimalPoints; + } + + public String getQueueLengthHistogramLabel() { + return queueLengthHistogramLabel; + } + + public void setQueueLengthHistogramLabel(String queueLengthHistogramLabel) { + this.queueLengthHistogramLabel = queueLengthHistogramLabel; + } + + public long getQueueLengthHistogramMaxYAxis() { + return queueLengthHistogramMaxYAxis; + } + + public void setQueueLengthHistogramMaxYAxis(long queueLengthHistogramMaxYAxis) { + this.queueLengthHistogramMaxYAxis = queueLengthHistogramMaxYAxis; + } + + public int getQueueLengthHistogramNumBins() { + return queueLengthHistogramNumBins; + } + + public void setQueueLengthHistogramNumBins(int queueLengthHistogramNumBins) { + this.queueLengthHistogramNumBins = queueLengthHistogramNumBins; + } + + public int getQueueLengthHistogramNumDecimalPoints() { + return queueLengthHistogramNumDecimalPoints; + } + + public void setQueueLengthHistogramNumDecimalPoints(int queueLengthHistogramNumDecimalPoints) { + this.queueLengthHistogramNumDecimalPoints = queueLengthHistogramNumDecimalPoints; + } + + public boolean isTransactionRateControllerEnabled() { + return transactionRateControllerEnabled; + } + + public void setTransactionRateControllerEnabled(boolean transactionRateControllerEnabled) { + this.transactionRateControllerEnabled = transactionRateControllerEnabled; + } + + public int getNumSamplesPerThreadForRunningAverage() { + return numSamplesPerThreadForRunningAverage; + } + + public void setNumSamplesPerThreadForRunningAverage(int numSamplesPerThreadForRunningAverage) { + this.numSamplesPerThreadForRunningAverage = numSamplesPerThreadForRunningAverage; + } + + public double getTargetTps() { + return targetTps; + } + + public void setTargetTps(double targetTps) { + this.targetTps = targetTps; + } + + public int getMaxConcurrentWorkers() { + return maxConcurrentWorkers; + } + + public void setMaxConcurrentWorkers(int maxConcurrentWorkers) { + this.maxConcurrentWorkers = maxConcurrentWorkers; + } + + public String getTaskAgeHistogramLabel() { + return taskAgeHistogramLabel; + } + + public void setTaskAgeHistogramLabel(String taskAgeHistogramLabel) { + this.taskAgeHistogramLabel = taskAgeHistogramLabel; + } + + public long getTaskAgeHistogramMaxYAxis() { + return taskAgeHistogramMaxYAxis; + } + + public void setTaskAgeHistogramMaxYAxis(long taskAgeHistogramMaxYAxis) { + this.taskAgeHistogramMaxYAxis = taskAgeHistogramMaxYAxis; + } + + public int getTaskAgeHistogramNumBins() { + return taskAgeHistogramNumBins; + } + + public void setTaskAgeHistogramNumBins(int taskAgeHistogramNumBins) { + this.taskAgeHistogramNumBins = taskAgeHistogramNumBins; + } + + public int getTaskAgeHistogramNumDecimalPoints() { + return taskAgeHistogramNumDecimalPoints; + } + + public void setTaskAgeHistogramNumDecimalPoints(int taskAgeHistogramNumDecimalPoints) { + this.taskAgeHistogramNumDecimalPoints = taskAgeHistogramNumDecimalPoints; + } + + public String getResponseTimeHistogramLabel() { + return responseTimeHistogramLabel; + } + + public void setResponseTimeHistogramLabel(String responseTimeHistogramLabel) { + this.responseTimeHistogramLabel = responseTimeHistogramLabel; + } + + public long getResponseTimeHistogramMaxYAxis() { + return responseTimeHistogramMaxYAxis; + } + + public void setResponseTimeHistogramMaxYAxis(long responseTimeHistogramMaxYAxis) { + this.responseTimeHistogramMaxYAxis = responseTimeHistogramMaxYAxis; + } + + public int getResponseTimeHistogramNumBins() { + return responseTimeHistogramNumBins; + } + + public void setResponseTimeHistogramNumBins(int responseTimeHistogramNumBins) { + this.responseTimeHistogramNumBins = responseTimeHistogramNumBins; + } + + public int getResponseTimeHistogramNumDecimalPoints() { + return responseTimeHistogramNumDecimalPoints; + } + + public void setResponseTimeHistogramNumDecimalPoints(int responseTimeHistogramNumDecimalPoints) { + this.responseTimeHistogramNumDecimalPoints = responseTimeHistogramNumDecimalPoints; + } + + public String getTpsHistogramLabel() { + return tpsHistogramLabel; + } + + public void setTpsHistogramLabel(String tpsHistogramLabel) { + this.tpsHistogramLabel = tpsHistogramLabel; + } + + public long getTpsHistogramMaxYAxis() { + return tpsHistogramMaxYAxis; + } + + public void setTpsHistogramMaxYAxis(long tpsHistogramMaxYAxis) { + this.tpsHistogramMaxYAxis = tpsHistogramMaxYAxis; + } + + public int getTpsHistogramNumBins() { + return tpsHistogramNumBins; + } + + public void setTpsHistogramNumBins(int tpsHistogramNumBins) { + this.tpsHistogramNumBins = tpsHistogramNumBins; + } + + public int getTpsHistogramNumDecimalPoints() { + return tpsHistogramNumDecimalPoints; + } + + public void setTpsHistogramNumDecimalPoints(int tpsHistogramNumDecimalPoints) { + this.tpsHistogramNumDecimalPoints = tpsHistogramNumDecimalPoints; + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/AggregationEntity.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/AggregationEntity.java new file mode 100644 index 0000000..0f817fe --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/entity/AggregationEntity.java @@ -0,0 +1,116 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.entity; + +import java.util.HashMap; +import java.util.Map; + +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.util.NodeUtils; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; + +/** + * The Class AggregationEntity. + */ +public class AggregationEntity extends IndexableEntity implements IndexDocument { + private Map<String, String> attributes = new HashMap<String, String>(); + protected ObjectMapper mapper = new ObjectMapper(); + + /** + * Instantiates a new aggregation entity. + */ + public AggregationEntity() { + super(); + } + + /** + * Instantiates a new aggregation entity. + * + * @param loader the loader + */ + public AggregationEntity(OxmModelLoader loader) { + super(loader); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields() + */ + @Override + public void deriveFields() { + + /* + * We'll try and create a unique identity key that we can use for differencing the previously + * imported record sets as we won't have granular control of what is created/removed and when. + * The best we can hope for is identification of resources by generated Id until the + * Identity-Service UUID is tagged against all resources, then we can use that instead. + */ + this.id = + NodeUtils.generateUniqueShaDigest(link); + } + + public void copyAttributeKeyValuePair(Map<String, Object> map){ + for(String key: map.keySet()){ + if (!key.equalsIgnoreCase("relationship-list")){ // ignore relationship data which is not required in aggregation + this.attributes.put(key, map.get(key).toString()); // not sure if entity attribute can contain an object as value + } + } + } + + public void addAttributeKeyValuePair(String key, String value){ + this.attributes.put(key, value); + } + + @Override + public String getIndexDocumentJson() { + ObjectNode rootNode = mapper.createObjectNode(); + rootNode.put("link", this.getLink()); + rootNode.put("lastmodTimestamp", this.getEntityTimeStamp()); + for (String key: this.attributes.keySet()){ + rootNode.put(key, this.attributes.get(key)); + } + return rootNode.toString(); + } + + @Override + public ObjectNode getBulkImportEntity() { + // TODO Auto-generated method stub + return null; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "IndexDocument [" + (entityType != null ? "entityType=" + entityType + ", " : "") + + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", " + : "") + + (mapper != null ? "mapper=" + mapper + ", " : "") + (id != null ? "id=" + id + ", " : "") + + (lastmodTimestamp != null ? "lastmodTimestamp=" + lastmodTimestamp + ", " : "") + "]"; + } +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/AggregationSuggestionEntity.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/AggregationSuggestionEntity.java new file mode 100644 index 0000000..155aed1 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/entity/AggregationSuggestionEntity.java @@ -0,0 +1,86 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.entity; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; + +import java.util.ArrayList; +import java.util.List; + +import org.json.JSONArray; +import org.json.JSONObject; +import org.openecomp.sparky.util.NodeUtils; + +public class AggregationSuggestionEntity extends IndexableEntity implements IndexDocument { + + private List<String> inputs = new ArrayList<String>(); + private final String outputString = "VNFs"; + protected ObjectMapper mapper = new ObjectMapper(); + + public AggregationSuggestionEntity() { + super(); + inputs.add("VNFs"); + inputs.add("generic-vnfs"); + } + + @Override + public void deriveFields() { + this.id = NodeUtils.generateUniqueShaDigest(this.outputString); + } + + @Override + public String getIndexDocumentJson() { + + JSONArray inputArray = new JSONArray(); + for (String input: inputs) { + input = input.replace(",","" ); + input = input.replace("[","" ); + input = input.replace("]","" ); + inputArray.put(input); + } + + JSONObject entitySuggest = new JSONObject(); + entitySuggest.put("input", inputArray); + entitySuggest.put("output", this.outputString); + entitySuggest.put("weight", 100); + + JSONObject payloadNode = new JSONObject(); + entitySuggest.put("payload", payloadNode); + + JSONObject rootNode = new JSONObject(); + rootNode.put("entity_suggest", entitySuggest); + + return rootNode.toString(); + } + + @Override + public ObjectNode getBulkImportEntity() { + // TODO Auto-generated method stub + return null; + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexDocument.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexDocument.java new file mode 100644 index 0000000..a115a84 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexDocument.java @@ -0,0 +1,45 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.entity; + +import com.fasterxml.jackson.databind.node.ObjectNode; + +/** + * The Interface IndexDocument. + */ +public interface IndexDocument { + + /** + * Derive fields. + */ + public void deriveFields(); + + public String getIndexDocumentJson(); + + public String getId(); + + public ObjectNode getBulkImportEntity(); +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexableCrossEntityReference.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexableCrossEntityReference.java new file mode 100644 index 0000000..d6de9c0 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexableCrossEntityReference.java @@ -0,0 +1,119 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.entity; + +import java.util.ArrayList; + +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.util.NodeUtils; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; + + +/** + * The Class IndexableCrossEntityReference. + */ + +public class IndexableCrossEntityReference extends IndexableEntity implements IndexDocument { + + protected String crossReferenceEntityValues; + protected ArrayList<String> crossEntityReferenceCollection = new ArrayList<String>(); + protected ObjectMapper mapper = new ObjectMapper(); + + /** + * Instantiates a new indexable cross entity reference. + */ + public IndexableCrossEntityReference() { + super(); + } + + /** + * Instantiates a new indexable cross entity reference. + * + * @param loader the loader + */ + public IndexableCrossEntityReference(OxmModelLoader loader) { + super(loader); + } + + /** + * Adds the cross entity reference value. + * + * @param crossEntityReferenceValue the cross entity reference value + */ + public void addCrossEntityReferenceValue(String crossEntityReferenceValue) { + if (!crossEntityReferenceCollection.contains(crossEntityReferenceValue)) { + crossEntityReferenceCollection.add(crossEntityReferenceValue); + } + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields() + */ + @Override + public void deriveFields() { + this.id = NodeUtils.generateUniqueShaDigest(link); + this.crossReferenceEntityValues = NodeUtils.concatArray(crossEntityReferenceCollection, ";"); + } + + @Override + public String getIndexDocumentJson() { + ObjectNode rootNode = mapper.createObjectNode(); + rootNode.put("entityType", this.getEntityType()); + rootNode.put("entityPrimaryKeyValue", this.getEntityPrimaryKeyValue()); + rootNode.put("crossEntityReferenceValues", crossReferenceEntityValues); + rootNode.put("link", link); + rootNode.put("lastmodTimestamp", this.getEntityTimeStamp()); + return rootNode.toString(); + } + + @Override + public ObjectNode getBulkImportEntity() { + // TODO Auto-generated method stub + return null; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "IndexableCrossEntityReference [" + + (crossReferenceEntityValues != null + ? "crossReferenceEntityValues=" + crossReferenceEntityValues + ", " : "") + + (crossEntityReferenceCollection != null + ? "crossEntityReferenceCollection=" + crossEntityReferenceCollection + ", " : "") + + (mapper != null ? "mapper=" + mapper + ", " : "") + (id != null ? "id=" + id + ", " : "") + + (entityType != null ? "entityType=" + entityType + ", " : "") + + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", " + : "") + + (lastmodTimestamp != null ? "lastmodTimestamp=" + lastmodTimestamp + ", " : "") + + (link != null ? "link=" + link + ", " : "") + (loader != null ? "loader=" + loader : "") + + "]"; + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexableEntity.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexableEntity.java new file mode 100644 index 0000000..6159bb1 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/entity/IndexableEntity.java @@ -0,0 +1,106 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.entity; + +import java.sql.Timestamp; +import java.text.SimpleDateFormat; + +import org.openecomp.sparky.config.oxm.OxmModelLoader; + +/** + * The Class IndexableEntity. + */ +public abstract class IndexableEntity { + protected String id; // generated, SHA-256 digest + protected String entityType; + protected String entityPrimaryKeyValue; + protected String lastmodTimestamp; + protected String link; + protected OxmModelLoader loader; + + private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; + + /** + * Instantiates a new indexable entity. + */ + public IndexableEntity() { + SimpleDateFormat dateFormat = new SimpleDateFormat(TIMESTAMP_FORMAT); + Timestamp timestamp = new Timestamp(System.currentTimeMillis()); + String currentFormattedTimeStamp = dateFormat.format(timestamp); + this.setEntityTimeStamp(currentFormattedTimeStamp); + } + + /** + * Instantiates a new indexable entity. + * + * @param loader the loader + */ + public IndexableEntity(OxmModelLoader loader) { + this(); + this.loader = loader; + } + + public String getId() { + return id; + } + + public String getEntityType() { + return entityType; + } + + public String getEntityPrimaryKeyValue() { + return entityPrimaryKeyValue; + } + + public String getEntityTimeStamp() { + return lastmodTimestamp; + } + + public void setId(String id) { + this.id = id; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public void setEntityPrimaryKeyValue(String fieldValue) { + this.entityPrimaryKeyValue = fieldValue; + } + + public void setEntityTimeStamp(String lastmodTimestamp) { + this.lastmodTimestamp = lastmodTimestamp; + } + + public String getLink() { + return link; + } + + public void setLink(String link) { + this.link = link; + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/MergableEntity.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/MergableEntity.java new file mode 100644 index 0000000..eccb52b --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/entity/MergableEntity.java @@ -0,0 +1,60 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.entity; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; + +import java.util.HashMap; +import java.util.Map; + +/** + * The Class MergableEntity. + */ +public class MergableEntity { + private Map<String, String> other = new HashMap<String, String>(); + + /** + * Any. + * + * @return the map + */ + @JsonAnyGetter + public Map<String, String> any() { + return other; + } + + /** + * Sets the. + * + * @param name the name + * @param value the value + */ + @JsonAnySetter + public void set(String name, String value) { + other.put(name, value); + } +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/ObjectIdCollection.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/ObjectIdCollection.java new file mode 100644 index 0000000..0e52d2e --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/entity/ObjectIdCollection.java @@ -0,0 +1,79 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.entity; + +import java.util.Collection; +import java.util.List; +import java.util.concurrent.ConcurrentHashMap; + +/** + * The Class ObjectIdCollection. + */ +public class ObjectIdCollection { + + protected ConcurrentHashMap<String, String> importedObjectIds = + new ConcurrentHashMap<String, String>(); + + public Collection<String> getImportedObjectIds() { + return importedObjectIds.values(); + } + + /** + * Adds the object id. + * + * @param id the id + */ + public void addObjectId(String id) { + importedObjectIds.putIfAbsent(id, id); + } + + public int getSize() { + return importedObjectIds.values().size(); + } + + /** + * Adds the all. + * + * @param items the items + */ + public void addAll(List<String> items) { + if (items == null) { + return; + } + + items.stream().forEach((item) -> { + importedObjectIds.putIfAbsent(item, item); + }); + + } + + /** + * Clear. + */ + public void clear() { + importedObjectIds.clear(); + } +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/SearchableEntity.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/SearchableEntity.java new file mode 100644 index 0000000..2bccb0a --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/entity/SearchableEntity.java @@ -0,0 +1,152 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.entity; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; + +import java.util.ArrayList; +import java.util.List; + +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.util.NodeUtils; + +/** + * The Class SearchableEntity. + */ +public class SearchableEntity extends IndexableEntity implements IndexDocument { + protected List<String> searchTagCollection = new ArrayList<String>(); + protected List<String> searchTagIdCollection = new ArrayList<String>(); + protected ObjectMapper mapper = new ObjectMapper(); + + /** + * Instantiates a new searchable entity. + */ + public SearchableEntity() { + super(); + } + + /** + * Instantiates a new searchable entity. + * + * @param loader the loader + */ + public SearchableEntity(OxmModelLoader loader) { + super(loader); + } + + /* + * Generated fields, leave the settings for junit overrides + */ + protected String searchTags; // generated based on searchTagCollection values + protected String searchTagIDs; + + /** + * Generates the sha based id. + */ + public void generateId() { + this.id = NodeUtils.generateUniqueShaDigest(link); + } + + /* (non-Javadoc) + * @see org.openecomp.sparky.synchronizer.entity.IndexDocument#deriveFields() + */ + @Override + public void deriveFields() { + + /* + * We'll try and create a unique identity key that we can use for differencing the previously + * imported record sets as we won't have granular control of what is created/removed and when. + * The best we can hope for is identification of resources by generated Id until the + * Identity-Service UUID is tagged against all resources, then we can use that instead. + */ + generateId(); + this.searchTags = NodeUtils.concatArray(searchTagCollection, ";"); + this.searchTagIDs = NodeUtils.concatArray(this.searchTagIdCollection, ";"); + } + + /** + * Adds the search tag with key. + * + * @param searchTag the search tag + * @param searchTagKey the key associated with the search tag (key:value) + */ + public void addSearchTagWithKey(String searchTag, String searchTagKey) { + searchTagIdCollection.add(searchTagKey); + searchTagCollection.add(searchTag); + } + + public List<String> getSearchTagCollection() { + return searchTagCollection; + } + + public String getSearchTags() { + return searchTags; + } + + public String getSearchTagIDs() { + return searchTagIDs; + } + + public List<String> getSearchTagIdCollection() { + return searchTagIdCollection; + } + + @Override + public String getIndexDocumentJson() { + ObjectNode rootNode = mapper.createObjectNode(); + rootNode.put("entityType", this.getEntityType()); + rootNode.put("entityPrimaryKeyValue", this.getEntityPrimaryKeyValue()); + rootNode.put("searchTagIDs", this.getSearchTagIDs()); + rootNode.put("searchTags", this.getSearchTags()); + rootNode.put("link", this.getLink()); + rootNode.put("lastmodTimestamp", this.getEntityTimeStamp()); + return rootNode.toString(); + } + + @Override + public ObjectNode getBulkImportEntity() { + // TODO Auto-generated method stub + return null; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "IndexDocument [" + (entityType != null ? "entityType=" + entityType + ", " : "") + + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", " + : "") + + (searchTagCollection != null ? "searchTagCollection=" + searchTagCollection + ", " : "") + + (searchTagIdCollection != null ? "searchTagIDCollection=" + searchTagIdCollection + ", " + : "") + + (mapper != null ? "mapper=" + mapper + ", " : "") + (id != null ? "id=" + id + ", " : "") + + (lastmodTimestamp != null ? "lastmodTimestamp=" + lastmodTimestamp + ", " : "") + + (searchTags != null ? "searchTags=" + searchTags + ", " : "") + + (searchTagIDs != null ? "searchTagIDs=" + searchTagIDs : "") + "]"; + } +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/SelfLinkDescriptor.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/SelfLinkDescriptor.java new file mode 100644 index 0000000..9a3d84d --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/entity/SelfLinkDescriptor.java @@ -0,0 +1,91 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.entity; + +/** + * The Class SelfLinkDescriptor. + */ +public class SelfLinkDescriptor { + private String selfLink; + private String entityType; + private String depthModifier; + + public String getDepthModifier() { + return depthModifier; + } + + public void setDepthModifier(String depthModifier) { + this.depthModifier = depthModifier; + } + + public String getSelfLink() { + return selfLink; + } + + public void setSelfLink(String selfLink) { + this.selfLink = selfLink; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public SelfLinkDescriptor(String selfLink) { + this(selfLink, null, null); + } + + /** + * Instantiates a new self link descriptor. + * + * @param selfLink the self link + * @param entityType the entity type + */ + public SelfLinkDescriptor(String selfLink, String entityType) { + this(selfLink, null, entityType); + } + + public SelfLinkDescriptor(String selfLink, String depthModifier, String entityType) { + this.selfLink = selfLink; + this.entityType = entityType; + this.depthModifier = depthModifier; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "SelfLinkDescriptor [" + (selfLink != null ? "selfLink=" + selfLink + ", " : "") + + (entityType != null ? "entityType=" + entityType + ", " : "") + + (depthModifier != null ? "depthModifier=" + depthModifier : "") + "]"; + } + +} + diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/SuggestionSearchEntity.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/SuggestionSearchEntity.java new file mode 100644 index 0000000..38558a1 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/entity/SuggestionSearchEntity.java @@ -0,0 +1,279 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sparky.synchronizer.entity; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.json.JSONArray; +import org.json.JSONObject; +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.util.NodeUtils; + +public class SuggestionSearchEntity extends IndexableEntity implements IndexDocument { + + private String entityType; + private List<String> suggestionConnectorWords = new ArrayList<String>(); + private List<String> suggestionAttributeTypes = new ArrayList<String>(); + private List<String> suggestionAttributeValues = new ArrayList<String>(); + private List<String> suggestionTypeAliases = new ArrayList<String>(); + private List<String> suggestionInputPermutations = new ArrayList<String>(); + private List<String> suggestableAttr = new ArrayList<String>(); + private Map<String, String> payload = new HashMap<String, String>(); + private JSONObject payloadJsonNode = new JSONObject(); + private StringBuffer outputString = new StringBuffer(); + private String aliasToUse; + + public Map<String, String> getPayload() { + return payload; + } + + public void setPayload(Map<String, String> payload) { + this.payload = payload; + } + + + public JSONObject getPayloadJsonNode() { + return payloadJsonNode; + } + + public void setPayloadJsonNode(JSONObject payloadJsonNode) { + this.payloadJsonNode = payloadJsonNode; + } + + + protected ObjectMapper mapper = new ObjectMapper(); + + public SuggestionSearchEntity() { + super(); + } + + public void setSuggestableAttr(ArrayList<String> attributes) { + for (String attribute : attributes) { + this.suggestableAttr.add(attribute); + } + } + + public void setPayloadFromResponse(JsonNode node) { + Map<String, String> nodePayload = new HashMap<String, String>(); + if (suggestableAttr != null) { + for (String attribute : suggestableAttr) { + if (node.get(attribute) != null) { + nodePayload.put(attribute, node.get(attribute).asText()); + } + } + this.setPayload(nodePayload); + } + } + + + public SuggestionSearchEntity(OxmModelLoader loader) { + super(loader); + } + + @Override + public String getEntityType() { + return entityType; + } + + @Override + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public List<String> getSuggestionConnectorWords() { + return suggestionConnectorWords; + } + + public void setSuggestionConnectorWords(List<String> suggestionConnectorWords) { + this.suggestionConnectorWords = suggestionConnectorWords; + } + + public List<String> getSuggestionPropertyTypes() { + return this.suggestionAttributeTypes; + } + + public void setSuggestionPropertyTypes(List<String> suggestionPropertyTypes) { + this.suggestionAttributeTypes = suggestionPropertyTypes; + } + + public List<String> getSuggestionAttributeValues() { + return this.suggestionAttributeValues; + } + + public void setSuggestionAttributeValues(List<String> suggestionAttributeValues) { + this.suggestionAttributeValues = suggestionAttributeValues; + } + + public List<String> getSuggestionAliases() { + return this.suggestionTypeAliases; + } + + public void setSuggestionAliases(List<String> suggestionAliases) { + this.suggestionTypeAliases = suggestionAliases; + } + + public List<String> getSuggestionInputPermutations() { + return this.suggestionInputPermutations; + } + + public void setSuggestionInputPermutations(List<String> permutations) { + this.suggestionInputPermutations = permutations; + } + + public void generateSuggestionInputPermutations() { + + + List<String> entityNames = new ArrayList<>(); + entityNames.add(entityType); + HashMap<String, String> desc = loader.getOxmModel().get(this.entityType); + String attr = desc.get("suggestionAliases"); + String[] suggestionAliasesArray = attr.split(","); + suggestionTypeAliases = Arrays.asList(suggestionAliasesArray); + this.setAliasToUse(suggestionAliasesArray[suggestionAliasesArray.length - 1]); + for (String alias : suggestionTypeAliases) { + entityNames.add(alias); + } + ArrayList<String> listOfSearchSuggestionPermutations = new ArrayList<>(); + + ArrayList<String> listToPermutate = new ArrayList<>(payload.values()); + + for (String entityName : entityNames) { + listToPermutate.add(entityName); + permutateList(listToPermutate, new ArrayList<String>(), listToPermutate.size(), + listOfSearchSuggestionPermutations); + listToPermutate.remove(entityName); + } + suggestionInputPermutations = listOfSearchSuggestionPermutations; + } + + /** + * Generate all permutations of a list of Strings + * + * @param list + * @param permutation + * @param size + */ + private void permutateList(List<String> list, List<String> permutation, int size, + List<String> listOfSearchSuggestionPermutationList) { + if (permutation.size() == size) { + StringBuilder newPermutation = new StringBuilder(); + + for (int i = 0; i < permutation.size(); i++) { + newPermutation.append(permutation.get(i)).append(" "); + } + + listOfSearchSuggestionPermutationList.add(newPermutation.toString().trim()); + + return; + } + + String[] availableItems = list.toArray(new String[0]); + + for (String i : availableItems) { + permutation.add(i); + list.remove(i); + permutateList(list, permutation, size, listOfSearchSuggestionPermutationList); + list.add(i); + permutation.remove(i); + } + } + + public boolean isSuggestableDoc() { + return this.getPayload().size() != 0; + } + + + @Override + public void deriveFields() { + + int payloadEntryCounter = 1; + for (Map.Entry<String, String> payload : getPayload().entrySet()) { + // Add the payload(status) only if a valid value is present + if (payload.getValue() != null &&payload.getValue().length() > 0) { + this.getPayloadJsonNode().put(payload.getKey(), payload.getValue()); + this.outputString.append(payload.getValue()); + if (payloadEntryCounter < getPayload().entrySet().size()) { + this.outputString.append(" and "); + } else{ + this.outputString.append(" "); + } + } + payloadEntryCounter++; + } + + this.outputString.append(this.getAliasToUse()); + this.id = NodeUtils.generateUniqueShaDigest(outputString.toString()); + } + + @Override + public String getIndexDocumentJson() { + // TODO Auto-generated method stub + JSONObject rootNode = new JSONObject(); + + JSONArray suggestionsArray = new JSONArray(); + for (String suggestion : suggestionInputPermutations) { + suggestionsArray.put(suggestion); + } + + JSONObject entitySuggest = new JSONObject(); + + entitySuggest.put("input", suggestionsArray); + entitySuggest.put("output", this.outputString); + entitySuggest.put("payload", this.payloadJsonNode); + rootNode.put("entity_suggest", entitySuggest); + + return rootNode.toString(); + } + + @Override + public ObjectNode getBulkImportEntity() { + // TODO Auto-generated method stub + return null; + } + + public String getAliasToUse() { + return aliasToUse; + } + + public void setAliasToUse(String aliasToUse) { + this.aliasToUse = aliasToUse; + } + + @Override + public String toString() { + return "SuggestionSearchEntity [entityType=" + entityType + ", suggestionConnectorWords=" + + suggestionConnectorWords + ", suggestionAttributeTypes=" + suggestionAttributeTypes + + ", suggestionAttributeValues=" + suggestionAttributeValues + ", suggestionTypeAliases=" + + suggestionTypeAliases + ", mapper=" + mapper + "]"; + } +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/entity/TransactionStorageType.java b/src/main/java/org/openecomp/sparky/synchronizer/entity/TransactionStorageType.java new file mode 100644 index 0000000..4c15e30 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/entity/TransactionStorageType.java @@ -0,0 +1,57 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.entity; + +/** + * The Enum TransactionStorageType. + */ +public enum TransactionStorageType { + EDGE_TAG_QUERY(0, "aaiOffline/edge-tag-query"), ACTIVE_INVENTORY_QUERY(1, + "aaiOffline/active-inventory-query"); + + private Integer index; + private String outputFolder; + + /** + * Instantiates a new transaction storage type. + * + * @param index the index + * @param outputFolder the output folder + */ + TransactionStorageType(Integer index, String outputFolder) { + this.index = index; + this.outputFolder = outputFolder; + } + + public Integer getIndex() { + return index; + } + + public String getOutputFolder() { + return outputFolder; + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/enumeration/OperationState.java b/src/main/java/org/openecomp/sparky/synchronizer/enumeration/OperationState.java new file mode 100644 index 0000000..65b350c --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/enumeration/OperationState.java @@ -0,0 +1,33 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.enumeration; + +/** + * The Enum OperationState. + */ +public enum OperationState { + INIT, OK, ERROR, ABORT, PENDING +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/enumeration/SynchronizerState.java b/src/main/java/org/openecomp/sparky/synchronizer/enumeration/SynchronizerState.java new file mode 100644 index 0000000..67f8eb6 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/enumeration/SynchronizerState.java @@ -0,0 +1,33 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.enumeration; + +/** + * The Enum SynchronizerState. + */ +public enum SynchronizerState { + IDLE, PERFORMING_SYNCHRONIZATION +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/filter/ElasticSearchSynchronizerFilter.java b/src/main/java/org/openecomp/sparky/synchronizer/filter/ElasticSearchSynchronizerFilter.java new file mode 100644 index 0000000..8f82371 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/filter/ElasticSearchSynchronizerFilter.java @@ -0,0 +1,111 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.filter; + +import java.io.IOException; +import java.net.InetAddress; +import java.net.UnknownHostException; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.synchronizer.SyncHelper; +import org.openecomp.sparky.util.NodeUtils; + +import org.openecomp.cl.mdc.MdcContext; + +/* + * This is a wire-frame for an experiment to get the jetty filter-lifecyle initialization method to + * setup a scheduled thread executor with an ElasticSearchSynchronization task, which (I'm hoping) + * will allow us to do periodic ES <=> AAI synchronization. + * + * Alternatively, if the embedded java approach doesn't work we could try instead to do a + * System.exec( "perl refreshElasticSearchInstance.pl"). We have two options, I'm hoping the + * embedded options will work for us. + */ + +/** + * The Class ElasticSearchSynchronizerFilter. + */ +public class ElasticSearchSynchronizerFilter implements Filter { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(ElasticSearchSynchronizerFilter.class); + + private SyncHelper syncHelper; + + /* (non-Javadoc) + * @see javax.servlet.Filter#destroy() + */ + @Override + public void destroy() { + + if (syncHelper != null) { + syncHelper.shutdown(); + } + } + + /* (non-Javadoc) + * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, javax.servlet.FilterChain) + */ + @Override + public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) + throws IOException, ServletException { + + /* + * However, we will setup the filtermap with a url that should never get it, so we shouldn't + * ever be in here. + */ + + chain.doFilter(request, response); + } + + /* (non-Javadoc) + * @see javax.servlet.Filter#init(javax.servlet.FilterConfig) + */ + @Override + public void init(FilterConfig filterConfig) throws ServletException { + String txnID = NodeUtils.getRandomTxnId(); + MdcContext.initialize(txnID, "ElasticSearchSynchronizerFilter", "", "Init", ""); + + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "init()"); + + try { + new SyncHelper(OxmModelLoader.getInstance()); + } catch (Exception exc) { + throw new ServletException("Caught an exception while initializing filter", exc); + } + + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/CollectEntitySelfLinkTask.java b/src/main/java/org/openecomp/sparky/synchronizer/task/CollectEntitySelfLinkTask.java new file mode 100644 index 0000000..6550551 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/task/CollectEntitySelfLinkTask.java @@ -0,0 +1,77 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.task; + +import java.util.function.Supplier; + +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider; +import org.openecomp.sparky.dal.rest.OperationResult; + +/** + * The Class CollectEntitySelfLinkTask. + */ +public class CollectEntitySelfLinkTask implements Supplier<NetworkTransaction> { + + private NetworkTransaction txn; + + private ActiveInventoryDataProvider provider; + + /** + * Instantiates a new collect entity self link task. + * + * @param txn the txn + * @param provider the provider + */ + public CollectEntitySelfLinkTask(NetworkTransaction txn, ActiveInventoryDataProvider provider) { + this.txn = txn; + this.provider = provider; + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + + txn.setTaskAgeInMs(); + + long startTimeInMs = System.currentTimeMillis(); + OperationResult result = null; + try { + result = provider.queryActiveInventoryWithRetries(txn.getLink(), "application/json", 5); + } catch (Exception exc) { + result = new OperationResult(500, + "Caught an exception while trying to resolve link = " + exc.getMessage()); + } finally { + result.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); + txn.setOperationResult(result); + } + + return txn; + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/CollectEntityTypeSelfLinksTask.java b/src/main/java/org/openecomp/sparky/synchronizer/task/CollectEntityTypeSelfLinksTask.java new file mode 100644 index 0000000..1ce8fdc --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/task/CollectEntityTypeSelfLinksTask.java @@ -0,0 +1,78 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.task; + +import java.util.function.Supplier; + +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider; +import org.openecomp.sparky.dal.rest.OperationResult; + +/** + * The Class CollectEntityTypeSelfLinksTask. + */ +public class CollectEntityTypeSelfLinksTask implements Supplier<NetworkTransaction> { + + private ActiveInventoryDataProvider aaiProvider; + + private NetworkTransaction txn; + + /** + * Instantiates a new collect entity type self links task. + * + * @param txn the txn + * @param provider the provider + */ + public CollectEntityTypeSelfLinksTask(NetworkTransaction txn, + ActiveInventoryDataProvider provider) { + this.aaiProvider = provider; + this.txn = txn; + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + + txn.setTaskAgeInMs(); + + long startTimeInMs = System.currentTimeMillis(); + OperationResult result = null; + try { + result = aaiProvider.queryActiveInventoryWithRetries(txn.getLink(), "application/json", 5); + } catch (Exception exc) { + result = new OperationResult(500, + "Caught an exception while trying to resolve link = " + exc.getMessage()); + } finally { + result.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); + txn.setOperationResult(result); + } + + return txn; + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/GetCrossEntityReferenceEntityTask.java b/src/main/java/org/openecomp/sparky/synchronizer/task/GetCrossEntityReferenceEntityTask.java new file mode 100644 index 0000000..c19c501 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/task/GetCrossEntityReferenceEntityTask.java @@ -0,0 +1,78 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.task; + +import java.util.function.Supplier; + +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider; +import org.openecomp.sparky.dal.rest.OperationResult; + +/** + * The Class GetCrossEntityReferenceEntityTask. + */ +public class GetCrossEntityReferenceEntityTask implements Supplier<NetworkTransaction> { + + private NetworkTransaction txn; + + private ActiveInventoryDataProvider provider; + + /** + * Instantiates a new gets the cross entity reference entity task. + * + * @param txn the txn + * @param provider the provider + */ + public GetCrossEntityReferenceEntityTask(NetworkTransaction txn, + ActiveInventoryDataProvider provider) { + this.txn = txn; + this.provider = provider; + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + + txn.setTaskAgeInMs(); + + long startTimeInMs = System.currentTimeMillis(); + OperationResult result = null; + try { + result = provider.queryActiveInventoryWithRetries(txn.getLink(), "application/json", 5); + } catch (Exception exc) { + result = new OperationResult(500, + "Caught an exception while trying to resolve link = " + exc.getMessage()); + } finally { + result.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); + txn.setOperationResult(result); + } + + return txn; + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/PerformActiveInventoryRetrieval.java b/src/main/java/org/openecomp/sparky/synchronizer/task/PerformActiveInventoryRetrieval.java new file mode 100644 index 0000000..3bfbabd --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/task/PerformActiveInventoryRetrieval.java @@ -0,0 +1,93 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.task; + +import java.util.Map; +import java.util.function.Supplier; + +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.slf4j.MDC; + +/* + * Consider abstraction the tasks into common elemnts, because most of them repeat a generic call + * flow pattern + */ + +/** + * The Class PerformActiveInventoryRetrieval. + */ +public class PerformActiveInventoryRetrieval implements Supplier<NetworkTransaction> { + + private static Logger logger = LoggerFactory.getLogger(PerformActiveInventoryRetrieval.class); + + private NetworkTransaction txn; + private ActiveInventoryDataProvider aaiProvider; + private Map<String, String> contextMap; + + /** + * Instantiates a new perform active inventory retrieval. + * + * @param txn the txn + * @param aaiProvider the aai provider + */ + public PerformActiveInventoryRetrieval(NetworkTransaction txn, + ActiveInventoryDataProvider aaiProvider) { + this.txn = txn; + this.aaiProvider = aaiProvider; + this.contextMap = MDC.getCopyOfContextMap(); + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + + txn.setTaskAgeInMs(); + + long startTimeInMs = System.currentTimeMillis(); + MDC.setContextMap(contextMap); + OperationResult result = null; + try { + // todo: use proper config instead of hard-coding parameters + result = aaiProvider.queryActiveInventoryWithRetries(txn.getLink(), "application/json", 5); + } catch (Exception exc) { + logger.error("Failure to resolve self link from AAI. Error = ", exc); + result = new OperationResult(500, + "Caught an exception while trying to resolve link = " + exc.getMessage()); + } finally { + result.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); + txn.setOperationResult(result); + } + + return txn; + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchPut.java b/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchPut.java new file mode 100644 index 0000000..b6fe489 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchPut.java @@ -0,0 +1,85 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.task; + +import java.util.Map; +import java.util.function.Supplier; + +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.rest.RestDataProvider; +import org.slf4j.MDC; + +/** + * The Class PerformElasticSearchPut. + */ +public class PerformElasticSearchPut implements Supplier<NetworkTransaction> { + + private RestDataProvider restDataProvider; + private String jsonPayload; + private NetworkTransaction txn; + private Map<String, String> contextMap; + + /** + * Instantiates a new perform elastic search put. + * + * @param jsonPayload the json payload + * @param txn the txn + * @param restDataProvider the rest data provider + */ + public PerformElasticSearchPut(String jsonPayload, NetworkTransaction txn, + RestDataProvider restDataProvider) { + this.jsonPayload = jsonPayload; + this.txn = txn; + this.restDataProvider = restDataProvider; + this.contextMap = MDC.getCopyOfContextMap(); + } + + public PerformElasticSearchPut(String jsonPayload, NetworkTransaction txn, + RestDataProvider restDataProvider, Map<String, String> contextMap) { + this.jsonPayload = jsonPayload; + this.txn = txn; + this.restDataProvider = restDataProvider; + this.contextMap = contextMap; + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + txn.setTaskAgeInMs(); + long startTimeInMs = System.currentTimeMillis(); + MDC.setContextMap(contextMap); + + OperationResult or = restDataProvider.doPut(txn.getLink(), jsonPayload, "application/json"); + + or.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); + txn.setOperationResult(or); + + return txn; + } +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchRetrieval.java b/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchRetrieval.java new file mode 100644 index 0000000..a144f1c --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchRetrieval.java @@ -0,0 +1,69 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.task; + +import java.util.Map; +import java.util.function.Supplier; + +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.rest.RestDataProvider; +import org.slf4j.MDC; + +/** + * The Class PerformElasticSearchRetrieval. + */ +public class PerformElasticSearchRetrieval implements Supplier<NetworkTransaction> { + + private NetworkTransaction txn; + private RestDataProvider restDataProvider; + private Map<String, String> contextMap; + + /** + * Instantiates a new perform elastic search retrieval. + * + * @param elasticSearchTxn the elastic search txn + * @param restDataProvider the rest data provider + */ + public PerformElasticSearchRetrieval(NetworkTransaction elasticSearchTxn, + RestDataProvider restDataProvider) { + this.txn = elasticSearchTxn; + this.restDataProvider = restDataProvider; + this.contextMap = MDC.getCopyOfContextMap(); + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + MDC.setContextMap(contextMap); + OperationResult or = restDataProvider.doGet(txn.getLink(), "application/json"); + txn.setOperationResult(or); + return txn; + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchUpdate.java b/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchUpdate.java new file mode 100644 index 0000000..d5cafc1 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/task/PerformElasticSearchUpdate.java @@ -0,0 +1,83 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.task; + +import java.util.Map; +import java.util.function.Supplier; + +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.elasticsearch.ElasticSearchDataProvider; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.slf4j.MDC; + +/** + * The Class PerformElasticSearchUpdate. + */ +public class PerformElasticSearchUpdate implements Supplier<NetworkTransaction> { + + private ElasticSearchDataProvider esDataProvider; + private NetworkTransaction operationTracker; + private String updatePayload; + private String updateUrl; + private Map<String, String> contextMap; + + /** + * Instantiates a new perform elastic search update. + * + * @param updateUrl the update url + * @param updatePayload the update payload + * @param esDataProvider the es data provider + * @param transactionTracker the transaction tracker + */ + public PerformElasticSearchUpdate(String updateUrl, String updatePayload, + ElasticSearchDataProvider esDataProvider, NetworkTransaction transactionTracker) { + this.updateUrl = updateUrl; + this.updatePayload = updatePayload; + this.esDataProvider = esDataProvider; + this.contextMap = MDC.getCopyOfContextMap(); + this.operationTracker = new NetworkTransaction(); + operationTracker.setEntityType(transactionTracker.getEntityType()); + operationTracker.setDescriptor(transactionTracker.getDescriptor()); + operationTracker.setOperationType(transactionTracker.getOperationType()); + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + operationTracker.setTaskAgeInMs(); + long startTimeInMs = System.currentTimeMillis(); + MDC.setContextMap(contextMap); + OperationResult or = esDataProvider.doBulkOperation(updateUrl, updatePayload); + + or.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); + operationTracker.setOperationResult(or); + + return operationTracker; + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/PersistOperationResultToDisk.java b/src/main/java/org/openecomp/sparky/synchronizer/task/PersistOperationResultToDisk.java new file mode 100644 index 0000000..894faa5 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/task/PersistOperationResultToDisk.java @@ -0,0 +1,88 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.task; + +import java.io.File; +import java.util.Map; +import java.util.function.Supplier; + +import org.openecomp.cl.api.Logger; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.slf4j.MDC; + +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * The Class PersistOperationResultToDisk. + */ +public class PersistOperationResultToDisk implements Supplier<Void> { + + private String fullPath; + private OperationResult dataToStore; + private ObjectMapper mapper; + private Logger logger; + private Map<String, String> contextMap; + + /** + * Instantiates a new persist operation result to disk. + * + * @param fullPath the full path + * @param dataToStore the data to store + * @param mapper the mapper + * @param logger the logger + */ + public PersistOperationResultToDisk(String fullPath, OperationResult dataToStore, + ObjectMapper mapper, Logger logger) { + + this.fullPath = fullPath; + this.mapper = mapper; + this.dataToStore = dataToStore; + this.logger = logger; + this.contextMap = MDC.getCopyOfContextMap(); + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public Void get() { + MDC.setContextMap(contextMap); + File file = new File(fullPath); + if (!file.exists()) { + try { + mapper.writeValue(new File(fullPath), dataToStore); + } catch (Exception exc) { + logger.error(AaiUiMsgs.DISK_DATA_WRITE_IO_ERROR, exc.toString()); + } + } + + return null; + } + + + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/RetrieveOperationResultFromDisk.java b/src/main/java/org/openecomp/sparky/synchronizer/task/RetrieveOperationResultFromDisk.java new file mode 100644 index 0000000..f69ce38 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/task/RetrieveOperationResultFromDisk.java @@ -0,0 +1,92 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.synchronizer.task; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.function.Supplier; + +import org.openecomp.cl.api.Logger; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.logging.AaiUiMsgs; + +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * The Class RetrieveOperationResultFromDisk. + */ +public class RetrieveOperationResultFromDisk implements Supplier<OperationResult> { + + private String fullPath; + private ObjectMapper mapper; + private Logger logger; + + /** + * Instantiates a new retrieve operation result from disk. + * + * @param fullPath the full path + * @param mapper the mapper + * @param logger the logger + */ + public RetrieveOperationResultFromDisk(String fullPath, ObjectMapper mapper, Logger logger) { + + this.fullPath = fullPath; + this.mapper = mapper; + this.logger = logger; + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public OperationResult get() { + + try { + File file = new File(fullPath); + if (file.exists()) { + if (logger.isDebugEnabled()) { + logger.debug(AaiUiMsgs.WILL_RETRIEVE_TXN, fullPath); + } + + Path path = Paths.get(fullPath); + byte[] byteBuffer = Files.readAllBytes(path); + + OperationResult opResult = mapper.readValue(byteBuffer, OperationResult.class); + + return opResult; + } else { + logger.debug(AaiUiMsgs.FAILED_TO_RESTORE_TXN_FILE_MISSING, fullPath); + } + } catch (IOException exc) { + logger.error(AaiUiMsgs.DISK_CACHE_READ_IO_ERROR, exc.getLocalizedMessage()); + } + return null; + } + +} diff --git a/src/main/java/org/openecomp/sparky/synchronizer/task/StoreDocumentTask.java b/src/main/java/org/openecomp/sparky/synchronizer/task/StoreDocumentTask.java new file mode 100644 index 0000000..0134b0d --- /dev/null +++ b/src/main/java/org/openecomp/sparky/synchronizer/task/StoreDocumentTask.java @@ -0,0 +1,81 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sparky.synchronizer.task; + +import java.util.Map; +import java.util.function.Supplier; + +import org.openecomp.sparky.dal.NetworkTransaction; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.rest.RestDataProvider; +import org.openecomp.sparky.synchronizer.entity.IndexDocument; +import org.slf4j.MDC; + +/** + * The Class StoreDocumentTask. + */ +public class StoreDocumentTask implements Supplier<NetworkTransaction> { + + private IndexDocument doc; + + private NetworkTransaction txn; + + private RestDataProvider esDataProvider; + private Map<String, String> contextMap; + + /** + * Instantiates a new store document task. + * + * @param doc the doc + * @param txn the txn + * @param esDataProvider the es data provider + */ + public StoreDocumentTask(IndexDocument doc, NetworkTransaction txn, + RestDataProvider esDataProvider) { + this.doc = doc; + this.txn = txn; + this.esDataProvider = esDataProvider; + this.contextMap = MDC.getCopyOfContextMap(); + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public NetworkTransaction get() { + txn.setTaskAgeInMs(); + + long startTimeInMs = System.currentTimeMillis(); + MDC.setContextMap(contextMap); + OperationResult or = + esDataProvider.doPut(txn.getLink(), doc.getIndexDocumentJson(), "application/json"); + or.setResponseTimeInMs(System.currentTimeMillis() - startTimeInMs); + + txn.setOperationResult(or); + + return txn; + } + +} diff --git a/src/main/java/org/openecomp/sparky/util/ConfigHelper.java b/src/main/java/org/openecomp/sparky/util/ConfigHelper.java new file mode 100644 index 0000000..5d660ff --- /dev/null +++ b/src/main/java/org/openecomp/sparky/util/ConfigHelper.java @@ -0,0 +1,194 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.util; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.io.InputStream; +import java.util.Properties; +import java.util.Set; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.logging.AaiUiMsgs; + +/** + * The Class ConfigHelper. + */ +public class ConfigHelper { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(ConfigHelper.class); + + /** + * Gets the config with prefix. + * + * @param configPrefix the config prefix + * @param properties the properties + * @return the config with prefix + */ + public static Properties getConfigWithPrefix(String configPrefix, Properties properties) { + + /* + * The idea here is collect properties groups prefixed with the same origin + */ + + Set<Object> set = properties.keySet(); + Properties newProps = new Properties(); + + for (Object k : set) { + String ks = (String) k; + if (ks.startsWith(configPrefix)) { + + String temp = ks.replaceFirst(configPrefix + ".", ""); + newProps.setProperty(temp, properties.getProperty(ks)); + } + } + + return newProps; + } + + /** + * Load config. + * + * @param fileName the file name + * @return the properties + * @throws Exception the exception + */ + public static Properties loadConfig(String fileName) throws Exception { + + String basePath = System.getProperty("user.dir"); + InputStream fileInputStream = new FileInputStream(basePath + "//" + fileName); + + Properties props = new Properties(); + props.load(fileInputStream); + + return props; + } + + /** + * Load config from explicit path. + * + * @param fileName the file name + * @return the properties + */ + public static Properties loadConfigFromExplicitPath(String fileName) { + + Properties props = new Properties(); + + try { + InputStream fileInputStream = new FileInputStream(fileName); + props.load(fileInputStream); + } catch (Exception exc) { + LOG.warn(AaiUiMsgs.CONFIG_NOT_FOUND_VERBOSE, fileName, exc.getLocalizedMessage()); + } + + return props; + } + + /** + * Property fetch. + * + * @param config the config + * @param propName the prop name + * @param defaultValue the default value + * @return the string + */ + public static String propertyFetch(Properties config, String propName, String defaultValue) { + return config.getProperty(propName, defaultValue); + } + + public static boolean isEssDevModeEnabled() { + return Boolean.parseBoolean(System.getProperty("isEssDevMode", "false")); + } + + /** + * Gets the filepath. + * + * @param fileName the file name + * @param isRelativePath the is relative path + * @return the filepath + */ + public static String getFilepath(String fileName, boolean isRelativePath) { + + String filepath = null; + + if (isRelativePath) { + filepath = System.getProperty("user.dir") + "/" + fileName; + + } else { + filepath = fileName; + } + + return filepath; + + } + + /** + * Gets the file contents. + * + * @param fileName the file name + * @return the file contents + * @throws IOException Signals that an I/O exception has occurred. + */ + public static String getFileContents(String fileName) throws IOException { + + LOG.debug(AaiUiMsgs.FILE_READ_IN_PROGRESS, fileName); + + File file = new File(fileName); + + if (!file.exists()) { + throw new FileNotFoundException("Failed to load file = " + fileName); + } + + if (file.exists() && !file.isDirectory()) { + BufferedReader br = new BufferedReader(new FileReader(file)); + try { + StringBuilder sb = new StringBuilder(); + String line = br.readLine(); + + while (line != null) { + sb.append(line); + sb.append(System.lineSeparator()); + line = br.readLine(); + } + + return sb.toString(); + } finally { + br.close(); + } + } else { + LOG.warn(AaiUiMsgs.FILE_NOT_FOUND, fileName); + } + + return null; + + } + +} diff --git a/src/main/java/org/openecomp/sparky/util/EncryptConvertor.java b/src/main/java/org/openecomp/sparky/util/EncryptConvertor.java new file mode 100644 index 0000000..6b03302 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/util/EncryptConvertor.java @@ -0,0 +1,150 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.util; + +/** + * The Class EncryptConvertor. + */ +public class EncryptConvertor { + + private static final char[] HEX_CHARS = "0123456789abcdef".toCharArray(); + + /** + * toHexString(String) - convert a string into its hex equivalent. + * + * @param buf the buf + * @return the string + */ + public static final String toHexString(String buf) { + if (buf == null) { + return ""; + } + return toHexString(buf.getBytes()); + } + + /** + * toHexString(byte[]) - convert a byte-string into its hex equivalent. + * + * @param buf the buf + * @return the string + */ + public static final String toHexString(byte[] buf) { + + if (buf == null) { + return ""; + } + char[] chars = new char[2 * buf.length]; + for (int i = 0; i < buf.length; ++i) { + chars[2 * i] = HEX_CHARS[(buf[i] & 0xF0) >>> 4]; + chars[2 * i + 1] = HEX_CHARS[buf[i] & 0x0F]; + } + return new String(chars); + } + + /** + * Convert a hex string to its equivalent value. + * + * @param hexString the hex string + * @return the string + * @throws Exception the exception + */ + public static final String stringFromHex(String hexString) throws Exception { + if (hexString == null) { + return ""; + } + return stringFromHex(hexString.toCharArray()); + } + + /** + * String from hex. + * + * @param hexCharArray the hex char array + * @return the string + * @throws Exception the exception + */ + public static final String stringFromHex(char[] hexCharArray) throws Exception { + if (hexCharArray == null) { + return ""; + } + return new String(bytesFromHex(hexCharArray)); + } + + /** + * Bytes from hex. + * + * @param hexString the hex string + * @return the byte[] + * @throws Exception the exception + */ + public static final byte[] bytesFromHex(String hexString) throws Exception { + if (hexString == null) { + return new byte[0]; + } + return bytesFromHex(hexString.toCharArray()); + } + + /** + * Bytes from hex. + * + * @param hexCharArray the hex char array + * @return the byte[] + * @throws Exception the exception + */ + public static final byte[] bytesFromHex(char[] hexCharArray) throws Exception { + if (hexCharArray == null) { + return new byte[0]; + } + int len = hexCharArray.length; + if ((len % 2) != 0) { + throw new Exception("Odd number of characters: '" + String.valueOf(hexCharArray) + "'"); + } + byte[] txtInByte = new byte[len / 2]; + int counter = 0; + for (int i = 0; i < len; i += 2) { + txtInByte[counter++] = + (byte) (((fromHexDigit(hexCharArray[i], i) << 4) | fromHexDigit(hexCharArray[i + 1], i)) + & 0xFF); + } + return txtInByte; + } + + /** + * From hex digit. + * + * @param ch the ch + * @param index the index + * @return the int + * @throws Exception the exception + */ + protected static final int fromHexDigit(char ch, int index) throws Exception { + int digit = Character.digit(ch, 16); + if (digit == -1) { + throw new Exception("Illegal hex character '" + ch + "' at index " + index); + } + return digit; + } + +} diff --git a/src/main/java/org/openecomp/sparky/util/Encryptor.java b/src/main/java/org/openecomp/sparky/util/Encryptor.java new file mode 100644 index 0000000..87abe16 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/util/Encryptor.java @@ -0,0 +1,137 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.util; + +import org.apache.commons.cli.BasicParser; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.CommandLineParser; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import org.eclipse.jetty.util.security.Password; + +/** + * The Class Encryptor. + */ +public class Encryptor { + + /** + * Instantiates a new encryptor. + */ + public Encryptor() { + } + + /** + * Decrypt value. + * + * @param value the value + * @return the string + */ + public String decryptValue(String value) { + String decyptedValue = ""; + + try { + decyptedValue = Password.deobfuscate(value); + } catch (Exception exc) { + System.err.println("Cannot decrypt '" + value + "': " + exc.toString()); + } + + return decyptedValue; + } + + /** + * Usage. + */ + public static void usage() { + usage(null); + } + + /** + * Usage. + * + * @param msg the msg + */ + public static void usage(String msg) { + if (msg != null) { + System.err.println(msg); + } + System.err.println("Usage: java Encryptor -e value"); + System.err.println("\tEncrypt the given value"); + System.err.println("Usage: java Encryptor -d value"); + System.err.println("\tDecrypt the given value"); + System.exit(1); + } + + /** + * The main method. + * + * @param args the arguments + */ + public static void main(String[] args) { + + Options options = new Options(); + options.addOption("d", true, "value to decrypt"); + options.addOption("h", false, "show help"); + options.addOption("?", false, "show help"); + + String value = null; + boolean encrypt = false; + boolean decrypt = false; + + CommandLineParser parser = new BasicParser(); + CommandLine cmd = null; + + try { + cmd = parser.parse(options, args); + + if (cmd.hasOption("d")) { + value = cmd.getOptionValue("d"); + decrypt = true; + } + + if (cmd.hasOption("?") || cmd.hasOption("h")) { + usage(); + System.exit(0); + } + + if ((encrypt && decrypt) || (!encrypt && !decrypt)) { + usage("Must specify one (and only one) of the -e or -d options"); + } + + Encryptor encryptor = new Encryptor(); + + if (decrypt) { + String out = encryptor.decryptValue(value); + System.out.println(out); + } + } catch (ParseException exc) { + System.out.println("Failed to parse command line properties: " + exc.toString()); + } catch (Exception exc) { + System.out.println("Failure: " + exc.toString()); + } + + System.exit(0); + } +} diff --git a/src/main/java/org/openecomp/sparky/util/ErrorUtil.java b/src/main/java/org/openecomp/sparky/util/ErrorUtil.java new file mode 100644 index 0000000..9cea8b3 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/util/ErrorUtil.java @@ -0,0 +1,63 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + + +package org.openecomp.sparky.util; + +/** + * The Class ErrorUtil. + */ +public class ErrorUtil { + + /** + * Extract stack trace elements. + * + * @param maxNumberOfElementsToCapture the max number of elements to capture + * @param exc the exc + * @return the string + */ + public static String extractStackTraceElements(int maxNumberOfElementsToCapture, Exception exc) { + StringBuilder sb = new StringBuilder(128); + + StackTraceElement[] stackTraceElements = exc.getStackTrace(); + + if (stackTraceElements != null) { + + /* + * We want to avoid an index out-of-bounds error, so we will make sure to only extract the + * number of frames from the stack trace that actually exist. + */ + + int numFramesToExtract = Math.min(maxNumberOfElementsToCapture, stackTraceElements.length); + + for (int x = 0; x < numFramesToExtract; x++) { + sb.append(stackTraceElements[x]).append("\n"); + } + + } + + return sb.toString(); + } +} diff --git a/src/main/java/org/openecomp/sparky/util/JsonXmlConverter.java b/src/main/java/org/openecomp/sparky/util/JsonXmlConverter.java new file mode 100644 index 0000000..845e0af --- /dev/null +++ b/src/main/java/org/openecomp/sparky/util/JsonXmlConverter.java @@ -0,0 +1,80 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.util; + +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; +import org.json.XML; + +/** + * The Class JsonXmlConverter. + */ +public class JsonXmlConverter { + + /** + * Checks if is valid json. + * + * @param text the text + * @return true, if is valid json + */ + public static boolean isValidJson(String text) { + try { + new JSONObject(text); + } catch (JSONException ex) { + try { + new JSONArray(text); + } catch (JSONException ex1) { + return false; + } + } + + return true; + } + + /** + * Convert jsonto xml. + * + * @param jsonText the json text + * @return the string + */ + public static String convertJsontoXml(String jsonText) { + JSONObject jsonObj = new JSONObject(jsonText); + String xmlText = XML.toString(jsonObj); + return xmlText; + } + + /** + * Convert xmlto json. + * + * @param xmlText the xml text + * @return the string + */ + public static String convertXmltoJson(String xmlText) { + JSONObject jsonObj = XML.toJSONObject(xmlText); + return jsonObj.toString(); + } +} diff --git a/src/main/java/org/openecomp/sparky/util/KeystoreBuilder.java b/src/main/java/org/openecomp/sparky/util/KeystoreBuilder.java new file mode 100644 index 0000000..6361e95 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/util/KeystoreBuilder.java @@ -0,0 +1,525 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.util; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.UnknownHostException; +import java.security.KeyManagementException; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.security.cert.CertificateEncodingException; +import java.security.cert.CertificateException; +import java.security.cert.CertificateParsingException; +import java.security.cert.X509Certificate; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLException; +import javax.net.ssl.SSLSocket; +import javax.net.ssl.SSLSocketFactory; +import javax.net.ssl.TrustManager; +import javax.net.ssl.TrustManagerFactory; +import javax.net.ssl.X509TrustManager; + +/** + * The Class KeystoreBuilder. + */ +public class KeystoreBuilder { + + /** + * The Class EndPoint. + */ + private class EndPoint { + private String hostname; + private int port; + + /** + * Instantiates a new end point. + */ + @SuppressWarnings("unused") + public EndPoint() {} + + /** + * Instantiates a new end point. + * + * @param host the host + * @param port the port + */ + public EndPoint(String host, int port) { + this.hostname = host; + this.port = port; + } + + public String getHostname() { + return hostname; + } + + @SuppressWarnings("unused") + public void setHostname(String hostname) { + this.hostname = hostname; + } + + public int getPort() { + return port; + } + + public void setPort(int port) { + this.port = port; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "EndPoint [hostname=" + hostname + ", port=" + port + "]"; + } + + } + + private List<EndPoint> endpoints = new ArrayList<EndPoint>(); + + /** + * Initialize end points list. + * + * @param endpointList the endpoint list + */ + private void initializeEndPointsList(String endpointList) { + String[] endpointUris = endpointList.split(";"); + + for (String endpointUri : endpointUris) { + + String ipAndPort = endpointUri.replaceAll("http://", ""); + ipAndPort = endpointUri.replaceAll("https://", ""); + + // System.out.println("ipAndPortUrl = " + ipAndPort); + + String[] hostAndPort = ipAndPort.split(":"); + + String hostname = hostAndPort[0]; + int port = Integer.parseInt(hostAndPort[1]); + + EndPoint ep = new EndPoint(hostname, port); + endpoints.add(ep); + } + + } + + /** + * Instantiates a new keystore builder. + * + * @param endpointList the endpoint list + * @throws NoSuchAlgorithmException the no such algorithm exception + */ + public KeystoreBuilder(String endpointList) throws NoSuchAlgorithmException { + initializeEndPointsList(endpointList); + sha1 = MessageDigest.getInstance("SHA1"); + md5 = MessageDigest.getInstance("MD5"); + } + + private static final String SEP = File.separator; + private SavingTrustManager savingTrustManager; + private SSLSocketFactory sslSocketFactory; + private MessageDigest sha1; + private MessageDigest md5; + private KeyStore ks; + private String keystoreFileName; + private String keystorePassword; + private boolean dumpCertDetails = false; + + public void setDumpCertDetails(boolean shouldSet) { + dumpCertDetails = shouldSet; + } + + /** + * Update keystore. + * + * @param keystoreFileName the keystore file name + * @param keystorePassword the keystore password + * @throws KeyStoreException the key store exception + * @throws NoSuchAlgorithmException the no such algorithm exception + * @throws CertificateException the certificate exception + * @throws IOException Signals that an I/O exception has occurred. + * @throws KeyManagementException the key management exception + */ + public void updateKeystore(String keystoreFileName, String keystorePassword) + throws KeyStoreException, NoSuchAlgorithmException, CertificateException, IOException, + KeyManagementException { + + this.keystoreFileName = keystoreFileName; + this.keystorePassword = keystorePassword; + + File file = new File(keystoreFileName); + String password = keystorePassword; + + if (file.isFile() == false) { + + File dir = new File(System.getProperty("java.home") + SEP + "lib" + SEP + "security"); + file = new File(dir, "jssecacerts"); + if (file.isFile() == false) { + + file = new File(dir, "cacerts"); + System.out.println("keystore file doesn't exist, preloading new file with cacerts"); + + } else { + System.out.println("keystore file doesn't exist, preloading new file with jssecacerts"); + } + password = "changeit"; + + } + + InputStream in = new FileInputStream(file); + ks = KeyStore.getInstance(KeyStore.getDefaultType()); + ks.load(in, password.toCharArray()); + in.close(); + + SSLContext context = SSLContext.getInstance("TLS"); + TrustManagerFactory tmf = + TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + tmf.init(ks); + X509TrustManager defaultTrustManager = (X509TrustManager) tmf.getTrustManagers()[0]; + savingTrustManager = new SavingTrustManager(defaultTrustManager); + context.init(null, new TrustManager[] {savingTrustManager}, null); + sslSocketFactory = context.getSocketFactory(); + + System.out.println("About to add the following endpoint server certificates to the keystore:"); + for (EndPoint ep : endpoints) { + System.out.println("\t--------------------------"); + System.out.println("\t" + ep.toString()); + + X509Certificate[] certChain = + getCertificateChainForRemoteEndpoint(ep.getHostname(), ep.getPort()); + + if (certChain == null) { + System.out.println("Could not obtain server certificate chain"); + return; + } + + dumpCertChainInfo(certChain); + + updateKeyStoreWithCertChain(certChain); + + } + + } + + /** + * Gets the certificate chain for remote endpoint. + * + * @param hostname the hostname + * @param port the port + * @return the certificate chain for remote endpoint + * @throws UnknownHostException the unknown host exception + * @throws IOException Signals that an I/O exception has occurred. + */ + private X509Certificate[] getCertificateChainForRemoteEndpoint(String hostname, int port) + throws UnknownHostException, IOException { + + System.out.println("Opening connection to localhost:8442.."); + SSLSocket socket = (SSLSocket) sslSocketFactory.createSocket("aai-int1.dev.att.com", 8440); + socket.setSoTimeout(10000); + + try { + System.out.println("Starting SSL handshake..."); + socket.startHandshake(); + socket.close(); + System.out.println("\nNo errors, certificate is already trusted"); + System.exit(0); + } catch (SSLException exc) { + System.out.println("\nCaught SSL exception, we are not authorized to access this server yet"); + // e.printStackTrace(System.out); + } + + return savingTrustManager.chain; + + } + + /** + * Dump cert chain info. + * + * @param chain the chain + * @throws NoSuchAlgorithmException the no such algorithm exception + * @throws CertificateEncodingException the certificate encoding exception + * @throws CertificateParsingException the certificate parsing exception + */ + private void dumpCertChainInfo(X509Certificate[] chain) + throws NoSuchAlgorithmException, CertificateEncodingException, CertificateParsingException { + + System.out.println(); + System.out.println("Server sent " + chain.length + " certificate(s):"); + System.out.println(); + + for (int i = 0; i < chain.length; i++) { + X509Certificate cert = chain[i]; + + if (dumpCertDetails) { + System.out.println("Full cert details @ index = " + i + " \n" + cert.toString()); + } + + System.out.println("Subject: " + cert.getSubjectDN()); + System.out.println("Issuer: " + cert.getIssuerDN()); + System.out.println("SubjectAlternativeNames: "); + + /* + * RFC-5280, pg. 38, section 4.2.1.6 ( Subject Alternative Names ) + * + * Finally, the semantics of subject alternative names that include wildcard characters (e.g., + * as a placeholder for a set of names) are not addressed by this specification. Applications + * with specific requirements MAY use such names, but they must define the semantics. + * + * id-ce-subjectAltName OBJECT IDENTIFIER ::= { id-ce 17 } + * + * SubjectAltName ::= GeneralNames + * + * GeneralNames ::= SEQUENCE SIZE (1..MAX) OF GeneralName + * + * GeneralName ::= CHOICE { otherName [0] OtherName, rfc822Name [1] IA5String, dNSName [2] + * IA5String, <-- the 2 in the output is a type operand x400Address [3] ORAddress, + * directoryName [4] Name, ediPartyName [5] EDIPartyName, uniformResourceIdentifier [6] + * IA5String, iPAddress [7] OCTET STRING, registeredID [8] OBJECT IDENTIFIER } + * + * OtherName ::= SEQUENCE { type-id OBJECT IDENTIFIER, value [0] EXPLICIT ANY DEFINED BY + * type-id } + * + * EDIPartyName ::= SEQUENCE { nameAssigner [0] DirectoryString OPTIONAL, partyName [1] + * DirectoryString } + * + */ + + Collection<List<?>> sans = cert.getSubjectAlternativeNames(); + + for (List<?> san : sans) { + + /* + * It seems the structure of the array elements contained within the SAN is: [<sanType>, + * <sanValue>]* + * + */ + + int type = ((Integer) san.get(0)).intValue(); + String typeStr = getSanType(type); + String value = (String) san.get(1); + + System.out.println(String.format("\tType:'%s', Value: '%s'.", typeStr, value)); + + } + + } + + } + + /** + * Gets the subject alternative names. + * + * @param cert the cert + * @return the subject alternative names + * @throws CertificateParsingException the certificate parsing exception + */ + private List<String> getSubjectAlternativeNames(X509Certificate cert) + throws CertificateParsingException { + + Collection<List<?>> sans = cert.getSubjectAlternativeNames(); + List<String> subjectAlternativeNames = new ArrayList<String>(); + + for (List<?> san : sans) { + + /* + * It seems the structure of the array elements contained within the SAN is: [<sanType>, + * <sanValue>]* + * + */ + + String value = (String) san.get(1); + subjectAlternativeNames.add(value); + } + + return subjectAlternativeNames; + } + + /** + * Update key store with cert chain. + * + * @param chain the chain + * @throws NoSuchAlgorithmException the no such algorithm exception + * @throws KeyStoreException the key store exception + * @throws CertificateException the certificate exception + * @throws IOException Signals that an I/O exception has occurred. + */ + private void updateKeyStoreWithCertChain(X509Certificate[] chain) + throws NoSuchAlgorithmException, KeyStoreException, CertificateException, IOException { + + for (X509Certificate cert : chain) { + + List<String> sans = getSubjectAlternativeNames(cert); + + for (String san : sans) { + ks.setCertificateEntry(san, cert); + System.out.println( + "Added certificate to keystore '" + keystoreFileName + "' using alias '" + san + "'"); + } + } + + OutputStream out = new FileOutputStream(keystoreFileName); + ks.store(out, keystorePassword.toCharArray()); + out.close(); + + } + + + /** + * The Class SavingTrustManager. + */ + private static class SavingTrustManager implements X509TrustManager { + + private final X509TrustManager tm; + private X509Certificate[] chain; + + /** + * Instantiates a new saving trust manager. + * + * @param tm the tm + */ + SavingTrustManager(X509TrustManager tm) { + this.tm = tm; + } + + @Override + public X509Certificate[] getAcceptedIssuers() { + throw new UnsupportedOperationException(); + } + + /* (non-Javadoc) + * @see javax.net.ssl.X509TrustManager#checkClientTrusted(java.security.cert.X509Certificate[], java.lang.String) + */ + @Override + public void checkClientTrusted(X509Certificate[] chain, String authType) + throws CertificateException { + throw new UnsupportedOperationException(); + } + + /* (non-Javadoc) + * @see javax.net.ssl.X509TrustManager#checkServerTrusted(java.security.cert.X509Certificate[], java.lang.String) + */ + @Override + public void checkServerTrusted(X509Certificate[] chain, String authType) + throws CertificateException { + this.chain = chain; + tm.checkServerTrusted(chain, authType); + } + } + + private static final char[] HEXDIGITS = "0123456789abcdef".toCharArray(); + + /** + * Gets the san type. + * + * @param type the type + * @return the san type + */ + // TODO: convert to enum(int,string) + private String getSanType(int type) { + switch (type) { + case 0: + return "otherName"; + case 1: + return "rfc822Name"; + case 2: + return "dNSName"; + case 3: + return "x400Address"; + case 4: + return "directoryName"; + case 5: + return "ediPartyName"; + case 6: + return "uniformResourceIdentifier"; + case 7: + return "iPAddress"; + case 8: + return "registeredID"; + default: + return "unknownSanType"; + } + } + + + /** + * To hex string. + * + * @param bytes the bytes + * @return the string + */ + private static String toHexString(byte[] bytes) { + StringBuilder sb = new StringBuilder(bytes.length * 3); + for (int b : bytes) { + b &= 0xff; + sb.append(HEXDIGITS[b >> 4]); + sb.append(HEXDIGITS[b & 15]); + sb.append(' '); + } + return sb.toString(); + } + + + + /** + * The main method. + * + * @param args the arguments + * @throws Exception the exception + */ + public static void main(String[] args) throws Exception { + + // String endpointList = "aai-int1.test.att.com:8440;aai-int1.dev.att.com:8442"; + + /* + * Examples: localhost:8440;localhost:8442 d:\1\adhoc_keystore.jks aaiDomain2 false + * localhost:8440;localhost:8442 d:\1\adhoc_keystore.jks aaiDomain2 true + */ + + if (args.length != 4) { + System.out.println( + "Usage: KeyBuilder <[ip:port];*> <keystoreFileName>" + + " <keystorePassword> <dumpCertDetails> "); + System.exit(1); + } + KeystoreBuilder kb = new KeystoreBuilder(args[0]); + kb.setDumpCertDetails(Boolean.parseBoolean(args[3])); + kb.updateKeystore(args[1], args[2]); + + } +} + + diff --git a/src/main/java/org/openecomp/sparky/util/NodeUtils.java b/src/main/java/org/openecomp/sparky/util/NodeUtils.java new file mode 100644 index 0000000..1789fcf --- /dev/null +++ b/src/main/java/org/openecomp/sparky/util/NodeUtils.java @@ -0,0 +1,714 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.util; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.lang.Thread.UncaughtExceptionHandler; +import java.nio.ByteBuffer; +import java.security.SecureRandom; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.TimeZone; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import javax.servlet.http.HttpServletRequest; +import javax.xml.stream.XMLStreamConstants; + +import org.openecomp.cl.api.Logger; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.google.common.util.concurrent.ThreadFactoryBuilder; + + +/** + * The Class NodeUtils. + */ +public class NodeUtils { + private static SecureRandom sRandom = new SecureRandom(); + + public static synchronized String getRandomTxnId(){ + byte bytes[] = new byte[6]; + sRandom.nextBytes(bytes); + return Integer.toUnsignedString(ByteBuffer.wrap(bytes).getInt()); + } + + /** + * Builds the depth padding. + * + * @param depth the depth + * @return the string + */ + public static String buildDepthPadding(int depth) { + StringBuilder sb = new StringBuilder(32); + + for (int x = 0; x < depth; x++) { + sb.append(" "); + } + + return sb.toString(); + } + + /** + * Checks if is numeric. + * + * @param numberStr the number str + * @return true, if is numeric + */ + public static boolean isNumeric(String numberStr) { + + try { + Double.parseDouble(numberStr); + } catch (Exception exc) { + return false; + } + + return true; + + } + + /** + * Creates the named executor. + * + * @param name the name + * @param numWorkers the num workers + * @param logger the logger + * @return the executor service + */ + public static ExecutorService createNamedExecutor(String name, int numWorkers, final Logger logger) { + UncaughtExceptionHandler uncaughtExceptionHandler = new Thread.UncaughtExceptionHandler() { + + @Override + public void uncaughtException(Thread thread, Throwable exc) { + + logger.error(AaiUiMsgs.ERROR_GENERIC, thread.getName() + ": " + exc); + + } + }; + + ThreadFactory namedThreadFactory = new ThreadFactoryBuilder().setNameFormat(name + "-%d") + .setUncaughtExceptionHandler(uncaughtExceptionHandler).build(); + + return Executors.newScheduledThreadPool(numWorkers + 1, namedThreadFactory); + } + + /** + * Calculate edit attribute uri. + * + * @param link the link + * @return the string + */ + public static String calculateEditAttributeUri(String link) { + String uri = null; + + if (link != null) { + + Pattern pattern = Pattern.compile(TierSupportUiConstants.URI_VERSION_REGEX_PATTERN); + Matcher matcher = pattern.matcher(link); + if (matcher.find()) { + uri = link.substring(matcher.end()); + } + } + return uri; + } + + /** + * Generate unique sha digest. + * + * @param keys the keys + * @return the string + */ + public static String generateUniqueShaDigest(String... keys) { + + if ((keys == null) || keys.length == 0) { + return null; + } + + final String keysStr = Arrays.asList(keys).toString(); + final String hashedId = org.apache.commons.codec.digest.DigestUtils.sha256Hex(keysStr); + + return hashedId; + } + + /** + * Gets the node field as text. + * + * @param node the node + * @param fieldName the field name + * @return the node field as text + */ + public static String getNodeFieldAsText(JsonNode node, String fieldName) { + + String fieldValue = null; + + JsonNode valueNode = node.get(fieldName); + + if (valueNode != null) { + fieldValue = valueNode.asText(); + } + + return fieldValue; + } + + private static final String ENTITY_RESOURCE_KEY_FORMAT = "%s.%s"; + + /** + * Convert a millisecond duration to a string format + * + * @param millis A duration to convert to a string form + * @return A string of the form "X Days Y Hours Z Minutes A Seconds". + */ + + private static final String TIME_BREAK_DOWN_FORMAT = + "[ %d days, %d hours, %d minutes, %d seconds ]"; + + /** + * Gets the duration breakdown. + * + * @param millis the millis + * @return the duration breakdown + */ + public static String getDurationBreakdown(long millis) { + + if (millis < 0) { + return String.format(TIME_BREAK_DOWN_FORMAT, 0, 0, 0, 0); + } + + long days = TimeUnit.MILLISECONDS.toDays(millis); + millis -= TimeUnit.DAYS.toMillis(days); + long hours = TimeUnit.MILLISECONDS.toHours(millis); + millis -= TimeUnit.HOURS.toMillis(hours); + long minutes = TimeUnit.MILLISECONDS.toMinutes(millis); + millis -= TimeUnit.MINUTES.toMillis(minutes); + long seconds = TimeUnit.MILLISECONDS.toSeconds(millis); + + return String.format(TIME_BREAK_DOWN_FORMAT, days, hours, minutes, seconds); + + } + + /** + * Checks if is equal. + * + * @param n1 the n 1 + * @param n2 the n 2 + * @return true, if is equal + */ + public static boolean isEqual(JsonNode n1, JsonNode n2) { + + /* + * due to the inherent nature of json being unordered, comparing object representations of the + * same keys and values but different order makes comparison challenging. Let's try an + * experiment where we compare the structure of the json, and then simply compare the sorted + * order of that structure which should be good enough for what we are trying to accomplish. + */ + + TreeWalker walker = new TreeWalker(); + List<String> n1Paths = new ArrayList<String>(); + List<String> n2Paths = new ArrayList<String>(); + + walker.walkTree(n1Paths, n1); + walker.walkTree(n2Paths, n2); + + Collections.sort(n1Paths); + Collections.sort(n2Paths); + + return n1Paths.equals(n2Paths); + + } + + /** + * Concat array. + * + * @param list the list + * @return the string + */ + public static String concatArray(List<String> list) { + return concatArray(list, " "); + } + + /** + * Concat array. + * + * @param list the list + * @param delimiter the delimiter + * @return the string + */ + public static String concatArray(List<String> list, String delimiter) { + + if (list == null || list.size() == 0) { + return ""; + } + + StringBuilder result = new StringBuilder(64); + + boolean firstValue = true; + + for (String item : list) { + + if (firstValue) { + result.append(item); + firstValue = false; + } else { + result.append(delimiter).append(item); + } + + } + + return result.toString(); + + } + + /** + * Concat array. + * + * @param values the values + * @return the string + */ + public static String concatArray(String[] values) { + + if (values == null || values.length == 0) { + return ""; + } + + StringBuilder result = new StringBuilder(64); + + boolean firstValue = true; + + for (String item : values) { + + if (firstValue) { + result.append(item); + firstValue = false; + } else { + result.append(".").append(item); + } + + } + + return result.toString(); + + } + + /** + * Builds the entity resource key. + * + * @param entityType the entity type + * @param resourceId the resource id + * @return the string + */ + public static String buildEntityResourceKey(String entityType, String resourceId) { + return String.format(ENTITY_RESOURCE_KEY_FORMAT, entityType, resourceId); + } + + /** + * Extract resource id from link. + * + * @param link the link + * @return the string + */ + public static String extractResourceIdFromLink(String link) { + + if (link == null) { + return null; + } + + int linkLength = link.length(); + if (linkLength == 0) { + return null; + } + + /* + * if the last character != / then we need to change the lastIndex position + */ + + int startIndex = 0; + String resourceId = null; + if ("/".equals(link.substring(linkLength - 1))) { + // Use-case: + // https://aai-ext1.test.att.com:9292/aai/v7/business/customers/customer/1607_20160524Func_Ak1_01/service-subscriptions/service-subscription/uCPE-VMS/ + startIndex = link.lastIndexOf("/", linkLength - 2); + resourceId = link.substring(startIndex + 1, linkLength - 1); + } else { + // Use-case: + // https://aai-ext1.test.att.com:9292/aai/v7/business/customers/customer/1607_20160524Func_Ak1_01/service-subscriptions/service-subscription/uCPE-VMS + startIndex = link.lastIndexOf("/"); + resourceId = link.substring(startIndex + 1, linkLength); + } + + String result = null; + + if (resourceId != null) { + try { + result = java.net.URLDecoder.decode(resourceId, "UTF-8"); + } catch (Exception exc) { + /* + * if there is a failure decoding the parameter we will just return the original value. + */ + result = resourceId; + } + } + + return result; + + } + + /** + * Gets the xml stream constant as str. + * + * @param value the value + * @return the xml stream constant as str + */ + public static String getXmlStreamConstantAsStr(int value) { + switch (value) { + case XMLStreamConstants.ATTRIBUTE: + return "ATTRIBUTE"; + case XMLStreamConstants.CDATA: + return "CDATA"; + case XMLStreamConstants.CHARACTERS: + return "CHARACTERS"; + case XMLStreamConstants.COMMENT: + return "COMMENT"; + case XMLStreamConstants.DTD: + return "DTD"; + case XMLStreamConstants.END_DOCUMENT: + return "END_DOCUMENT"; + case XMLStreamConstants.END_ELEMENT: + return "END_ELEMENT"; + case XMLStreamConstants.ENTITY_DECLARATION: + return "ENTITY_DECLARATION"; + case XMLStreamConstants.ENTITY_REFERENCE: + return "ENTITY_REFERENCE"; + case XMLStreamConstants.NAMESPACE: + return "NAMESPACE"; + case XMLStreamConstants.NOTATION_DECLARATION: + return "NOTATION_DECLARATION"; + case XMLStreamConstants.PROCESSING_INSTRUCTION: + return "PROCESSING_INSTRUCTION"; + case XMLStreamConstants.SPACE: + return "SPACE"; + case XMLStreamConstants.START_DOCUMENT: + return "START_DOCUMENT"; + case XMLStreamConstants.START_ELEMENT: + return "START_ELEMENT"; + + default: + return "Unknown(" + value + ")"; + } + } + + /** + * Convert object to json. + * + * @param object the object + * @param pretty the pretty + * @return the string + * @throws JsonProcessingException the json processing exception + */ + public static String convertObjectToJson(Object object, boolean pretty) + throws JsonProcessingException { + ObjectWriter ow = null; + + ObjectMapper mapper = new ObjectMapper(); + mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); + + if (pretty) { + ow = mapper.writer().withDefaultPrettyPrinter(); + + } else { + ow = mapper.writer(); + } + + return ow.writeValueAsString(object); + } + + /** + * Convert json str to json node. + * + * @param jsonStr the json str + * @return the json node + * @throws IOException Signals that an I/O exception has occurred. + */ + public static JsonNode convertJsonStrToJsonNode(String jsonStr) throws IOException { + ObjectMapper mapper = new ObjectMapper(); + if (jsonStr == null || jsonStr.length() == 0) { + return null; + } + + return mapper.readTree(jsonStr); + } + + /** + * Convert object to xml. + * + * @param object the object + * @return the string + * @throws JsonProcessingException the json processing exception + */ + public static String convertObjectToXml(Object object) throws JsonProcessingException { + ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); + String jsonOutput = ow.writeValueAsString(object); + + if (jsonOutput == null) { + return null; + } + + return JsonXmlConverter.convertJsontoXml(jsonOutput); + + } + + /** + * Extract objects by key. + * + * @param node the node + * @param searchKey the search key + * @param foundObjects the found objects + */ + public static void extractObjectsByKey(JsonNode node, String searchKey, + Collection<JsonNode> foundObjects) { + + if ( node == null ) { + return; + } + + if (node.isObject()) { + Iterator<Map.Entry<String, JsonNode>> nodeIterator = node.fields(); + + while (nodeIterator.hasNext()) { + Map.Entry<String, JsonNode> entry = nodeIterator.next(); + if (!entry.getValue().isValueNode()) { + extractObjectsByKey(entry.getValue(), searchKey, foundObjects); + } + + String name = entry.getKey(); + if (name.equalsIgnoreCase(searchKey)) { + + JsonNode entryNode = entry.getValue(); + + if (entryNode.isArray()) { + + Iterator<JsonNode> arrayItemsIterator = entryNode.elements(); + while (arrayItemsIterator.hasNext()) { + foundObjects.add(arrayItemsIterator.next()); + } + + } else { + foundObjects.add(entry.getValue()); + } + + + } + } + } else if (node.isArray()) { + Iterator<JsonNode> arrayItemsIterator = node.elements(); + while (arrayItemsIterator.hasNext()) { + extractObjectsByKey(arrayItemsIterator.next(), searchKey, foundObjects); + } + + } + + } + + /** + * Convert array into list. + * + * @param node the node + * @param instances the instances + */ + public static void convertArrayIntoList(JsonNode node, Collection<JsonNode> instances) { + + if (node.isArray()) { + Iterator<JsonNode> arrayItemsIterator = node.elements(); + while (arrayItemsIterator.hasNext()) { + instances.add(arrayItemsIterator.next()); + } + + } else { + instances.add(node); + } + + } + + /** + * Extract field values from object. + * + * @param node the node + * @param attributesToExtract the attributes to extract + * @param fieldValues the field values + */ + public static void extractFieldValuesFromObject(JsonNode node, + Collection<String> attributesToExtract, Collection<String> fieldValues) { + + if (node == null) { + return; + } + + if (node.isObject()) { + + JsonNode valueNode = null; + + for (String attrToExtract : attributesToExtract) { + + valueNode = node.get(attrToExtract); + + if (valueNode != null) { + + if (valueNode.isValueNode()) { + fieldValues.add(valueNode.asText()); + } + } + } + } + } + + /** + * Extract field value from object. + * + * @param node the node + * @param fieldName the field name + * @return the string + */ + public static String extractFieldValueFromObject(JsonNode node, String fieldName) { + + if (node == null) { + return null; + } + + if (node.isObject()) { + + JsonNode valueNode = node.get(fieldName); + + if (valueNode != null) { + + if (valueNode.isValueNode()) { + return valueNode.asText(); + } + } + + } + return null; + + } + + /** + * Format timestamp. + * + * @param timestamp the timestamp + * @return the string + */ + public static String formatTimestamp(String timestamp) { + try { + SimpleDateFormat originalFormat = new SimpleDateFormat("yyyyMMdd'T'HHmmss'Z'"); + originalFormat.setTimeZone(TimeZone.getTimeZone("UTC")); + Date toDate = originalFormat.parse(timestamp); + SimpleDateFormat newFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); + newFormat.setTimeZone(TimeZone.getTimeZone("UTC")); + return newFormat.format(toDate); + + } catch (ParseException pe) { + return timestamp; + } + } + + /** + * Gets the HttpRequest payload. + * + * @param request the request + * @return the body + * @throws IOException Signals that an I/O exception has occurred. + */ + public static String getBody(HttpServletRequest request) throws IOException { + + String body = null; + StringBuilder stringBuilder = new StringBuilder(); + BufferedReader bufferedReader = null; + + try { + InputStream inputStream = request.getInputStream(); + if (inputStream != null) { + bufferedReader = new BufferedReader(new InputStreamReader(inputStream)); + char[] charBuffer = new char[128]; + int bytesRead = -1; + while ((bytesRead = bufferedReader.read(charBuffer)) > 0) { + stringBuilder.append(charBuffer, 0, bytesRead); + } + } else { + stringBuilder.append(""); + } + } catch (IOException ex) { + throw ex; + } finally { + if (bufferedReader != null) { + try { + bufferedReader.close(); + } catch (IOException ex) { + throw ex; + } + } + } + + body = stringBuilder.toString(); + return body; + } + + /** + * The main method. + * + * @param args the arguments + * @throws ParseException the parse exception + */ + public static void main(String[] args) throws ParseException { + String date = "20170110T112312Z"; + SimpleDateFormat originalFormat = new SimpleDateFormat("yyyyMMdd'T'hhmmss'Z'"); + Date toDate = originalFormat.parse(date); + SimpleDateFormat newFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss'Z'"); + System.out.println(newFormat.format(toDate)); + + } + + + +} diff --git a/src/main/java/org/openecomp/sparky/util/RawByteHelper.java b/src/main/java/org/openecomp/sparky/util/RawByteHelper.java new file mode 100644 index 0000000..f929acf --- /dev/null +++ b/src/main/java/org/openecomp/sparky/util/RawByteHelper.java @@ -0,0 +1,177 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.util; + +/** + * The Class RawByteHelper. + */ +public class RawByteHelper { + private static final byte[] HEX_CHAR = + new byte[] {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'}; + + /** + * Dump bytes. + * + * @param buffer the buffer + * @return the string + */ + /* + * TODO -> DOCUMENT ME! + * + * @param buffer DOCUMENT ME! + * + * @return DOCUMENT ME! + */ + public static String dumpBytes(byte[] buffer) { + if (buffer == null) { + return ""; + } + String newLine = System.getProperty("line.separator"); + StringBuffer sb = new StringBuffer(); + + for (int i = 0; i < buffer.length; i++) { + if (i != 0 && i % 16 == 0) { + sb.append(newLine); + } + // sb.append("0x").append((char) (HEX_CHAR[(buffer[i] & 0x00F0) >> 4])).append((char) + // (HEX_CHAR[buffer[i] & 0x000F])).append(" "); + sb.append((char) (HEX_CHAR[(buffer[i] & 0x00F0) >> 4])) + .append((char) (HEX_CHAR[buffer[i] & 0x000F])).append(" "); + } + + return sb.toString(); + } + + // if you're trying to figure out why or's w/ FF's see: + /** + * Bytes to int. + * + * @param one the one + * @param two the two + * @param three the three + * @param four the four + * @return the int + */ + // http://www.darksleep.com/player/JavaAndUnsignedTypes.html + public static int bytesToInt(byte one, byte two, byte three, byte four) { + return (((0xFF & one) << 24) | ((0xFF & two) << 16) | ((0xFF & three) << 8) | ((0xFF & four))); + } + + /** + * Bytes to short. + * + * @param one the one + * @param two the two + * @return the short + */ + public static short bytesToShort(byte one, byte two) { + return (short) (((0xFF & one) << 8) | (0xFF & two)); + } + + /** + * First byte. + * + * @param num the num + * @return the byte + */ + // short helper functions + static byte firstByte(short num) { + return (byte) ((num >> 8) & 0xFF); + } + + /** + * First byte. + * + * @param num the num + * @return the byte + */ + // Int helper functions + static byte firstByte(int num) { + return (byte) ((num >> 24) & 0xFF); + } + + /** + * Second byte. + * + * @param num the num + * @return the byte + */ + static byte secondByte(short num) { + return (byte) (num & 0xFF); + } + + /** + * Second byte. + * + * @param num the num + * @return the byte + */ + static byte secondByte(int num) { + return (byte) ((num >> 16) & 0xFF); + } + + /** + * Third byte. + * + * @param num the num + * @return the byte + */ + static byte thirdByte(int num) { + return (byte) ((num >> 8) & 0xFF); + } + + /** + * Fourth byte. + * + * @param num the num + * @return the byte + */ + static byte fourthByte(int num) { + return (byte) (num & 0xFF); + } + + /** + * Int to byte. + * + * @param value the value + * @return the byte + */ + public static byte intToByte(int value) { + return fourthByte(value); + } + + /** + * Int to short. + * + * @param value the value + * @return the short + */ + public static short intToShort(int value) { + return (short) ((value & 0xFF00) | (value & 0xFF)); + } + +} + diff --git a/src/main/java/org/openecomp/sparky/util/ServletUtils.java b/src/main/java/org/openecomp/sparky/util/ServletUtils.java new file mode 100644 index 0000000..e56a98a --- /dev/null +++ b/src/main/java/org/openecomp/sparky/util/ServletUtils.java @@ -0,0 +1,164 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.util; + +import java.io.IOException; +import java.io.PrintWriter; + +import javax.servlet.http.HttpServletResponse; + +import org.openecomp.cl.api.Logger; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.dal.elasticsearch.SearchAdapter; +import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.openecomp.sparky.dal.rest.OperationResult; + +/** + * The Class ServletUtils. + */ +public class ServletUtils { + + /** + * Execute get query. + * + * @param logger the logger + * @param search the search + * @param response the response + * @param requestUrl the request url + * @return the operation result + * @throws Exception the exception + */ + public static OperationResult executeGetQuery(Logger logger, SearchAdapter search, + HttpServletResponse response, String requestUrl) throws Exception { + + OperationResult opResult = search.doGet(requestUrl, "application/json"); + + if (opResult.getResultCode() > 300) { + setServletResponse(logger, true, opResult.getResultCode(), response, opResult.getResult()); + } else { + response.setStatus(opResult.getResultCode()); + } + + return opResult; + + } + + /** + * Execute post query. + * + * @param logger the logger + * @param search the search + * @param response the response + * @param requestUrl the request url + * @param requestJsonPayload the request json payload + * @return the operation result + * @throws Exception the exception + */ + public static OperationResult executePostQuery(Logger logger, SearchAdapter search, + HttpServletResponse response, String requestUrl, String requestJsonPayload) throws Exception { + + OperationResult opResult = search.doPost(requestUrl, requestJsonPayload, "application/json"); + + if (opResult.getResultCode() > 300) { + setServletResponse(logger, true, opResult.getResultCode(), response, opResult.getResult()); + + } else { + response.setStatus(opResult.getResultCode()); + } + + return opResult; + } + + /** + * Handle search servlet errors. + * + * @param logger the logger + * @param errorMsg the error msg + * @param exc the exc + * @param response the response + * @throws IOException Signals that an I/O exception has occurred. + */ + public static void handleSearchServletErrors(Logger logger, String errorMsg, Exception exc, + HttpServletResponse response) throws IOException { + String errorLogMsg = (exc == null ? errorMsg : errorMsg + ". Error:" + + exc.getLocalizedMessage()); + logger.error(AaiUiMsgs.ERROR_GENERIC, errorLogMsg); + response.setContentType("application/json"); + PrintWriter out = response.getWriter(); + out.println(generateJsonErrorResponse(errorMsg)); + out.close(); + } + + /** + * Generate json error response. + * + * @param message the message + * @return the string + */ + public static String generateJsonErrorResponse(String message) { + return String.format("{ \"errorMessage\" : %s }", message); + } + + /** + * Sets the servlet response. + * + * @param logger the logger + * @param isError the is error + * @param responseCode the response code + * @param response the response + * @param postPayload the post payload + * @throws IOException Signals that an I/O exception has occurred. + */ + public static void setServletResponse(Logger logger, boolean isError, int responseCode, + HttpServletResponse response, String postPayload) throws IOException { + + if (isError) { + logger.error(AaiUiMsgs.ERROR_GENERIC, postPayload); + } + + response.setStatus(responseCode); + + if (postPayload != null) { + response.setContentType("application/json"); + PrintWriter out = response.getWriter(); + out.println(postPayload); + out.close(); + } + } + + /** + * Gets the full url. + * + * @param elasticConfig the elastic config + * @param resourceUrl the resource url + * @return the full url + */ + public static String getFullUrl(ElasticSearchConfig elasticConfig, String resourceUrl) { + final String host = elasticConfig.getIpAddress(); + final String port = elasticConfig.getHttpPort(); + return String.format("http://%s:%s%s", host, port, resourceUrl); + } +} diff --git a/src/main/java/org/openecomp/sparky/util/SuggestionsPermutation.java b/src/main/java/org/openecomp/sparky/util/SuggestionsPermutation.java new file mode 100644 index 0000000..876b2f4 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/util/SuggestionsPermutation.java @@ -0,0 +1,82 @@ +/**
+ * ============LICENSE_START===================================================
+ * SPARKY (AAI UI service)
+ * ============================================================================
+ * Copyright © 2017 AT&T Intellectual Property.
+ * Copyright © 2017 Amdocs
+ * All rights reserved.
+ * ============================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=====================================================
+ *
+ * ECOMP and OpenECOMP are trademarks
+ * and service marks of AT&T Intellectual Property.
+ */
+package org.openecomp.sparky.util;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class SuggestionsPermutation {
+
+ /*
+ * Will return all the unique combinations of the suggestions provided.
+ * The order of the permutation is not taken into account when computing
+ * the uniqueness.
+ * eg: A list of A,B,C,D will return
+ * [[A], [A, B, C, D], [A, C, D], [A, D], [B], [B, C, D], [B, D], [C], [C, D], [D]]
+ *
+ * @param list The list to create the unique permutations
+ * @return A Arraylist which contains a array list of all possible combinations
+ */
+ @SuppressWarnings("serial")
+ public ArrayList<ArrayList<String>> getSuggestionsPermutation(List<String> list) {
+ List<String> statusList = new ArrayList<>(list);
+ List<String> dupStatusList;
+ ArrayList<ArrayList<String>> uniqueList = new ArrayList<ArrayList<String>>();
+ int mainLoopIndexCounter = 0;
+ for (String status : statusList) {
+ // Add the single entity subset
+ uniqueList.add(new ArrayList<String>() {
+ {
+ add(status);
+ }
+ });
+ // Remove all the elements to left till the current index
+ dupStatusList = truncateListUntill(statusList, mainLoopIndexCounter);
+
+ while (dupStatusList.size() > 0) {
+ ArrayList<String> suggListInIterate= new ArrayList<>();
+ suggListInIterate.add(status);
+ for (String dupStatus : dupStatusList) {
+ suggListInIterate.add(dupStatus);
+ }
+ uniqueList.add(suggListInIterate);
+ dupStatusList.remove(0);
+ }
+ mainLoopIndexCounter++;
+ }
+ return uniqueList;
+
+ }
+
+ private List<String> truncateListUntill(List<String> lists, int index) {
+ List<String> truncatedList = new ArrayList<>(lists);
+ int counter = 0;
+ while (counter <= index) {
+ truncatedList.remove(0);
+ counter++;
+ }
+ return truncatedList;
+ }
+}
diff --git a/src/main/java/org/openecomp/sparky/util/TreeWalker.java b/src/main/java/org/openecomp/sparky/util/TreeWalker.java new file mode 100644 index 0000000..c9a804d --- /dev/null +++ b/src/main/java/org/openecomp/sparky/util/TreeWalker.java @@ -0,0 +1,137 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.util; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Lists; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +/** + * The Class TreeWalker. + */ +public class TreeWalker { + + /** + * Convert json to node. + * + * @param json the json + * @return the json node + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + public JsonNode convertJsonToNode(String json) throws JsonProcessingException, IOException { + ObjectMapper mapper = new ObjectMapper(); + + if (json == null) { + return null; + } + + return mapper.readTree(json); + + } + + /** + * Walk tree. + * + * @param paths the paths + * @param root the root + */ + public void walkTree(List<String> paths, JsonNode root) { + walker(paths, null, root); + } + + /** + * Walker. + * + * @param paths the paths + * @param nodename the nodename + * @param node the node + */ + private void walker(List<String> paths, String nodename, JsonNode node) { + + if (node == null) { + return; + } + + /* + * if ( nodename != null ) { paths.add(nodename); } + */ + + // System.out.println("path: " + nameToPrint); + if (node.isObject()) { + Iterator<Map.Entry<String, JsonNode>> iterator = node.fields(); + + ArrayList<Map.Entry<String, JsonNode>> nodesList = Lists.newArrayList(iterator); + // System.out.println("Walk Tree - root:" + node + ", elements + // keys:" + nodesList); + + if (nodesList.isEmpty()) { + + if (nodename != null) { + paths.add(nodename); + } + + } else { + + for (Map.Entry<String, JsonNode> nodEntry : nodesList) { + String name = nodEntry.getKey(); + JsonNode newNode = nodEntry.getValue(); + + if (newNode.isValueNode()) { + if (nodename == null) { + paths.add(name + "=" + newNode.asText()); + } else { + paths.add(nodename + "." + name + "=" + newNode.asText()); + } + } else { + + if (nodename == null) { + walker(paths, name, newNode); + } else { + walker(paths, nodename + "." + name, newNode); + } + } + + } + } + } else if (node.isArray()) { + Iterator<JsonNode> arrayItemsIterator = node.elements(); + ArrayList<JsonNode> arrayItemsList = Lists.newArrayList(arrayItemsIterator); + for (JsonNode arrayNode : arrayItemsList) { + walker(paths, nodename, arrayNode); + } + } else if (node.isValueNode()) { + paths.add(nodename + "=" + node.asText()); + } + } +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/EntityTypeAggregation.java b/src/main/java/org/openecomp/sparky/viewandinspect/EntityTypeAggregation.java new file mode 100644 index 0000000..a99ebeb --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/EntityTypeAggregation.java @@ -0,0 +1,94 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect; + +import com.fasterxml.jackson.core.JsonProcessingException; + +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicInteger; + +import org.openecomp.sparky.util.NodeUtils; + +/** + * The Class EntityTypeAggregation. + */ +public class EntityTypeAggregation { + + private ConcurrentHashMap<String, AtomicInteger> counters; + + /** + * Instantiates a new entity type aggregation. + */ + public EntityTypeAggregation() { + counters = new ConcurrentHashMap<String, AtomicInteger>(); + } + + /** + * Peg counter. + * + * @param counterName the counter name + */ + public void pegCounter(String counterName) { + counters.putIfAbsent(counterName, new AtomicInteger(0)); + counters.get(counterName).incrementAndGet(); + } + + public ConcurrentHashMap<String, AtomicInteger> getCounters() { + return counters; + } + + /** + * The main method. + * + * @param args the arguments + * @throws JsonProcessingException the json processing exception + */ + public static void main(String[] args) throws JsonProcessingException { + // TODO Auto-generated method stub + + EntityTypeAggregation eta = new EntityTypeAggregation(); + + eta.pegCounter("c1"); + eta.pegCounter("c1"); + eta.pegCounter("c1"); + + eta.pegCounter("x2"); + eta.pegCounter("x2"); + eta.pegCounter("x2"); + eta.pegCounter("x2"); + + eta.pegCounter("z2"); + eta.pegCounter("z2"); + eta.pegCounter("z2"); + eta.pegCounter("z2"); + eta.pegCounter("z2"); + eta.pegCounter("z2"); + + System.out.println(NodeUtils.convertObjectToJson(eta, true)); + + } + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/config/TierSupportUiConstants.java b/src/main/java/org/openecomp/sparky/viewandinspect/config/TierSupportUiConstants.java new file mode 100644 index 0000000..4da07b9 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/config/TierSupportUiConstants.java @@ -0,0 +1,90 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.config; + +/** + * The Class TierSupportUiConstants. + */ +public class TierSupportUiConstants { + + public static String APP_NAME = "AAIUI"; + + /** Default to unix file separator if system property file.separator is null */ + public static final String FILESEP = + (System.getProperty("file.separator") == null) ? "/" : System.getProperty("file.separator"); + + public static String CONFIG_HOME = System.getProperty("CONFIG_HOME") + FILESEP; + public static String AJSC_HOME = System.getProperty("AJSC_HOME") + FILESEP; + public static String CONFIG_ROOT_LOCATION = AJSC_HOME + "bundleconfig" + FILESEP + "etc" + FILESEP; + public static String STATIC_CONFIG_APP_LOCATION = CONFIG_ROOT_LOCATION + "appprops" + FILESEP; + public static String DYNAMIC_CONFIG_APP_LOCATION = CONFIG_HOME; + + public static String CONFIG_OXM_LOCATION = CONFIG_HOME + "model" + FILESEP; + + public static String CONFIG_AUTH_LOCATION = CONFIG_HOME + "auth" + FILESEP; + + public static String HOST = "host"; + public static String PORT = "port"; + public static String RETRIES = "numRequestRetries"; + public static String RESOURCE_VERSION = "resource-version"; + public static String URI = "URI"; + + public static String USERS_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "users.config"; + public static String ROLES_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "roles.config"; + public static String PORTAL_AUTHENTICATION_FILE_LOCATION = DYNAMIC_CONFIG_APP_LOCATION + "portal" + FILESEP + "portal-authentication.properties"; + + public static final String ES_SUGGEST_API = "_suggest"; + public static final String ES_COUNT_API = "_count"; + public static final String ES_SEARCH_API = "_search"; + + public static final String ENTITY_AUTO_SUGGEST_INDEX_NAME_DEFAULT = + "entityautosuggestindex-localhost"; + public static final String ENTITY_AUTO_SUGGEST_SETTINGS_FILE_DEFAULT = + "/etc/autoSuggestSettings.json"; + public static final String ENTITY_AUTO_SUGGEST_MAPPINGS_FILE_DEFAULT = + "/etc/autoSuggestMappings.json"; + public static final String ENTITY_DYNAMIC_MAPPINGS_FILE_DEFAULT = + "/etc/dynamicMappings.json"; + + // JUnit testing synchronizer.properties file + public static String TEST_CONFIG_FILE = + System.getProperty("user.dir") + FILESEP + "bundleconfig-local" + FILESEP + "etc" + FILESEP + + "appprops" + FILESEP + "synchronizer.properties"; + + // Injected Attributes + public static String URI_ATTR_NAME = "uri"; + + public static final String URI_VERSION_REGEX_PATTERN = "aai/v[\\d]+/"; + + public static final String getConfigPath(String configFile){ + return AJSC_HOME + FILESEP + configFile; + } + + public static final String getAggregationIndexName(String entityType){ + return "aggregate_" + entityType + "_index"; + } + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/config/VisualizationConfig.java b/src/main/java/org/openecomp/sparky/viewandinspect/config/VisualizationConfig.java new file mode 100644 index 0000000..3f0a5b5 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/config/VisualizationConfig.java @@ -0,0 +1,199 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.config; + +import java.util.Properties; + +import org.openecomp.sparky.util.ConfigHelper; + +/** + * The Class VisualizationConfig. + */ +public class VisualizationConfig { + + private int maxSelfLinkTraversalDepth; + + private boolean visualizationDebugEnabled; + + private String aaiEntityNodeDescriptors; + + private String generalNodeClassName; + + private String searchNodeClassName; + + private String selectedSearchedNodeClassName; + + private String entityTypesToSummarize; + private String vnfEntityTypes; + + private boolean makeAllNeighborsBidirectional; + + private static VisualizationConfig instance; + + public static VisualizationConfig getConfig() { + + if (instance == null) { + instance = new VisualizationConfig(); + } + + return instance; + + } + + /** + * Instantiates a new visualization config. + */ + public VisualizationConfig() { + + Properties visualizationProps = + ConfigHelper.loadConfigFromExplicitPath(TierSupportUiConstants.STATIC_CONFIG_APP_LOCATION + + TierSupportUiConstants.FILESEP + "visualization.properties"); + + maxSelfLinkTraversalDepth = + Integer.parseInt(visualizationProps.getProperty("maxSelfLinkTraversalDepth", "2")); + visualizationDebugEnabled = + Boolean.parseBoolean(visualizationProps.getProperty("visualizationDebugEnabled", "false")); + aaiEntityNodeDescriptors = visualizationProps.getProperty("aaiEntityNodeDescriptors", null); + generalNodeClassName = + visualizationProps.getProperty("generalNodeClassName", "unknownClassName"); + searchNodeClassName = + visualizationProps.getProperty("searchedNodeClassName", "unknownClassName"); + selectedSearchedNodeClassName = + visualizationProps.getProperty("selectedSearchedNodeClassName", "unknownClassName"); + + entityTypesToSummarize = visualizationProps.getProperty("entityTypesToSummarize", + "customer,service-instance,complex,pserver,vserver,vnf"); + + vnfEntityTypes = visualizationProps.getProperty("vnfEntityTypes", "generic-vnf,newvce,vce,vpe"); + + makeAllNeighborsBidirectional = Boolean + .parseBoolean(visualizationProps.getProperty("makeAllNeighborsBidirectional", "false")); + + } + + + + /** + * Make all neighbors bidirectional. + * + * @return true, if successful + */ + public boolean makeAllNeighborsBidirectional() { + return makeAllNeighborsBidirectional; + } + + public void setMakeAllNeighborsBidirectional(boolean makeAllNeighborsBidirectional) { + this.makeAllNeighborsBidirectional = makeAllNeighborsBidirectional; + } + + public String getSelectedSearchedNodeClassName() { + return selectedSearchedNodeClassName; + } + + public void setSelectedSearchedNodeClassName(String selectedSearchedNodeClassName) { + this.selectedSearchedNodeClassName = selectedSearchedNodeClassName; + } + + public String getGeneralNodeClassName() { + return generalNodeClassName; + } + + public void setGeneralNodeClassName(String generalNodeClassName) { + this.generalNodeClassName = generalNodeClassName; + } + + public String getSearchNodeClassName() { + return searchNodeClassName; + } + + public void setSearchNodeClassName(String searchNodeClassName) { + this.searchNodeClassName = searchNodeClassName; + } + + public String getAaiEntityNodeDescriptors() { + return aaiEntityNodeDescriptors; + } + + public void setAaiEntityNodeDescriptors(String aaiEntityNodeDescriptors) { + this.aaiEntityNodeDescriptors = aaiEntityNodeDescriptors; + } + + public boolean isVisualizationDebugEnabled() { + return visualizationDebugEnabled; + } + + public void setVisualizationDebugEnabled(boolean visualizationDebugEnabled) { + this.visualizationDebugEnabled = visualizationDebugEnabled; + } + + public void setMaxSelfLinkTraversalDepth(int maxSelfLinkTraversalDepth) { + this.maxSelfLinkTraversalDepth = maxSelfLinkTraversalDepth; + } + + public int getMaxSelfLinkTraversalDepth() { + return maxSelfLinkTraversalDepth; + } + + public String getEntityTypesToSummarize() { + return entityTypesToSummarize; + } + + public void setEntityTypesToSummarize(String entityTypesToSummarize) { + this.entityTypesToSummarize = entityTypesToSummarize; + } + + public String getVnfEntityTypes() { + return vnfEntityTypes; + } + + public void setVnfEntityTypes(String vnfEntityTypes) { + this.vnfEntityTypes = vnfEntityTypes; + } + + @Override + public String toString() { + return "VisualizationConfig [maxSelfLinkTraversalDepth=" + maxSelfLinkTraversalDepth + + ", visualizationDebugEnabled=" + visualizationDebugEnabled + ", " + + (aaiEntityNodeDescriptors != null + ? "aaiEntityNodeDescriptors=" + aaiEntityNodeDescriptors + ", " : "") + + (generalNodeClassName != null ? "generalNodeClassName=" + generalNodeClassName + ", " + : "") + + (searchNodeClassName != null ? "searchNodeClassName=" + searchNodeClassName + ", " : "") + + (selectedSearchedNodeClassName != null + ? "selectedSearchedNodeClassName=" + selectedSearchedNodeClassName + ", " : "") + + (entityTypesToSummarize != null + ? "entityTypesToSummarize=" + entityTypesToSummarize + ", " : "") + + (vnfEntityTypes != null ? "vnfEntityTypes=" + vnfEntityTypes + ", " : "") + + "makeAllNeighborsBidirectional=" + makeAllNeighborsBidirectional + "]"; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + + + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/ActiveInventoryNode.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/ActiveInventoryNode.java new file mode 100644 index 0000000..db79ef5 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/ActiveInventoryNode.java @@ -0,0 +1,778 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.viewandinspect.config.VisualizationConfig; +import org.openecomp.sparky.viewandinspect.enumeration.NodeProcessingAction; +import org.openecomp.sparky.viewandinspect.enumeration.NodeProcessingState; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * The Class ActiveInventoryNode. + */ +public class ActiveInventoryNode { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger( + ActiveInventoryNode.class); + private static final String URIRegexPattern = "aai/v[\\d]/"; + + public static final int DEFAULT_INIT_NODE_DEPTH = 1000; + + private String nodeId; + private String selfLink; + + private boolean isRootNode; + private ConcurrentLinkedDeque<String> inboundNeighbors; + private ConcurrentLinkedDeque<String> outboundNeighbors; + private List<JsonNode> complexGroups; + private List<RelationshipList> relationshipLists; + private int nodeDepth; + private OperationResult opResult; + + + private boolean processingErrorOccurred; + private List<String> errorCauses; + private boolean selflinkRetrievalFailure; + private NodeProcessingState state; + + private boolean processedNeighbors; + + private boolean selfLinkPendingResolve; + + /* + * I think we shouldn't be using this crutch flags. If these things are meant + * to represent the current state of the node, then they should be legitimate + * state transitions. + */ + + private boolean selfLinkDeterminationPending; + + private AtomicBoolean selfLinkProcessed; + + private OxmModelLoader oxmModelLoader; + private VisualizationConfig visualizationConfig; + + private String entityType; + private String primaryKeyName; + private String primaryKeyValue; + + private boolean nodeIssue; + private boolean ignoredByFilter; + + private boolean resolvedSelfLink; + + private Map<String, String> properties; + private ArrayList<String> queryParams; + + private ObjectMapper mapper; + + /** + * Instantiates a new active inventory node. + */ + public ActiveInventoryNode() { + this(null); + } + + /** + * Instantiates a new active inventory node. + * + * @param key the key + */ + public ActiveInventoryNode(String key) { + this.nodeId = null; + this.entityType = null; + this.selfLink = null; + this.properties = new HashMap<String, String>(); + this.processingErrorOccurred = false; + this.errorCauses = new ArrayList<String>(); + this.selflinkRetrievalFailure = false; + this.nodeIssue = false; + this.state = NodeProcessingState.INIT; + this.selfLinkPendingResolve = false; + this.selfLinkDeterminationPending = false; + + selfLinkProcessed = new AtomicBoolean(Boolean.FALSE); + oxmModelLoader = null; + visualizationConfig = null; + + isRootNode = false; + inboundNeighbors = new ConcurrentLinkedDeque<String>(); + outboundNeighbors = new ConcurrentLinkedDeque<String>(); + complexGroups = new ArrayList<JsonNode>(); + relationshipLists = new ArrayList<RelationshipList>(); + nodeDepth = DEFAULT_INIT_NODE_DEPTH; + queryParams = new ArrayList<String>(); + + mapper = new ObjectMapper(); + + processedNeighbors = false; + resolvedSelfLink = false; + + + } + + public void clearQueryParams() { + queryParams.clear(); + } + + public void addQueryParam(String queryParam) { + if ( queryParam!= null) { + if( !queryParams.contains(queryParam)) { + queryParams.add(queryParam); + } + } + } + + public void addQueryParams(Collection<String> params) { + + if (params != null & params.size() > 0) { + + for (String param : params) { + addQueryParam(param); + } + } + } + + + public List<String> getQueryParams() { + return queryParams; + } + + public void setSelfLinkDeterminationPending(boolean selfLinkDeterminationPending) { + this.selfLinkDeterminationPending = selfLinkDeterminationPending; + } + + public boolean isSelfLinkDeterminationPending() { + return selfLinkDeterminationPending; + } + + public NodeProcessingState getState() { + return state; + } + + public List<JsonNode> getComplexGroups() { + return complexGroups; + } + + public List<RelationshipList> getRelationshipLists() { + return relationshipLists; + } + + public OperationResult getOpResult() { + return opResult; + } + + public void setOpResult(OperationResult opResult) { + this.opResult = opResult; + } + + public String getPrimaryKeyName() { + return primaryKeyName; + } + + /** + * Gets the visualization config. + * + * @return the visualization config + */ + public VisualizationConfig getvisualizationConfig() { + return visualizationConfig; + } + + public int getNodeDepth() { + return nodeDepth; + } + + public void setNodeDepth(int nodeDepth) { + this.nodeDepth = nodeDepth; + } + + /** + * Sets the visualization config. + * + * @param visualizationConfig the new visualization config + */ + public void setvisualizationConfig(VisualizationConfig visualizationConfig) { + this.visualizationConfig = visualizationConfig; + } + + public OxmModelLoader getOxmModelLoader() { + return oxmModelLoader; + } + + public void setPrimaryKeyName(String primaryKeyName) { + this.primaryKeyName = primaryKeyName; + } + + public String getPrimaryKeyValue() { + return primaryKeyValue; + } + + public void setPrimaryKeyValue(String primaryKeyValue) { + this.primaryKeyValue = primaryKeyValue; + } + + public boolean isNodeIssue() { + return nodeIssue; + } + + public boolean isIgnoredByFilter() { + return ignoredByFilter; + } + + public void setIgnoredByFilter(boolean ignoredByFilter) { + this.ignoredByFilter = ignoredByFilter; + } + + public void setNodeIssue(boolean nodeIssue) { + this.nodeIssue = nodeIssue; + } + + /** + * Checks for processed neighbors. + * + * @return true, if successful + */ + public boolean hasProcessedNeighbors() { + return processedNeighbors; + } + + public void setProcessedNeighbors(boolean processedNeighbors) { + this.processedNeighbors = processedNeighbors; + } + + /** + * Checks for resolved self link. + * + * @return true, if successful + */ + public boolean hasResolvedSelfLink() { + return resolvedSelfLink; + } + + public void setResolvedSelfLink(boolean resolvedSelfLink) { + this.resolvedSelfLink = resolvedSelfLink; + } + + /** + * Checks for neighbors. + * + * @return true, if successful + */ + public boolean hasNeighbors() { + return (inboundNeighbors.size() > 0 || outboundNeighbors.size() > 0); + } + + /** + * Adds the inbound neighbor. + * + * @param nodeId the node id + */ + public void addInboundNeighbor(String nodeId) { + + if (nodeId == null) { + return; + } + + if (!inboundNeighbors.contains(nodeId)) { + inboundNeighbors.add(nodeId); + } + + } + + /** + * Adds the outbound neighbor. + * + * @param nodeId the node id + */ + public void addOutboundNeighbor(String nodeId) { + + if (nodeId == null) { + return; + } + + if (!outboundNeighbors.contains(nodeId)) { + outboundNeighbors.add(nodeId); + } + + } + + public boolean isAtMaxDepth() { + return (nodeDepth >= VisualizationConfig.getConfig().getMaxSelfLinkTraversalDepth()); + } + + public ConcurrentLinkedDeque<String> getInboundNeighbors() { + return inboundNeighbors; + } + + public void setInboundNeighbors(ConcurrentLinkedDeque<String> inboundNeighbors) { + this.inboundNeighbors = inboundNeighbors; + } + + public Collection<String> getOutboundNeighbors() { + List<String> result = new ArrayList<String>(); + + Iterator<String> neighborIterator = outboundNeighbors.iterator(); + + while (neighborIterator.hasNext()) { + result.add(neighborIterator.next()); + } + + return result; + } + + /** + * Change depth. + * + * @param newDepth the new depth + * @return true, if successful + */ + public boolean changeDepth(int newDepth) { + + boolean nodeDepthWasChanged = false; + + if (newDepth < nodeDepth) { + LOG.info(AaiUiMsgs.ACTIVE_INV_NODE_CHANGE_DEPTH, nodeId, + String.valueOf(this.nodeDepth), String.valueOf(newDepth)); + this.nodeDepth = newDepth; + nodeDepthWasChanged = true; + } + + return nodeDepthWasChanged; + + } + + public void setOutboundNeighbors(ConcurrentLinkedDeque<String> outboundNeighbors) { + this.outboundNeighbors = outboundNeighbors; + } + + public boolean isRootNode() { + return isRootNode; + } + + public void setRootNode(boolean isRootNode) { + this.isRootNode = isRootNode; + } + + /** + * Change state. + * + * @param newState the new state + * @param action the action + */ + public void changeState(NodeProcessingState newState, NodeProcessingAction action) { + /* + * NodeId may be null depending on the current node life-cycle state + */ + + if (getNodeId() != null) { + LOG.info(AaiUiMsgs.ACTIVE_INV_NODE_CHANGE_STATE, state.toString(), newState.toString(), action.toString()); + } else { + LOG.info(AaiUiMsgs.ACTIVE_INV_NODE_CHANGE_STATE_NO_NODE_ID, state.toString(), newState.toString(), action.toString()); + } + this.state = newState; + } + + public boolean isSelfLinkPendingResolve() { + return selfLinkPendingResolve; + } + + public void setSelfLinkPendingResolve(boolean selfLinkPendingResolve) { + this.selfLinkPendingResolve = selfLinkPendingResolve; + } + + public boolean isSelflinkRetrievalFailure() { + return selflinkRetrievalFailure; + } + + public void setSelflinkRetrievalFailure(boolean selflinkRetrievalFailure) { + this.selflinkRetrievalFailure = selflinkRetrievalFailure; + } + + public void setOxmModelLoader(OxmModelLoader loader) { + this.oxmModelLoader = loader; + } + + public boolean getSelfLinkProcessed() { + return selfLinkProcessed.get(); + } + + public void setSelfLinkProcessed(boolean selfLinkProcessed) { + this.selfLinkProcessed.set(selfLinkProcessed); + } + + public boolean isDirectSelfLink() { + // https://aai-int1.test.att.com:8443/aai/v8/resources/id/2458124400 + return isDirectSelfLink(this.selfLink); + } + + /** + * Checks if is direct self link. + * + * @param link the link + * @return true, if is direct self link + */ + public static boolean isDirectSelfLink(String link) { + // https://aai-int1.test.att.com:8443/aai/v8/resources/id/2458124400 + + if (link == null) { + return false; + } + + return link.contains("/resources/id/"); + + } + + public Map<String, String> getProperties() { + return properties; + } + + /** + * Adds the error cause. + * + * @param error the error + */ + public void addErrorCause(String error) { + if (!errorCauses.contains(error)) { + errorCauses.add(error); + } + } + + /** + * Adds the property. + * + * @param key the key + * @param value the value + */ + public void addProperty(String key, String value) { + properties.put(key, value); + } + + public boolean isProcessingErrorOccurred() { + return processingErrorOccurred; + } + + public void setProcessingErrorOccurred(boolean processingErrorOccurred) { + this.processingErrorOccurred = processingErrorOccurred; + } + + public String getNodeId() { + return nodeId; + } + + public void setNodeId(String nodeId) { + this.nodeId = nodeId; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public String getSelfLink() { + return selfLink; + } + + /** + * Calculate edit attribute uri. + * + * @param link the link + * @return the string + */ + public String calculateEditAttributeUri(String link) { + String uri = null; + Pattern pattern = Pattern.compile(URIRegexPattern); + Matcher matcher = pattern.matcher(link); + if (matcher.find()) { + uri = link.substring(matcher.end()); + } + return uri; + } + + /** + * Analyze self link relationship list. + * + * @param jsonResult the json result + * @return the relationship list + */ + private RelationshipList analyzeSelfLinkRelationshipList(String jsonResult) { + + + RelationshipList relationshipList = null; + + try { + relationshipList = mapper.readValue(jsonResult, RelationshipList.class); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SELF_LINK_RELATIONSHIP_LIST_ERROR, exc.toString()); + } + + return relationshipList; + } + + /** + * Adds the relationship list. + * + * @param relationshipList the relationship list + */ + public void addRelationshipList(RelationshipList relationshipList) { + + if (!relationshipLists.contains(relationshipList)) { + relationshipLists.add(relationshipList); + } + + } + + /** + * Process pathed self link response. + * + * @param selfLinkJsonResponse the self link json response + * @param startNodeType the start node type + * @param startNodeResourceKey the start node resource key + */ + public void processPathedSelfLinkResponse(String selfLinkJsonResponse, String startNodeType, + String startNodeResourceKey) { + + if (selfLinkJsonResponse == null || selfLinkJsonResponse.length() == 0) { + LOG.error(AaiUiMsgs.SELF_LINK_NULL_EMPTY_RESPONSE); + return; + } + + try { + JsonNode jsonNode = mapper.readValue(selfLinkJsonResponse, JsonNode.class); + + Iterator<Entry<String, JsonNode>> fieldNames = jsonNode.fields(); + Entry<String, JsonNode> field = null; + + while (fieldNames.hasNext()) { + + field = fieldNames.next(); + + /* + * Is there a way to tell if the field is an aggregate or an atomic value? This is where our + * flattening code needs to live + */ + + String fieldName = field.getKey(); + + if ("relationship-list".equals(fieldName)) { + + /* + * Parse the relationship list like we were doing before, so we can determine whether or + * not to keep it or traverse it after we have performed the evaluative node depth logic. + */ + RelationshipList relationshipList = + analyzeSelfLinkRelationshipList(field.getValue().toString()); + + if (relationshipList != null) { + this.relationshipLists.add(relationshipList); + } else { + LOG.info(AaiUiMsgs.NO_RELATIONSHIP_DISCOVERED, nodeId); + } + } else { + JsonNode nodeValue = field.getValue(); + + if (nodeValue != null && nodeValue.isValueNode()) { + + /* + * before we blindly add the fieldName and value to our property set, let's do one more + * check to see if the field name is an entity type. If it is, then our complex + * attribute processing code will pick it up and process it instead, but this is + * probably more likely just for array node types, but we'll see. + */ + + if (oxmModelLoader.getEntityDescriptor(fieldName) == null) { + /* + * this is no an entity type as far as we can tell, so we can add it to our property + * set. + */ + + addProperty(fieldName, nodeValue.asText()); + + } + + } else { + + if (nodeValue.isArray()) { + + /* + * make sure array entity-type collection is not an entityType before adding it to the + * property set. The expetation is that it will be added the visualization through a + * complex group or relationship. + */ + + if (oxmModelLoader.getEntityDescriptor(field.getKey()) == null) { + /* + * this is no an entity type as far as we can tell, so we can add it to our property + * set. + */ + + addProperty(field.getKey(), nodeValue.toString()); + + } + + } else { + + complexGroups.add(nodeValue); + + } + + } + + } + + } + + } catch (IOException exc) { + LOG.error(AaiUiMsgs.JSON_CONVERSION_ERROR, "POJO", exc.getLocalizedMessage()); + this.setProcessingErrorOccurred(true); + this.addErrorCause( + "An error occurred while converting JSON into POJO = " + exc.getLocalizedMessage()); + } + + } + + public void setSelfLink(String selfLink) { + this.selfLink = selfLink; + } + + /** + * Adds the complex group. + * + * @param complexGroup the complex group + */ + public void addComplexGroup(JsonNode complexGroup) { + + if (!complexGroups.contains(complexGroup)) { + complexGroups.add(complexGroup); + } + + } + + /** + * Gets the padding. + * + * @param level the level + * @param paddingString the padding string + * @return the padding + */ + private static String getPadding(int level, String paddingString) { + StringBuilder sb = new StringBuilder(32); + for (int x = 0; x < level; x++) { + sb.append(paddingString); + } + return sb.toString(); + } + + /** + * Dump node tree. + * + * @param showProperties the show properties + * @return the string + */ + public String dumpNodeTree(boolean showProperties) { + return dumpNodeTree(0, showProperties); + } + + /** + * Dump node tree. + * + * @param level the level + * @param showProperties the show properties + * @return the string + */ + private String dumpNodeTree(int level, boolean showProperties) { + StringBuilder sb = new StringBuilder(128); + String padding = getPadding(level, " "); + + sb.append(padding + " -> " + getNodeId() + "]").append("\n"); + sb.append(padding + " -> primaryKeyName = " + primaryKeyName + "]").append("\n"); + sb.append(padding + " -> primaryKeyValue = " + primaryKeyValue + "]").append("\n"); + sb.append(padding + " -> entityType = " + entityType + "]").append("\n"); + + if (showProperties) { + Set<Entry<String, String>> entries = properties.entrySet(); + for (Entry<String, String> entry : entries) { + sb.append( + padding + " ----> " + String.format("[ %s => %s ]", entry.getKey(), entry.getValue())) + .append("\n"); + } + } + + sb.append(padding + " ----> " + String.format("[ selfLink => %s ]", getSelfLink())) + .append("\n"); + + sb.append("\n").append(padding + " ----> Inbound Neighbors:").append("\n"); + + for (String inboundNeighbor : inboundNeighbors) { + sb.append("\n").append(inboundNeighbor.toString()); + } + + sb.append(padding + " ----> Outbound Neighbors:").append("\n"); + sb.append("\n").append(padding + " ----> Outbound Neighbors:").append("\n"); + + for (String outboundNeighbor : outboundNeighbors) { + sb.append("\n").append(outboundNeighbor.toString()); + } + + return sb.toString(); + + } + + public String getProcessingErrorCauses() { + + StringBuilder sb = new StringBuilder(128); + + for (String c : this.errorCauses) { + sb.append(c).append("\n"); + } + + return sb.toString(); + } +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/D3VisualizationOutput.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/D3VisualizationOutput.java new file mode 100644 index 0000000..c201408 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/D3VisualizationOutput.java @@ -0,0 +1,132 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; + +import java.util.ArrayList; +import java.util.List; + +/** + * The Class D3VisualizationOutput. + */ +public class D3VisualizationOutput { + + public GraphMeta graphMeta; + public List<JsonNode> nodes; + public List<JsonNodeLink> links; + public InlineMessage inlineMessage; + + /** + * Instantiates a new d 3 visualization output. + */ + public D3VisualizationOutput() { + nodes = new ArrayList<JsonNode>(); + links = new ArrayList<JsonNodeLink>(); + inlineMessage = null; + } + + public GraphMeta getGraphMeta() { + return graphMeta; + } + + /** + * Peg counter. + * + * @param counterName the counter name + */ + public void pegCounter(String counterName) { + graphMeta.pegCounter(counterName); + } + + public void setGraphMeta(GraphMeta graphMeta) { + this.graphMeta = graphMeta; + } + + /** + * Adds the nodes. + * + * @param nodes the nodes + */ + public void addNodes(List<JsonNode> nodes) { + this.nodes.addAll(nodes); + } + + /** + * Adds the links. + * + * @param links the links + */ + public void addLinks(List<JsonNodeLink> links) { + this.links.addAll(links); + } + + public InlineMessage getInlineMessage() { + return inlineMessage; + } + + public void setInlineMessage(InlineMessage inlineMessage) { + this.inlineMessage = inlineMessage; + } + + /** + * The main method. + * + * @param args the arguments + * @throws JsonProcessingException the json processing exception + */ + public static final void main(String[] args) throws JsonProcessingException { + + ActiveInventoryNode pserverAin = new ActiveInventoryNode(); + pserverAin.setNodeId("pserver.76786asd87asgd"); + JsonNode pserver = new JsonNode(pserverAin); + + List<JsonNode> nodes = new ArrayList<JsonNode>(); + nodes.add(pserver); + + JsonNodeLink l1 = new JsonNodeLink(); + l1.setSource(pserverAin.getNodeId()); + l1.setTarget(pserverAin.getNodeId()); + l1.setId(l1.getSource() + "_" + l1.getTarget()); + + List<JsonNodeLink> links = new ArrayList<JsonNodeLink>(); + links.add(l1); + + D3VisualizationOutput output = new D3VisualizationOutput(); + output.addNodes(nodes); + output.addLinks(links); + + + ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); + String json = ow.writeValueAsString(output); + + System.out.println(json); + + } + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/EntityEntry.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/EntityEntry.java new file mode 100644 index 0000000..0d5699d --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/EntityEntry.java @@ -0,0 +1,82 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +/** + * The Class EntityEntry. + */ +public class EntityEntry { + + private String entityType; + + private String entityPrimaryKeyValue; + + private String searchTags; + + private String entityId; + + public String getEntityId() { + return entityId; + } + + public void setEntityId(String entityId) { + this.entityId = entityId; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public String getEntityPrimaryKeyValue() { + return entityPrimaryKeyValue; + } + + public void setEntityPrimaryKeyValue(String entityPrimaryKeyValue) { + this.entityPrimaryKeyValue = entityPrimaryKeyValue; + } + + public String getSearchTags() { + return searchTags; + } + + public void setSearchTags(String searchTags) { + this.searchTags = searchTags; + } + + @Override + public String toString() { + return "EntityEntry [" + (entityType != null ? "entityType=" + entityType + ", " : "") + + (entityPrimaryKeyValue != null ? "entityPrimaryKeyValue=" + entityPrimaryKeyValue + ", " + : "") + + (searchTags != null ? "searchTags=" + searchTags + ", " : "") + + (entityId != null ? "entityId=" + entityId : "") + "]"; + } + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/GraphMeta.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/GraphMeta.java new file mode 100644 index 0000000..1409501 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/GraphMeta.java @@ -0,0 +1,148 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +import org.openecomp.sparky.viewandinspect.EntityTypeAggregation; + +/** + * The Class GraphMeta. + */ +public class GraphMeta { + + private com.fasterxml.jackson.databind.JsonNode aaiEntityNodeDescriptors; + + private int numNodes; + + private int numLinks; + + private long renderTimeInMs; + + private int numLinksResolvedSuccessfullyFromCache; + + private int numLinksResolvedSuccessfullyFromServer; + + private int numLinkResolveFailed; + + private EntityTypeAggregation entitySummary; + + /** + * Instantiates a new graph meta. + */ + public GraphMeta() { + entitySummary = new EntityTypeAggregation(); + } + + public EntityTypeAggregation getEntitySummary() { + return entitySummary; + } + + public void setEntitySummary(EntityTypeAggregation entitySummary) { + this.entitySummary = entitySummary; + } + + public com.fasterxml.jackson.databind.JsonNode getAaiEntityNodeDescriptors() { + return aaiEntityNodeDescriptors; + } + + public void setAaiEntityNodeDescriptors( + com.fasterxml.jackson.databind.JsonNode aaiEntityNodeDefinitions) { + this.aaiEntityNodeDescriptors = aaiEntityNodeDefinitions; + } + + public int getNumLinksResolvedSuccessfullyFromCache() { + return numLinksResolvedSuccessfullyFromCache; + } + + public void setNumLinksResolvedSuccessfullyFromCache(int numLinksResolvedSuccessfullyFromCache) { + this.numLinksResolvedSuccessfullyFromCache = numLinksResolvedSuccessfullyFromCache; + } + + public int getNumLinksResolvedSuccessfullyFromServer() { + return numLinksResolvedSuccessfullyFromServer; + } + + public void setNumLinksResolvedSuccessfullyFromServer( + int numLinksResolvedSuccessfullyFromServer) { + this.numLinksResolvedSuccessfullyFromServer = numLinksResolvedSuccessfullyFromServer; + } + + public int getNumLinkResolveFailed() { + return numLinkResolveFailed; + } + + public void setNumLinkResolveFailed(int numLinkResolveFailed) { + this.numLinkResolveFailed = numLinkResolveFailed; + } + + public int getNumNodes() { + return numNodes; + } + + public void setNumNodes(int numNodes) { + this.numNodes = numNodes; + } + + public int getNumLinks() { + return numLinks; + } + + public void setNumLinks(int numLinks) { + this.numLinks = numLinks; + } + + public long getRenderTimeInMs() { + return renderTimeInMs; + } + + public void setRenderTimeInMs(long renderTimeInMs) { + this.renderTimeInMs = renderTimeInMs; + } + + /** + * Peg counter. + * + * @param counterName the counter name + */ + public void pegCounter(String counterName) { + entitySummary.pegCounter(counterName); + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "GraphMeta [" + + (aaiEntityNodeDescriptors != null + ? "aaiEntityNodeDescriptors=" + aaiEntityNodeDescriptors + ", " : "") + + "numNodes=" + numNodes + ", numLinks=" + numLinks + ", renderTimeInMs=" + renderTimeInMs + + ", numLinksResolvedSuccessfullyFromCache=" + numLinksResolvedSuccessfullyFromCache + + ", numLinksResolvedSuccessfullyFromServer=" + numLinksResolvedSuccessfullyFromServer + + ", numLinkResolveFailed=" + numLinkResolveFailed + ", " + + (entitySummary != null ? "entitySummary=" + entitySummary : "") + "]"; + } + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/InlineMessage.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/InlineMessage.java new file mode 100644 index 0000000..1a4dd58 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/InlineMessage.java @@ -0,0 +1,71 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +/** + * The Class InlineMessage. + */ +public class InlineMessage { + + private String level; + private String message; + + /** + * Instantiates a new inline message. + * + * @param level the level + * @param message the message + */ + public InlineMessage(String level, String message) { + this.level = level; + this.message = message; + } + + public String getLevel() { + return level; + } + + public void setLevel(String level) { + this.level = level; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return level + " : " + message; + } + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/JsonNode.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/JsonNode.java new file mode 100644 index 0000000..9db06f0 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/JsonNode.java @@ -0,0 +1,197 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +import com.fasterxml.jackson.annotation.JsonIgnore; + +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +import org.apache.log4j.Logger; + +/* + * We can use annotations to differentiate between intermediate data we use to build the node, and + * the data that we actually want to appear in the exported JSON. + */ + +/* + * This is our current ( 14-June-2016 ) working schema that will remain organic until we get it just + * right. + * + * { "item-type": "customer", "item-name-key": "subscriber-name", “item-name-value” : + * “subscriber-name-123456789-aai847-data-01”, "item-properties": [{ "property-name": + * "subscriber-name", "property-value": "subscriber-name-123456789-aai847-data-01" }, { + * "property-name": "global-customer-id", "property-value": + * "global-customer-id-123456789-aai847-data-01" } ], "node-meta": { “color” : “#f2d2d2”, + * "isSearchTarget" : false, "nodeGroups" : "1,2,3,4" }, } + * + */ + + +/** + * The Class JsonNode. + */ +public class JsonNode { + + private String id; + private String itemType; + private String itemNameKey; + private String itemNameValue; + private Map<String, String> itemProperties; + private NodeMeta nodeMeta; + + @JsonIgnore + private boolean isRootNode; + + + @JsonIgnore + private String resourceKey; + @JsonIgnore + private Collection<String> inboundNeighbors; + + @JsonIgnore + private Collection<String> outboundNeighbors; + + + @JsonIgnore + private static final Logger LOG = Logger.getLogger(JsonNode.class); + + /** + * Instantiates a new json node. + * + * @param ain the ain + */ + public JsonNode(ActiveInventoryNode ain) { + this.resourceKey = ain.getNodeId(); + this.itemProperties = ain.getProperties(); + this.setItemType(ain.getEntityType()); + this.setItemNameKey(ain.getPrimaryKeyName()); + this.setItemNameValue(ain.getPrimaryKeyValue()); + this.setId(ain.getNodeId()); + this.isRootNode = ain.isRootNode(); + + if (LOG.isDebugEnabled()) { + LOG.debug("---"); + LOG.debug("JsonNode constructor using AIN = " + ain.dumpNodeTree(true)); + LOG.debug("---"); + } + + inboundNeighbors = ain.getInboundNeighbors(); + outboundNeighbors = ain.getOutboundNeighbors(); + + nodeMeta = new NodeMeta(); + + nodeMeta.setNodeIssue(ain.isNodeIssue()); + nodeMeta.setNodeDepth(ain.getNodeDepth()); + + nodeMeta.setNumInboundNeighbors(ain.getInboundNeighbors().size()); + nodeMeta.setNumOutboundNeighbors(ain.getOutboundNeighbors().size()); + + nodeMeta.setAtMaxDepth(ain.isAtMaxDepth()); + nodeMeta.setSelfLinkResolved(!ain.isSelflinkRetrievalFailure()); + nodeMeta.setProcessingErrorOccurred(ain.isProcessingErrorOccurred()); + nodeMeta.setHasNeighbors( + ain.getOutboundNeighbors().size() > 0 || ain.getInboundNeighbors().size() > 0); + nodeMeta.setProcessingState(ain.getState()); + + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getItemNameKey() { + return itemNameKey; + } + + public String getItemNameValue() { + return itemNameValue; + } + + public Map<String, String> getItemProperties() { + return itemProperties; + } + + public String getItemType() { + return itemType; + } + + public String getResourceKey() { + return resourceKey; + } + + public void setItemNameKey(String itemNameKey) { + this.itemNameKey = itemNameKey; + } + + public void setItemNameValue(String itemNameValue) { + this.itemNameValue = itemNameValue; + } + + public void setItemProperties(HashMap<String, String> itemProperties) { + this.itemProperties = itemProperties; + } + + public void setItemType(String itemType) { + this.itemType = itemType; + } + + public void setResourceKey(String resourceKey) { + this.resourceKey = resourceKey; + } + + public NodeMeta getNodeMeta() { + return nodeMeta; + } + + public void setNodeMeta(NodeMeta nodeMeta) { + this.nodeMeta = nodeMeta; + } + + public boolean isRootNode() { + return isRootNode; + } + + @Override + public String toString() { + return "JsonNode [" + (id != null ? "id=" + id + ", " : "") + + (itemType != null ? "itemType=" + itemType + ", " : "") + + (itemNameKey != null ? "itemNameKey=" + itemNameKey + ", " : "") + + (itemNameValue != null ? "itemNameValue=" + itemNameValue + ", " : "") + + (itemProperties != null ? "itemProperties=" + itemProperties + ", " : "") + + (nodeMeta != null ? "nodeMeta=" + nodeMeta + ", " : "") + "isRootNode=" + isRootNode + + ", " + (resourceKey != null ? "resourceKey=" + resourceKey + ", " : "") + + (inboundNeighbors != null ? "inboundNeighbors=" + inboundNeighbors + ", " : "") + + (outboundNeighbors != null ? "outboundNeighbors=" + outboundNeighbors : "") + "]"; + } + + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/JsonNodeLink.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/JsonNodeLink.java new file mode 100644 index 0000000..f6be171 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/JsonNodeLink.java @@ -0,0 +1,76 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +/* + * Expected JSON Output: + * + * { JsonNodeLink : { id : <value>, source : <value>, target : <value> } } + * + */ + +/** + * The Class JsonNodeLink. + */ +public class JsonNodeLink { + + protected String id; + protected String source; + protected String target; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public String getTarget() { + return target; + } + + public void setTarget(String target) { + this.target = target; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "JsonNodeLink [id=" + id + ", source=" + source + ", target=" + target + "]"; + } + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/NodeDebug.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/NodeDebug.java new file mode 100644 index 0000000..64f5333 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/NodeDebug.java @@ -0,0 +1,60 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +/** + * The Class NodeDebug. + */ +public class NodeDebug { + private boolean maxTraversalDepthReached; + private boolean processingError; + private String processingErrorCauses; + + public boolean isMaxTraversalDepthReached() { + return maxTraversalDepthReached; + } + + public void setMaxTraversalDepthReached(boolean maxTraversalDepthReached) { + this.maxTraversalDepthReached = maxTraversalDepthReached; + } + + public boolean isProcessingError() { + return processingError; + } + + public void setProcessingError(boolean processingError) { + this.processingError = processingError; + } + + public String getProcessingErrorCauses() { + return processingErrorCauses; + } + + public void setProcessingErrorCauses(String processingErrorCauses) { + this.processingErrorCauses = processingErrorCauses; + } + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/NodeMeta.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/NodeMeta.java new file mode 100644 index 0000000..6df5e73 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/NodeMeta.java @@ -0,0 +1,212 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +import org.openecomp.sparky.viewandinspect.config.VisualizationConfig; +import org.openecomp.sparky.viewandinspect.enumeration.NodeProcessingState; + +/** + * The Class NodeMeta. + */ +public class NodeMeta { + + private String className; + + private boolean isEnrichableNode; + private boolean isSearchTarget; + + private NodeDebug nodeDebug; + private boolean nodeIssue; + private boolean nodeValidated; + private long selfLinkResponseTimeInMs; + private long numInboundNeighbors; + private long numOutboundNeighbors; + + private boolean atMaxDepth; + private boolean selfLinkResolved; + private boolean processingErrorOccurred; + private boolean neighborsProcessed; + private int nodeDepth; + private boolean hasNeighbors; + + private NodeProcessingState processingState; + + /** + * Instantiates a new node meta. + */ + public NodeMeta() { + this.isSearchTarget = false; + this.isEnrichableNode = false; + + if (VisualizationConfig.getConfig().isVisualizationDebugEnabled()) { + nodeDebug = new NodeDebug(); + } + this.numInboundNeighbors = 0; + this.numOutboundNeighbors = 0; + + this.selfLinkResponseTimeInMs = 0; + + this.atMaxDepth = false; + this.selfLinkResolved = false; + this.processingErrorOccurred = false; + this.hasNeighbors = false; + this.neighborsProcessed = false; + this.nodeDepth = ActiveInventoryNode.DEFAULT_INIT_NODE_DEPTH; + this.processingState = NodeProcessingState.INIT; + + } + + public boolean isAtMaxDepth() { + return atMaxDepth; + } + + public void setAtMaxDepth(boolean atMaxDepth) { + this.atMaxDepth = atMaxDepth; + } + + public boolean isSelfLinkResolved() { + return selfLinkResolved; + } + + + + public NodeProcessingState getProcessingState() { + return processingState; + } + + public void setProcessingState(NodeProcessingState processingState) { + this.processingState = processingState; + } + + public void setSelfLinkResolved(boolean selfLinkResolved) { + this.selfLinkResolved = selfLinkResolved; + } + + public boolean isProcessingErrorOccurred() { + return processingErrorOccurred; + } + + public void setProcessingErrorOccurred(boolean processingErrorOccurred) { + this.processingErrorOccurred = processingErrorOccurred; + } + + public boolean isHasNeighbors() { + return hasNeighbors; + } + + public void setHasNeighbors(boolean hasNeighbors) { + this.hasNeighbors = hasNeighbors; + } + + public boolean isNeighborsProcessed() { + return neighborsProcessed; + } + + public void setNeighborsProcessed(boolean neighborsProcessed) { + this.neighborsProcessed = neighborsProcessed; + } + + public int getNodeDepth() { + return nodeDepth; + } + + public void setNodeDepth(int nodeDepth) { + this.nodeDepth = nodeDepth; + } + + public void setNodeDebug(NodeDebug nodeDebug) { + this.nodeDebug = nodeDebug; + } + + public String getClassName() { + return className; + } + + public long getNumInboundNeighbors() { + return numInboundNeighbors; + } + + public void setNumInboundNeighbors(long numInboundNeighbors) { + this.numInboundNeighbors = numInboundNeighbors; + } + + public long getNumOutboundNeighbors() { + return numOutboundNeighbors; + } + + public void setNumOutboundNeighbors(long numOutboundNeighbors) { + this.numOutboundNeighbors = numOutboundNeighbors; + } + + public NodeDebug getNodeDebug() { + return nodeDebug; + } + + public long getSelfLinkResponseTimeInMs() { + return selfLinkResponseTimeInMs; + } + + public boolean isEnrichableNode() { + return isEnrichableNode; + } + + public boolean isNodeIssue() { + return nodeIssue; + } + + public boolean isNodeValidated() { + return nodeValidated; + } + + public boolean isSearchTarget() { + return isSearchTarget; + } + + public void setClassName(String className) { + this.className = className; + } + + public void setEnrichableNode(boolean isEnrichableNode) { + this.isEnrichableNode = isEnrichableNode; + } + + public void setNodeIssue(boolean nodeIssue) { + this.nodeIssue = nodeIssue; + } + + public void setNodeValidated(boolean nodeValidated) { + this.nodeValidated = nodeValidated; + } + + public void setSearchTarget(boolean isSearchTarget) { + this.isSearchTarget = isSearchTarget; + } + + public void setSelfLinkResponseTimeInMs(long selfLinkResponseTimeInMs) { + this.selfLinkResponseTimeInMs = selfLinkResponseTimeInMs; + } + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/NodeProcessingTransaction.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/NodeProcessingTransaction.java new file mode 100644 index 0000000..f881f06 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/NodeProcessingTransaction.java @@ -0,0 +1,103 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +import org.openecomp.sparky.dal.rest.OperationResult; + +/** + * The Class NodeProcessingTransaction. + */ +public class NodeProcessingTransaction { + + private ActiveInventoryNode processingNode; + private OperationResult opResult; + private String selfLinkWithModifiers; + private String requestParameters; + + /** + * Instantiates a new node processing transaction. + */ + public NodeProcessingTransaction() {} + + public String getRequestParameters() { + return requestParameters; + } + + public void setRequestParameters(String requestParameters) { + this.requestParameters = requestParameters; + } + + public String getSelfLinkWithModifiers() { + + if (processingNode == null) { + return null; + } + + return processingNode.getSelfLink() + requestParameters; + } + + public ActiveInventoryNode getProcessingNode() { + return processingNode; + } + + public void setProcessingNode(ActiveInventoryNode processingNode) { + this.processingNode = processingNode; + } + + public OperationResult getOpResult() { + return opResult; + } + + public void setOpResult(OperationResult opResult) { + this.opResult = opResult; + } + + /** + * Processing error occurred. + * + * @return true, if successful + */ + public boolean processingErrorOccurred() { + if (opResult == null) { + return true; + } + + return !opResult.wasSuccessful(); + + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "NodeProcessingTransaction [" + + (processingNode != null ? "processingNode=" + processingNode + ", " : "") + + (opResult != null ? "opResult=" + opResult + ", " : "") + "processorErrorOccurred=" + + processingErrorOccurred() + "]"; + } + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/QueryParams.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/QueryParams.java new file mode 100644 index 0000000..b3592c3 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/QueryParams.java @@ -0,0 +1,58 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +/** + * The Class QueryParams. + */ +public class QueryParams { + + private String searchTargetPrimaryKeyValues; + private String searchTargetNodeId; + + /** + * Instantiates a new query params. + */ + public QueryParams() { + + } + + public String getSearchTargetPrimaryKeyValues() { + return searchTargetPrimaryKeyValues; + } + + public void setSearchTargetPrimaryKeyValues(String searchTargetPrimaryKeyValues) { + this.searchTargetPrimaryKeyValues = searchTargetPrimaryKeyValues; + } + + public String getSearchTargetNodeId() { + return searchTargetNodeId; + } + + public void setSearchTargetNodeId(String searchTargetNodeId) { + this.searchTargetNodeId = searchTargetNodeId; + } +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/QueryRequest.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/QueryRequest.java new file mode 100644 index 0000000..34c34ef --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/QueryRequest.java @@ -0,0 +1,48 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +/** + * The Class QueryRequest. + */ +public class QueryRequest { + + private String hashId; + + public String getHashId() { + return hashId; + } + + public void setHashId(String hashId) { + this.hashId = hashId; + } + + @Override + public String toString() { + return "QueryRequest [hashId=" + hashId + "]"; + } + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/QuerySearchEntity.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/QuerySearchEntity.java new file mode 100644 index 0000000..71b775c --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/QuerySearchEntity.java @@ -0,0 +1,75 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +import com.fasterxml.jackson.annotation.JsonIgnore; + +/** + * The Class ViewAndInspectSearchRequest. + */ +public class QuerySearchEntity { + + private static final String DEFAULT_MAX_RESULTS = "10"; + public String maxResults; + + public String queryStr; + + /** + * Instantiates a new view and inspect search request. + */ + public QuerySearchEntity() { + maxResults = DEFAULT_MAX_RESULTS; + queryStr = null; + } + + public String getMaxResults() { + return maxResults; + } + + public void setMaxResults(String maxResults) { + this.maxResults = maxResults; + } + + public String getQueryStr() { + return queryStr; + } + + public void setQueryStr(String queryStr) { + this.queryStr = queryStr; + } + + @JsonIgnore + public String[] getSearchTerms() { + + if (queryStr == null) { + return null; + } + + return queryStr.split(" "); + + } + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/RelatedToProperty.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/RelatedToProperty.java new file mode 100644 index 0000000..a4c72b0 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/RelatedToProperty.java @@ -0,0 +1,65 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * The Class RelatedToProperty. + */ +public class RelatedToProperty { + protected String propertyKey; + protected String propertyValue; + + @JsonProperty("property-key") + public String getPropertyKey() { + return propertyKey; + } + + public void setPropertyKey(String propertyKey) { + this.propertyKey = propertyKey; + } + + @JsonProperty("property-value") + public String getPropertyValue() { + return propertyValue; + } + + public void setPropertyValue(String propertyValue) { + this.propertyValue = propertyValue; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "RelatedToProperty [propertyKey=" + propertyKey + ", propertyValue=" + propertyValue + + "]"; + } + + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/Relationship.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/Relationship.java new file mode 100644 index 0000000..e82ef3a --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/Relationship.java @@ -0,0 +1,92 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.util.Arrays; + +/** + * The Class Relationship. + */ +public class Relationship { + + protected String relatedTo; + protected String relatedLink; + protected RelationshipData[] relationshipData; + protected RelatedToProperty[] relatedToProperty; + + public String getRelatedTo() { + return relatedTo; + } + + @JsonProperty("related-to") + public void setRelatedTo(String relatedTo) { + this.relatedTo = relatedTo; + } + + public String getRelatedLink() { + return relatedLink; + } + + @JsonProperty("related-link") + public void setRelatedLink(String relatedLink) { + this.relatedLink = relatedLink; + } + + public RelationshipData[] getRelationshipData() { + return relationshipData; + } + + @JsonProperty("relationship-data") + public void setRelationshipData(RelationshipData[] relationshipData) { + this.relationshipData = relationshipData; + } + + + + public RelatedToProperty[] getRelatedToProperty() { + return relatedToProperty; + } + + @JsonProperty("related-to-property") + public void setRelatedToProperty(RelatedToProperty[] relatedToProperty) { + this.relatedToProperty = relatedToProperty; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "Relationship [relatedTo=" + relatedTo + ", relatedLink=" + relatedLink + + ", relationshipData=" + Arrays.toString(relationshipData) + ", relatedToProperty=" + + Arrays.toString(relatedToProperty) + "]"; + } + + + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/RelationshipData.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/RelationshipData.java new file mode 100644 index 0000000..d290fef --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/RelationshipData.java @@ -0,0 +1,64 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * The Class RelationshipData. + */ +public class RelationshipData { + protected String relationshipKey; + protected String relationshipValue; + + @JsonProperty("relationship-key") + public String getRelationshipKey() { + return relationshipKey; + } + + public void setRelationshipKey(String relationshipKey) { + this.relationshipKey = relationshipKey; + } + + @JsonProperty("relationship-value") + public String getRelationshipValue() { + return relationshipValue; + } + + public void setRelationshipValue(String relationshipValue) { + this.relationshipValue = relationshipValue; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "RelationshipData [relationshipKey=" + relationshipKey + ", relationshipValue=" + + relationshipValue + "]"; + } + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/RelationshipDirectionality.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/RelationshipDirectionality.java new file mode 100644 index 0000000..3c273dc --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/RelationshipDirectionality.java @@ -0,0 +1,43 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +/** + * This enumeration is intended to be used to help us discriminate neighbor relationships for the + * purpose of visualization and conceptualization to model in/out relationships between + * ActiveInventoryNodes. + * Possible visualization behaviors could be the following: - IN ( draw a line with 1 arrow ) - OUT + * ( draw a line with 1 arrow ) - BOTH ( draw a line with 2 arrows, or 2 lines with 1 arrow each ) - + * UNKNOWN ( draw a line with no arrows ) + * The UNKNOWN case is what we have at the moment where we have a collection neighbors with no + * knowledge of relationship directionality. + * + * @author davea + * + */ +public enum RelationshipDirectionality { + IN, OUT, BOTH, UNKNOWN +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/RelationshipList.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/RelationshipList.java new file mode 100644 index 0000000..257b68c --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/RelationshipList.java @@ -0,0 +1,58 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.util.Arrays; + +/** + * The Class RelationshipList. + */ +public class RelationshipList { + + protected Relationship[] relationship; + + public Relationship[] getRelationshipList() { + return relationship; + } + + @JsonProperty("relationship") + public void setRelationshipList(Relationship[] relationship) { + this.relationship = relationship; + } + + /* (non-Javadoc) + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return "RelationshipList [relationshipList=" + Arrays.toString(relationship) + "]"; + } + + + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/SearchResponse.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/SearchResponse.java new file mode 100644 index 0000000..8cc8d9b --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/SearchResponse.java @@ -0,0 +1,93 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +import java.util.ArrayList; +import java.util.List; + +import org.openecomp.sparky.suggestivesearch.SuggestionEntity; + +/** + * The Class SearchResponse. + */ +public class SearchResponse { + + private long processingTimeInMs; + private int totalFound; + + private List<SuggestionEntity> suggestions; + + /** + * Instantiates a new search response. + */ + public SearchResponse() { + this.suggestions = new ArrayList<SuggestionEntity>(); + this.processingTimeInMs = 0; + this.totalFound = 0; + } + + public long getProcessingTimeInMs() { + return processingTimeInMs; + } + + public void setProcessingTimeInMs(long processingTimeInMs) { + this.processingTimeInMs = processingTimeInMs; + } + + public int getTotalFound() { + return totalFound; + } + + public void setTotalFound(int totalFound) { + this.totalFound = totalFound; + } + + public List<SuggestionEntity> getSuggestions() { + return suggestions; + } + + public void setSuggestions(List<SuggestionEntity> suggestions) { + this.suggestions = suggestions; + } + /** + * Adds the entity entry. + * + * @param suggestionEntry that will be converted to JSON + */ + public void addSuggestion(SuggestionEntity suggestionEntity){ + suggestions.add(suggestionEntity); + } + + /** + * Increments the total number of hits for this SearchResponse by + * the value passed in. + * + * @param additionalCount - Count to increment the total found + */ + public void addToTotalFound(int additionalCount) { + totalFound += additionalCount; + } +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/SelfLinkDeterminationTransaction.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/SelfLinkDeterminationTransaction.java new file mode 100644 index 0000000..fbe6325 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/SelfLinkDeterminationTransaction.java @@ -0,0 +1,82 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +import org.openecomp.sparky.dal.rest.OperationResult; + +public class SelfLinkDeterminationTransaction { + + private String parentNodeId; + private ActiveInventoryNode newNode; + private String queryString; + private String entityUrl; + private OperationResult opResult; + + + + public String getParentNodeId() { + return parentNodeId; + } + + public void setParentNodeId(String parentNodeId) { + this.parentNodeId = parentNodeId; + } + + public ActiveInventoryNode getNewNode() { + return newNode; + } + + public void setNewNode(ActiveInventoryNode newNode) { + this.newNode = newNode; + } + + public OperationResult getOpResult() { + return opResult; + } + + public void setOpResult(OperationResult opResult) { + this.opResult = opResult; + } + + public String getQueryString() { + return queryString; + } + + public void setQueryString(String queryString) { + this.queryString = queryString; + } + + public String getEntityUrl() { + return entityUrl; + } + + public void setEntityUrl(String entityUrl) { + this.entityUrl = entityUrl; + } + + + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/entity/Violations.java b/src/main/java/org/openecomp/sparky/viewandinspect/entity/Violations.java new file mode 100644 index 0000000..a921782 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/entity/Violations.java @@ -0,0 +1,114 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.entity; + +import com.att.aft.dme2.internal.jackson.annotate.JsonProperty; + +/** + * The Class Violations. + */ +public class Violations { + + private String severity; + + private String category; + + private String type; + + private String timestamp; + + private String details; + + @JsonProperty("error-message") + private String errorMessage; + + /** + * Instantiates a new violations. + * + * @param severity the severity + * @param category the category + * @param type the type + * @param timestamp the timestamp + * @param errorMessage the error message + */ + public Violations(String severity, String category, String type, String timestamp, + String errorMessage) { + this.severity = severity; + this.category = category; + this.type = type; + this.timestamp = timestamp; + this.errorMessage = errorMessage; + } + + public String getSeverity() { + return severity; + } + + public void setSeverity(String severity) { + this.severity = severity; + } + + public String getCategory() { + return category; + } + + public void setCategory(String category) { + this.category = category; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + + public String getTimestamp() { + return timestamp; + } + + public void setTimestamp(String timestamp) { + this.timestamp = timestamp; + } + + /* + * public Map<String, Object> getDetails() { return details; } + * + * public void setDetails(Map<String, Object> details) { this.details = details; } + */ + + public String getErrorMessage() { + return errorMessage; + } + + public void setErrorMessage(String errorMessage) { + this.errorMessage = errorMessage; + } + + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/enumeration/NodeProcessingAction.java b/src/main/java/org/openecomp/sparky/viewandinspect/enumeration/NodeProcessingAction.java new file mode 100644 index 0000000..7c9befa --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/enumeration/NodeProcessingAction.java @@ -0,0 +1,36 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.enumeration; + +/** + * The Enum NodeProcessingAction. + */ +public enum NodeProcessingAction { + SELF_LINK_SET, NEW_NODE_PROCESSED, SELF_LINK_RESOLVE_ERROR, SELF_LINK_DETERMINATION_ERROR, + SELF_LINK_RESOLVE_OK, SELF_LINK_RESPONSE_PARSE_ERROR, SELF_LINK_RESPONSE_PARSE_OK, + NEIGHBORS_PROCESSED_ERROR, NEIGHBORS_PROCESSED_OK, COMPLEX_ATTRIBUTE_GROUP_PARSE_ERROR, + COMPLEX_ATTRIBUTE_GROUP_PARSE_OK, NODE_IDENTITY_ERROR, UNEXPECTED_STATE_TRANSITION +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/enumeration/NodeProcessingState.java b/src/main/java/org/openecomp/sparky/viewandinspect/enumeration/NodeProcessingState.java new file mode 100644 index 0000000..344f8df --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/enumeration/NodeProcessingState.java @@ -0,0 +1,33 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.enumeration; + +/** + * The Enum NodeProcessingState. + */ +public enum NodeProcessingState { + INIT, SELF_LINK_UNRESOLVED, SELF_LINK_RESPONSE_UNPROCESSED, NEIGHBORS_UNPROCESSED, READY, ERROR +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/services/SearchServiceWrapper.java b/src/main/java/org/openecomp/sparky/viewandinspect/services/SearchServiceWrapper.java new file mode 100644 index 0000000..41f0eff --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/services/SearchServiceWrapper.java @@ -0,0 +1,809 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sparky.viewandinspect.services; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.PrintWriter; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.json.JSONException; +import org.json.JSONObject; +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.dal.elasticsearch.HashQueryResponse; +import org.openecomp.sparky.dal.elasticsearch.SearchAdapter; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.sas.config.SearchServiceConfig; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.search.VnfSearchService; +import org.openecomp.sparky.search.config.SuggestionConfig; +import org.openecomp.sparky.suggestivesearch.SuggestionEntity; +import org.openecomp.sparky.util.NodeUtils; +import org.openecomp.sparky.viewandinspect.entity.QuerySearchEntity; +import org.openecomp.sparky.viewandinspect.entity.SearchResponse; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; + +/** + * The Class SearchServlet. + */ + +public class SearchServiceWrapper { + + private static final long serialVersionUID = 1L; + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(SearchServiceWrapper.class); + + private SearchServiceConfig sasConfig = null; + private SuggestionConfig suggestionConfig = null; + private SearchAdapter search = null; + private ObjectMapper mapper; + private OxmModelLoader oxmModelLoader; + private VnfSearchService vnfSearch = null; + + private static final String SEARCH_STRING = "search"; + private static final String COUNT_STRING = "count"; + private static final String QUERY_SEARCH = SEARCH_STRING + "/querysearch"; + private static final String SUMMARY_BY_ENTITY_TYPE_API = SEARCH_STRING + "/summarybyentitytype"; + private static final String SUMMARY_BY_ENTITY_TYPE_COUNT_API = + SUMMARY_BY_ENTITY_TYPE_API + "/" + COUNT_STRING; + + private static final String VALUE_ANYKEY = "anyKey"; + private static final String VALUE_QUERY = "query"; + + private static final String KEY_HASH_ID = "hashId"; + private static final String KEY_GROUP_BY = "groupby"; + private static final String KEY_SEARCH_RESULT = "searchResult"; + private static final String KEY_HITS = "hits"; + private static final String KEY_PAYLOAD = "payload"; + private static final String KEY_DOCUMENT = "document"; + private static final String KEY_CONTENT = "content"; + private static final String KEY_SEARCH_TAG_IDS = "searchTagIDs"; + private static final String KEY_SEARCH_TAGS = "searchTags"; + private static final String KEY_LINK = "link"; + private static final String KEY_ENTITY_TYPE = "entityType"; + + private static final String VI_SUGGESTION_ROUTE = "viewInspect"; // TODO -> Read route from + // suggestive-search.properties + // instead of hard coding + + private static final String VIUI_SEARCH_TEMPLATE = + "{ " + "\"results-start\": 0," + "\"results-size\": %d," + "\"queries\": [{" + "\"must\": {" + + "\"match\": {" + "\"field\": \"entityType searchTags crossEntityReferenceValues\"," + + "\"value\": \"%s\"," + "\"operator\": \"and\", " + + "\"analyzer\": \"whitespace_analyzer\"" + "}" + "}" + "}]" + "}"; + + /** + * Instantiates a new search service wrapper + */ + public SearchServiceWrapper() { + this.mapper = new ObjectMapper(); + vnfSearch = new VnfSearchService(); + + try { + if (sasConfig == null) { + sasConfig = SearchServiceConfig.getConfig(); + } + + if (suggestionConfig == null) { + suggestionConfig = SuggestionConfig.getConfig(); + } + + if (search == null) { + search = new SearchAdapter(); + } + + if (oxmModelLoader == null) { + oxmModelLoader = OxmModelLoader.getInstance(); + + if (OxmModelLoader.getInstance().getSearchableEntityDescriptors().isEmpty()) { + LOG.error(AaiUiMsgs.ENTITY_NOT_FOUND_IN_OXM, "searchable entity"); + } + } + } catch (Exception exc) { + new ServletException( + "Caught an exception while getting an instance of servlet configuration from SearchServlet.", exc); + } + } + + public void doGet(HttpServletRequest request, HttpServletResponse response) + throws ServletException, IOException { + doPost(request, response); + } + + public void setSasConfig(SearchServiceConfig sasConfig) { + this.sasConfig = sasConfig; + } + + public SearchServiceConfig getSasConfig() { + return sasConfig; + } + + public void setSuggestionConfig(SuggestionConfig suggestionConfig) { + this.suggestionConfig = suggestionConfig; + } + + public void setSearch(SearchAdapter search) { + this.search = search; + } + + public SuggestionConfig getSuggestionConfig() { + return suggestionConfig; + } + + public SearchAdapter getSearch() { + return search; + } + + public void setOxmModelLoader(OxmModelLoader oxmModelLoader) { + this.oxmModelLoader = oxmModelLoader; + } + + public OxmModelLoader getOxmModelLoader() { + return oxmModelLoader; + } + + public VnfSearchService getVnfSearch() { + return vnfSearch; + } + + public void setVnfSearch(VnfSearchService vnfSearch) { + this.vnfSearch = vnfSearch; + } + + /** + * Get Full URL for search + * + * @param api the api + * @param indexName + * @return the full url + */ + private String getSasFullUrl(String indexName, String type, String ipAddress, String port, + String version) { + + return String.format("https://%s:%s/services/search-data-service/%s/search/indexes/%s/%s", + ipAddress, port, version, indexName, type); + } + + /** + * Handle search service do query. + * + * @param app the app + * @param request the request + * @param response the response + * @throws Exception the exception + */ + + protected JSONObject getRequestParamsFromHeader(HttpServletRequest request) { + StringBuffer br = new StringBuffer(); + String line = null; + try { + BufferedReader reader = request.getReader(); + while ((line = reader.readLine()) != null) { + br.append(line); + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.ERROR_READING_HTTP_REQ_PARAMS); + } + + String output = br.toString(); + + return new JSONObject(output); + } + + protected void handleSummaryByEntityTypeCount(HttpServletRequest request, + HttpServletResponse response) throws Exception { + JSONObject parameters = getRequestParamsFromHeader(request); + String hashId = null; + if (parameters.has(KEY_HASH_ID)){ + hashId = parameters.get(KEY_HASH_ID).toString(); + } else { + vnfSearch.setZeroCountResponse(response); + LOG.error(AaiUiMsgs.ERROR_HASH_NOT_FOUND); + return; + } + HashQueryResponse hashQueryResponse = getResponseForQueryByHash(hashId, response); + Map<String, String> hashQueryResponsePayloadParams = new HashMap<String, String>(); + if (hashQueryResponse.getJsonPayload() != null) { + hashQueryResponsePayloadParams = getPayloadParams(hashQueryResponse.getJsonPayload()); + vnfSearch.getEntityCountResults(response, hashQueryResponsePayloadParams); + } else { + vnfSearch.setZeroCountResponse(response); + LOG.error(AaiUiMsgs.ERROR_INVALID_HASH, hashId); + } + } + + protected Map<String, String> getPayloadParams(String parameters) { + Map<String, String> payloadParams = new HashMap<String, String>(); + try { + JSONObject json = new JSONObject(parameters); + JSONObject payload = json.getJSONObject(KEY_PAYLOAD); + if (payload.length() > 0) { + for (String key : JSONObject.getNames(payload)) { + payloadParams.put(key, payload.getString(key)); + } + } + } catch (JSONException exc) { + LOG.error(AaiUiMsgs.ERROR_PARSING_PARAMS, exc); + } + return payloadParams; + } + + protected HashQueryResponse getResponseForQueryByHash(String hashId, HttpServletResponse response){ + return vnfSearch.getJSONPayloadFromHash(hashId); + } + + protected void handleSummaryByEntityType(HttpServletRequest request, HttpServletResponse response) + throws Exception { + JSONObject parameters = getRequestParamsFromHeader(request); + String hashId = null; + if (parameters.has(KEY_HASH_ID)){ + hashId = parameters.get(KEY_HASH_ID).toString(); + } else { + vnfSearch.setZeroCountResponse(response); + LOG.error(AaiUiMsgs.ERROR_HASH_NOT_FOUND); + return; + } + HashQueryResponse hashQueryResponse = getResponseForQueryByHash(hashId, response); + Map<String, String> hashQueryResponsePayloadParams = new HashMap<String, String>(); + if (hashQueryResponse.getJsonPayload() != null) { + hashQueryResponsePayloadParams = getPayloadParams(hashQueryResponse.getJsonPayload()); + if (parameters.has(KEY_GROUP_BY)){ + String groupByKey = parameters.getString(KEY_GROUP_BY); + vnfSearch.getSummaryByEntityType(response, hashQueryResponsePayloadParams, groupByKey); + } + } else { + LOG.error(AaiUiMsgs.ERROR_INVALID_HASH, hashId); + vnfSearch.setEmptyAggResponse(response); + } + } + + /** + * Gets the value from node. + * + * @param node the node + * @param fieldName the field name + * @return the value from node + */ + private String getValueFromNode(JsonNode node, String fieldName) { + + if (node == null || fieldName == null) { + return null; + } + + JsonNode valueNode = node.get(fieldName); + + if (valueNode != null) { + return valueNode.asText(); + } + + return null; + + } + + /** + * Builds the search response. + * + * @param operationResult the operation result + * @param queryStr the query str + * @return TODO + * @return the search response + */ + private List<SuggestionEntity> generateSuggestionsForSearchResponse(String operationResult, + String queryStr) { + + + if (operationResult == null || operationResult.length() == 0) { + return null; + } + + ObjectMapper mapper = new ObjectMapper(); + JsonNode rootNode = null; + List<SuggestionEntity> suggestionEntityList = new ArrayList<SuggestionEntity>(); + try { + rootNode = mapper.readTree(operationResult); + + JsonNode hitsNode = rootNode.get(KEY_SEARCH_RESULT); + + + // Check if there are hits that are coming back + if (hitsNode.has(KEY_HITS)) { + ArrayNode hitsArray = (ArrayNode) hitsNode.get(KEY_HITS); + + /* + * next we iterate over the values in the hit array elements + */ + + Iterator<JsonNode> nodeIterator = hitsArray.elements(); + JsonNode entityNode = null; + SuggestionEntity suggestionEntity = null; + JsonNode sourceNode = null; + while (nodeIterator.hasNext()) { + entityNode = nodeIterator.next(); + sourceNode = entityNode.get(KEY_DOCUMENT).get(KEY_CONTENT); + + // do the point transformation as we build the response? + suggestionEntity = new SuggestionEntity(); + suggestionEntity.setRoute(VI_SUGGESTION_ROUTE); + + /* + * This is where we probably want to annotate the search tags because we also have access + * to the seachTagIds + */ + + String searchTagIds = getValueFromNode(sourceNode, KEY_SEARCH_TAG_IDS); + String searchTags = getValueFromNode(sourceNode, KEY_SEARCH_TAGS); + String link = getValueFromNode(sourceNode, KEY_LINK); + String entityType = getValueFromNode(sourceNode, KEY_ENTITY_TYPE); + if (link != null) { + suggestionEntity.setHashId(NodeUtils.generateUniqueShaDigest(link)); + } + + try { + suggestionEntity + .setText(annotateSearchTags(searchTags, searchTagIds, entityType, queryStr)); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, searchTags.toString(), + exc.getLocalizedMessage()); + // at least send back the un-annotated search tags + suggestionEntity.setText(searchTags); + } + + if (searchTags != null) { + suggestionEntityList.add(suggestionEntity); + } + + } + } + } catch (IOException exc) { + LOG.warn(AaiUiMsgs.SEARCH_RESPONSE_BUILDING_EXCEPTION, exc.getLocalizedMessage()); + } + return suggestionEntityList; + } + + /* + */ + + /** + * Query terms match search tag. + * + * @param queryTerms the query terms + * @param searchTag the search tag + * @return true, if successful @return. + */ + private boolean queryTermsMatchSearchTag(String[] queryTerms, String searchTag) { + + if (queryTerms == null || queryTerms.length == 0 || searchTag == null) { + return false; + } + + for (String queryTerm : queryTerms) { + if (searchTag.toLowerCase().contains(queryTerm.toLowerCase())) { + return true; + } + } + + return false; + + } + + /** + * The current format of an UI-dropdown-item is like: "search-terms entityType att1=attr1_val". + * Example, for pserver: search-terms pserver hostname=djmAG-72060, + * pserver-name2=example-pserver-name2-val-17254, pserver-id=example-pserver-id-val-17254, + * ipv4-oam-address=example-ipv4-oam-address-val-17254 SearchController.js parses the above + * format. So if you are modifying the parsing below, please update SearchController.js as well. + * + * @param searchTags the search tags + * @param searchTagIds the search tag ids + * @param entityType the entity type + * @param queryStr the query str + * @return the string + */ + + private String annotateSearchTags(String searchTags, String searchTagIds, String entityType, + String queryStr) { + + if (searchTags == null || searchTagIds == null) { + String valueOfSearchTags = String.valueOf(searchTags); + String valueOfSearchTagIds = String.valueOf(searchTagIds); + + LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, "See error", + "Search tags = " + valueOfSearchTags + " and Seach tag IDs = " + valueOfSearchTagIds); + return searchTags; + } + + if (entityType == null) { + LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, searchTags.toString(), "EntityType is null"); + return searchTags; + } + + if (queryStr == null) { + LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, searchTags.toString(), + "Query string is null"); + return searchTags; + } + + /* + * The ElasticSearch analyzer has already applied the lowercase filter, so we don't have to + * covert them again + */ + String[] searchTagsArray = searchTags.split(";"); + String[] searchTagIdsArray = searchTagIds.split(";"); + + // specifically apply lower case to the the query terms to make matching + // simpler + String[] queryTerms = queryStr.toLowerCase().split(" "); + + OxmEntityDescriptor desc = oxmModelLoader.getSearchableEntityDescriptors().get(entityType); + + if (desc == null) { + LOG.error(AaiUiMsgs.ENTITY_NOT_FOUND_IN_OXM, entityType.toString()); + return searchTags; + } + + String primaryKeyName = NodeUtils.concatArray(desc.getPrimaryKeyAttributeName(), "/"); + String primaryKeyValue = null; + + /* + * For each used attribute, get the fieldName for the attribute index and transform the search + * tag into t1,t2,t3 => h1=t1, h2=t2, h3=t3; + */ + StringBuilder searchTagsBuilder = new StringBuilder(128); + searchTagsBuilder.append(entityType); + + String primaryKeyConjunctionValue = null; + boolean queryTermsMatchedSearchTags = false; + + if (searchTagsArray.length == searchTagIdsArray.length) { + for (int i = 0; i < searchTagsArray.length; i++) { + String searchTagAttributeId = searchTagIdsArray[i]; + String searchTagAttributeValue = searchTagsArray[i]; + + // Find the concat conjunction + Map<String, String> pairConjunctionList = suggestionConfig.getPairingList(); + + String suggConjunction = null; + if (pairConjunctionList.get(searchTagAttributeId) != null) { + suggConjunction = pairConjunctionList.get(searchTagAttributeId); + } else { + suggConjunction = suggestionConfig.getDefaultPairingValue(); + } + + if (primaryKeyName.equals(searchTagAttributeId)) { + primaryKeyValue = searchTagAttributeValue; + primaryKeyConjunctionValue = suggConjunction; + } + + if (queryTermsMatchSearchTag(queryTerms, searchTagAttributeValue)) { + searchTagsBuilder.append(" " + suggConjunction + " " + searchTagAttributeValue); + queryTermsMatchedSearchTags = true; + } + } + } else { + String errorMessage = "Search tags length did not match search tag ID length for entity type " + entityType; + LOG.error(AaiUiMsgs.ENTITY_SYNC_SEARCH_TAG_ANNOTATION_FAILED, errorMessage); + } + + /* + * if none of the user query terms matched the index entity search tags then we should still tag + * the matched entity with a conjunction set to at least it's entity primary key value to + * discriminate between the entities of the same type in the search results displayed in the UI + * search bar results + */ + + if (!queryTermsMatchedSearchTags) { + + if (primaryKeyValue != null && primaryKeyConjunctionValue != null) { + searchTagsBuilder.append(" " + primaryKeyConjunctionValue + " " + primaryKeyValue); + } else { + LOG.error(AaiUiMsgs.SEARCH_TAG_ANNOTATION_ERROR, "See error", + "Could not annotate user query terms " + queryStr + + " from available entity search tags = " + searchTags); + return searchTags; + } + + } + + return searchTagsBuilder.toString(); + + } + + + /** + * @param queryStr - space separate query search terms + * @return - query string with stop-words removed + */ + private String stripStopWordsFromQuery(String queryStr) { + + if (queryStr == null) { + return queryStr; + } + + Collection<String> stopWords = suggestionConfig.getStopWords(); + ArrayList<String> queryTerms = new ArrayList<String>(Arrays.asList(queryStr.toLowerCase().split(" "))); + + queryTerms.removeAll(stopWords); + + return String.join(" ", queryTerms); + } + + /* + * Expected query: + * + * POST /search/viuiSearch/ + * + * { "maxResults" : "10", "searchStr" : "<search bar text>" } + */ + + /** + * Handle view and inspect search. + * + * @param request the request + * @param maxResults Max number of results to return + * @param response the response + * @return + * @throws IOException Signals that an I/O exception has occurred. + */ + protected List<SuggestionEntity> performViewAndInspectQuerySearch( + QuerySearchEntity querySearchEntity, int maxResults) throws IOException { + List<SuggestionEntity> suggestionEntityList = new ArrayList<SuggestionEntity>(); + + /* + * Based on the configured stop words, we need to strip any matched stop-words ( case + * insensitively ) from the query string, before hitting elastic to prevent the words from being + * used against the elastic view-and-inspect index. Another alternative to this approach would + * be to define stop words on the elastic search index configuration for the + * entity-search-index, but but that may be more complicated / more risky than just a simple bug + * fix, but it's something we should think about for the future. + */ + + try { + final String queryStringWithoutStopWords = + stripStopWordsFromQuery(querySearchEntity.getQueryStr()); + + final String fullUrlStr = getSasFullUrl(sasConfig.getIndexName(), VALUE_QUERY, + sasConfig.getIpAddress(), sasConfig.getHttpPort(), sasConfig.getVersion()); + + String postBody = + String.format(VIUI_SEARCH_TEMPLATE, maxResults, queryStringWithoutStopWords); + + OperationResult opResult = search.doPost(fullUrlStr, postBody, "application/json"); + if (opResult.getResultCode() == 200) { + suggestionEntityList = generateSuggestionsForSearchResponse(opResult.getResult(), + querySearchEntity.getQueryStr()); + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, + "View and inspect query failed with error = " + exc.getMessage()); + } + return suggestionEntityList; + } + + protected List<SuggestionEntity> performVnfQuerySearch(QuerySearchEntity querySearchEntity, + int resultCountLimit) throws Exception { + return vnfSearch.getSuggestionsResults(querySearchEntity, resultCountLimit); + } + + protected void handleQuerySearch(HttpServletRequest request, HttpServletResponse response) + throws IOException { + String payload = NodeUtils.getBody(request); + if (payload == null || payload.isEmpty()) { + handleSearchServletErrors("Unable to parse payload", null, response); + } else { + QuerySearchEntity querySearchEntity = mapper.readValue(payload, QuerySearchEntity.class); + int maxResultsPerSearch = Integer.valueOf(querySearchEntity.getMaxResults()); + try { + SearchResponse searchResponse = new SearchResponse(); + List<SuggestionEntity> viewAndInspectsuggestionEntityList = + new ArrayList<SuggestionEntity>(); + List<SuggestionEntity> vnfSuggestionEntityList = new ArrayList<SuggestionEntity>(); + long processTime = System.currentTimeMillis(); + for (String searchService : suggestionConfig.getSearchIndexToSearchService().values()) { + if (searchService.equals(SearchServiceWrapper.class.getSimpleName())) { + viewAndInspectsuggestionEntityList = + performViewAndInspectQuerySearch(querySearchEntity, maxResultsPerSearch); + } else if (searchService.equals(VnfSearchService.class.getSimpleName())) { + vnfSuggestionEntityList = performVnfQuerySearch(querySearchEntity, maxResultsPerSearch); + } + } + + int totalAdded = 0; + for (int i = 0; i < maxResultsPerSearch; i++) { + if (i < viewAndInspectsuggestionEntityList.size() && totalAdded < maxResultsPerSearch) { + searchResponse.addSuggestion(viewAndInspectsuggestionEntityList.get(i)); + totalAdded++; + } + if (i < vnfSuggestionEntityList.size() && totalAdded < maxResultsPerSearch) { + searchResponse.addSuggestion(vnfSuggestionEntityList.get(i)); + totalAdded++; + } + if (totalAdded >= maxResultsPerSearch) { + break; + } + } + searchResponse.addToTotalFound(totalAdded); + String searchResponseJson = NodeUtils.convertObjectToJson(searchResponse, true); + + processTime = System.currentTimeMillis() - processTime; + searchResponse.setProcessingTimeInMs(processTime); + setServletResponse(response, searchResponseJson); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, + "Query search failed with error = " + exc.getMessage()); + } + } + } + + public void doPost(HttpServletRequest request, HttpServletResponse response) + throws ServletException, IOException { + + String api = null; + try { + + // set default response + response.setStatus(200); + + if (request.getRequestURI().contains(QUERY_SEARCH)) { + api = QUERY_SEARCH; + handleQuerySearch(request, response); + return; + } else if (request.getRequestURI().contains(SUMMARY_BY_ENTITY_TYPE_COUNT_API)) { + api = SUMMARY_BY_ENTITY_TYPE_COUNT_API; + handleSummaryByEntityTypeCount(request, response); + return; + } else if (request.getRequestURI().contains(SUMMARY_BY_ENTITY_TYPE_API)) { + api = SUMMARY_BY_ENTITY_TYPE_API; + handleSummaryByEntityType(request, response); + return; + } else { + + final String errorMessage = "Ignored request-uri = " + request.getRequestURI(); + LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, errorMessage); + response.setStatus(404); + response.setContentType("application/json"); + PrintWriter out = response.getWriter(); + out.println(generateJsonErrorResponse(errorMessage)); + out.close(); + + + } + } catch (JSONException je){ + handleSearchServletErrors("Caught an exception while parsing json in processing for " + api, je, + response); + } catch (Exception e1) { + handleSearchServletErrors("Caught an exception while communicating with elasticsearch", e1, + response); + } + } + + /** + * Generate json error response. + * + * @param message the message + * @return the string + */ + /* + * This is the manual approach, however we could also create an object container for the error + * then use the Jackson ObjectWrite to dump the object to json instead. If it gets any more + * complicated we could do that approach so we don't have to manually trip over the JSON + * formatting. + */ + protected String generateJsonErrorResponse(String message) { + return String.format("{ \"errorMessage\" : %s }", message); + } + + /** + * Handle search servlet errors. + * + * @param errorMsg the error msg + * @param exc the exc + * @param response the response + * @throws IOException Signals that an I/O exception has occurred. + */ + public void handleSearchServletErrors(String errorMsg, Exception exc, + HttpServletResponse response) throws IOException { + + String errorLogMsg = + (exc == null ? errorMsg : errorMsg + ". Error:" + exc.getLocalizedMessage()); + + LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, errorLogMsg); + + response.setContentType("application/json"); + PrintWriter out = response.getWriter(); + out.println(generateJsonErrorResponse(errorMsg)); + out.close(); + } + + + /** + * Execute query. + * + * @param response the response + * @param requestUrl the request url + * @param requestJsonPayload the request json payload + * @throws Exception the exception + */ + public void executeQuery(HttpServletResponse response, String requestUrl, + String requestJsonPayload) throws Exception { + + OperationResult opResult = search.doPost(requestUrl, requestJsonPayload, "application/json"); + + if (opResult != null) { + + response.setStatus(opResult.getResultCode()); + String finalOutput = opResult.getResult(); + + // example: failed to populate drop-down items from formatOutputJson() + if (finalOutput != null) { + response.setContentType("application/json"); + PrintWriter out = response.getWriter(); + out.println(finalOutput); + out.close(); + } + + } else { + response.setStatus(500); + } + + } + + /** + * Sets the servlet response. + * + * @param response the response + * @param postPayload the post payload + * + * @throws IOException Signals that an I/O exception has occurred. + */ + private void setServletResponse(HttpServletResponse response, String postPayload) + throws IOException { + + if (postPayload != null) { + response.setContentType("application/json"); + PrintWriter out = response.getWriter(); + out.println(postPayload); + out.close(); + } + } + + + +}
\ No newline at end of file diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/services/VisualizationContext.java b/src/main/java/org/openecomp/sparky/viewandinspect/services/VisualizationContext.java new file mode 100644 index 0000000..c5adfd4 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/services/VisualizationContext.java @@ -0,0 +1,1649 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.services; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.http.client.utils.URIBuilder; +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.config.oxm.OxmEntityDescriptor; +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider; +import org.openecomp.sparky.dal.aai.config.ActiveInventoryConfig; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.synchronizer.entity.SearchableEntity; +import org.openecomp.sparky.util.NodeUtils; +import org.openecomp.sparky.viewandinspect.config.TierSupportUiConstants; +import org.openecomp.sparky.viewandinspect.config.VisualizationConfig; +import org.openecomp.sparky.viewandinspect.entity.ActiveInventoryNode; +import org.openecomp.sparky.viewandinspect.entity.InlineMessage; +import org.openecomp.sparky.viewandinspect.entity.NodeProcessingTransaction; +import org.openecomp.sparky.viewandinspect.entity.QueryParams; +import org.openecomp.sparky.viewandinspect.entity.Relationship; +import org.openecomp.sparky.viewandinspect.entity.RelationshipData; +import org.openecomp.sparky.viewandinspect.entity.RelationshipList; +import org.openecomp.sparky.viewandinspect.entity.SelfLinkDeterminationTransaction; +import org.openecomp.sparky.viewandinspect.enumeration.NodeProcessingAction; +import org.openecomp.sparky.viewandinspect.enumeration.NodeProcessingState; +import org.openecomp.sparky.viewandinspect.task.PerformNodeSelfLinkProcessingTask; +import org.openecomp.sparky.viewandinspect.task.PerformSelfLinkDeterminationTask; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.PropertyNamingStrategy; + +/** + * The Class SelfLinkNodeCollector. + */ +public class VisualizationContext { + + private static final int MAX_DEPTH_EVALUATION_ATTEMPTS = 100; + private static final String DEPTH_ALL_MODIFIER = "?depth=all"; + private static final String NODES_ONLY_MODIFIER = "?nodes-only"; + private static final String SERVICE_INSTANCE = "service-instance"; + + private static final Logger LOG = LoggerFactory.getInstance().getLogger( + VisualizationContext.class); + private final ActiveInventoryDataProvider aaiProvider; + + private int maxSelfLinkTraversalDepth; + private AtomicInteger numLinksDiscovered; + private AtomicInteger numSuccessfulLinkResolveFromCache; + private AtomicInteger numSuccessfulLinkResolveFromFromServer; + private AtomicInteger numFailedLinkResolve; + private AtomicInteger aaiWorkOnHand; + + private ActiveInventoryConfig aaiConfig; + private VisualizationConfig visualizationConfig; + private List<String> shallowEntities; + + private AtomicInteger totalLinksRetrieved; + + private final long contextId; + private final String contextIdStr; + + private OxmModelLoader loader; + private ObjectMapper mapper; + private InlineMessage inlineMessage = null; + + private ExecutorService aaiExecutorService; + + /* + * The node cache is intended to be a flat structure indexed by a primary key to avoid needlessly + * re-requesting the same self-links over-and-over again, to speed up the overall render time and + * more importantly to reduce the network cost of determining information we already have. + */ + private ConcurrentHashMap<String, ActiveInventoryNode> nodeCache; + + /** + * Instantiates a new self link node collector. + * + * @param loader the loader + * @throws Exception the exception + */ + public VisualizationContext(long contextId, ActiveInventoryDataProvider aaiDataProvider, + ExecutorService aaiExecutorService, OxmModelLoader loader) throws Exception { + + this.contextId = contextId; + this.contextIdStr = "[Context-Id=" + contextId + "]"; + this.aaiProvider = aaiDataProvider; + this.aaiExecutorService = aaiExecutorService; + this.loader = loader; + + this.nodeCache = new ConcurrentHashMap<String, ActiveInventoryNode>(); + this.numLinksDiscovered = new AtomicInteger(0); + this.totalLinksRetrieved = new AtomicInteger(0); + this.numSuccessfulLinkResolveFromCache = new AtomicInteger(0); + this.numSuccessfulLinkResolveFromFromServer = new AtomicInteger(0); + this.numFailedLinkResolve = new AtomicInteger(0); + this.aaiWorkOnHand = new AtomicInteger(0); + + this.aaiConfig = ActiveInventoryConfig.getConfig(); + this.visualizationConfig = VisualizationConfig.getConfig(); + this.shallowEntities = aaiConfig.getAaiRestConfig().getShallowEntities(); + + this.maxSelfLinkTraversalDepth = visualizationConfig.getMaxSelfLinkTraversalDepth(); + + this.mapper = new ObjectMapper(); + mapper.setSerializationInclusion(Include.NON_EMPTY); + mapper.setPropertyNamingStrategy(new PropertyNamingStrategy.KebabCaseStrategy()); + } + + public long getContextId() { + return contextId; + } + + /** + * A utility method for extracting all entity-type primary key values from a provided self-link + * and return a set of generic-query API keys. + * + * @param parentEntityType + * @param link + * @return a list of key values that can be used for this entity with the AAI generic-query API + */ + protected List<String> extractQueryParamsFromSelfLink(String link) { + + List<String> queryParams = new ArrayList<String>(); + + if (link == null) { + LOG.error(AaiUiMsgs.QUERY_PARAM_EXTRACTION_ERROR, "self link is null"); + return queryParams; + } + + Map<String, OxmEntityDescriptor> entityDescriptors = loader.getEntityDescriptors(); + + try { + + URIBuilder urlBuilder = new URIBuilder(link); + String urlPath = urlBuilder.getPath(); + + OxmEntityDescriptor descriptor = null; + String[] urlPathElements = urlPath.split("/"); + List<String> primaryKeyNames = null; + int index = 0; + String entityType = null; + + while (index < urlPathElements.length) { + + descriptor = entityDescriptors.get(urlPathElements[index]); + + if (descriptor != null) { + entityType = urlPathElements[index]; + primaryKeyNames = descriptor.getPrimaryKeyAttributeName(); + + /* + * Make sure from what ever index we matched the parent entity-type on that we can extract + * additional path elements for the primary key values. + */ + + if (index + primaryKeyNames.size() < urlPathElements.length) { + + for (String primaryKeyName : primaryKeyNames) { + index++; + queryParams.add(entityType + "." + primaryKeyName + ":" + urlPathElements[index]); + } + } else { + LOG.error(AaiUiMsgs.QUERY_PARAM_EXTRACTION_ERROR, + "Could not extract query parametrs for entity-type = '" + entityType + + "' from self-link = " + link); + } + } + + index++; + } + + } catch (URISyntaxException exc) { + + LOG.error(AaiUiMsgs.QUERY_PARAM_EXTRACTION_ERROR, + "Error extracting query parameters from self-link = " + link + ". Error = " + + exc.getMessage()); + } + + return queryParams; + + } + + /** + * Decode complex attribute group. + * + * @param ain the ain + * @param attributeGroup the attribute group + * @return boolean indicating whether operation was successful (true), / failure(false). + */ + public boolean decodeComplexAttributeGroup(ActiveInventoryNode ain, JsonNode attributeGroup) { + + try { + + Iterator<Entry<String, JsonNode>> entityArrays = attributeGroup.fields(); + Entry<String, JsonNode> entityArray = null; + + if (entityArrays == null) { + LOG.error(AaiUiMsgs.ATTRIBUTE_GROUP_FAILURE, attributeGroup.toString()); + ain.changeState(NodeProcessingState.ERROR, NodeProcessingAction.NEIGHBORS_PROCESSED_ERROR); + return false; + } + + while (entityArrays.hasNext()) { + + entityArray = entityArrays.next(); + + String entityType = entityArray.getKey(); + JsonNode entityArrayObject = entityArray.getValue(); + + if (entityArrayObject.isArray()) { + + Iterator<JsonNode> entityCollection = entityArrayObject.elements(); + JsonNode entity = null; + while (entityCollection.hasNext()) { + entity = entityCollection.next(); + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "decodeComplexAttributeGroup()," + + " entity = " + entity.toString()); + } + + /** + * Here's what we are going to do: + * + * <li>In the ActiveInventoryNode, on construction maintain a collection of queryParams + * that is added to for the purpose of discovering parent->child hierarchies. + * + * <li>When we hit this block of the code then we'll use the queryParams to feed the + * generic query to resolve the self-link asynchronously. + * + * <li>Upon successful link determination, then and only then will we create a new node + * in the nodeCache and process the child + * + */ + + ActiveInventoryNode newNode = new ActiveInventoryNode(); + newNode.setEntityType(entityType); + + /* + * This is partially a lie because we actually don't have a self-link for complex nodes + * discovered in this way. + */ + newNode.setSelfLinkProcessed(true); + newNode.changeState(NodeProcessingState.SELF_LINK_RESPONSE_UNPROCESSED, + NodeProcessingAction.COMPLEX_ATTRIBUTE_GROUP_PARSE_OK); + + /* + * copy parent query params into new child + */ + + if (SERVICE_INSTANCE.equals(entityType)) { + + /* + * 1707 AAI has an issue being tracked with AAI-8932 where the generic-query cannot be + * resolved if all the service-instance path keys are provided. The query only works + * if only the service-instance key and valude are passed due to a historical reason. + * A fix is being worked on for 1707, and when it becomes available we can revert this + * small change. + */ + + newNode.clearQueryParams(); + + } else { + + /* + * For all other entity-types we want to copy the parent query parameters into the new node + * query parameters. + */ + + for (String queryParam : ain.getQueryParams()) { + newNode.addQueryParam(queryParam); + } + + } + + + if (!addComplexGroupToNode(newNode, entity)) { + LOG.error(AaiUiMsgs.ATTRIBUTE_GROUP_FAILURE, "Failed to add child to parent for child = " + entity.toString()); + } + + if (!addNodeQueryParams(newNode)) { + LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE_NODE_ID, "Error determining node id and key for node = " + newNode.dumpNodeTree(true) + + " skipping relationship processing"); + newNode.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.NODE_IDENTITY_ERROR); + return false; + } else { + + newNode.changeState(NodeProcessingState.NEIGHBORS_UNPROCESSED, + NodeProcessingAction.COMPLEX_ATTRIBUTE_GROUP_PARSE_OK); + + } + + + /* + * Order matters for the query params. We need to set the parent ones before the child + * node + */ + + String selfLinkQuery = + aaiProvider.getGenericQueryForSelfLink(entityType, newNode.getQueryParams()); + + /** + * <li>get the self-link + * <li>add it to the new node + * <li>generate node id + * <li>add node to node cache + * <li>add node id to parent outbound links list + * <li>process node children (should be automatic) (but don't query and resolve + * self-link as we already have all the data) + */ + + SelfLinkDeterminationTransaction txn = new SelfLinkDeterminationTransaction(); + + txn.setQueryString(selfLinkQuery); + txn.setNewNode(newNode); + txn.setParentNodeId(ain.getNodeId()); + aaiWorkOnHand.incrementAndGet(); + supplyAsync(new PerformSelfLinkDeterminationTask(txn, null, aaiProvider), + aaiExecutorService).whenComplete((nodeTxn, error) -> { + aaiWorkOnHand.decrementAndGet(); + if (error != null) { + LOG.error(AaiUiMsgs.SELF_LINK_DETERMINATION_FAILED_GENERIC, selfLinkQuery); + } else { + + OperationResult opResult = nodeTxn.getOpResult(); + + ActiveInventoryNode newChildNode = txn.getNewNode(); + + if (opResult != null && opResult.wasSuccessful()) { + + if (opResult.isResolvedLinkFailure()) { + numFailedLinkResolve.incrementAndGet(); + } + + if (opResult.isResolvedLinkFromCache()) { + numSuccessfulLinkResolveFromCache.incrementAndGet(); + } + + if (opResult.isResolvedLinkFromServer()) { + numSuccessfulLinkResolveFromFromServer.incrementAndGet(); + } + + /* + * extract the self-link from the operational result. + */ + + Collection<JsonNode> entityLinks = new ArrayList<JsonNode>(); + JsonNode genericQueryResult = null; + try { + genericQueryResult = + NodeUtils.convertJsonStrToJsonNode(nodeTxn.getOpResult().getResult()); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.JSON_CONVERSION_ERROR, JsonNode.class.toString(), exc.getMessage()); + } + + NodeUtils.extractObjectsByKey(genericQueryResult, "resource-link", + entityLinks); + + String selfLink = null; + + if (entityLinks.size() != 1) { + + LOG.error(AaiUiMsgs.SELF_LINK_DETERMINATION_FAILED_UNEXPECTED_LINKS, String.valueOf(entityLinks.size())); + + } else { + selfLink = ((JsonNode) entityLinks.toArray()[0]).asText(); + + newChildNode.setSelfLink(selfLink); + newChildNode.setNodeId(NodeUtils.generateUniqueShaDigest(selfLink)); + + String uri = NodeUtils.calculateEditAttributeUri(selfLink); + if (uri != null) { + newChildNode.addProperty(TierSupportUiConstants.URI_ATTR_NAME, uri); + } + + ActiveInventoryNode parent = nodeCache.get(txn.getParentNodeId()); + + if (parent != null) { + parent.addOutboundNeighbor(newChildNode.getNodeId()); + newChildNode.addInboundNeighbor(parent.getNodeId()); + } + + newChildNode.setSelfLinkPendingResolve(false); + newChildNode.setSelfLinkProcessed(true); + + newChildNode.changeState(NodeProcessingState.NEIGHBORS_UNPROCESSED, + NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_OK); + + nodeCache.putIfAbsent(newChildNode.getNodeId(), newChildNode); + + } + + } else { + LOG.error(AaiUiMsgs.SELF_LINK_RETRIEVAL_FAILED, txn.getQueryString(), + String.valueOf(nodeTxn.getOpResult().getResultCode()), nodeTxn.getOpResult().getResult()); + newChildNode.setSelflinkRetrievalFailure(true); + newChildNode.setSelfLinkProcessed(true); + newChildNode.setSelfLinkPendingResolve(false); + + newChildNode.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.SELF_LINK_DETERMINATION_ERROR); + + } + + } + + }); + + } + + return true; + + } else { + LOG.error(AaiUiMsgs.UNHANDLED_OBJ_TYPE_FOR_ENTITY_TYPE, entityType); + } + + } + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Exception caught while" + + " decoding complex attribute group - " + exc.getMessage()); + } + + return false; + + } + + /** + * Process self link response. + * + * @param nodeId the node id + */ + private void processSelfLinkResponse(String nodeId) { + + if (nodeId == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Cannot process self link" + + " response because nodeId is null"); + return; + } + + ActiveInventoryNode ain = nodeCache.get(nodeId); + + if (ain == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Cannot process self link response" + + " because can't find node for id = " + nodeId); + return; + } + + JsonNode jsonNode = null; + + try { + jsonNode = mapper.readValue(ain.getOpResult().getResult(), JsonNode.class); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SELF_LINK_JSON_PARSE_ERROR, "Failed to marshal json" + + " response str into JsonNode with error, " + exc.getLocalizedMessage()); + ain.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_ERROR); + return; + } + + if (jsonNode == null) { + LOG.error(AaiUiMsgs.SELF_LINK_JSON_PARSE_ERROR, "Failed to parse json node str." + + " Parse resulted a null value."); + ain.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_ERROR); + return; + } + + Iterator<Entry<String, JsonNode>> fieldNames = jsonNode.fields(); + Entry<String, JsonNode> field = null; + + RelationshipList relationshipList = null; + + while (fieldNames.hasNext()) { + + field = fieldNames.next(); + String fieldName = field.getKey(); + + if ("relationship-list".equals(fieldName)) { + + try { + relationshipList = mapper.readValue(field.getValue().toString(), RelationshipList.class); + + if (relationshipList != null) { + ain.addRelationshipList(relationshipList); + } + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SELF_LINK_JSON_PARSE_ERROR, "Failed to parse relationship-list" + + " attribute. Parse resulted in error, " + exc.getLocalizedMessage()); + ain.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_ERROR); + return; + } + + } else { + + JsonNode nodeValue = field.getValue(); + + if (nodeValue != null && nodeValue.isValueNode()) { + + if (loader.getEntityDescriptor(fieldName) == null) { + + /* + * entity property name is not an entity, thus we can add this property name and value + * to our property set + */ + + ain.addProperty(fieldName, nodeValue.asText()); + + } + + } else { + + if (nodeValue.isArray()) { + + if (loader.getEntityDescriptor(fieldName) == null) { + + /* + * entity property name is not an entity, thus we can add this property name and value + * to our property set + */ + + ain.addProperty(field.getKey(), nodeValue.toString()); + + } + + } else { + + ain.addComplexGroup(nodeValue); + + } + + } + } + + } + + String uri = NodeUtils.calculateEditAttributeUri(ain.getSelfLink()); + if (uri != null) { + ain.addProperty(TierSupportUiConstants.URI_ATTR_NAME, uri); + } + + /* + * We need a special behavior for intermediate entities from the REST model + * + * Tenants are not top level entities, and when we want to visualization + * their children, we need to construct keys that include the parent entity query + * keys, the current entity type keys, and the child keys. We'll always have the + * current entity and children, but never the parent entity in the current (1707) REST + * data model. + * + * We have two possible solutions: + * + * 1) Try to use the custom-query approach to learn about the entity keys + * - this could be done, but it could be very expensive for large objects. When we do the first + * query to get a tenant, it will list all the in and out edges related to this entity, + * there is presently no way to filter this. But the approach could be made to work and it would be + * somewhat data-model driven, other than the fact that we have to first realize that the entity + * that is being searched for is not top-level entity. Once we have globally unique ids for resources + * this logic will not be needed and everything will be simpler. The only reason we are in this logic + * at all is to be able to calculate a url for the child entities so we can hash it to generate + * a globally unique id that can be safely used for the node. + * + * *2* Extract the keys from the pathed self-link. + * This is a bad solution and I don't like it but it will be fast for all resource types, as the + * information is already encoded in the URI. When we get to a point where we switch to a better + * globally unique entity identity model, then a lot of the code being used to calculate an entity url + * to in-turn generate a deterministic globally unique id will disappear. + * + * + * right now we have the following: + * + * - cloud-regions/cloud-region/{cloud-region-id}/{cloud-owner-id}/tenants/tenant/{tenant-id} + * + */ + + /* + * For all entity types use the self-link extraction method to be consistent. Once we have a + * globally unique identity mechanism for entities, this logic can be revisited. + */ + ain.clearQueryParams(); + ain.addQueryParams(extractQueryParamsFromSelfLink(ain.getSelfLink())); + + ain.changeState(NodeProcessingState.NEIGHBORS_UNPROCESSED, + NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_OK); + + } + + /** + * Perform self link resolve. + * + * @param nodeId the node id + */ + private void performSelfLinkResolve(String nodeId) { + + if (nodeId == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Resolve of self-link" + + " has been skipped because provided nodeId is null"); + return; + } + + ActiveInventoryNode ain = nodeCache.get(nodeId); + + if (ain == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Failed to find node with id, " + nodeId + + ", from node cache. Resolve self-link method has been skipped."); + return; + } + + if (!ain.isSelfLinkPendingResolve()) { + + ain.setSelfLinkPendingResolve(true); + + // kick off async self-link resolution + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "About to process node in SELF_LINK_UNPROCESSED State, link = " + ain.getSelfLink()); + } + + numLinksDiscovered.incrementAndGet(); + + String depthModifier = DEPTH_ALL_MODIFIER; + + /* + * If the current node is the search target, we want to see everything the node has to offer + * from the self-link and not filter it to a single node. + */ + + if (shallowEntities.contains(ain.getEntityType()) && !ain.isRootNode()) { + depthModifier = NODES_ONLY_MODIFIER; + } + + NodeProcessingTransaction txn = new NodeProcessingTransaction(); + txn.setProcessingNode(ain); + txn.setRequestParameters(depthModifier); + aaiWorkOnHand.incrementAndGet(); + supplyAsync( + new PerformNodeSelfLinkProcessingTask(txn, depthModifier, aaiProvider), + aaiExecutorService).whenComplete((nodeTxn, error) -> { + aaiWorkOnHand.decrementAndGet(); + if (error != null) { + + /* + * an error processing the self link should probably result in the node processing + * state shifting to ERROR + */ + + nodeTxn.getProcessingNode().setSelflinkRetrievalFailure(true); + + nodeTxn.getProcessingNode().changeState(NodeProcessingState.ERROR, + NodeProcessingAction.SELF_LINK_RESOLVE_ERROR); + + nodeTxn.getProcessingNode().setSelfLinkPendingResolve(false); + + } else { + + totalLinksRetrieved.incrementAndGet(); + + OperationResult opResult = nodeTxn.getOpResult(); + + if (opResult != null && opResult.wasSuccessful()) { + + if (opResult.isResolvedLinkFailure()) { + numFailedLinkResolve.incrementAndGet(); + } + + if (opResult.isResolvedLinkFromCache()) { + numSuccessfulLinkResolveFromCache.incrementAndGet(); + } + + if (opResult.isResolvedLinkFromServer()) { + numSuccessfulLinkResolveFromFromServer.incrementAndGet(); + } + + // success path + nodeTxn.getProcessingNode().setOpResult(opResult); + nodeTxn.getProcessingNode().changeState( + NodeProcessingState.SELF_LINK_RESPONSE_UNPROCESSED, + NodeProcessingAction.SELF_LINK_RESOLVE_OK); + + nodeTxn.getProcessingNode().setSelfLinkProcessed(true); + nodeTxn.getProcessingNode().setSelfLinkPendingResolve(false); + + } else { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, "Self Link retrieval for link," + + txn.getSelfLinkWithModifiers() + ", failed with error code," + + nodeTxn.getOpResult().getResultCode() + ", and message," + + nodeTxn.getOpResult().getResult()); + + nodeTxn.getProcessingNode().setSelflinkRetrievalFailure(true); + nodeTxn.getProcessingNode().setSelfLinkProcessed(true); + + nodeTxn.getProcessingNode().changeState(NodeProcessingState.ERROR, + NodeProcessingAction.SELF_LINK_RESOLVE_ERROR); + + nodeTxn.getProcessingNode().setSelfLinkPendingResolve(false); + + } + } + + }); + + } + + } + + + /** + * Process neighbors. + * + * @param nodeId the node id + */ + private void processNeighbors(String nodeId) { + + if (nodeId == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESS_NEIGHBORS_ERROR, "Failed to process" + + " neighbors because nodeId is null."); + return; + } + + ActiveInventoryNode ain = nodeCache.get(nodeId); + + if (ain == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESS_NEIGHBORS_ERROR, "Failed to process" + + " neighbors because node could not be found in nodeCache with id, " + nodeId); + return; + } + + /* + * process complex attribute and relationships + */ + + boolean neighborsProcessedSuccessfully = true; + + for (JsonNode n : ain.getComplexGroups()) { + neighborsProcessedSuccessfully &= decodeComplexAttributeGroup(ain, n); + } + + for (RelationshipList relationshipList : ain.getRelationshipLists()) { + neighborsProcessedSuccessfully &= addSelfLinkRelationshipChildren(ain, relationshipList); + } + + + if (neighborsProcessedSuccessfully) { + ain.changeState(NodeProcessingState.READY, NodeProcessingAction.NEIGHBORS_PROCESSED_OK); + } else { + ain.changeState(NodeProcessingState.ERROR, NodeProcessingAction.NEIGHBORS_PROCESSED_ERROR); + } + + + /* + * If neighbors fail to process, there is already a call to change the state within the + * relationship and neighbor processing functions. + */ + + } + + /** + * Find and mark root node. + * + * @param queryParams the query params + * @return true, if successful + */ + private boolean findAndMarkRootNode(QueryParams queryParams) { + + for (ActiveInventoryNode cacheNode : nodeCache.values()) { + + if (queryParams.getSearchTargetNodeId().equals(cacheNode.getNodeId())) { + cacheNode.setNodeDepth(0); + cacheNode.setRootNode(true); + LOG.info(AaiUiMsgs.ROOT_NODE_DISCOVERED, queryParams.getSearchTargetNodeId()); + return true; + } + } + + return false; + + } + + /** + * Process current node states. + * + * @param rootNodeDiscovered the root node discovered + */ + private void processCurrentNodeStates(boolean rootNodeDiscovered) { + /* + * Force an evaluation of node depths before determining if we should limit state-based + * traversal or processing. + */ + if (rootNodeDiscovered) { + evaluateNodeDepths(); + } + + for (ActiveInventoryNode cacheNode : nodeCache.values()) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "processCurrentNodeState(), nid = " + + cacheNode.getNodeId() + " , nodeDepth = " + cacheNode.getNodeDepth()); + } + + switch (cacheNode.getState()) { + + case INIT: { + processInitialState(cacheNode.getNodeId()); + break; + } + + case READY: + case ERROR: { + break; + } + + case SELF_LINK_UNRESOLVED: { + performSelfLinkResolve(cacheNode.getNodeId()); + break; + } + + case SELF_LINK_RESPONSE_UNPROCESSED: { + processSelfLinkResponse(cacheNode.getNodeId()); + break; + } + + case NEIGHBORS_UNPROCESSED: { + + /* + * We use the rootNodeDiscovered flag to ignore depth retrieval thresholds until the root + * node is identified. Then the evaluative depth calculations should re-balance the graph + * around the root node. + */ + + if (!rootNodeDiscovered || cacheNode.getNodeDepth() < VisualizationConfig.getConfig() + .getMaxSelfLinkTraversalDepth()) { + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "SLNC::processCurrentNodeState() -- Node at max depth," + + " halting processing at current state = -- " + + cacheNode.getState() + " nodeId = " + cacheNode.getNodeId()); + } + + + + processNeighbors(cacheNode.getNodeId()); + + } + + break; + } + default: + break; + + + + } + + } + + } + + /** + * Adds the complex group to node. + * + * @param targetNode the target node + * @param attributeGroup the attribute group + * @return true, if successful + */ + private boolean addComplexGroupToNode(ActiveInventoryNode targetNode, JsonNode attributeGroup) { + + if (attributeGroup == null) { + targetNode.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.COMPLEX_ATTRIBUTE_GROUP_PARSE_OK); + return false; + } + + RelationshipList relationshipList = null; + + if (attributeGroup.isObject()) { + + Iterator<Entry<String, JsonNode>> fields = attributeGroup.fields(); + Entry<String, JsonNode> field = null; + String fieldName; + JsonNode fieldValue; + + while (fields.hasNext()) { + field = fields.next(); + fieldName = field.getKey(); + fieldValue = field.getValue(); + + if (fieldValue.isObject()) { + + if (fieldName.equals("relationship-list")) { + + try { + relationshipList = + mapper.readValue(field.getValue().toString(), RelationshipList.class); + + if (relationshipList != null) { + targetNode.addRelationshipList(relationshipList); + } + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.SELF_LINK_JSON_PARSE_ERROR, "Failed to parse" + + " relationship-list attribute. Parse resulted in error, " + + exc.getLocalizedMessage()); + targetNode.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.COMPLEX_ATTRIBUTE_GROUP_PARSE_ERROR); + return false; + } + + } else { + targetNode.addComplexGroup(fieldValue); + } + + } else if (fieldValue.isArray()) { + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Unexpected array type with a key = " + fieldName); + } + } else if (fieldValue.isValueNode()) { + if (loader.getEntityDescriptor(field.getKey()) == null) { + /* + * property key is not an entity type, add it to our property set. + */ + targetNode.addProperty(field.getKey(), fieldValue.asText()); + } + + } + } + + } else if (attributeGroup.isArray()) { + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Unexpected array type for attributeGroup = " + attributeGroup); + } + } else if (attributeGroup.isValueNode()) { + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Unexpected value type for attributeGroup = " + attributeGroup); + } + } + + return true; + } + + public int getNumSuccessfulLinkResolveFromCache() { + return numSuccessfulLinkResolveFromCache.get(); + } + + public int getNumSuccessfulLinkResolveFromFromServer() { + return numSuccessfulLinkResolveFromFromServer.get(); + } + + public int getNumFailedLinkResolve() { + return numFailedLinkResolve.get(); + } + + public InlineMessage getInlineMessage() { + return inlineMessage; + } + + public void setInlineMessage(InlineMessage inlineMessage) { + this.inlineMessage = inlineMessage; + } + + public void setMaxSelfLinkTraversalDepth(int depth) { + this.maxSelfLinkTraversalDepth = depth; + } + + public int getMaxSelfLinkTraversalDepth() { + return this.maxSelfLinkTraversalDepth; + } + + public ConcurrentHashMap<String, ActiveInventoryNode> getNodeCache() { + return nodeCache; + } + + /** + * Gets the relationship primary key values. + * + * @param r the r + * @param entityType the entity type + * @param pkeyNames the pkey names + * @return the relationship primary key values + */ + private String getRelationshipPrimaryKeyValues(Relationship r, String entityType, + List<String> pkeyNames) { + + StringBuilder sb = new StringBuilder(64); + + if (pkeyNames.size() > 0) { + String primaryKey = extractKeyValueFromRelationData(r, entityType + "." + pkeyNames.get(0)); + if (primaryKey != null) { + + sb.append(primaryKey); + + } else { + // this should be a fatal error because unless we can + // successfully retrieve all the expected keys we'll end up + // with a garbage node + LOG.error(AaiUiMsgs.EXTRACTION_ERROR, "ERROR: Failed to extract" + + " keyName, " + entityType + "." + pkeyNames.get(0) + + ", from relationship data, " + r.toString()); + return null; + } + + for (int i = 1; i < pkeyNames.size(); i++) { + + String kv = extractKeyValueFromRelationData(r, entityType + "." + pkeyNames.get(i)); + if (kv != null) { + sb.append("/").append(kv); + } else { + // this should be a fatal error because unless we can + // successfully retrieve all the expected keys we'll end up + // with a garbage node + LOG.error(AaiUiMsgs.EXTRACTION_ERROR, "ERROR: failed to extract keyName, " + + entityType + "." + pkeyNames.get(i) + + ", from relationship data, " + r.toString()); + return null; + } + } + + return sb.toString(); + + } + + return null; + + } + + /** + * Extract key value from relation data. + * + * @param r the r + * @param keyName the key name + * @return the string + */ + private String extractKeyValueFromRelationData(Relationship r, String keyName) { + + RelationshipData[] rdList = r.getRelationshipData(); + + for (RelationshipData relData : rdList) { + + if (relData.getRelationshipKey().equals(keyName)) { + return relData.getRelationshipValue(); + } + } + + return null; + } + + /** + * Determine node id and key. + * + * @param ain the ain + * @return true, if successful + */ + private boolean addNodeQueryParams(ActiveInventoryNode ain) { + + if (ain == null) { + LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE_NODE_ID, "ActiveInventoryNode is null"); + return false; + } + + List<String> pkeyNames = + loader.getEntityDescriptor(ain.getEntityType()).getPrimaryKeyAttributeName(); + + if (pkeyNames == null || pkeyNames.size() == 0) { + LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE_NODE_ID, "Primary key names is empty"); + return false; + } + + StringBuilder sb = new StringBuilder(64); + + if (pkeyNames.size() > 0) { + String primaryKey = ain.getProperties().get(pkeyNames.get(0)); + if (primaryKey != null) { + sb.append(primaryKey); + } else { + // this should be a fatal error because unless we can + // successfully retrieve all the expected keys we'll end up + // with a garbage node + LOG.error(AaiUiMsgs.EXTRACTION_ERROR, "ERROR: Failed to extract keyName, " + + pkeyNames.get(0) + ", from entity properties"); + return false; + } + + for (int i = 1; i < pkeyNames.size(); i++) { + + String kv = ain.getProperties().get(pkeyNames.get(i)); + if (kv != null) { + sb.append("/").append(kv); + } else { + // this should be a fatal error because unless we can + // successfully retrieve all the expected keys we'll end up + // with a garbage node + LOG.error(AaiUiMsgs.EXTRACTION_ERROR, "ERROR: Failed to extract keyName, " + + pkeyNames.get(i) + ", from entity properties"); + return false; + } + } + + /*final String nodeId = NodeUtils.generateUniqueShaDigest(ain.getEntityType(), + NodeUtils.concatArray(pkeyNames, "/"), sb.toString());*/ + + //ain.setNodeId(nodeId); + ain.setPrimaryKeyName(NodeUtils.concatArray(pkeyNames, "/")); + ain.setPrimaryKeyValue(sb.toString()); + + if (ain.getEntityType() != null && ain.getPrimaryKeyName() != null + && ain.getPrimaryKeyValue() != null) { + ain.addQueryParam( + ain.getEntityType() + "." + ain.getPrimaryKeyName() + ":" + ain.getPrimaryKeyValue()); + } + return true; + + } + + return false; + + } + + /** + * Adds the self link relationship children. + * + * @param processingNode the processing node + * @param relationshipList the relationship list + * @return true, if successful + */ + private boolean addSelfLinkRelationshipChildren(ActiveInventoryNode processingNode, + RelationshipList relationshipList) { + + if (relationshipList == null) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "No relationships added to parent node = " + + processingNode.getNodeId() + " because relationshipList is empty"); + processingNode.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.NEIGHBORS_PROCESSED_ERROR); + return false; + } + + OxmModelLoader modelLoader = OxmModelLoader.getInstance(); + + Relationship[] relationshipArray = relationshipList.getRelationshipList(); + OxmEntityDescriptor descriptor = null; + String repairedSelfLink = null; + + if (relationshipArray != null) { + + ActiveInventoryNode newNode = null; + + for (Relationship r : relationshipArray) { + + repairedSelfLink = aaiConfig.repairSelfLink(r.getRelatedLink()); + + String nodeId = NodeUtils.generateUniqueShaDigest(repairedSelfLink); + + if (nodeId == null) { + + LOG.error(AaiUiMsgs.SKIPPING_RELATIONSHIP, r.toString()); + processingNode.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.NODE_IDENTITY_ERROR); + return false; + } + + newNode = new ActiveInventoryNode(); + + String entityType = r.getRelatedTo(); + + if (r.getRelationshipData() != null) { + for (RelationshipData rd : r.getRelationshipData()) { + newNode.addQueryParam(rd.getRelationshipKey() + ":" + rd.getRelationshipValue()); + } + } + + descriptor = modelLoader.getEntityDescriptor(r.getRelatedTo()); + + newNode.setNodeId(nodeId); + newNode.setEntityType(entityType); + newNode.setSelfLink(repairedSelfLink); + + processingNode.addOutboundNeighbor(nodeId); + + if (descriptor != null) { + + List<String> pkeyNames = descriptor.getPrimaryKeyAttributeName(); + + newNode.changeState(NodeProcessingState.SELF_LINK_UNRESOLVED, + NodeProcessingAction.SELF_LINK_SET); + + newNode.setPrimaryKeyName(NodeUtils.concatArray(pkeyNames, "/")); + + String primaryKeyValues = getRelationshipPrimaryKeyValues(r, entityType, pkeyNames); + newNode.setPrimaryKeyValue(primaryKeyValues); + + } else { + + LOG.error(AaiUiMsgs.VISUALIZATION_OUTPUT_ERROR, + "Failed to parse entity because OXM descriptor could not be found for type = " + + r.getRelatedTo()); + + newNode.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.NEIGHBORS_PROCESSED_ERROR); + + } + + if (nodeCache.putIfAbsent(nodeId, newNode) != null) { + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Failed to add node to nodeCache because it already exists. Node id = " + + newNode.getNodeId()); + } + } + + } + + } + + return true; + + } + + /** + * Process initial state. + * + * @param nodeId the node id + */ + private void processInitialState(String nodeId) { + + if (nodeId == null) { + LOG.error(AaiUiMsgs.FAILED_TO_PROCESS_INITIAL_STATE, "Node id is null"); + return; + } + + ActiveInventoryNode cachedNode = nodeCache.get(nodeId); + + if (cachedNode == null) { + LOG.error(AaiUiMsgs.FAILED_TO_PROCESS_INITIAL_STATE, "Node cannot be" + + " found for nodeId, " + nodeId); + return; + } + + if (cachedNode.getSelfLink() == null) { + + if (cachedNode.getNodeId() == null ) { + + /* + * if the self link is null at the INIT state, which could be valid if this node is a + * complex attribute group which didn't originate from a self-link, but in that situation + * both the node id and node key should already be set. + */ + + cachedNode.changeState(NodeProcessingState.ERROR, NodeProcessingAction.NODE_IDENTITY_ERROR); + + } + + if (cachedNode.getNodeId() != null) { + + /* + * This should be the success path branch if the self-link is not set + */ + + cachedNode.changeState(NodeProcessingState.SELF_LINK_RESPONSE_UNPROCESSED, + NodeProcessingAction.SELF_LINK_RESPONSE_PARSE_OK); + + } + + } else { + + if (cachedNode.hasResolvedSelfLink()) { + LOG.error(AaiUiMsgs.INVALID_RESOLVE_STATE_DURING_INIT); + cachedNode.changeState(NodeProcessingState.ERROR, + NodeProcessingAction.UNEXPECTED_STATE_TRANSITION); + } else { + cachedNode.changeState(NodeProcessingState.SELF_LINK_UNRESOLVED, + NodeProcessingAction.SELF_LINK_SET); + } + } + } + + /** + * Process skeleton node. + * + * @param skeletonNode the skeleton node + * @param queryParams the query params + */ + private void processSearchableEntity(SearchableEntity searchTargetEntity, QueryParams queryParams) { + + if (searchTargetEntity == null) { + return; + } + + if (searchTargetEntity.getId() == null) { + LOG.error(AaiUiMsgs.FAILED_TO_PROCESS_SKELETON_NODE, "Failed to process skeleton" + + " node because nodeId is null for node, " + searchTargetEntity.getLink()); + return; + } + + ActiveInventoryNode newNode = new ActiveInventoryNode(); + + newNode.setNodeId(searchTargetEntity.getId()); + newNode.setEntityType(searchTargetEntity.getEntityType()); + newNode.setPrimaryKeyName(getEntityTypePrimaryKeyName(searchTargetEntity.getEntityType())); + newNode.setPrimaryKeyValue(searchTargetEntity.getEntityPrimaryKeyValue()); + + if (newNode.getEntityType() != null && newNode.getPrimaryKeyName() != null + && newNode.getPrimaryKeyValue() != null) { + newNode.addQueryParam( + newNode.getEntityType() + "." + newNode.getPrimaryKeyName() + ":" + newNode.getPrimaryKeyValue()); + } + /* + * This code may need some explanation. In any graph there will be a single root node. The root + * node is really the center of the universe, and for now, we are tagging the search target as + * the root node. Everything else in the visualization of the graph will be centered around this + * node as the focal point of interest. + * + * Due to it's special nature, there will only ever be one root node, and it's node depth will + * always be equal to zero. + */ + + if (queryParams.getSearchTargetNodeId().equals(newNode.getNodeId())) { + newNode.setNodeDepth(0); + newNode.setRootNode(true); + LOG.info(AaiUiMsgs.ROOT_NODE_DISCOVERED, queryParams.getSearchTargetNodeId()); + } + + newNode.setSelfLink(searchTargetEntity.getLink()); + + nodeCache.putIfAbsent(newNode.getNodeId(), newNode); + } + + /** + * Checks for out standing work. + * + * @return true, if successful + */ + private boolean hasOutStandingWork() { + + int numNodesWithPendingStates = 0; + + /* + * Force an evaluation of node depths before determining if we should limit state-based + * traversal or processing. + */ + + evaluateNodeDepths(); + + for (ActiveInventoryNode n : nodeCache.values()) { + + switch (n.getState()) { + + case READY: + case ERROR: { + // do nothing, these are our normal + // exit states + break; + } + + case NEIGHBORS_UNPROCESSED: { + + if (n.getNodeDepth() < VisualizationConfig.getConfig().getMaxSelfLinkTraversalDepth()) { + /* + * Only process our neighbors relationships if our current depth is less than the max + * depth + */ + numNodesWithPendingStates++; + } + + break; + } + + default: { + + /* + * for all other states, there is work to be done + */ + numNodesWithPendingStates++; + } + + } + + } + + LOG.debug(AaiUiMsgs.OUTSTANDING_WORK_PENDING_NODES, String.valueOf(numNodesWithPendingStates)); + + return (numNodesWithPendingStates > 0); + + } + + /** + * Process self links. + * + * @param skeletonNode the skeleton node + * @param queryParams the query params + */ + public void processSelfLinks(SearchableEntity searchtargetEntity, QueryParams queryParams) { + + try { + + if (searchtargetEntity == null) { + LOG.error(AaiUiMsgs.SELF_LINK_PROCESSING_ERROR, contextIdStr + " - Failed to" + + " processSelfLinks, searchtargetEntity is null"); + return; + } + + processSearchableEntity(searchtargetEntity, queryParams); + + long startTimeInMs = System.currentTimeMillis(); + + /* + * wait until all transactions are complete or guard-timer expires. + */ + + long totalResolveTime = 0; + boolean hasOutstandingWork = hasOutStandingWork(); + boolean outstandingWorkGuardTimerFired = false; + long maxGuardTimeInMs = 5000; + long guardTimeInMs = 0; + boolean foundRootNode = false; + + + /* + * TODO: Put a count-down-latch in place of the while loop, but if we do that then + * we'll need to decouple the visualization processing from the main thread so it can continue to process while + * the main thread is waiting on for count-down-latch gate to open. This may also be easier once we move to the + * VisualizationService + VisualizationContext ideas. + */ + + + while (hasOutstandingWork || !outstandingWorkGuardTimerFired) { + + if (!foundRootNode) { + foundRootNode = findAndMarkRootNode(queryParams); + } + + processCurrentNodeStates(foundRootNode); + + verifyOutboundNeighbors(); + + try { + Thread.sleep(500); + } catch (InterruptedException exc) { + LOG.error(AaiUiMsgs.PROCESSING_LOOP_INTERUPTED, exc.getMessage()); + return; + } + + totalResolveTime = (System.currentTimeMillis() - startTimeInMs); + + if (!hasOutstandingWork) { + + guardTimeInMs += 500; + + if (guardTimeInMs > maxGuardTimeInMs) { + outstandingWorkGuardTimerFired = true; + } + } else { + guardTimeInMs = 0; + } + + hasOutstandingWork = hasOutStandingWork(); + + } + + long opTime = System.currentTimeMillis() - startTimeInMs; + + LOG.info(AaiUiMsgs.ALL_TRANSACTIONS_RESOLVED, String.valueOf(totalResolveTime), + String.valueOf(totalLinksRetrieved.get()), String.valueOf(opTime)); + + } catch (Exception exc) { + LOG.error(AaiUiMsgs.VISUALIZATION_OUTPUT_ERROR, exc.getMessage()); + } + + } + + /** + * Verify outbound neighbors. + */ + private void verifyOutboundNeighbors() { + + for (ActiveInventoryNode srcNode : nodeCache.values()) { + + for (String targetNodeId : srcNode.getOutboundNeighbors()) { + + ActiveInventoryNode targetNode = nodeCache.get(targetNodeId); + + if (targetNode != null && srcNode.getNodeId() != null) { + + targetNode.addInboundNeighbor(srcNode.getNodeId()); + + if (VisualizationConfig.getConfig().makeAllNeighborsBidirectional()) { + targetNode.addOutboundNeighbor(srcNode.getNodeId()); + } + + } + + } + + } + + } + + /** + * Evaluate node depths. + */ + private void evaluateNodeDepths() { + + int numChanged = -1; + int numAttempts = 0; + + while (numChanged != 0) { + + numChanged = 0; + numAttempts++; + + for (ActiveInventoryNode srcNode : nodeCache.values()) { + + if (srcNode.getState() == NodeProcessingState.INIT) { + + /* + * this maybe the only state that we don't want to to process the node depth on, because + * typically it won't have any valid fields set, and it may remain in a partial state + * until we have processed the self-link. + */ + + continue; + + } + + for (String targetNodeId : srcNode.getOutboundNeighbors()) { + ActiveInventoryNode targetNode = nodeCache.get(targetNodeId); + + if (targetNode != null) { + + if (targetNode.changeDepth(srcNode.getNodeDepth() + 1)) { + numChanged++; + } + } + } + + for (String targetNodeId : srcNode.getInboundNeighbors()) { + ActiveInventoryNode targetNode = nodeCache.get(targetNodeId); + + if (targetNode != null) { + + if (targetNode.changeDepth(srcNode.getNodeDepth() + 1)) { + numChanged++; + } + } + } + } + + if (numAttempts >= MAX_DEPTH_EVALUATION_ATTEMPTS) { + LOG.info(AaiUiMsgs.MAX_EVALUATION_ATTEMPTS_EXCEEDED); + return; + } + + } + + if (LOG.isDebugEnabled()) { + if (numAttempts > 0) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Evaluate node depths completed in " + numAttempts + " attempts"); + } else { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Evaluate node depths completed in 0 attempts because all nodes at correct depth"); + } + } + + } + + + /** + * Gets the entity type primary key name. + * + * @param entityType the entity type + * @return the entity type primary key name + */ + + + private String getEntityTypePrimaryKeyName(String entityType) { + + if (entityType == null) { + LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE, "node primary key" + + " name because entity type is null"); + return null; + } + + OxmEntityDescriptor descriptor = loader.getEntityDescriptor(entityType); + + if (descriptor == null) { + LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE, "oxm entity" + + " descriptor for entityType = " + entityType); + return null; + } + + List<String> pkeyNames = descriptor.getPrimaryKeyAttributeName(); + + if (pkeyNames == null || pkeyNames.size() == 0) { + LOG.error(AaiUiMsgs.FAILED_TO_DETERMINE, "node primary" + + " key because descriptor primary key names is empty"); + return null; + } + + return NodeUtils.concatArray(pkeyNames, "/"); + + } + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/services/VisualizationService.java b/src/main/java/org/openecomp/sparky/viewandinspect/services/VisualizationService.java new file mode 100644 index 0000000..aed7cd2 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/services/VisualizationService.java @@ -0,0 +1,387 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.services; + +import java.io.IOException; +import java.security.SecureRandom; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; + +import javax.servlet.ServletException; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.dal.aai.ActiveInventoryAdapter; +import org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider; +import org.openecomp.sparky.dal.aai.config.ActiveInventoryConfig; +import org.openecomp.sparky.dal.aai.config.ActiveInventoryRestConfig; +import org.openecomp.sparky.dal.cache.EntityCache; +import org.openecomp.sparky.dal.cache.PersistentEntityCache; +import org.openecomp.sparky.dal.elasticsearch.ElasticSearchAdapter; +import org.openecomp.sparky.dal.elasticsearch.ElasticSearchDataProvider; +import org.openecomp.sparky.dal.elasticsearch.config.ElasticSearchConfig; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.rest.RestClientBuilder; +import org.openecomp.sparky.dal.rest.RestfulDataAccessor; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.synchronizer.entity.SearchableEntity; +import org.openecomp.sparky.util.NodeUtils; +import org.openecomp.sparky.viewandinspect.config.VisualizationConfig; +import org.openecomp.sparky.viewandinspect.entity.ActiveInventoryNode; +import org.openecomp.sparky.viewandinspect.entity.D3VisualizationOutput; +import org.openecomp.sparky.viewandinspect.entity.GraphMeta; +import org.openecomp.sparky.viewandinspect.entity.QueryParams; +import org.openecomp.sparky.viewandinspect.entity.QueryRequest; + +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +public class VisualizationService { + + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(VisualizationService.class); + + private OxmModelLoader loader; + private ObjectMapper mapper = new ObjectMapper(); + + private final ActiveInventoryDataProvider aaiProvider; + private final ActiveInventoryRestConfig aaiRestConfig; + private final ElasticSearchDataProvider esProvider; + private final ElasticSearchConfig esConfig; + private final ExecutorService aaiExecutorService; + + private ConcurrentHashMap<Long, VisualizationContext> contextMap; + private final SecureRandom secureRandom; + + private ActiveInventoryConfig aaiConfig; + private VisualizationConfig visualizationConfig; + + public VisualizationService(OxmModelLoader loader) throws Exception { + this.loader = loader; + + aaiRestConfig = ActiveInventoryConfig.getConfig().getAaiRestConfig(); + + EntityCache cache = null; + secureRandom = new SecureRandom(); + + ActiveInventoryAdapter aaiAdapter = new ActiveInventoryAdapter(new RestClientBuilder()); + + if (aaiRestConfig.isCacheEnabled()) { + cache = new PersistentEntityCache(aaiRestConfig.getStorageFolderOverride(), + aaiRestConfig.getNumCacheWorkers()); + + aaiAdapter.setCacheEnabled(true); + aaiAdapter.setEntityCache(cache); + } + + this.aaiProvider = aaiAdapter; + + RestClientBuilder esClientBuilder = new RestClientBuilder(); + esClientBuilder.setUseHttps(false); + RestfulDataAccessor nonCachingRestProvider = new RestfulDataAccessor(esClientBuilder); + this.esConfig = ElasticSearchConfig.getConfig(); + this.esProvider = new ElasticSearchAdapter(nonCachingRestProvider, this.esConfig); + + this.mapper = new ObjectMapper(); + mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + + this.contextMap = new ConcurrentHashMap<Long, VisualizationContext>(); + this.visualizationConfig = VisualizationConfig.getConfig(); + this.aaiConfig = ActiveInventoryConfig.getConfig(); + this.aaiExecutorService = NodeUtils.createNamedExecutor("SLNC-WORKER", + aaiConfig.getAaiRestConfig().getNumResolverWorkers(), LOG); + } + + public OxmModelLoader getLoader() { + return loader; + } + + public void setLoader(OxmModelLoader loader) { + this.loader = loader; + } + + /** + * Analyze query request body. + * + * @param queryRequestJson the query request json + * @return the query request + */ + + public QueryRequest analyzeQueryRequestBody(String queryRequestJson) { + + + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "analyzeQueryRequestBody()," + " queryRequestJson = " + queryRequestJson); + + ObjectMapper nonEmptyMapper = new ObjectMapper(); + nonEmptyMapper.setSerializationInclusion(Include.NON_EMPTY); + + QueryRequest queryBody = null; + + try { + queryBody = nonEmptyMapper.readValue(queryRequestJson, QueryRequest.class); + } catch (Exception exc) { + LOG.error(AaiUiMsgs.EXCEPTION_CAUGHT, "Analyzing query request body.", + exc.getLocalizedMessage()); + } + + return queryBody; + + } + + /** + * Log optime. + * + * @param method the method + * @param opStartTimeInMs the op start time in ms + */ + private void logOptime(String method, long opStartTimeInMs) { + LOG.info(AaiUiMsgs.OPERATION_TIME, method, + String.valueOf(System.currentTimeMillis() - opStartTimeInMs)); + } + + private SearchableEntity extractSearchableEntityFromElasticEntity(OperationResult operationResult) { + if (operationResult == null || !operationResult.wasSuccessful()) { + // error, return empty collection + return null; + } + + SearchableEntity sourceEntity = null; + if (operationResult.wasSuccessful()) { + + try { + JsonNode elasticValue = mapper.readValue(operationResult.getResult(), JsonNode.class); + + if (elasticValue != null) { + JsonNode sourceField = elasticValue.get("_source"); + + if (sourceField != null) { + sourceEntity = new SearchableEntity(); + + String entityType = NodeUtils.extractFieldValueFromObject(sourceField, "entityType"); + sourceEntity.setEntityType(entityType); + String entityPrimaryKeyValue = NodeUtils.extractFieldValueFromObject(sourceField, "entityPrimaryKeyValue"); + sourceEntity.setEntityPrimaryKeyValue(entityPrimaryKeyValue); + String link = NodeUtils.extractFieldValueFromObject(sourceField, "link"); + sourceEntity.setLink(link); + String lastmodTimestamp = NodeUtils.extractFieldValueFromObject(sourceField, "lastmodTimestamp"); + sourceEntity.setEntityTimeStamp(lastmodTimestamp); + } + } + } catch (IOException ioe) { + LOG.error(AaiUiMsgs.JSON_CONVERSION_ERROR, "a json node ", ioe.getLocalizedMessage()); + } + } + return sourceEntity; + } + + /** + * Builds the visualization using generic query. + * + * @param queryRequest the query request + * @return the operation result + */ + public OperationResult buildVisualizationUsingGenericQuery(QueryRequest queryRequest) { + + OperationResult returnValue = new OperationResult(); + OperationResult dataCollectionResult = null; + QueryParams queryParams = null; + SearchableEntity sourceEntity = null; + + try { + + /* + * Here is where we need to make a dip to elastic-search for the self-link by entity-id (link + * hash). + */ + dataCollectionResult = esProvider.retrieveEntityById(queryRequest.getHashId()); + sourceEntity = extractSearchableEntityFromElasticEntity(dataCollectionResult); + + if (sourceEntity != null) { + sourceEntity.generateId(); + } + + queryParams = new QueryParams(); + queryParams.setSearchTargetNodeId(queryRequest.getHashId()); + + } catch (Exception e1) { + LOG.error(AaiUiMsgs.FAILED_TO_GET_NODES_QUERY_RESULT, e1.getLocalizedMessage()); + dataCollectionResult = new OperationResult(500, "Failed to get nodes-query result from AAI"); + } + + if (dataCollectionResult.getResultCode() == 200) { + + String d3OutputJsonOutput = null; + + try { + + d3OutputJsonOutput = getVisualizationOutputBasedonGenericQuery(sourceEntity, queryParams); + + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Generated D3" + " output as json = " + d3OutputJsonOutput); + } + + if (d3OutputJsonOutput != null) { + returnValue.setResultCode(200); + returnValue.setResult(d3OutputJsonOutput); + } else { + returnValue.setResult(500, "Failed to generate D3 graph visualization"); + } + + } catch (Exception exc) { + returnValue.setResult(500, + "Failed to generate D3 graph visualization, due to a servlet exception."); + LOG.error(AaiUiMsgs.ERROR_D3_GRAPH_VISUALIZATION, exc.getLocalizedMessage()); + } + } else { + returnValue.setResult(dataCollectionResult.getResultCode(), dataCollectionResult.getResult()); + } + + return returnValue; + + } + + /** + * Gets the visualization output basedon generic query. + * + * @param searchtargetEntity entity that will be used to start visualization flow + * @param queryParams the query params + * @return the visualization output basedon generic query + * @throws ServletException the servlet exception + */ + private String getVisualizationOutputBasedonGenericQuery(SearchableEntity searchtargetEntity, + QueryParams queryParams) throws ServletException { + + long opStartTimeInMs = System.currentTimeMillis(); + + VisualizationTransformer transformer = null; + try { + transformer = new VisualizationTransformer(); + } catch (Exception exc) { + throw new ServletException( + "Failed to create VisualizationTransformer instance because of execption", exc); + } + + VisualizationContext visContext = null; + long contextId = secureRandom.nextLong(); + try { + visContext = new VisualizationContext(contextId, aaiProvider, aaiExecutorService, loader); + contextMap.putIfAbsent(contextId, visContext); + } catch (Exception e1) { + LOG.error(AaiUiMsgs.EXCEPTION_CAUGHT, + "While building Visualization Context, " + e1.getLocalizedMessage()); + throw new ServletException(e1); + } + + String jsonResponse = null; + + long startTimeInMs = System.currentTimeMillis(); + + visContext.processSelfLinks(searchtargetEntity, queryParams); + contextMap.remove(contextId); + + logOptime("collectSelfLinkNodes()", startTimeInMs); + + /* + * Flatten the graphs into a set of Graph and Link nodes. In this method I want the node graph + * resulting from the edge-tag-query to be represented first, and then we'll layer in + * relationship data. + */ + long overlayDataStartTimeInMs = System.currentTimeMillis(); + + Map<String, ActiveInventoryNode> cachedNodeMap = visContext.getNodeCache(); + + if (LOG.isDebugEnabled()) { + + StringBuilder sb = new StringBuilder(128); + + sb.append("\nCached Node Map:\n"); + for (String k : cachedNodeMap.keySet()) { + sb.append("\n----"); + sb.append("\n").append(cachedNodeMap.get(k).dumpNodeTree(true)); + } + + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, sb.toString()); + } + + transformer.buildFlatNodeArrayFromGraphCollection(cachedNodeMap); + transformer.buildLinksFromGraphCollection(cachedNodeMap); + + /* + * - Apply configuration-driven styling + * - Build the final transformation response object + * - Use information we have to populate the GraphMeta object + */ + + transformer.addSearchTargetAttributesToRootNode(); + + GraphMeta graphMeta = new GraphMeta(); + + D3VisualizationOutput output = null; + try { + output = transformer + .generateVisualizationOutput((System.currentTimeMillis() - opStartTimeInMs), graphMeta); + } catch (JsonProcessingException exc) { + throw new ServletException("Caught an exception while generation visualization output", exc); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.FAILURE_TO_PROCESS_REQUEST, exc.getLocalizedMessage()); + } + + output.setInlineMessage(visContext.getInlineMessage()); + output.getGraphMeta().setNumLinkResolveFailed(visContext.getNumFailedLinkResolve()); + output.getGraphMeta().setNumLinksResolvedSuccessfullyFromCache( + visContext.getNumSuccessfulLinkResolveFromCache()); + output.getGraphMeta().setNumLinksResolvedSuccessfullyFromServer( + visContext.getNumSuccessfulLinkResolveFromFromServer()); + + try { + jsonResponse = transformer.convertVisualizationOutputToJson(output); + } catch (JsonProcessingException jpe) { + throw new ServletException( + "Caught an exception while converting visualization output to json", jpe); + } + + logOptime("[build flat node array, add relationship data, search target," + + " color scheme, and generate visualization output]", overlayDataStartTimeInMs); + + logOptime("doFilter()", opStartTimeInMs); + + return jsonResponse; + + } + + public void shutdown() { + aaiProvider.shutdown(); + aaiExecutorService.shutdown(); + esProvider.shutdown(); + } +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/services/VisualizationTransformer.java b/src/main/java/org/openecomp/sparky/viewandinspect/services/VisualizationTransformer.java new file mode 100644 index 0000000..a6803a6 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/services/VisualizationTransformer.java @@ -0,0 +1,320 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.services; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.dal.aai.config.ActiveInventoryConfig; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.util.ConfigHelper; +import org.openecomp.sparky.viewandinspect.config.VisualizationConfig; +import org.openecomp.sparky.viewandinspect.entity.ActiveInventoryNode; +import org.openecomp.sparky.viewandinspect.entity.D3VisualizationOutput; +import org.openecomp.sparky.viewandinspect.entity.GraphMeta; +import org.openecomp.sparky.viewandinspect.entity.JsonNode; +import org.openecomp.sparky.viewandinspect.entity.JsonNodeLink; +import org.openecomp.sparky.viewandinspect.entity.NodeDebug; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; + +/** + * The idea here is to receive a collection of graphs and then fold them together (or not) based on + * configuration. The first goal will be to fold all like-resources together, but the choice of + * folding could/should be configurable, and will simply change the degree of link based nodes when + * we generate the Node-Array and Link-Array output. + * + * @author DAVEA + * + */ + +public class VisualizationTransformer { + + private static final Logger LOG = LoggerFactory.getInstance().getLogger( + VisualizationTransformer.class); + + List<JsonNode> flatNodeArray = new ArrayList<JsonNode>(); + Set<String> enrichableUriPrefixes = null; + + /* + * Maybe this isn't a string but Json-Model objects that we will convert to final string + * representation when we dump the node-array and link-array collections the post-data blob in the + * HttpServletResponse. + */ + + List<JsonNodeLink> linkArrayOutput = new ArrayList<JsonNodeLink>(); + + + + private VisualizationConfig visualizationConfig; + + + /** + * Instantiates a new visualization transformer. + * + * @throws Exception the exception + */ + public VisualizationTransformer() throws Exception { + visualizationConfig = VisualizationConfig.getConfig(); + + } + + + /** + * Log optime. + * + * @param method the method + * @param startTimeInMs the start time in ms + */ + private void logOptime(String method, long startTimeInMs) { + LOG.info(AaiUiMsgs.OPERATION_TIME, method, + String.valueOf((System.currentTimeMillis() - startTimeInMs))); + } + + /** + * Adds the search target attributes to root node. + */ + public void addSearchTargetAttributesToRootNode() { + + for (JsonNode n : flatNodeArray) { + if (n.isRootNode()) { + n.getNodeMeta().setSearchTarget(true); + n.getNodeMeta().setClassName(visualizationConfig.getSelectedSearchedNodeClassName()); + } + + } + + } + + /** + * Generate visualization output. + * + * @param preProcessingOpTimeInMs the pre processing op time in ms + * @param graphMeta the graph meta + * @return the d 3 visualization output + * @throws JsonProcessingException the json processing exception + * @throws IOException Signals that an I/O exception has occurred. + */ + + public D3VisualizationOutput generateVisualizationOutput(long preProcessingOpTimeInMs, + GraphMeta graphMeta) throws JsonProcessingException, IOException { + + long opStartTimeInMs = System.currentTimeMillis(); + + /* + * iterate over the flat collection, and only add the graph nodes to the graph node collection + */ + + D3VisualizationOutput output = new D3VisualizationOutput(); + + output.setGraphMeta(graphMeta); + + for (JsonNode n : flatNodeArray) { + if ( n.getItemType()!= null) { + output.pegCounter(n.getItemType()); + } + } + + output.addNodes(flatNodeArray); + output.addLinks(linkArrayOutput); + + int numNodes = flatNodeArray.size(); + int numLinks = linkArrayOutput.size(); + + LOG.info(AaiUiMsgs.VISUALIZATION_GRAPH_OUTPUT, String.valueOf(numNodes), + String.valueOf(numLinks)); + + if (numLinks < (numNodes - 1)) { + LOG.warn(AaiUiMsgs.DANGLING_NODE_WARNING, String.valueOf(numLinks), + String.valueOf(numNodes)); + } + + ObjectMapper mapper = new ObjectMapper(); + + final String fileContent = ConfigHelper.getFileContents( + System.getProperty("AJSC_HOME") + visualizationConfig.getAaiEntityNodeDescriptors()); + com.fasterxml.jackson.databind.JsonNode aaiEntityNodeDefinitions = mapper.readTree(fileContent); + graphMeta.setAaiEntityNodeDescriptors(aaiEntityNodeDefinitions); + + graphMeta.setNumLinks(linkArrayOutput.size()); + graphMeta.setNumNodes(flatNodeArray.size()); + graphMeta.setRenderTimeInMs(preProcessingOpTimeInMs); + + output.setGraphMeta(graphMeta); + + logOptime("generateVisualizationOutput()", opStartTimeInMs); + + return output; + } + + /** + * Convert visualization output to json. + * + * @param output the output + * @return the string + * @throws JsonProcessingException the json processing exception + */ + public String convertVisualizationOutputToJson(D3VisualizationOutput output) + throws JsonProcessingException { + + if (output == null) { + return null; + } + + ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); + + return ow.writeValueAsString(output); + + } + + /** + * Builds the links from graph collection. + * + * @param nodeMap the node map + */ + public void buildLinksFromGraphCollection(Map<String, ActiveInventoryNode> nodeMap) { + + for (ActiveInventoryNode ain : nodeMap.values()) { + + /* + * This one is a little bit different, when we iterate over the collection we only want to + * draw the links for node that are less than the max traversal depth. We want to only draw + * links at a depth of n-1 because we are basing the links on the outbound neighbors from the + * current node. + */ + + if (ain.getNodeDepth() < VisualizationConfig.getConfig().getMaxSelfLinkTraversalDepth()) { + + Collection<String> outboundNeighbors = ain.getOutboundNeighbors(); + + for (String outboundNeighbor : outboundNeighbors) { + + JsonNodeLink nodeLink = new JsonNodeLink(); + + nodeLink.setId(UUID.randomUUID().toString()); + nodeLink.setSource(ain.getNodeId()); + nodeLink.setTarget(outboundNeighbor); + + linkArrayOutput.add(nodeLink); + + } + + Collection<String> inboundNeighbors = ain.getInboundNeighbors(); + + for (String inboundNeighbor : inboundNeighbors) { + + JsonNodeLink nodeLink = new JsonNodeLink(); + + nodeLink.setId(UUID.randomUUID().toString()); + nodeLink.setSource(ain.getNodeId()); + nodeLink.setTarget(inboundNeighbor); + + linkArrayOutput.add(nodeLink); + + } + + + } else { + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "buildLinks()," + + " Filtering node = " + ain.getNodeId() + " @ depth = " + + ain.getNodeDepth()); + } + + } + } + + } + + /** + * Builds the flat node array from graph collection. + * + * @param nodeMap the node map + */ + /* + * Recursive function to walk multi-graph nodes and children to build a folded resource target + * graph. + */ + public void buildFlatNodeArrayFromGraphCollection(Map<String, ActiveInventoryNode> nodeMap) { + + for (ActiveInventoryNode n : nodeMap.values()) { + + if (n.getNodeDepth() <= VisualizationConfig.getConfig().getMaxSelfLinkTraversalDepth()) { + + JsonNode jsonNode = new JsonNode(n); + + if (this.isUriEnrichable(n.getSelfLink())) { + jsonNode.getNodeMeta().setEnrichableNode(true); + } + + jsonNode.getNodeMeta().setClassName(visualizationConfig.getGeneralNodeClassName()); + + if (VisualizationConfig.getConfig().isVisualizationDebugEnabled()) { + + NodeDebug nodeDebug = jsonNode.getNodeMeta().getNodeDebug(); + + if (nodeDebug != null) { + nodeDebug.setProcessingError(n.isProcessingErrorOccurred()); + nodeDebug.setProcessingErrorCauses(n.getProcessingErrorCauses()); + } + } + flatNodeArray.add(jsonNode); + } else { + if (LOG.isDebugEnabled()) { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, + "Filtering node from visualization: " + n.getNodeId() + " @ depth = " + + n.getNodeDepth()); + } + } + } + } + + /** + * Checks if is uri enrichable. + * + * @param uri the uri + * @return true, if is uri enrichable + */ + private boolean isUriEnrichable(String uri) { + if (enrichableUriPrefixes != null) { + for (String prefix : enrichableUriPrefixes) { + if (uri.contains(prefix)) { // AAI-4089 + return true; + } + } + } + return false; + } +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/servlet/SearchServlet.java b/src/main/java/org/openecomp/sparky/viewandinspect/servlet/SearchServlet.java new file mode 100644 index 0000000..c011e9c --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/servlet/SearchServlet.java @@ -0,0 +1,194 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.servlet; + +import java.io.IOException; +import java.io.PrintWriter; +import java.util.HashMap; +import java.util.Map; + +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.json.JSONException; +import org.json.JSONObject; +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.dal.elasticsearch.SearchAdapter; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.sas.config.SearchServiceConfig; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.search.VnfSearchService; +import org.openecomp.sparky.search.config.SuggestionConfig; +import org.openecomp.sparky.util.NodeUtils; +import org.openecomp.sparky.viewandinspect.services.SearchServiceWrapper; + +import org.openecomp.cl.mdc.MdcContext; + +/** + * The Class SearchServlet. + */ + +public class SearchServlet extends HttpServlet { + + private static final long serialVersionUID = 1L; + + private static final Logger LOG = LoggerFactory.getInstance().getLogger(SearchServlet.class); + + private SearchServiceWrapper searchWrapper = null; + + private static final String KEY_PAYLOAD = "payload"; + + /** + * Instantiates a new search servlet. + */ + public SearchServlet() { + } + + /* + * (non-Javadoc) + * + * @see javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest, + * javax.servlet.http.HttpServletResponse) + */ + @Override + public void doGet(HttpServletRequest request, HttpServletResponse response) + throws ServletException, IOException { + doPost(request, response); + } + + public void destroy() { + // TODO Auto-generated method stub + super.destroy(); + } + + public void init() throws ServletException { + super.init(); + searchWrapper = new SearchServiceWrapper(); + } + + protected Map<String, String> getPayloadParams(JSONObject parameters) { + Map<String, String> payloadParams = new HashMap<String, String>(); + try { + JSONObject payload = parameters.getJSONObject(KEY_PAYLOAD); + if (payload.length() > 0) { + for (String key : JSONObject.getNames(payload)) { + payloadParams.put(key, payload.getString(key)); + } + } + } catch (JSONException exc) { + LOG.error(AaiUiMsgs.ERROR_PARSING_PARAMS, exc); + } + return payloadParams; + } + + /* + * (non-Javadoc) + * + * @see javax.servlet.http.HttpServlet#doPost(javax.servlet.http.HttpServletRequest, + * javax.servlet.http.HttpServletResponse) + */ + @Override + public void doPost(HttpServletRequest request, HttpServletResponse response) + throws ServletException, IOException { + String txnID = request.getHeader("X-TransactionId"); + if (txnID == null) { + txnID = NodeUtils.getRandomTxnId(); + } + + String partnerName = request.getHeader("X-FromAppId"); + if (partnerName == null) { + partnerName = "Browser"; + } + MdcContext.initialize(txnID, "AAI_UI", "", partnerName, request.getRemoteAddr()); + searchWrapper.doPost(request, response); + } + + /** + * Generate json error response. + * + * @param message the message + * @return the string + */ + /* + * This is the manual approach, however we could also create an object container for the error + * then use the Jackson ObjectWrite to dump the object to json instead. If it gets any more + * complicated we could do that approach so we don't have to manually trip over the JSON + * formatting. + */ + protected String generateJsonErrorResponse(String message) { + return String.format("{ \"errorMessage\" : %s }", message); + } + + /** + * Handle search servlet errors. + * + * @param errorMsg the error msg + * @param exc the exc + * @param response the response + * @throws IOException Signals that an I/O exception has occurred. + */ + public void handleSearchServletErrors(String errorMsg, Exception exc, + HttpServletResponse response) throws IOException { + + String errorLogMsg = + (exc == null ? errorMsg : errorMsg + ". Error:" + exc.getLocalizedMessage()); + + LOG.error(AaiUiMsgs.SEARCH_SERVLET_ERROR, errorLogMsg); + + response.setContentType("application/json"); + PrintWriter out = response.getWriter(); + out.println(generateJsonErrorResponse(errorMsg)); + out.close(); + } + + + /** + * Sets the servlet response. + * + * @param response the response + * @param postPayload the post payload + * + * @throws IOException Signals that an I/O exception has occurred. + */ + private void setServletResponse(HttpServletResponse response, String postPayload) + throws IOException { + + if (postPayload != null) { + response.setContentType("application/json"); + PrintWriter out = response.getWriter(); + out.println(postPayload); + out.close(); + } + } + + + + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/servlet/VisualizationServlet.java b/src/main/java/org/openecomp/sparky/viewandinspect/servlet/VisualizationServlet.java new file mode 100644 index 0000000..b64d0e8 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/servlet/VisualizationServlet.java @@ -0,0 +1,203 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.servlet; + +import java.io.IOException; +import java.io.PrintWriter; + +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.io.IOUtils; +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.config.oxm.OxmModelLoader; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.dal.servlet.ResettableStreamHttpServletRequest; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.util.NodeUtils; +import org.openecomp.sparky.viewandinspect.entity.QueryRequest; +import org.openecomp.sparky.viewandinspect.services.VisualizationService; + +import org.openecomp.cl.mdc.MdcContext; + +/** + * A dedicated servlet for handling Front-End Visualization Requests and performing feats of magic + * to execute the right model/type/config driven queries to build the D3 visualization output JSON + * back to the FE. + * + * @author DAVEA + * + */ +public class VisualizationServlet extends HttpServlet { + + /** + * + */ + private static final long serialVersionUID = 4678831934652478571L; + private static final Logger LOG = + LoggerFactory.getInstance().getLogger(VisualizationServlet.class); + private static final String VISUALIZATION_API_ENDPOINT = "prepareVisualization"; + private final VisualizationService visualizationService; + /** + * Instantiates a new visualization servlet. + * + * @throws Exception the exception + */ + public VisualizationServlet() throws Exception { + this.visualizationService = new VisualizationService(OxmModelLoader.getInstance()); + } + + /** + * Inits the. + * + * @param filterConfig the filter config + * @throws ServletException the servlet exception + */ + public void init(FilterConfig filterConfig) throws ServletException { + LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "init()"); + } + + /** + * Gets the request body. + * + * @param request the request + * @return the request body + */ + private String getRequestBody(HttpServletRequest request) { + + ResettableStreamHttpServletRequest requestWrapper = + new ResettableStreamHttpServletRequest(request); + + String body = null; + try { + body = IOUtils.toString(requestWrapper.getRequestBody()); + } catch (IOException exc) { + LOG.error(AaiUiMsgs.EXCEPTION_CAUGHT, "Trying to get body from request", + exc.getLocalizedMessage()); + } + + return body; + } + + /* + * (non-Javadoc) + * + * @see javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest, + * javax.servlet.http.HttpServletResponse) + */ + @Override + protected void doGet(HttpServletRequest request, HttpServletResponse response) + throws ServletException, IOException { + doPost(request, response); + } + + /* + * (non-Javadoc) + * + * @see javax.servlet.http.HttpServlet#doPost(javax.servlet.http.HttpServletRequest, + * javax.servlet.http.HttpServletResponse) + */ + @Override + protected void doPost(HttpServletRequest request, HttpServletResponse response) + throws ServletException, IOException { + String txnID = request.getHeader("X-TransactionId"); + if (txnID == null) { + txnID = NodeUtils.getRandomTxnId(); + } + + String partnerName = request.getHeader("X-FromAppId"); + if (partnerName == null) { + partnerName = "Browser"; + } + + MdcContext.initialize(txnID, "AAI-UI", "", partnerName, request.getRemoteAddr()); + + String postRequestBody = getRequestBody(request); + + String requestUri = request.getRequestURI(); + OperationResult operationResult = null; + + /* + * For now we only have a single API call but there could be more in the future + */ + if (requestUri.endsWith(VISUALIZATION_API_ENDPOINT)) { + + /* + * Work our magic and determine the best way to interrogate AAI to get the stuff we are + * interested in. Perhaps it should be an edge-tag-query or perhaps it is a straight up + * derived self-link query. + */ + + /* + * Map request body to an interpreted API PoJo object + */ + QueryRequest queryRequest = visualizationService.analyzeQueryRequestBody(postRequestBody); + + if (queryRequest != null) { + operationResult = visualizationService.buildVisualizationUsingGenericQuery(queryRequest); + } else { + LOG.error(AaiUiMsgs.FAILED_TO_ANALYZE, + String.format("Failed to analyze post request query body = '%s'", postRequestBody)); + + operationResult = new OperationResult(); + operationResult.setResult(500, + String.format("Failed to analyze post request query body = '%s'", postRequestBody)); + + } + + } else { + // unhandled type + LOG.error(AaiUiMsgs.UNKNOWN_SERVER_ERROR, "Unhandled requestUri - " + requestUri); + operationResult = new OperationResult(); + operationResult.setResult(500, "Unknown Server Error: Unhandled requestUri = " + requestUri); + } + + PrintWriter out = response.getWriter(); + response.addHeader("Content-Type", "application/xml"); + + response.setStatus(operationResult.getResultCode()); + + if (operationResult.getResultCode() == 200) { + response.setContentLength(operationResult.getResult().length()); + out.print(operationResult.getResult()); + out.print("\n"); + } else { + response.setContentLength(operationResult.getResult().length()); + out.print(operationResult.getResult()); + out.print("\n"); + } + } + + @Override + public void destroy() { + super.destroy(); + visualizationService.shutdown(); + } +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/task/CollectNodeSelfLinkTask.java b/src/main/java/org/openecomp/sparky/viewandinspect/task/CollectNodeSelfLinkTask.java new file mode 100644 index 0000000..6c482e9 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/task/CollectNodeSelfLinkTask.java @@ -0,0 +1,60 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.task; + +import java.util.function.Supplier; + +import org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider; +import org.openecomp.sparky.dal.rest.OperationResult; + +/** + * The Class CollectNodeSelfLinkTask. + */ +public class CollectNodeSelfLinkTask implements Supplier<OperationResult> { + + private String selfLink; + private ActiveInventoryDataProvider aaiProvider; + + /** + * Instantiates a new collect node self link task. + * + * @param selfLink the self link + * @param aaiProvider the aai provider + */ + public CollectNodeSelfLinkTask(String selfLink, ActiveInventoryDataProvider aaiProvider) { + this.selfLink = selfLink; + this.aaiProvider = aaiProvider; + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public OperationResult get() { + return aaiProvider.queryActiveInventoryWithRetries(selfLink, "application/json", 5); + } + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/task/PerformNodeSelfLinkProcessingTask.java b/src/main/java/org/openecomp/sparky/viewandinspect/task/PerformNodeSelfLinkProcessingTask.java new file mode 100644 index 0000000..b7fe3a5 --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/task/PerformNodeSelfLinkProcessingTask.java @@ -0,0 +1,104 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ + +package org.openecomp.sparky.viewandinspect.task; + +import java.util.Map; +import java.util.function.Supplier; + +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider; +import org.openecomp.sparky.dal.aai.config.ActiveInventoryConfig; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.viewandinspect.entity.NodeProcessingTransaction; +import org.slf4j.MDC; + +/** + * The Class PerformNodeSelfLinkProcessingTask. + */ +public class PerformNodeSelfLinkProcessingTask implements Supplier<NodeProcessingTransaction> { + + private static final Logger logger = + LoggerFactory.getInstance().getLogger(PerformNodeSelfLinkProcessingTask.class); + + private NodeProcessingTransaction txn; + private ActiveInventoryDataProvider aaiProvider; + private Map<String, String> contextMap; + + /** + * Instantiates a new perform node self link processing task. + * + * @param txn the txn + * @param requestParameters the request parameters + * @param aaiProvider the aai provider + */ + public PerformNodeSelfLinkProcessingTask(NodeProcessingTransaction txn, String requestParameters, + ActiveInventoryDataProvider aaiProvider) { + this.aaiProvider = aaiProvider; + this.txn = txn; + this.contextMap = MDC.getCopyOfContextMap(); + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public NodeProcessingTransaction get() { + MDC.setContextMap(contextMap); + String link = txn.getSelfLinkWithModifiers(); + + if (link == null) { + OperationResult opResult = new OperationResult(); + opResult.setResult(500, "Aborting self-link processing because self link is null"); + txn.setOpResult(opResult); + return txn; + } + + if (logger.isDebugEnabled()) { + logger.debug(AaiUiMsgs.DEBUG_GENERIC, "Collecting " + link); + } + + OperationResult opResult = null; + try { + opResult = aaiProvider.queryActiveInventoryWithRetries(link, "application/json", + ActiveInventoryConfig.getConfig().getAaiRestConfig().getNumRequestRetries()); + } catch (Exception exc) { + opResult = new OperationResult(); + opResult.setResult(500, "Querying AAI with retry failed due to an exception."); + logger.error(AaiUiMsgs.ERROR_AAI_QUERY_WITH_RETRY, exc.getMessage()); + } + + if (logger.isDebugEnabled()) { + logger.debug(AaiUiMsgs.DEBUG_GENERIC, "Operation result = " + opResult.toString()); + } + + txn.setOpResult(opResult); + return txn; + + } + +} diff --git a/src/main/java/org/openecomp/sparky/viewandinspect/task/PerformSelfLinkDeterminationTask.java b/src/main/java/org/openecomp/sparky/viewandinspect/task/PerformSelfLinkDeterminationTask.java new file mode 100644 index 0000000..948c5cb --- /dev/null +++ b/src/main/java/org/openecomp/sparky/viewandinspect/task/PerformSelfLinkDeterminationTask.java @@ -0,0 +1,96 @@ +/** + * ============LICENSE_START=================================================== + * SPARKY (AAI UI service) + * ============================================================================ + * Copyright © 2017 AT&T Intellectual Property. + * Copyright © 2017 Amdocs + * All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + * + * ECOMP and OpenECOMP are trademarks + * and service marks of AT&T Intellectual Property. + */ +package org.openecomp.sparky.viewandinspect.task; + +import java.util.Map; +import java.util.function.Supplier; + +import org.openecomp.sparky.dal.aai.ActiveInventoryDataProvider; +import org.openecomp.sparky.dal.aai.config.ActiveInventoryConfig; +import org.openecomp.sparky.dal.rest.OperationResult; +import org.openecomp.sparky.logging.AaiUiMsgs; +import org.openecomp.sparky.viewandinspect.entity.SelfLinkDeterminationTransaction; +import org.openecomp.cl.api.Logger; +import org.openecomp.cl.eelf.LoggerFactory; +import org.slf4j.MDC; + +public class PerformSelfLinkDeterminationTask implements Supplier<SelfLinkDeterminationTransaction> { + + private static final Logger logger = + LoggerFactory.getInstance().getLogger(PerformSelfLinkDeterminationTask.class); + + private SelfLinkDeterminationTransaction txn; + private ActiveInventoryDataProvider aaiProvider; + private Map<String, String> contextMap; + + + /** + * Instantiates a new perform node self link processing task. + * + * @param txn the txn + * @param requestParameters the request parameters + * @param aaiProvider the aai provider + */ + public PerformSelfLinkDeterminationTask(SelfLinkDeterminationTransaction txn, String requestParameters, + ActiveInventoryDataProvider aaiProvider) { + + this.aaiProvider = aaiProvider; + this.txn = txn; + this.contextMap = MDC.getCopyOfContextMap(); + } + + /* (non-Javadoc) + * @see java.util.function.Supplier#get() + */ + @Override + public SelfLinkDeterminationTransaction get() { + MDC.setContextMap(contextMap); + if (txn.getQueryString() == null) { + OperationResult opResult = new OperationResult(); + opResult.setResult(500, "Aborting self-link determination because self link query is null."); + txn.setOpResult(opResult); + return txn; + } + + OperationResult opResult = null; + try { + opResult = aaiProvider.queryActiveInventoryWithRetries(txn.getQueryString(), "application/json", + ActiveInventoryConfig.getConfig().getAaiRestConfig().getNumRequestRetries()); + } catch (Exception exc) { + opResult = new OperationResult(); + opResult.setResult(500, "Querying AAI with retry failed due to an exception."); + logger.error(AaiUiMsgs.ERROR_AAI_QUERY_WITH_RETRY, exc.getMessage()); + } + + if (logger.isDebugEnabled()) { + logger.debug("Operation result = " + opResult.toString()); + } + + txn.setOpResult(opResult); + return txn; + + } + +}
\ No newline at end of file |