summaryrefslogtreecommitdiffstats
path: root/mod/designtool/designtool-web/src/main/java/org
diff options
context:
space:
mode:
Diffstat (limited to 'mod/designtool/designtool-web/src/main/java/org')
-rw-r--r--mod/designtool/designtool-web/src/main/java/org/apache/nifi/NiFi.java446
-rw-r--r--mod/designtool/designtool-web/src/main/java/org/apache/nifi/controller/AbstractPort.java675
-rw-r--r--mod/designtool/designtool-web/src/main/java/org/apache/nifi/nar/DCAEAutoLoader.java105
-rw-r--r--mod/designtool/designtool-web/src/main/java/org/apache/nifi/nar/DCAEClassLoaders.java127
-rw-r--r--mod/designtool/designtool-web/src/main/java/org/apache/nifi/util/NiFiProperties.java1551
-rw-r--r--mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/StandardNiFiServiceFacade.java4899
-rw-r--r--mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java4354
-rw-r--r--mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/api/dto/FlowConfigurationDTO.java182
-rw-r--r--mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/dao/impl/StandardConnectionDAO.java700
-rw-r--r--mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/server/JettyServer.java1226
10 files changed, 14265 insertions, 0 deletions
diff --git a/mod/designtool/designtool-web/src/main/java/org/apache/nifi/NiFi.java b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/NiFi.java
new file mode 100644
index 0000000..0b033db
--- /dev/null
+++ b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/NiFi.java
@@ -0,0 +1,446 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Modifications to the original nifi code for the ONAP project are made
+ * available under the Apache License, Version 2.0
+ */
+package org.apache.nifi;
+
+import org.apache.nifi.bundle.Bundle;
+import org.apache.nifi.nar.ExtensionMapping;
+import org.apache.nifi.nar.NarClassLoaders;
+import org.apache.nifi.nar.NarClassLoadersHolder;
+import org.apache.nifi.nar.NarUnpacker;
+import org.apache.nifi.nar.SystemBundle;
+import org.apache.nifi.util.FileUtils;
+import org.apache.nifi.util.NiFiProperties;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.bridge.SLF4JBridgeHandler;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.lang.Thread.UncaughtExceptionHandler;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Set;
+import java.util.Random;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+
+public class NiFi {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(NiFi.class);
+ private static final String KEY_FILE_FLAG = "-K";
+ private final NiFiServer nifiServer;
+ private final BootstrapListener bootstrapListener;
+
+ public static final String BOOTSTRAP_PORT_PROPERTY = "nifi.bootstrap.listen.port";
+ private volatile boolean shutdown = false;
+
+ public NiFi(final NiFiProperties properties)
+ throws ClassNotFoundException, IOException, NoSuchMethodException, InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
+
+ this(properties, ClassLoader.getSystemClassLoader());
+
+ }
+
+ public NiFi(final NiFiProperties properties, ClassLoader rootClassLoader)
+ throws ClassNotFoundException, IOException, NoSuchMethodException, InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
+
+ // There can only be one krb5.conf for the overall Java process so set this globally during
+ // start up so that processors and our Kerberos authentication code don't have to set this
+ final File kerberosConfigFile = properties.getKerberosConfigurationFile();
+ if (kerberosConfigFile != null) {
+ final String kerberosConfigFilePath = kerberosConfigFile.getAbsolutePath();
+ LOGGER.info("Setting java.security.krb5.conf to {}", new Object[]{kerberosConfigFilePath});
+ System.setProperty("java.security.krb5.conf", kerberosConfigFilePath);
+ }
+
+ setDefaultUncaughtExceptionHandler();
+
+ // register the shutdown hook
+ addShutdownHook();
+
+ final String bootstrapPort = System.getProperty(BOOTSTRAP_PORT_PROPERTY);
+ if (bootstrapPort != null) {
+ try {
+ final int port = Integer.parseInt(bootstrapPort);
+
+ if (port < 1 || port > 65535) {
+ throw new RuntimeException("Failed to start NiFi because system property '" + BOOTSTRAP_PORT_PROPERTY + "' is not a valid integer in the range 1 - 65535");
+ }
+
+ bootstrapListener = new BootstrapListener(this, port);
+ bootstrapListener.start();
+ } catch (final NumberFormatException nfe) {
+ throw new RuntimeException("Failed to start NiFi because system property '" + BOOTSTRAP_PORT_PROPERTY + "' is not a valid integer in the range 1 - 65535");
+ }
+ } else {
+ LOGGER.info("NiFi started without Bootstrap Port information provided; will not listen for requests from Bootstrap");
+ bootstrapListener = null;
+ }
+
+ // delete the web working dir - if the application does not start successfully
+ // the web app directories might be in an invalid state. when this happens
+ // jetty will not attempt to re-extract the war into the directory. by removing
+ // the working directory, we can be assured that it will attempt to extract the
+ // war every time the application starts.
+ File webWorkingDir = properties.getWebWorkingDirectory();
+ FileUtils.deleteFilesInDirectory(webWorkingDir, null, LOGGER, true, true);
+ FileUtils.deleteFile(webWorkingDir, LOGGER, 3);
+
+ detectTimingIssues();
+
+ // redirect JUL log events
+ initLogging();
+
+ final Bundle systemBundle = SystemBundle.create(properties);
+
+ // expand the nars
+ final ExtensionMapping extensionMapping = NarUnpacker.unpackNars(properties, systemBundle);
+
+ // load the extensions classloaders
+ NarClassLoaders narClassLoaders = NarClassLoadersHolder.getInstance();
+
+ narClassLoaders.init(rootClassLoader,
+ properties.getFrameworkWorkingDirectory(), properties.getExtensionsWorkingDirectory());
+
+ // load the framework classloader
+ final ClassLoader frameworkClassLoader = narClassLoaders.getFrameworkBundle().getClassLoader();
+ if (frameworkClassLoader == null) {
+ throw new IllegalStateException("Unable to find the framework NAR ClassLoader.");
+ }
+
+ final Set<Bundle> narBundles = narClassLoaders.getBundles();
+
+ // load the server from the framework classloader
+ Thread.currentThread().setContextClassLoader(frameworkClassLoader);
+ Class<?> jettyServer = Class.forName("org.apache.nifi.web.server.JettyServer", true, frameworkClassLoader);
+ Constructor<?> jettyConstructor = jettyServer.getConstructor(NiFiProperties.class, Set.class);
+
+ final long startTime = System.nanoTime();
+ nifiServer = (NiFiServer) jettyConstructor.newInstance(properties, narBundles);
+ nifiServer.setExtensionMapping(extensionMapping);
+ nifiServer.setBundles(systemBundle, narBundles);
+
+ if (shutdown) {
+ LOGGER.info("NiFi has been shutdown via NiFi Bootstrap. Will not start Controller");
+ } else {
+ nifiServer.start();
+
+ if (bootstrapListener != null) {
+ bootstrapListener.sendStartedStatus(true);
+ }
+
+ final long duration = System.nanoTime() - startTime;
+ LOGGER.info("Controller initialization took " + duration + " nanoseconds "
+ + "(" + (int) TimeUnit.SECONDS.convert(duration, TimeUnit.NANOSECONDS) + " seconds).");
+ }
+ }
+
+ protected void setDefaultUncaughtExceptionHandler() {
+ Thread.setDefaultUncaughtExceptionHandler(new UncaughtExceptionHandler() {
+ @Override
+ public void uncaughtException(final Thread t, final Throwable e) {
+ LOGGER.error("An Unknown Error Occurred in Thread {}: {}", t, e.toString());
+ LOGGER.error("", e);
+ }
+ });
+ }
+
+ protected void addShutdownHook() {
+ Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
+ @Override
+ public void run() {
+ // shutdown the jetty server
+ shutdownHook();
+ }
+ }));
+ }
+
+ protected void initLogging() {
+ SLF4JBridgeHandler.removeHandlersForRootLogger();
+ SLF4JBridgeHandler.install();
+ }
+
+ private static ClassLoader createBootstrapClassLoader() {
+ //Get list of files in bootstrap folder
+ final List<URL> urls = new ArrayList<>();
+ try {
+ Files.list(Paths.get("lib/bootstrap")).forEach(p -> {
+ try {
+ urls.add(p.toUri().toURL());
+ } catch (final MalformedURLException mef) {
+ LOGGER.warn("Unable to load " + p.getFileName() + " due to " + mef, mef);
+ }
+ });
+ } catch (IOException ioe) {
+ LOGGER.warn("Unable to access lib/bootstrap to create bootstrap classloader", ioe);
+ }
+ //Create the bootstrap classloader
+ return new URLClassLoader(urls.toArray(new URL[0]), Thread.currentThread().getContextClassLoader());
+ }
+
+ protected void shutdownHook() {
+ try {
+ shutdown();
+ } catch (final Throwable t) {
+ LOGGER.warn("Problem occurred ensuring Jetty web server was properly terminated due to " + t);
+ }
+ }
+
+ protected void shutdown() {
+ this.shutdown = true;
+
+ LOGGER.info("Initiating shutdown of Jetty web server...");
+ if (nifiServer != null) {
+ nifiServer.stop();
+ }
+ if (bootstrapListener != null) {
+ bootstrapListener.stop();
+ }
+ LOGGER.info("Jetty web server shutdown completed (nicely or otherwise).");
+ }
+
+ /**
+ * Determine if the machine we're running on has timing issues.
+ */
+ private void detectTimingIssues() {
+ final int minRequiredOccurrences = 25;
+ final int maxOccurrencesOutOfRange = 15;
+ final AtomicLong lastTriggerMillis = new AtomicLong(System.currentTimeMillis());
+
+ final ScheduledExecutorService service = Executors.newScheduledThreadPool(1, new ThreadFactory() {
+ private final ThreadFactory defaultFactory = Executors.defaultThreadFactory();
+
+ @Override
+ public Thread newThread(final Runnable r) {
+ final Thread t = defaultFactory.newThread(r);
+ t.setDaemon(true);
+ t.setName("Detect Timing Issues");
+ return t;
+ }
+ });
+
+ final AtomicInteger occurrencesOutOfRange = new AtomicInteger(0);
+ final AtomicInteger occurrences = new AtomicInteger(0);
+ final Runnable command = new Runnable() {
+ @Override
+ public void run() {
+ final long curMillis = System.currentTimeMillis();
+ final long difference = curMillis - lastTriggerMillis.get();
+ final long millisOff = Math.abs(difference - 2000L);
+ occurrences.incrementAndGet();
+ if (millisOff > 500L) {
+ occurrencesOutOfRange.incrementAndGet();
+ }
+ lastTriggerMillis.set(curMillis);
+ }
+ };
+
+ final ScheduledFuture<?> future = service.scheduleWithFixedDelay(command, 2000L, 2000L, TimeUnit.MILLISECONDS);
+
+ final TimerTask timerTask = new TimerTask() {
+ @Override
+ public void run() {
+ future.cancel(true);
+ service.shutdownNow();
+
+ if (occurrences.get() < minRequiredOccurrences || occurrencesOutOfRange.get() > maxOccurrencesOutOfRange) {
+ LOGGER.warn("NiFi has detected that this box is not responding within the expected timing interval, which may cause "
+ + "Processors to be scheduled erratically. Please see the NiFi documentation for more information.");
+ }
+ }
+ };
+ final Timer timer = new Timer(true);
+ timer.schedule(timerTask, 60000L);
+ }
+
+ /**
+ * Main entry point of the application.
+ *
+ * @param args things which are ignored
+ */
+ public static void main(String[] args) {
+ LOGGER.info("Launching NiFi...");
+ try {
+ NiFiProperties properties = convertArgumentsToValidatedNiFiProperties(args);
+ new NiFi(properties);
+ } catch (final Throwable t) {
+ LOGGER.error("Failure to launch NiFi due to " + t, t);
+ }
+ }
+
+ protected static NiFiProperties convertArgumentsToValidatedNiFiProperties(String[] args) {
+ final ClassLoader bootstrap = createBootstrapClassLoader();
+ NiFiProperties properties = initializeProperties(args, bootstrap);
+ properties.validate();
+ return properties;
+ }
+
+ private static NiFiProperties initializeProperties(final String[] args, final ClassLoader boostrapLoader) {
+ // Try to get key
+ // If key doesn't exist, instantiate without
+ // Load properties
+ // If properties are protected and key missing, throw RuntimeException
+
+ final ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
+ final String key;
+ try {
+ key = loadFormattedKey(args);
+ // The key might be empty or null when it is passed to the loader
+ } catch (IllegalArgumentException e) {
+ final String msg = "The bootstrap process did not provide a valid key";
+ throw new IllegalArgumentException(msg, e);
+ }
+ Thread.currentThread().setContextClassLoader(boostrapLoader);
+
+ try {
+ final Class<?> propsLoaderClass = Class.forName("org.apache.nifi.properties.NiFiPropertiesLoader", true, boostrapLoader);
+ final Method withKeyMethod = propsLoaderClass.getMethod("withKey", String.class);
+ final Object loaderInstance = withKeyMethod.invoke(null, key);
+ final Method getMethod = propsLoaderClass.getMethod("get");
+ final NiFiProperties properties = (NiFiProperties) getMethod.invoke(loaderInstance);
+ LOGGER.info("Loaded {} properties", properties.size());
+ return properties;
+ } catch (InvocationTargetException wrappedException) {
+ final String msg = "There was an issue decrypting protected properties";
+ throw new IllegalArgumentException(msg, wrappedException.getCause() == null ? wrappedException : wrappedException.getCause());
+ } catch (final IllegalAccessException | NoSuchMethodException | ClassNotFoundException reex) {
+ final String msg = "Unable to access properties loader in the expected manner - apparent classpath or build issue";
+ throw new IllegalArgumentException(msg, reex);
+ } catch (final RuntimeException e) {
+ final String msg = "There was an issue decrypting protected properties";
+ throw new IllegalArgumentException(msg, e);
+ } finally {
+ Thread.currentThread().setContextClassLoader(contextClassLoader);
+ }
+ }
+
+ private static String loadFormattedKey(String[] args) {
+ String key = null;
+ List<String> parsedArgs = parseArgs(args);
+ // Check if args contain protection key
+ if (parsedArgs.contains(KEY_FILE_FLAG)) {
+ key = getKeyFromKeyFileAndPrune(parsedArgs);
+ // Format the key (check hex validity and remove spaces)
+ key = formatHexKey(key);
+
+ }
+
+ if (null == key) {
+ return "";
+ } else if (!isHexKeyValid(key)) {
+ throw new IllegalArgumentException("The key was not provided in valid hex format and of the correct length");
+ } else {
+ return key;
+ }
+ }
+
+ private static String getKeyFromKeyFileAndPrune(List<String> parsedArgs) {
+ String key = null;
+ LOGGER.debug("The bootstrap process provided the " + KEY_FILE_FLAG + " flag");
+ int i = parsedArgs.indexOf(KEY_FILE_FLAG);
+ if (parsedArgs.size() <= i + 1) {
+ LOGGER.error("The bootstrap process passed the {} flag without a filename", KEY_FILE_FLAG);
+ throw new IllegalArgumentException("The bootstrap process provided the " + KEY_FILE_FLAG + " flag but no key");
+ }
+ try {
+ String passwordfile_path = parsedArgs.get(i + 1);
+ // Slurp in the contents of the file:
+ byte[] encoded = Files.readAllBytes(Paths.get(passwordfile_path));
+ key = new String(encoded,StandardCharsets.UTF_8);
+ if (0 == key.length())
+ throw new IllegalArgumentException("Key in keyfile " + passwordfile_path + " yielded an empty key");
+
+ LOGGER.info("Now overwriting file in "+passwordfile_path);
+
+ // Overwrite the contents of the file (to avoid littering file system
+ // unlinked with key material):
+ File password_file = new File(passwordfile_path);
+ FileWriter overwriter = new FileWriter(password_file,false);
+
+ // Construe a random pad:
+ Random r = new Random();
+ StringBuffer sb = new StringBuffer();
+ // Note on correctness: this pad is longer, but equally sufficient.
+ while(sb.length() < encoded.length){
+ sb.append(Integer.toHexString(r.nextInt()));
+ }
+ String pad = sb.toString();
+ LOGGER.info("Overwriting key material with pad: "+pad);
+ overwriter.write(pad);
+ overwriter.close();
+
+ LOGGER.info("Removing/unlinking file: "+passwordfile_path);
+ password_file.delete();
+
+ } catch (IOException e) {
+ LOGGER.error("Caught IOException while retrieving the "+KEY_FILE_FLAG+"-passed keyfile; aborting: "+e.toString());
+ System.exit(1);
+ }
+
+ LOGGER.info("Read property protection key from key file provided by bootstrap process");
+ return key;
+ }
+
+ private static List<String> parseArgs(String[] args) {
+ List<String> parsedArgs = new ArrayList<>(Arrays.asList(args));
+ for (int i = 0; i < parsedArgs.size(); i++) {
+ if (parsedArgs.get(i).startsWith(KEY_FILE_FLAG + " ")) {
+ String[] split = parsedArgs.get(i).split(" ", 2);
+ parsedArgs.set(i, split[0]);
+ parsedArgs.add(i + 1, split[1]);
+ break;
+ }
+ }
+ return parsedArgs;
+ }
+
+ private static String formatHexKey(String input) {
+ if (input == null || input.trim().isEmpty()) {
+ return "";
+ }
+ return input.replaceAll("[^0-9a-fA-F]", "").toLowerCase();
+ }
+
+ private static boolean isHexKeyValid(String key) {
+ if (key == null || key.trim().isEmpty()) {
+ return false;
+ }
+ // Key length is in "nibbles" (i.e. one hex char = 4 bits)
+ return Arrays.asList(128, 196, 256).contains(key.length() * 4) && key.matches("^[0-9a-fA-F]*$");
+ }
+}
diff --git a/mod/designtool/designtool-web/src/main/java/org/apache/nifi/controller/AbstractPort.java b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/controller/AbstractPort.java
new file mode 100644
index 0000000..6023fc2
--- /dev/null
+++ b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/controller/AbstractPort.java
@@ -0,0 +1,675 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Modifications to the original nifi code for the ONAP project are made
+ * available under the Apache License, Version 2.0
+ */
+package org.apache.nifi.controller;
+
+import org.apache.commons.lang3.builder.ToStringBuilder;
+import org.apache.commons.lang3.builder.ToStringStyle;
+import org.apache.nifi.authorization.Resource;
+import org.apache.nifi.authorization.resource.Authorizable;
+import org.apache.nifi.authorization.resource.ResourceFactory;
+import org.apache.nifi.authorization.resource.ResourceType;
+import org.apache.nifi.components.ValidationResult;
+import org.apache.nifi.connectable.Connectable;
+import org.apache.nifi.connectable.ConnectableType;
+import org.apache.nifi.connectable.Connection;
+import org.apache.nifi.connectable.Port;
+import org.apache.nifi.connectable.Position;
+import org.apache.nifi.groups.ProcessGroup;
+import org.apache.nifi.processor.ProcessContext;
+import org.apache.nifi.processor.ProcessSession;
+import org.apache.nifi.processor.ProcessSessionFactory;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.processor.exception.ProcessException;
+import org.apache.nifi.util.CharacterFilterUtils;
+import org.apache.nifi.util.FormatUtils;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Optional;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import static java.util.Objects.requireNonNull;
+
+public abstract class AbstractPort implements Port {
+
+ public static final Relationship PORT_RELATIONSHIP = new Relationship.Builder()
+ .description("The relationship through which all Flow Files are transferred")
+ .name("")
+ .build();
+
+ public static final long MINIMUM_PENALIZATION_MILLIS = 0L;
+ public static final TimeUnit DEFAULT_TIME_UNIT = TimeUnit.MILLISECONDS;
+
+ public static final long MINIMUM_YIELD_MILLIS = 0L;
+ public static final long DEFAULT_YIELD_PERIOD = 10000L;
+ public static final TimeUnit DEFAULT_YIELD_TIME_UNIT = TimeUnit.MILLISECONDS;
+
+ private final List<Relationship> relationships;
+
+ private final String id;
+ private final ConnectableType type;
+ private final AtomicReference<String> name;
+ private final AtomicReference<Position> position;
+ private final AtomicReference<String> comments;
+ private final AtomicReference<ProcessGroup> processGroup;
+ private final AtomicBoolean lossTolerant;
+ private final AtomicReference<ScheduledState> scheduledState;
+ private final AtomicInteger concurrentTaskCount;
+ private final AtomicReference<String> penalizationPeriod;
+ private final AtomicReference<String> yieldPeriod;
+ private final AtomicReference<String> schedulingPeriod;
+ private final AtomicReference<String> versionedComponentId = new AtomicReference<>();
+ private final AtomicLong schedulingNanos;
+ private final AtomicLong yieldExpiration;
+ private final ProcessScheduler processScheduler;
+
+ private final Set<Connection> outgoingConnections;
+ private final List<Connection> incomingConnections;
+
+ private final ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock();
+ private final Lock readLock = rwLock.readLock();
+ private final Lock writeLock = rwLock.writeLock();
+
+ public AbstractPort(final String id, final String name, final ProcessGroup processGroup, final ConnectableType type, final ProcessScheduler scheduler) {
+ this.id = requireNonNull(id);
+ this.name = new AtomicReference<>(requireNonNull(name));
+ position = new AtomicReference<>(new Position(0D, 0D));
+ outgoingConnections = new HashSet<>();
+ incomingConnections = new ArrayList<>();
+ comments = new AtomicReference<>();
+ lossTolerant = new AtomicBoolean(false);
+ concurrentTaskCount = new AtomicInteger(1);
+ processScheduler = scheduler;
+
+ final List<Relationship> relationshipList = new ArrayList<>();
+ relationshipList.add(PORT_RELATIONSHIP);
+ relationships = Collections.unmodifiableList(relationshipList);
+ this.processGroup = new AtomicReference<>(processGroup);
+ this.type = type;
+ penalizationPeriod = new AtomicReference<>("30 sec");
+ yieldPeriod = new AtomicReference<>("1 sec");
+ yieldExpiration = new AtomicLong(0L);
+ schedulingPeriod = new AtomicReference<>("0 millis");
+ schedulingNanos = new AtomicLong(MINIMUM_SCHEDULING_NANOS);
+ scheduledState = new AtomicReference<>(ScheduledState.STOPPED);
+ }
+
+ @Override
+ public String getIdentifier() {
+ return id;
+ }
+
+ @Override
+ public String getProcessGroupIdentifier() {
+ final ProcessGroup procGroup = getProcessGroup();
+ return procGroup == null ? null : procGroup.getIdentifier();
+ }
+
+ @Override
+ public String getName() {
+ return name.get();
+ }
+
+ @Override
+ public void setName(final String name) {
+ if (this.name.get().equals(name)) {
+ return;
+ }
+
+ final ProcessGroup parentGroup = this.processGroup.get();
+ if (getConnectableType() == ConnectableType.INPUT_PORT) {
+ if (parentGroup.getInputPortByName(name) != null) {
+ throw new IllegalStateException("The requested new port name is not available");
+ }
+ } else if (getConnectableType() == ConnectableType.OUTPUT_PORT) {
+ if (parentGroup.getOutputPortByName(name) != null) {
+ throw new IllegalStateException("The requested new port name is not available");
+ }
+ }
+
+ this.name.set(name);
+ }
+
+ @Override
+ public Authorizable getParentAuthorizable() {
+ return getProcessGroup();
+ }
+
+ @Override
+ public Resource getResource() {
+ final ResourceType resourceType = ConnectableType.INPUT_PORT.equals(getConnectableType()) ? ResourceType.InputPort : ResourceType.OutputPort;
+ return ResourceFactory.getComponentResource(resourceType, getIdentifier(), getName());
+ }
+
+ @Override
+ public ProcessGroup getProcessGroup() {
+ return processGroup.get();
+ }
+
+ @Override
+ public void setProcessGroup(final ProcessGroup newGroup) {
+ this.processGroup.set(newGroup);
+ }
+
+ @Override
+ public String getComments() {
+ return comments.get();
+ }
+
+ @Override
+ public void setComments(final String comments) {
+ this.comments.set(CharacterFilterUtils.filterInvalidXmlCharacters(comments));
+ }
+
+ @Override
+ public Collection<Relationship> getRelationships() {
+ return relationships;
+ }
+
+ @Override
+ public Relationship getRelationship(final String relationshipName) {
+ if (PORT_RELATIONSHIP.getName().equals(relationshipName)) {
+ return PORT_RELATIONSHIP;
+ }
+ return null;
+ }
+
+ @Override
+ public void addConnection(final Connection connection) throws IllegalArgumentException {
+ writeLock.lock();
+ try {
+ if (!requireNonNull(connection).getSource().equals(this)) {
+ if (connection.getDestination().equals(this)) {
+ // don't add the connection twice. This may occur if we have a self-loop because we will be told
+ // to add the connection once because we are the source and again because we are the destination.
+ if (!incomingConnections.contains(connection)) {
+ incomingConnections.add(connection);
+ }
+
+ return;
+ } else {
+ throw new IllegalArgumentException("Cannot add a connection to a LocalPort for which the LocalPort is neither the Source nor the Destination");
+ }
+ }
+
+ /* TODO: Will commenting this out have repercussions?
+ Needed to comment this out to allow use of relationships for port to processor case which was previously not supported
+ for (final Relationship relationship : connection.getRelationships()) {
+ if (!relationship.equals(PORT_RELATIONSHIP)) {
+ throw new IllegalArgumentException("No relationship with name " + relationship + " exists for Local Ports");
+ }
+ }
+ */
+
+ // don't add the connection twice. This may occur if we have a self-loop because we will be told
+ // to add the connection once because we are the source and again because we are the destination.
+ if (!outgoingConnections.contains(connection)) {
+ outgoingConnections.add(connection);
+ }
+ } finally {
+ writeLock.unlock();
+ }
+ }
+
+ @Override
+ public boolean hasIncomingConnection() {
+ readLock.lock();
+ try {
+ return !incomingConnections.isEmpty();
+ } finally {
+ readLock.unlock();
+ }
+ }
+
+ @Override
+ public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) throws ProcessException {
+ final ProcessSession session = sessionFactory.createSession();
+
+ try {
+ onTrigger(context, session);
+ session.commit();
+ } catch (final ProcessException e) {
+ session.rollback();
+ throw e;
+ } catch (final Throwable t) {
+ session.rollback();
+ throw new RuntimeException(t);
+ }
+ }
+
+ public abstract void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException;
+
+ @Override
+ public void updateConnection(final Connection connection) throws IllegalStateException {
+ if (requireNonNull(connection).getSource().equals(this)) {
+ writeLock.lock();
+ try {
+ if (!outgoingConnections.remove(connection)) {
+ throw new IllegalStateException("No Connection with ID " + connection.getIdentifier() + " is currently registered with this Port");
+ }
+ outgoingConnections.add(connection);
+ } finally {
+ writeLock.unlock();
+ }
+ } else if (connection.getDestination().equals(this)) {
+ writeLock.lock();
+ try {
+ if (!incomingConnections.remove(connection)) {
+ throw new IllegalStateException("No Connection with ID " + connection.getIdentifier() + " is currently registered with this Port");
+ }
+ incomingConnections.add(connection);
+ } finally {
+ writeLock.unlock();
+ }
+ } else {
+ throw new IllegalStateException("The given connection is not currently registered for this Port");
+ }
+ }
+
+ @Override
+ public void removeConnection(final Connection connection) throws IllegalArgumentException, IllegalStateException {
+ writeLock.lock();
+ try {
+ if (!requireNonNull(connection).getSource().equals(this)) {
+ final boolean existed = incomingConnections.remove(connection);
+ if (!existed) {
+ throw new IllegalStateException("The given connection is not currently registered for this Port");
+ }
+ return;
+ }
+
+ if (!canConnectionBeRemoved(connection)) {
+ // TODO: Determine which processors will be broken if connection is removed, rather than just returning a boolean
+ throw new IllegalStateException("Connection " + connection.getIdentifier() + " cannot be removed");
+ }
+
+ final boolean removed = outgoingConnections.remove(connection);
+ if (!removed) {
+ throw new IllegalStateException("Connection " + connection.getIdentifier() + " is not registered with " + this.getIdentifier());
+ }
+ } finally {
+ writeLock.unlock();
+ }
+ }
+
+ /**
+ * Verify that removing this connection will not prevent this Port from
+ * still being connected via each relationship
+ *
+ * @param connection to test for removal
+ * @return true if can be removed
+ */
+ private boolean canConnectionBeRemoved(final Connection connection) {
+ final Connectable source = connection.getSource();
+ if (!source.isRunning()) {
+ // we don't have to verify that this Connectable is still connected because it's okay to make
+ // the source invalid since it is not running.
+ return true;
+ }
+
+ for (final Relationship relationship : source.getRelationships()) {
+ if (source.isAutoTerminated(relationship)) {
+ continue;
+ }
+
+ final Set<Connection> connectionsForRelationship = source.getConnections(relationship);
+ if (connectionsForRelationship == null || connectionsForRelationship.isEmpty()) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ @Override
+ public Set<Connection> getConnections() {
+ readLock.lock();
+ try {
+ return Collections.unmodifiableSet(outgoingConnections);
+ } finally {
+ readLock.unlock();
+ }
+ }
+
+ @Override
+ public Set<Connection> getConnections(final Relationship relationship) {
+ readLock.lock();
+ try {
+ if (relationship.equals(PORT_RELATIONSHIP)) {
+ return Collections.unmodifiableSet(outgoingConnections);
+ }
+
+ throw new IllegalArgumentException("No relationship with name " + relationship.getName() + " exists for Local Ports");
+ } finally {
+ readLock.unlock();
+ }
+ }
+
+ @Override
+ public Position getPosition() {
+ return position.get();
+ }
+
+ @Override
+ public void setPosition(final Position position) {
+ this.position.set(position);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE).append("id", getIdentifier()).toString();
+ }
+
+ @Override
+ public List<Connection> getIncomingConnections() {
+ readLock.lock();
+ try {
+ return Collections.unmodifiableList(incomingConnections);
+ } finally {
+ readLock.unlock();
+ }
+ }
+
+ @Override
+ public abstract boolean isValid();
+
+ @Override
+ public boolean isAutoTerminated(final Relationship relationship) {
+ return false;
+ }
+
+ @Override
+ public boolean isLossTolerant() {
+ return lossTolerant.get();
+ }
+
+ @Override
+ public void setLossTolerant(boolean lossTolerant) {
+ this.lossTolerant.set(lossTolerant);
+ }
+
+ @Override
+ public void setMaxConcurrentTasks(final int taskCount) {
+ if (taskCount < 1) {
+ throw new IllegalArgumentException();
+ }
+ concurrentTaskCount.set(taskCount);
+ }
+
+ @Override
+ public int getMaxConcurrentTasks() {
+ return concurrentTaskCount.get();
+ }
+
+ @Override
+ public void shutdown() {
+ scheduledState.set(ScheduledState.STOPPED);
+ }
+
+ @Override
+ public void onSchedulingStart() {
+ scheduledState.set(ScheduledState.RUNNING);
+ }
+
+ public void disable() {
+ final boolean updated = scheduledState.compareAndSet(ScheduledState.STOPPED, ScheduledState.DISABLED);
+ if (!updated) {
+ throw new IllegalStateException("Port cannot be disabled because it is not stopped");
+ }
+ }
+
+ public void enable() {
+ final boolean updated = scheduledState.compareAndSet(ScheduledState.DISABLED, ScheduledState.STOPPED);
+ if (!updated) {
+ throw new IllegalStateException("Port cannot be enabled because it is not disabled");
+ }
+ }
+
+ @Override
+ public boolean isRunning() {
+ return getScheduledState().equals(ScheduledState.RUNNING) || processScheduler.getActiveThreadCount(this) > 0;
+ }
+
+ @Override
+ public ScheduledState getScheduledState() {
+ return scheduledState.get();
+ }
+
+ @Override
+ public ConnectableType getConnectableType() {
+ return type;
+ }
+
+ @Override
+ public void setYieldPeriod(final String yieldPeriod) {
+ final long yieldMillis = FormatUtils.getTimeDuration(requireNonNull(yieldPeriod), TimeUnit.MILLISECONDS);
+ if (yieldMillis < 0) {
+ throw new IllegalArgumentException("Yield duration must be positive");
+ }
+ this.yieldPeriod.set(yieldPeriod);
+ }
+
+ @Override
+ public void setScheduldingPeriod(final String schedulingPeriod) {
+ final long schedulingNanos = FormatUtils.getTimeDuration(requireNonNull(schedulingPeriod), TimeUnit.NANOSECONDS);
+ if (schedulingNanos < 0) {
+ throw new IllegalArgumentException("Scheduling Period must be positive");
+ }
+
+ this.schedulingPeriod.set(schedulingPeriod);
+ this.schedulingNanos.set(Math.max(MINIMUM_SCHEDULING_NANOS, schedulingNanos));
+ }
+
+ @Override
+ public long getPenalizationPeriod(final TimeUnit timeUnit) {
+ return FormatUtils.getTimeDuration(getPenalizationPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
+ }
+
+ @Override
+ public String getPenalizationPeriod() {
+ return penalizationPeriod.get();
+ }
+
+ @Override
+ public void yield() {
+ final long yieldMillis = getYieldPeriod(TimeUnit.MILLISECONDS);
+ yield(yieldMillis, TimeUnit.MILLISECONDS);
+ }
+
+ @Override
+ public void yield(final long yieldDuration, final TimeUnit timeUnit) {
+ final long yieldMillis = timeUnit.toMillis(yieldDuration);
+ yieldExpiration.set(Math.max(yieldExpiration.get(), System.currentTimeMillis() + yieldMillis));
+ }
+
+ @Override
+ public long getYieldExpiration() {
+ return yieldExpiration.get();
+ }
+
+ @Override
+ public long getSchedulingPeriod(final TimeUnit timeUnit) {
+ return timeUnit.convert(schedulingNanos.get(), TimeUnit.NANOSECONDS);
+ }
+
+ @Override
+ public String getSchedulingPeriod() {
+ return schedulingPeriod.get();
+ }
+
+ @Override
+ public void setPenalizationPeriod(final String penalizationPeriod) {
+ this.penalizationPeriod.set(penalizationPeriod);
+ }
+
+ @Override
+ public String getYieldPeriod() {
+ return yieldPeriod.get();
+ }
+
+ @Override
+ public long getYieldPeriod(final TimeUnit timeUnit) {
+ return FormatUtils.getTimeDuration(getYieldPeriod(), timeUnit == null ? DEFAULT_TIME_UNIT : timeUnit);
+ }
+
+ @Override
+ public void verifyCanDelete() throws IllegalStateException {
+ verifyCanDelete(false);
+ }
+
+ @Override
+ public void verifyCanDelete(final boolean ignoreConnections) {
+ readLock.lock();
+ try {
+ if (isRunning()) {
+ throw new IllegalStateException(this.getIdentifier() + " is running");
+ }
+
+ if (!ignoreConnections) {
+ for (final Connection connection : outgoingConnections) {
+ connection.verifyCanDelete();
+ }
+
+ for (final Connection connection : incomingConnections) {
+ if (connection.getSource().equals(this)) {
+ connection.verifyCanDelete();
+ } else {
+ throw new IllegalStateException(this.getIdentifier() + " is the destination of another component");
+ }
+ }
+ }
+ } finally {
+ readLock.unlock();
+ }
+ }
+
+ @Override
+ public void verifyCanStart() {
+ readLock.lock();
+ try {
+ switch (scheduledState.get()) {
+ case DISABLED:
+ throw new IllegalStateException(this.getIdentifier() + " cannot be started because it is disabled");
+ case RUNNING:
+ throw new IllegalStateException(this.getIdentifier() + " cannot be started because it is already running");
+ case STOPPED:
+ break;
+ }
+ verifyNoActiveThreads();
+
+ final Collection<ValidationResult> validationResults = getValidationErrors();
+ if (!validationResults.isEmpty()) {
+ throw new IllegalStateException(this.getIdentifier() + " is not in a valid state: " + validationResults.iterator().next().getExplanation());
+ }
+ } finally {
+ readLock.unlock();
+ }
+ }
+
+ @Override
+ public void verifyCanStop() {
+ if (getScheduledState() != ScheduledState.RUNNING) {
+ throw new IllegalStateException(this.getIdentifier() + " is not scheduled to run");
+ }
+ }
+
+ @Override
+ public void verifyCanUpdate() {
+ readLock.lock();
+ try {
+ if (isRunning()) {
+ throw new IllegalStateException(this.getIdentifier() + " is not stopped");
+ }
+ } finally {
+ readLock.unlock();
+ }
+ }
+
+ @Override
+ public void verifyCanEnable() {
+ readLock.lock();
+ try {
+ if (getScheduledState() != ScheduledState.DISABLED) {
+ throw new IllegalStateException(this.getIdentifier() + " is not disabled");
+ }
+
+ verifyNoActiveThreads();
+ } finally {
+ readLock.unlock();
+ }
+ }
+
+ @Override
+ public void verifyCanDisable() {
+ readLock.lock();
+ try {
+ if (getScheduledState() != ScheduledState.STOPPED) {
+ throw new IllegalStateException(this.getIdentifier() + " is not stopped");
+ }
+ verifyNoActiveThreads();
+ } finally {
+ readLock.unlock();
+ }
+ }
+
+ private void verifyNoActiveThreads() throws IllegalStateException {
+ final int threadCount = processScheduler.getActiveThreadCount(this);
+ if (threadCount > 0) {
+ throw new IllegalStateException(this.getIdentifier() + " has " + threadCount + " threads still active");
+ }
+ }
+
+ @Override
+ public void verifyCanClearState() {
+ }
+
+ @Override
+ public Optional<String> getVersionedComponentId() {
+ return Optional.ofNullable(versionedComponentId.get());
+ }
+
+ @Override
+ public void setVersionedComponentId(final String versionedComponentId) {
+ boolean updated = false;
+ while (!updated) {
+ final String currentId = this.versionedComponentId.get();
+
+ if (currentId == null) {
+ updated = this.versionedComponentId.compareAndSet(null, versionedComponentId);
+ } else if (currentId.equals(versionedComponentId)) {
+ return;
+ } else if (versionedComponentId == null) {
+ updated = this.versionedComponentId.compareAndSet(currentId, null);
+ } else {
+ throw new IllegalStateException(this + " is already under version control");
+ }
+ }
+ }
+}
diff --git a/mod/designtool/designtool-web/src/main/java/org/apache/nifi/nar/DCAEAutoLoader.java b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/nar/DCAEAutoLoader.java
new file mode 100644
index 0000000..ec15ba6
--- /dev/null
+++ b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/nar/DCAEAutoLoader.java
@@ -0,0 +1,105 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.apache.nifi.nar;
+
+import org.apache.nifi.bundle.Bundle;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URL;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.Executors;
+
+/**
+ * Uses the Java executor service scheduler to continuously load new DCAE jars
+ */
+public class DCAEAutoLoader {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(DCAEAutoLoader.class);
+
+ private static final long POLL_INTERVAL_MS = 5000;
+
+ /**
+ * Runnable task that grabs list of remotely stored jars, identifies ones that haven't
+ * been processed, builds Nifi bundles for those unprocessed ones and loads them into
+ * the global extension manager.
+ */
+ private static class LoaderTask implements Runnable {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(LoaderTask.class);
+
+ private final URI indexJsonDcaeJars;
+ private final ExtensionDiscoveringManager extensionManager;
+ private final Set<URL> processed = new LinkedHashSet();
+
+ private LoaderTask(URI indexJsonDcaeJars, ExtensionDiscoveringManager extensionManager) {
+ this.indexJsonDcaeJars = indexJsonDcaeJars;
+ this.extensionManager = extensionManager;
+ }
+
+ @Override
+ public void run() {
+ try {
+ List<URL> toProcess = DCAEClassLoaders.getDCAEJarsURLs(this.indexJsonDcaeJars);
+ toProcess.removeAll(processed);
+
+ if (!toProcess.isEmpty()) {
+ Set<Bundle> bundles = DCAEClassLoaders.createDCAEBundles(toProcess);
+ this.extensionManager.discoverExtensions(bundles);
+ processed.addAll(toProcess);
+
+ LOGGER.info(String.format("#Added DCAE bundles: %d, #Total DCAE bundles: %d ",
+ bundles.size(), processed.size()));
+ }
+ } catch (final Exception e) {
+ LOGGER.error("Error loading DCAE jars due to: " + e.getMessage(), e);
+ }
+ }
+ }
+
+ private final ScheduledExecutorService executor = Executors.newScheduledThreadPool(1);
+ private ScheduledFuture taskFuture;
+
+ public synchronized void start(URI indexJsonDcaeJars, final ExtensionDiscoveringManager extensionManager) {
+ // Restricting to a single thread
+ if (taskFuture != null && !taskFuture.isCancelled()) {
+ return;
+ }
+
+ LOGGER.info("Starting DCAE Auto-Loader: {}", new Object[]{indexJsonDcaeJars});
+
+ LoaderTask task = new LoaderTask(indexJsonDcaeJars, extensionManager);
+ this.taskFuture = executor.scheduleAtFixedRate(task, 0, POLL_INTERVAL_MS, TimeUnit.MILLISECONDS);
+ LOGGER.info("DCAE Auto-Loader started");
+ }
+
+ public synchronized void stop() {
+ if (this.taskFuture != null) {
+ this.taskFuture.cancel(true);
+ LOGGER.info("DCAE Auto-Loader stopped");
+ }
+ }
+
+}
diff --git a/mod/designtool/designtool-web/src/main/java/org/apache/nifi/nar/DCAEClassLoaders.java b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/nar/DCAEClassLoaders.java
new file mode 100644
index 0000000..a4dbe77
--- /dev/null
+++ b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/nar/DCAEClassLoaders.java
@@ -0,0 +1,127 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.apache.nifi.nar;
+
+import org.apache.nifi.bundle.Bundle;
+import org.apache.nifi.bundle.BundleCoordinate;
+import org.apache.nifi.bundle.BundleDetails;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.jar.Manifest;
+import java.util.stream.Collectors;
+
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import java.util.jar.Attributes;
+
+
+/**
+ * Class responsible for loading JARs for DCAEProcessors into Nifi
+ */
+public class DCAEClassLoaders {
+
+ public static class DCAEClassLoadersError extends RuntimeException {
+ public DCAEClassLoadersError(Throwable e) {
+ super("Error while using DCAEClassLoaders", e);
+ }
+ }
+
+ /**
+ * Given a URL to a index.json file, fetches the file and generates a list of
+ * URLs for DCAE jars that has Processors packaged.
+ *
+ * @param indexDCAEJars
+ * @return
+ */
+ public static List<URL> getDCAEJarsURLs(URI indexDCAEJars) {
+ JsonFactory jf = new JsonFactory();
+ ObjectMapper om = new ObjectMapper();
+
+ try {
+ List<Object> urls = om.readValue(jf.createParser(indexDCAEJars.toURL()), List.class);
+
+ return urls.stream().map(u -> {
+ try {
+ Map<String, Object> foo = (Map<String, Object>) u;
+ String name = (String) foo.get("name");
+ String url = String.format("%s/%s", indexDCAEJars.toString(), name);
+ return new URL(url);
+ } catch (MalformedURLException e) {
+ // Hopefully you never come here...
+ return null;
+ }
+ }).collect(Collectors.toList());
+ } catch (Exception e) {
+ throw new RuntimeException("Error while getting jar URIs", e);
+ }
+ }
+
+ private static BundleDetails createBundleDetails(URLClassLoader classLoader) {
+ try {
+ URL url = classLoader.findResource("META-INF/MANIFEST.MF");
+ Manifest manifest = new Manifest(url.openStream());
+
+ final Attributes attributes = manifest.getMainAttributes();
+
+ final BundleDetails.Builder builder = new BundleDetails.Builder();
+ // NOTE: Working directory cannot be null so set it to some bogus dir
+ // because we aren't really using this. Or maybe should create our own
+ // working directory
+ builder.workingDir(new File("/tmp"));
+
+ final String group = attributes.getValue("Group");
+ final String id = attributes.getValue("Id");
+ final String version = attributes.getValue("Version");
+ builder.coordinate(new BundleCoordinate(group, id, version));
+
+ return builder.build();
+ } catch (IOException e) {
+ throw new DCAEClassLoadersError(e);
+ }
+ }
+
+ /**
+ * From a list of URLs to remote JARs where the JARs contain DCAEProcessor classes,
+ * create a bundle for each JAR. You will never get a partial list of bundles.
+ *
+ * @param jarURLs
+ * @return
+ */
+ public static Set<Bundle> createDCAEBundles(List<URL> jarURLs) {
+ Set<Bundle> bundles = new HashSet<>();
+
+ for (URL jarURL : jarURLs) {
+ URLClassLoader classLoader = new URLClassLoader(new URL[] {jarURL});
+ Bundle bundle = new Bundle(createBundleDetails(classLoader), classLoader);
+ bundles.add(bundle);
+ }
+
+ return bundles;
+ }
+
+}
diff --git a/mod/designtool/designtool-web/src/main/java/org/apache/nifi/util/NiFiProperties.java b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/util/NiFiProperties.java
new file mode 100644
index 0000000..3b341ec
--- /dev/null
+++ b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/util/NiFiProperties.java
@@ -0,0 +1,1551 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Modifications to the original nifi code for the ONAP project are made
+ * available under the Apache License, Version 2.0
+ */
+package org.apache.nifi.util;
+
+import java.io.BufferedInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.InvalidPathException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+/**
+ * The NiFiProperties class holds all properties which are needed for various
+ * values to be available at runtime. It is strongly tied to the startup
+ * properties needed and is often refer to as the 'nifi.properties' file. The
+ * properties contains keys and values. Great care should be taken in leveraging
+ * this class or passing it along. Its use should be refactored and minimized
+ * over time.
+ */
+public abstract class NiFiProperties {
+
+ // core properties
+ public static final String PROPERTIES_FILE_PATH = "nifi.properties.file.path";
+ public static final String FLOW_CONFIGURATION_FILE = "nifi.flow.configuration.file";
+ public static final String FLOW_CONFIGURATION_ARCHIVE_ENABLED = "nifi.flow.configuration.archive.enabled";
+ public static final String FLOW_CONFIGURATION_ARCHIVE_DIR = "nifi.flow.configuration.archive.dir";
+ public static final String FLOW_CONFIGURATION_ARCHIVE_MAX_TIME = "nifi.flow.configuration.archive.max.time";
+ public static final String FLOW_CONFIGURATION_ARCHIVE_MAX_STORAGE = "nifi.flow.configuration.archive.max.storage";
+ public static final String FLOW_CONFIGURATION_ARCHIVE_MAX_COUNT = "nifi.flow.configuration.archive.max.count";
+ public static final String AUTHORIZER_CONFIGURATION_FILE = "nifi.authorizer.configuration.file";
+ public static final String LOGIN_IDENTITY_PROVIDER_CONFIGURATION_FILE = "nifi.login.identity.provider.configuration.file";
+ public static final String REPOSITORY_DATABASE_DIRECTORY = "nifi.database.directory";
+ public static final String RESTORE_DIRECTORY = "nifi.restore.directory";
+ public static final String WRITE_DELAY_INTERVAL = "nifi.flowservice.writedelay.interval";
+ public static final String AUTO_RESUME_STATE = "nifi.flowcontroller.autoResumeState";
+ public static final String FLOW_CONTROLLER_GRACEFUL_SHUTDOWN_PERIOD = "nifi.flowcontroller.graceful.shutdown.period";
+ public static final String NAR_LIBRARY_DIRECTORY = "nifi.nar.library.directory";
+ public static final String NAR_LIBRARY_DIRECTORY_PREFIX = "nifi.nar.library.directory.";
+ public static final String NAR_LIBRARY_AUTOLOAD_DIRECTORY = "nifi.nar.library.autoload.directory";
+ public static final String NAR_WORKING_DIRECTORY = "nifi.nar.working.directory";
+ public static final String COMPONENT_DOCS_DIRECTORY = "nifi.documentation.working.directory";
+ public static final String SENSITIVE_PROPS_KEY = "nifi.sensitive.props.key";
+ public static final String SENSITIVE_PROPS_ALGORITHM = "nifi.sensitive.props.algorithm";
+ public static final String SENSITIVE_PROPS_PROVIDER = "nifi.sensitive.props.provider";
+ public static final String H2_URL_APPEND = "nifi.h2.url.append";
+ public static final String REMOTE_INPUT_HOST = "nifi.remote.input.host";
+ public static final String REMOTE_INPUT_PORT = "nifi.remote.input.socket.port";
+ public static final String SITE_TO_SITE_SECURE = "nifi.remote.input.secure";
+ public static final String SITE_TO_SITE_HTTP_ENABLED = "nifi.remote.input.http.enabled";
+ public static final String SITE_TO_SITE_HTTP_TRANSACTION_TTL = "nifi.remote.input.http.transaction.ttl";
+ public static final String REMOTE_CONTENTS_CACHE_EXPIRATION = "nifi.remote.contents.cache.expiration";
+ public static final String TEMPLATE_DIRECTORY = "nifi.templates.directory";
+ public static final String ADMINISTRATIVE_YIELD_DURATION = "nifi.administrative.yield.duration";
+ public static final String PERSISTENT_STATE_DIRECTORY = "nifi.persistent.state.directory";
+ public static final String BORED_YIELD_DURATION = "nifi.bored.yield.duration";
+ public static final String PROCESSOR_SCHEDULING_TIMEOUT = "nifi.processor.scheduling.timeout";
+ public static final String BACKPRESSURE_COUNT = "nifi.queue.backpressure.count";
+ public static final String BACKPRESSURE_SIZE = "nifi.queue.backpressure.size";
+
+ // DCAE related config
+ public static final String DCAE_JARS_INDEX_URL = "nifi.dcae.jars.index.url";
+
+ // content repository properties
+ public static final String REPOSITORY_CONTENT_PREFIX = "nifi.content.repository.directory.";
+ public static final String CONTENT_REPOSITORY_IMPLEMENTATION = "nifi.content.repository.implementation";
+ public static final String MAX_APPENDABLE_CLAIM_SIZE = "nifi.content.claim.max.appendable.size";
+ public static final String MAX_FLOWFILES_PER_CLAIM = "nifi.content.claim.max.flow.files";
+ public static final String CONTENT_ARCHIVE_MAX_RETENTION_PERIOD = "nifi.content.repository.archive.max.retention.period";
+ public static final String CONTENT_ARCHIVE_MAX_USAGE_PERCENTAGE = "nifi.content.repository.archive.max.usage.percentage";
+ public static final String CONTENT_ARCHIVE_BACK_PRESSURE_PERCENTAGE = "nifi.content.repository.archive.backpressure.percentage";
+ public static final String CONTENT_ARCHIVE_ENABLED = "nifi.content.repository.archive.enabled";
+ public static final String CONTENT_ARCHIVE_CLEANUP_FREQUENCY = "nifi.content.repository.archive.cleanup.frequency";
+ public static final String CONTENT_VIEWER_URL = "nifi.content.viewer.url";
+
+ // flowfile repository properties
+ public static final String FLOWFILE_REPOSITORY_IMPLEMENTATION = "nifi.flowfile.repository.implementation";
+ public static final String FLOWFILE_REPOSITORY_ALWAYS_SYNC = "nifi.flowfile.repository.always.sync";
+ public static final String FLOWFILE_REPOSITORY_DIRECTORY = "nifi.flowfile.repository.directory";
+ public static final String FLOWFILE_REPOSITORY_PARTITIONS = "nifi.flowfile.repository.partitions";
+ public static final String FLOWFILE_REPOSITORY_CHECKPOINT_INTERVAL = "nifi.flowfile.repository.checkpoint.interval";
+ public static final String FLOWFILE_SWAP_MANAGER_IMPLEMENTATION = "nifi.swap.manager.implementation";
+ public static final String QUEUE_SWAP_THRESHOLD = "nifi.queue.swap.threshold";
+ public static final String SWAP_IN_THREADS = "nifi.swap.in.threads";
+ public static final String SWAP_IN_PERIOD = "nifi.swap.in.period";
+ public static final String SWAP_OUT_THREADS = "nifi.swap.out.threads";
+ public static final String SWAP_OUT_PERIOD = "nifi.swap.out.period";
+
+ // provenance properties
+ public static final String PROVENANCE_REPO_IMPLEMENTATION_CLASS = "nifi.provenance.repository.implementation";
+ public static final String PROVENANCE_REPO_DIRECTORY_PREFIX = "nifi.provenance.repository.directory.";
+ public static final String PROVENANCE_MAX_STORAGE_TIME = "nifi.provenance.repository.max.storage.time";
+ public static final String PROVENANCE_MAX_STORAGE_SIZE = "nifi.provenance.repository.max.storage.size";
+ public static final String PROVENANCE_ROLLOVER_TIME = "nifi.provenance.repository.rollover.time";
+ public static final String PROVENANCE_ROLLOVER_SIZE = "nifi.provenance.repository.rollover.size";
+ public static final String PROVENANCE_QUERY_THREAD_POOL_SIZE = "nifi.provenance.repository.query.threads";
+ public static final String PROVENANCE_INDEX_THREAD_POOL_SIZE = "nifi.provenance.repository.index.threads";
+ public static final String PROVENANCE_COMPRESS_ON_ROLLOVER = "nifi.provenance.repository.compress.on.rollover";
+ public static final String PROVENANCE_INDEXED_FIELDS = "nifi.provenance.repository.indexed.fields";
+ public static final String PROVENANCE_INDEXED_ATTRIBUTES = "nifi.provenance.repository.indexed.attributes";
+ public static final String PROVENANCE_INDEX_SHARD_SIZE = "nifi.provenance.repository.index.shard.size";
+ public static final String PROVENANCE_JOURNAL_COUNT = "nifi.provenance.repository.journal.count";
+ public static final String PROVENANCE_REPO_ENCRYPTION_KEY = "nifi.provenance.repository.encryption.key";
+ public static final String PROVENANCE_REPO_ENCRYPTION_KEY_ID = "nifi.provenance.repository.encryption.key.id";
+ public static final String PROVENANCE_REPO_ENCRYPTION_KEY_PROVIDER_IMPLEMENTATION_CLASS = "nifi.provenance.repository.encryption.key.provider.implementation";
+ public static final String PROVENANCE_REPO_ENCRYPTION_KEY_PROVIDER_LOCATION = "nifi.provenance.repository.encryption.key.provider.location";
+ public static final String PROVENANCE_REPO_DEBUG_FREQUENCY = "nifi.provenance.repository.debug.frequency";
+
+ // component status repository properties
+ public static final String COMPONENT_STATUS_REPOSITORY_IMPLEMENTATION = "nifi.components.status.repository.implementation";
+ public static final String COMPONENT_STATUS_SNAPSHOT_FREQUENCY = "nifi.components.status.snapshot.frequency";
+
+ // security properties
+ public static final String SECURITY_KEYSTORE = "nifi.security.keystore";
+ public static final String SECURITY_KEYSTORE_TYPE = "nifi.security.keystoreType";
+ public static final String SECURITY_KEYSTORE_PASSWD = "nifi.security.keystorePasswd";
+ public static final String SECURITY_KEY_PASSWD = "nifi.security.keyPasswd";
+ public static final String SECURITY_TRUSTSTORE = "nifi.security.truststore";
+ public static final String SECURITY_TRUSTSTORE_TYPE = "nifi.security.truststoreType";
+ public static final String SECURITY_TRUSTSTORE_PASSWD = "nifi.security.truststorePasswd";
+ public static final String SECURITY_USER_AUTHORIZER = "nifi.security.user.authorizer";
+ public static final String SECURITY_USER_LOGIN_IDENTITY_PROVIDER = "nifi.security.user.login.identity.provider";
+ public static final String SECURITY_OCSP_RESPONDER_URL = "nifi.security.ocsp.responder.url";
+ public static final String SECURITY_OCSP_RESPONDER_CERTIFICATE = "nifi.security.ocsp.responder.certificate";
+ public static final String SECURITY_IDENTITY_MAPPING_PATTERN_PREFIX = "nifi.security.identity.mapping.pattern.";
+ public static final String SECURITY_IDENTITY_MAPPING_VALUE_PREFIX = "nifi.security.identity.mapping.value.";
+ public static final String SECURITY_IDENTITY_MAPPING_TRANSFORM_PREFIX = "nifi.security.identity.mapping.transform.";
+ public static final String SECURITY_GROUP_MAPPING_PATTERN_PREFIX = "nifi.security.group.mapping.pattern.";
+ public static final String SECURITY_GROUP_MAPPING_VALUE_PREFIX = "nifi.security.group.mapping.value.";
+ public static final String SECURITY_GROUP_MAPPING_TRANSFORM_PREFIX = "nifi.security.group.mapping.transform.";
+
+ // oidc
+ public static final String SECURITY_USER_OIDC_DISCOVERY_URL = "nifi.security.user.oidc.discovery.url";
+ public static final String SECURITY_USER_OIDC_CONNECT_TIMEOUT = "nifi.security.user.oidc.connect.timeout";
+ public static final String SECURITY_USER_OIDC_READ_TIMEOUT = "nifi.security.user.oidc.read.timeout";
+ public static final String SECURITY_USER_OIDC_CLIENT_ID = "nifi.security.user.oidc.client.id";
+ public static final String SECURITY_USER_OIDC_CLIENT_SECRET = "nifi.security.user.oidc.client.secret";
+ public static final String SECURITY_USER_OIDC_PREFERRED_JWSALGORITHM = "nifi.security.user.oidc.preferred.jwsalgorithm";
+
+ // apache knox
+ public static final String SECURITY_USER_KNOX_URL = "nifi.security.user.knox.url";
+ public static final String SECURITY_USER_KNOX_PUBLIC_KEY = "nifi.security.user.knox.publicKey";
+ public static final String SECURITY_USER_KNOX_COOKIE_NAME = "nifi.security.user.knox.cookieName";
+ public static final String SECURITY_USER_KNOX_AUDIENCES = "nifi.security.user.knox.audiences";
+
+ // web properties
+ public static final String WEB_WAR_DIR = "nifi.web.war.directory";
+ public static final String WEB_HTTP_PORT = "nifi.web.http.port";
+ public static final String WEB_HTTP_PORT_FORWARDING = "nifi.web.http.port.forwarding";
+ public static final String WEB_HTTP_HOST = "nifi.web.http.host";
+ public static final String WEB_HTTP_NETWORK_INTERFACE_PREFIX = "nifi.web.http.network.interface.";
+ public static final String WEB_HTTPS_PORT = "nifi.web.https.port";
+ public static final String WEB_HTTPS_PORT_FORWARDING = "nifi.web.https.port.forwarding";
+ public static final String WEB_HTTPS_HOST = "nifi.web.https.host";
+ public static final String WEB_HTTPS_NETWORK_INTERFACE_PREFIX = "nifi.web.https.network.interface.";
+ public static final String WEB_WORKING_DIR = "nifi.web.jetty.working.directory";
+ public static final String WEB_THREADS = "nifi.web.jetty.threads";
+ public static final String WEB_MAX_HEADER_SIZE = "nifi.web.max.header.size";
+ public static final String WEB_PROXY_CONTEXT_PATH = "nifi.web.proxy.context.path";
+ public static final String WEB_PROXY_HOST = "nifi.web.proxy.host";
+
+ // ui properties
+ public static final String UI_BANNER_TEXT = "nifi.ui.banner.text";
+ public static final String UI_AUTO_REFRESH_INTERVAL = "nifi.ui.autorefresh.interval";
+ public static final String UI_DCAE_DISTRIBUTOR_API_URL="nifi.ui.dcae.distibutor.api.url";
+
+ // cluster common properties
+ public static final String CLUSTER_PROTOCOL_HEARTBEAT_INTERVAL = "nifi.cluster.protocol.heartbeat.interval";
+ public static final String CLUSTER_PROTOCOL_IS_SECURE = "nifi.cluster.protocol.is.secure";
+
+ // cluster node properties
+ public static final String CLUSTER_IS_NODE = "nifi.cluster.is.node";
+ public static final String CLUSTER_NODE_ADDRESS = "nifi.cluster.node.address";
+ public static final String CLUSTER_NODE_PROTOCOL_PORT = "nifi.cluster.node.protocol.port";
+ public static final String CLUSTER_NODE_PROTOCOL_THREADS = "nifi.cluster.node.protocol.threads";
+ public static final String CLUSTER_NODE_PROTOCOL_MAX_THREADS = "nifi.cluster.node.protocol.max.threads";
+ public static final String CLUSTER_NODE_CONNECTION_TIMEOUT = "nifi.cluster.node.connection.timeout";
+ public static final String CLUSTER_NODE_READ_TIMEOUT = "nifi.cluster.node.read.timeout";
+ public static final String CLUSTER_NODE_MAX_CONCURRENT_REQUESTS = "nifi.cluster.node.max.concurrent.requests";
+ public static final String CLUSTER_FIREWALL_FILE = "nifi.cluster.firewall.file";
+ public static final String FLOW_ELECTION_MAX_WAIT_TIME = "nifi.cluster.flow.election.max.wait.time";
+ public static final String FLOW_ELECTION_MAX_CANDIDATES = "nifi.cluster.flow.election.max.candidates";
+
+ // cluster load balance properties
+ public static final String LOAD_BALANCE_ADDRESS = "nifi.cluster.load.balance.address";
+ public static final String LOAD_BALANCE_PORT = "nifi.cluster.load.balance.port";
+ public static final String LOAD_BALANCE_CONNECTIONS_PER_NODE = "nifi.cluster.load.balance.connections.per.node";
+ public static final String LOAD_BALANCE_MAX_THREAD_COUNT = "nifi.cluster.load.balance.max.thread.count";
+ public static final String LOAD_BALANCE_COMMS_TIMEOUT = "nifi.cluster.load.balance.comms.timeout";
+
+ // zookeeper properties
+ public static final String ZOOKEEPER_CONNECT_STRING = "nifi.zookeeper.connect.string";
+ public static final String ZOOKEEPER_CONNECT_TIMEOUT = "nifi.zookeeper.connect.timeout";
+ public static final String ZOOKEEPER_SESSION_TIMEOUT = "nifi.zookeeper.session.timeout";
+ public static final String ZOOKEEPER_ROOT_NODE = "nifi.zookeeper.root.node";
+ public static final String ZOOKEEPER_AUTH_TYPE = "nifi.zookeeper.auth.type";
+ public static final String ZOOKEEPER_KERBEROS_REMOVE_HOST_FROM_PRINCIPAL = "nifi.zookeeper.kerberos.removeHostFromPrincipal";
+ public static final String ZOOKEEPER_KERBEROS_REMOVE_REALM_FROM_PRINCIPAL = "nifi.zookeeper.kerberos.removeRealmFromPrincipal";
+
+ // kerberos properties
+ public static final String KERBEROS_KRB5_FILE = "nifi.kerberos.krb5.file";
+ public static final String KERBEROS_SERVICE_PRINCIPAL = "nifi.kerberos.service.principal";
+ public static final String KERBEROS_SERVICE_KEYTAB_LOCATION = "nifi.kerberos.service.keytab.location";
+ public static final String KERBEROS_SPNEGO_PRINCIPAL = "nifi.kerberos.spnego.principal";
+ public static final String KERBEROS_SPNEGO_KEYTAB_LOCATION = "nifi.kerberos.spnego.keytab.location";
+ public static final String KERBEROS_AUTHENTICATION_EXPIRATION = "nifi.kerberos.spnego.authentication.expiration";
+
+ // state management
+ public static final String STATE_MANAGEMENT_CONFIG_FILE = "nifi.state.management.configuration.file";
+ public static final String STATE_MANAGEMENT_LOCAL_PROVIDER_ID = "nifi.state.management.provider.local";
+ public static final String STATE_MANAGEMENT_CLUSTER_PROVIDER_ID = "nifi.state.management.provider.cluster";
+ public static final String STATE_MANAGEMENT_START_EMBEDDED_ZOOKEEPER = "nifi.state.management.embedded.zookeeper.start";
+ public static final String STATE_MANAGEMENT_ZOOKEEPER_PROPERTIES = "nifi.state.management.embedded.zookeeper.properties";
+
+ // expression language properties
+ public static final String VARIABLE_REGISTRY_PROPERTIES = "nifi.variable.registry.properties";
+
+ // defaults
+ public static final Boolean DEFAULT_AUTO_RESUME_STATE = true;
+ public static final String DEFAULT_AUTHORIZER_CONFIGURATION_FILE = "conf/authorizers.xml";
+ public static final String DEFAULT_LOGIN_IDENTITY_PROVIDER_CONFIGURATION_FILE = "conf/login-identity-providers.xml";
+ public static final Integer DEFAULT_REMOTE_INPUT_PORT = null;
+ public static final Path DEFAULT_TEMPLATE_DIRECTORY = Paths.get("conf", "templates");
+ public static final int DEFAULT_WEB_THREADS = 200;
+ public static final String DEFAULT_WEB_MAX_HEADER_SIZE = "16 KB";
+ public static final String DEFAULT_WEB_WORKING_DIR = "./work/jetty";
+ public static final String DEFAULT_NAR_WORKING_DIR = "./work/nar";
+ public static final String DEFAULT_COMPONENT_DOCS_DIRECTORY = "./work/docs/components";
+ public static final String DEFAULT_NAR_LIBRARY_DIR = "./lib";
+ public static final String DEFAULT_NAR_LIBRARY_AUTOLOAD_DIR = "./extensions";
+ public static final String DEFAULT_FLOWFILE_REPO_PARTITIONS = "256";
+ public static final String DEFAULT_FLOWFILE_CHECKPOINT_INTERVAL = "2 min";
+ public static final int DEFAULT_MAX_FLOWFILES_PER_CLAIM = 100;
+ public static final String DEFAULT_MAX_APPENDABLE_CLAIM_SIZE = "1 MB";
+ public static final int DEFAULT_QUEUE_SWAP_THRESHOLD = 20000;
+ public static final String DEFAULT_SWAP_STORAGE_LOCATION = "./flowfile_repository/swap";
+ public static final String DEFAULT_SWAP_IN_PERIOD = "1 sec";
+ public static final String DEFAULT_SWAP_OUT_PERIOD = "5 sec";
+ public static final int DEFAULT_SWAP_IN_THREADS = 4;
+ public static final int DEFAULT_SWAP_OUT_THREADS = 4;
+ public static final long DEFAULT_BACKPRESSURE_COUNT = 10_000L;
+ public static final String DEFAULT_BACKPRESSURE_SIZE = "1 GB";
+ public static final String DEFAULT_ADMINISTRATIVE_YIELD_DURATION = "30 sec";
+ public static final String DEFAULT_PERSISTENT_STATE_DIRECTORY = "./conf/state";
+ public static final String DEFAULT_COMPONENT_STATUS_SNAPSHOT_FREQUENCY = "5 mins";
+ public static final String DEFAULT_BORED_YIELD_DURATION = "10 millis";
+ public static final String DEFAULT_ZOOKEEPER_CONNECT_TIMEOUT = "3 secs";
+ public static final String DEFAULT_ZOOKEEPER_SESSION_TIMEOUT = "3 secs";
+ public static final String DEFAULT_ZOOKEEPER_ROOT_NODE = "/nifi";
+ public static final String DEFAULT_ZOOKEEPER_AUTH_TYPE = "default";
+ public static final String DEFAULT_ZOOKEEPER_KERBEROS_REMOVE_HOST_FROM_PRINCIPAL = "true";
+ public static final String DEFAULT_ZOOKEEPER_KERBEROS_REMOVE_REALM_FROM_PRINCIPAL = "true";
+ public static final String DEFAULT_SITE_TO_SITE_HTTP_TRANSACTION_TTL = "30 secs";
+ public static final String DEFAULT_FLOW_CONFIGURATION_ARCHIVE_ENABLED = "true";
+ public static final String DEFAULT_FLOW_CONFIGURATION_ARCHIVE_MAX_TIME = "30 days";
+ public static final String DEFAULT_FLOW_CONFIGURATION_ARCHIVE_MAX_STORAGE = "500 MB";
+ public static final String DEFAULT_SECURITY_USER_OIDC_CONNECT_TIMEOUT = "5 secs";
+ public static final String DEFAULT_SECURITY_USER_OIDC_READ_TIMEOUT = "5 secs";
+
+ // DCAE related config
+ // REVIEW: Default is to turn off the dcae jar loading until the platform becomes more accessible/stable
+ public static final String DEFAULT_DCAE_JARS_INDEX_URL = "";
+
+ // cluster common defaults
+ public static final String DEFAULT_CLUSTER_PROTOCOL_HEARTBEAT_INTERVAL = "5 sec";
+ public static final String DEFAULT_CLUSTER_PROTOCOL_MULTICAST_SERVICE_BROADCAST_DELAY = "500 ms";
+ public static final int DEFAULT_CLUSTER_PROTOCOL_MULTICAST_SERVICE_LOCATOR_ATTEMPTS = 3;
+ public static final String DEFAULT_CLUSTER_PROTOCOL_MULTICAST_SERVICE_LOCATOR_ATTEMPTS_DELAY = "1 sec";
+ public static final String DEFAULT_CLUSTER_NODE_READ_TIMEOUT = "5 sec";
+ public static final String DEFAULT_CLUSTER_NODE_CONNECTION_TIMEOUT = "5 sec";
+ public static final int DEFAULT_CLUSTER_NODE_MAX_CONCURRENT_REQUESTS = 100;
+
+ // cluster node defaults
+ public static final int DEFAULT_CLUSTER_NODE_PROTOCOL_THREADS = 10;
+ public static final int DEFAULT_CLUSTER_NODE_PROTOCOL_MAX_THREADS = 50;
+ public static final String DEFAULT_REQUEST_REPLICATION_CLAIM_TIMEOUT = "15 secs";
+ public static final String DEFAULT_FLOW_ELECTION_MAX_WAIT_TIME = "5 mins";
+
+ // cluster load balance defaults
+ public static final int DEFAULT_LOAD_BALANCE_PORT = 6342;
+ public static final int DEFAULT_LOAD_BALANCE_CONNECTIONS_PER_NODE = 4;
+ public static final int DEFAULT_LOAD_BALANCE_MAX_THREAD_COUNT = 8;
+ public static final String DEFAULT_LOAD_BALANCE_COMMS_TIMEOUT = "30 sec";
+
+
+ // state management defaults
+ public static final String DEFAULT_STATE_MANAGEMENT_CONFIG_FILE = "conf/state-management.xml";
+
+ // Kerberos defaults
+ public static final String DEFAULT_KERBEROS_AUTHENTICATION_EXPIRATION = "12 hours";
+
+
+ /**
+ * Retrieves the property value for the given property key.
+ *
+ * @param key the key of property value to lookup
+ * @return value of property at given key or null if not found
+ */
+ public abstract String getProperty(String key);
+
+ /**
+ * Retrieves all known property keys.
+ *
+ * @return all known property keys
+ */
+ public abstract Set<String> getPropertyKeys();
+
+ // getters for core properties //
+ public File getFlowConfigurationFile() {
+ try {
+ return new File(getProperty(FLOW_CONFIGURATION_FILE));
+ } catch (Exception ex) {
+ return null;
+ }
+ }
+
+ public File getFlowConfigurationFileDir() {
+ try {
+ return getFlowConfigurationFile().getParentFile();
+ } catch (Exception ex) {
+ return null;
+ }
+ }
+
+ private Integer getPropertyAsPort(final String propertyName, final Integer defaultValue) {
+ final String port = getProperty(propertyName);
+ if (StringUtils.isEmpty(port)) {
+ return defaultValue;
+ }
+ try {
+ final int val = Integer.parseInt(port);
+ if (val <= 0 || val > 65535) {
+ throw new RuntimeException("Valid port range is 0 - 65535 but got " + val);
+ }
+ return val;
+ } catch (final NumberFormatException e) {
+ return defaultValue;
+ }
+ }
+
+ public int getQueueSwapThreshold() {
+ final String thresholdValue = getProperty(QUEUE_SWAP_THRESHOLD);
+ if (thresholdValue == null) {
+ return DEFAULT_QUEUE_SWAP_THRESHOLD;
+ }
+
+ try {
+ return Integer.parseInt(thresholdValue);
+ } catch (final NumberFormatException e) {
+ return DEFAULT_QUEUE_SWAP_THRESHOLD;
+ }
+ }
+
+ public Integer getIntegerProperty(final String propertyName, final Integer defaultValue) {
+ final String value = getProperty(propertyName);
+ if (value == null || value.trim().isEmpty()) {
+ return defaultValue;
+ }
+
+ try {
+ return Integer.parseInt(value.trim());
+ } catch (final Exception e) {
+ return defaultValue;
+ }
+ }
+
+ public int getSwapInThreads() {
+ return getIntegerProperty(SWAP_IN_THREADS, DEFAULT_SWAP_IN_THREADS);
+ }
+
+ public int getSwapOutThreads() {
+ final String value = getProperty(SWAP_OUT_THREADS);
+ if (value == null) {
+ return DEFAULT_SWAP_OUT_THREADS;
+ }
+
+ try {
+ return Integer.parseInt(getProperty(SWAP_OUT_THREADS));
+ } catch (final Exception e) {
+ return DEFAULT_SWAP_OUT_THREADS;
+ }
+ }
+
+ public String getSwapInPeriod() {
+ return getProperty(SWAP_IN_PERIOD, DEFAULT_SWAP_IN_PERIOD);
+ }
+
+ public String getSwapOutPeriod() {
+ return getProperty(SWAP_OUT_PERIOD, DEFAULT_SWAP_OUT_PERIOD);
+ }
+
+ public String getAdministrativeYieldDuration() {
+ return getProperty(ADMINISTRATIVE_YIELD_DURATION, DEFAULT_ADMINISTRATIVE_YIELD_DURATION);
+ }
+
+ /**
+ * The host name that will be given out to clients to connect to the Remote
+ * Input Port.
+ *
+ * @return the remote input host name or null if not configured
+ */
+ public String getRemoteInputHost() {
+ final String value = getProperty(REMOTE_INPUT_HOST);
+ return StringUtils.isBlank(value) ? null : value;
+ }
+
+ /**
+ * The socket port to listen on for a Remote Input Port.
+ *
+ * @return the remote input port for RAW socket communication
+ */
+ public Integer getRemoteInputPort() {
+ return getPropertyAsPort(REMOTE_INPUT_PORT, DEFAULT_REMOTE_INPUT_PORT);
+ }
+
+ /**
+ * @return False if property value is 'false'; True otherwise.
+ */
+ public Boolean isSiteToSiteSecure() {
+ final String secureVal = getProperty(SITE_TO_SITE_SECURE, "true");
+
+ return !"false".equalsIgnoreCase(secureVal);
+
+ }
+
+ /**
+ * @return True if property value is 'true'; False otherwise.
+ */
+ public Boolean isSiteToSiteHttpEnabled() {
+ final String remoteInputHttpEnabled = getProperty(SITE_TO_SITE_HTTP_ENABLED, "false");
+
+ return "true".equalsIgnoreCase(remoteInputHttpEnabled);
+
+ }
+
+ /**
+ * The HTTP or HTTPS Web API port for a Remote Input Port.
+ *
+ * @return the remote input port for HTTP(S) communication, or null if
+ * HTTP(S) Site-to-Site is not enabled
+ */
+ public Integer getRemoteInputHttpPort() {
+ if (!isSiteToSiteHttpEnabled()) {
+ return null;
+ }
+
+ final String propertyKey;
+ if (isSiteToSiteSecure()) {
+ if (StringUtils.isBlank(getProperty(NiFiProperties.WEB_HTTPS_PORT_FORWARDING))) {
+ propertyKey = WEB_HTTPS_PORT;
+ } else {
+ propertyKey = WEB_HTTPS_PORT_FORWARDING;
+ }
+ } else {
+ if (StringUtils.isBlank(getProperty(NiFiProperties.WEB_HTTP_PORT_FORWARDING))) {
+ propertyKey = WEB_HTTP_PORT;
+ } else {
+ propertyKey = WEB_HTTP_PORT_FORWARDING;
+ }
+ }
+
+ final Integer port = getIntegerProperty(propertyKey, null);
+ if (port == null) {
+ throw new RuntimeException("Remote input HTTP" + (isSiteToSiteSecure() ? "S" : "")
+ + " is enabled but " + propertyKey + " is not specified.");
+ }
+ return port;
+ }
+
+ /**
+ * Returns the directory to which Templates are to be persisted
+ *
+ * @return the template directory
+ */
+ public Path getTemplateDirectory() {
+ final String strVal = getProperty(TEMPLATE_DIRECTORY);
+ return (strVal == null) ? DEFAULT_TEMPLATE_DIRECTORY : Paths.get(strVal);
+ }
+
+ /**
+ * Get the flow service write delay.
+ *
+ * @return The write delay
+ */
+ public String getFlowServiceWriteDelay() {
+ return getProperty(WRITE_DELAY_INTERVAL);
+ }
+
+ /**
+ * Returns whether the processors should be started automatically when the
+ * application loads.
+ *
+ * @return Whether to auto start the processors or not
+ */
+ public boolean getAutoResumeState() {
+ final String rawAutoResumeState = getProperty(AUTO_RESUME_STATE,
+ DEFAULT_AUTO_RESUME_STATE.toString());
+ return Boolean.parseBoolean(rawAutoResumeState);
+ }
+
+ /**
+ * Returns the number of partitions that should be used for the FlowFile
+ * Repository
+ *
+ * @return the number of partitions
+ */
+ public int getFlowFileRepositoryPartitions() {
+ final String rawProperty = getProperty(FLOWFILE_REPOSITORY_PARTITIONS,
+ DEFAULT_FLOWFILE_REPO_PARTITIONS);
+ return Integer.parseInt(rawProperty);
+ }
+
+ /**
+ * Returns the number of milliseconds between FlowFileRepository
+ * checkpointing
+ *
+ * @return the number of milliseconds between checkpoint events
+ */
+ public String getFlowFileRepositoryCheckpointInterval() {
+ return getProperty(FLOWFILE_REPOSITORY_CHECKPOINT_INTERVAL,
+ DEFAULT_FLOWFILE_CHECKPOINT_INTERVAL);
+ }
+
+ /**
+ * @return the restore directory or null if not configured
+ */
+ public File getRestoreDirectory() {
+ final String value = getProperty(RESTORE_DIRECTORY);
+ if (StringUtils.isBlank(value)) {
+ return null;
+ } else {
+ return new File(value);
+ }
+ }
+
+ /**
+ * @return the user authorizers file
+ */
+ public File getAuthorizerConfigurationFile() {
+ final String value = getProperty(AUTHORIZER_CONFIGURATION_FILE);
+ if (StringUtils.isBlank(value)) {
+ return new File(DEFAULT_AUTHORIZER_CONFIGURATION_FILE);
+ } else {
+ return new File(value);
+ }
+ }
+
+ /**
+ * @return the user login identity provider file
+ */
+ public File getLoginIdentityProviderConfigurationFile() {
+ final String value = getProperty(LOGIN_IDENTITY_PROVIDER_CONFIGURATION_FILE);
+ if (StringUtils.isBlank(value)) {
+ return new File(DEFAULT_LOGIN_IDENTITY_PROVIDER_CONFIGURATION_FILE);
+ } else {
+ return new File(value);
+ }
+ }
+
+ // getters for web properties //
+ public Integer getPort() {
+ Integer port = null;
+ try {
+ port = Integer.parseInt(getProperty(WEB_HTTP_PORT));
+ } catch (NumberFormatException nfe) {
+ }
+ return port;
+ }
+
+ public Integer getSslPort() {
+ Integer sslPort = null;
+ try {
+ sslPort = Integer.parseInt(getProperty(WEB_HTTPS_PORT));
+ } catch (NumberFormatException nfe) {
+ }
+ return sslPort;
+ }
+
+ public boolean isHTTPSConfigured() {
+ return getSslPort() != null;
+ }
+
+ /**
+ * Determines the HTTP/HTTPS port NiFi is configured to bind to. Prefers the HTTPS port. Throws an exception if neither is configured.
+ *
+ * @return the configured port number
+ */
+ public Integer getConfiguredHttpOrHttpsPort() throws RuntimeException {
+ if (getSslPort() != null) {
+ return getSslPort();
+ } else if (getPort() != null) {
+ return getPort();
+ } else {
+ throw new RuntimeException("The HTTP or HTTPS port must be configured");
+ }
+ }
+
+ public String getWebMaxHeaderSize() {
+ return getProperty(WEB_MAX_HEADER_SIZE, DEFAULT_WEB_MAX_HEADER_SIZE);
+ }
+
+ public int getWebThreads() {
+ return getIntegerProperty(WEB_THREADS, DEFAULT_WEB_THREADS);
+ }
+
+ public int getClusterNodeMaxConcurrentRequests() {
+ return getIntegerProperty(CLUSTER_NODE_MAX_CONCURRENT_REQUESTS, DEFAULT_CLUSTER_NODE_MAX_CONCURRENT_REQUESTS);
+ }
+
+ public File getWebWorkingDirectory() {
+ return new File(getProperty(WEB_WORKING_DIR, DEFAULT_WEB_WORKING_DIR));
+ }
+
+ public File getComponentDocumentationWorkingDirectory() {
+ return new File(getProperty(COMPONENT_DOCS_DIRECTORY, DEFAULT_COMPONENT_DOCS_DIRECTORY));
+ }
+
+ public File getNarWorkingDirectory() {
+ return new File(getProperty(NAR_WORKING_DIRECTORY, DEFAULT_NAR_WORKING_DIR));
+ }
+
+ public File getFrameworkWorkingDirectory() {
+ return new File(getNarWorkingDirectory(), "framework");
+ }
+
+ public File getExtensionsWorkingDirectory() {
+ return new File(getNarWorkingDirectory(), "extensions");
+ }
+
+ public List<Path> getNarLibraryDirectories() {
+
+ List<Path> narLibraryPaths = new ArrayList<>();
+
+ // go through each property
+ for (String propertyName : getPropertyKeys()) {
+ // determine if the property is a nar library path
+ if (StringUtils.startsWith(propertyName, NAR_LIBRARY_DIRECTORY_PREFIX)
+ || NAR_LIBRARY_DIRECTORY.equals(propertyName)
+ || NAR_LIBRARY_AUTOLOAD_DIRECTORY.equals(propertyName)) {
+ // attempt to resolve the path specified
+ String narLib = getProperty(propertyName);
+ if (!StringUtils.isBlank(narLib)) {
+ narLibraryPaths.add(Paths.get(narLib));
+ }
+ }
+ }
+
+ if (narLibraryPaths.isEmpty()) {
+ narLibraryPaths.add(Paths.get(DEFAULT_NAR_LIBRARY_DIR));
+ }
+
+ return narLibraryPaths;
+ }
+
+ public File getNarAutoLoadDirectory() {
+ return new File(getProperty(NAR_LIBRARY_AUTOLOAD_DIRECTORY, DEFAULT_NAR_LIBRARY_AUTOLOAD_DIR));
+ }
+
+ /**
+ * Retrieves a URI to the index that contains URLs to all the DCAE jars to be loaded into Nifi.
+ * Refer to the genprocessor project for more info.
+ *
+ * Not setting the underlying configuration parameter "nifi.dcae.jar.index.url" is not
+ * fatal. Nifi will just skip over trying to load DCAE jars.
+ *
+ * @return
+ * @throws URISyntaxException
+ */
+ public URI getDCAEJarIndexURI() throws URISyntaxException {
+ String strUrl = getProperty(DCAE_JARS_INDEX_URL, DEFAULT_DCAE_JARS_INDEX_URL);
+
+ if (strUrl == null || strUrl.isEmpty()) {
+ return null;
+ } else {
+ return new URI(strUrl);
+ }
+ }
+
+ // getters for ui properties //
+
+ /**
+ * Get the banner text.
+ *
+ * @return The banner text
+ */
+ public String getBannerText() {
+ return this.getProperty(UI_BANNER_TEXT, StringUtils.EMPTY);
+ }
+
+
+ /**
+ * @author Renu
+ * @return the IP address where the nifi-app is being hosted
+ */
+ public String getDcaeDistributorApiHostname() {
+ return getProperty(UI_DCAE_DISTRIBUTOR_API_URL);
+ }
+
+ /**
+ * Returns the auto refresh interval in seconds.
+ *
+ * @return the interval over which the properties should auto refresh
+ */
+ public String getAutoRefreshInterval() {
+ return getProperty(UI_AUTO_REFRESH_INTERVAL);
+ }
+
+ // getters for cluster protocol properties //
+ public String getClusterProtocolHeartbeatInterval() {
+ return getProperty(CLUSTER_PROTOCOL_HEARTBEAT_INTERVAL,
+ DEFAULT_CLUSTER_PROTOCOL_HEARTBEAT_INTERVAL);
+ }
+
+ public String getNodeHeartbeatInterval() {
+ return getClusterProtocolHeartbeatInterval();
+ }
+
+ public String getClusterNodeReadTimeout() {
+ return getProperty(CLUSTER_NODE_READ_TIMEOUT, DEFAULT_CLUSTER_NODE_READ_TIMEOUT);
+ }
+
+ public String getClusterNodeConnectionTimeout() {
+ return getProperty(CLUSTER_NODE_CONNECTION_TIMEOUT,
+ DEFAULT_CLUSTER_NODE_CONNECTION_TIMEOUT);
+ }
+
+ public File getPersistentStateDirectory() {
+ final String dirName = getProperty(PERSISTENT_STATE_DIRECTORY,
+ DEFAULT_PERSISTENT_STATE_DIRECTORY);
+ final File file = new File(dirName);
+ if (!file.exists()) {
+ file.mkdirs();
+ }
+ return file;
+ }
+
+ // getters for cluster node properties //
+ public boolean isNode() {
+ return Boolean.parseBoolean(getProperty(CLUSTER_IS_NODE));
+ }
+
+ public InetSocketAddress getClusterNodeProtocolAddress() {
+ try {
+ String socketAddress = getProperty(CLUSTER_NODE_ADDRESS);
+ if (StringUtils.isBlank(socketAddress)) {
+ socketAddress = "localhost";
+ }
+ int socketPort = getClusterNodeProtocolPort();
+ return InetSocketAddress.createUnresolved(socketAddress, socketPort);
+ } catch (Exception ex) {
+ throw new RuntimeException("Invalid node protocol address/port due to: " + ex, ex);
+ }
+ }
+
+ public InetSocketAddress getClusterLoadBalanceAddress() {
+ try {
+ String address = getProperty(LOAD_BALANCE_ADDRESS);
+ if (StringUtils.isBlank(address)) {
+ address = getProperty(CLUSTER_NODE_ADDRESS);
+ }
+ if (StringUtils.isBlank(address)) {
+ address = "localhost";
+ }
+
+ final int port = getIntegerProperty(LOAD_BALANCE_PORT, DEFAULT_LOAD_BALANCE_PORT);
+ return InetSocketAddress.createUnresolved(address, port);
+ } catch (final Exception e) {
+ throw new RuntimeException("Invalid load balance address/port due to: " + e, e);
+ }
+ }
+
+ public Integer getClusterNodeProtocolPort() {
+ try {
+ return Integer.parseInt(getProperty(CLUSTER_NODE_PROTOCOL_PORT));
+ } catch (NumberFormatException nfe) {
+ return null;
+ }
+ }
+
+ /**
+ * @deprecated Use getClusterNodeProtocolCorePoolSize() and getClusterNodeProtocolMaxPoolSize() instead
+ */
+ @Deprecated()
+ public int getClusterNodeProtocolThreads() {
+ return getClusterNodeProtocolCorePoolSize();
+ }
+
+ public int getClusterNodeProtocolCorePoolSize() {
+ try {
+ return Integer.parseInt(getProperty(CLUSTER_NODE_PROTOCOL_THREADS));
+ } catch (NumberFormatException nfe) {
+ return DEFAULT_CLUSTER_NODE_PROTOCOL_THREADS;
+ }
+ }
+
+ public int getClusterNodeProtocolMaxPoolSize() {
+ try {
+ return Integer.parseInt(getProperty(CLUSTER_NODE_PROTOCOL_MAX_THREADS));
+ } catch (NumberFormatException nfe) {
+ return DEFAULT_CLUSTER_NODE_PROTOCOL_MAX_THREADS;
+ }
+ }
+
+ public boolean isClustered() {
+ return Boolean.parseBoolean(getProperty(CLUSTER_IS_NODE));
+ }
+
+ public File getClusterNodeFirewallFile() {
+ final String firewallFile = getProperty(CLUSTER_FIREWALL_FILE);
+ if (StringUtils.isBlank(firewallFile)) {
+ return null;
+ } else {
+ return new File(firewallFile);
+ }
+ }
+
+ public String getClusterProtocolManagerToNodeApiScheme() {
+ final String isSecureProperty = getProperty(CLUSTER_PROTOCOL_IS_SECURE);
+ if (Boolean.valueOf(isSecureProperty)) {
+ return "https";
+ } else {
+ return "http";
+ }
+ }
+
+ public File getKerberosConfigurationFile() {
+ final String krb5File = getProperty(KERBEROS_KRB5_FILE);
+ if (krb5File != null && krb5File.trim().length() > 0) {
+ return new File(krb5File.trim());
+ } else {
+ return null;
+ }
+ }
+
+ public String getKerberosServicePrincipal() {
+ final String servicePrincipal = getProperty(KERBEROS_SERVICE_PRINCIPAL);
+ if (!StringUtils.isBlank(servicePrincipal)) {
+ return servicePrincipal.trim();
+ } else {
+ return null;
+ }
+ }
+
+ public String getKerberosServiceKeytabLocation() {
+ final String keytabLocation = getProperty(KERBEROS_SERVICE_KEYTAB_LOCATION);
+ if (!StringUtils.isBlank(keytabLocation)) {
+ return keytabLocation.trim();
+ } else {
+ return null;
+ }
+ }
+
+ public String getKerberosSpnegoPrincipal() {
+ final String spengoPrincipal = getProperty(KERBEROS_SPNEGO_PRINCIPAL);
+ if (!StringUtils.isBlank(spengoPrincipal)) {
+ return spengoPrincipal.trim();
+ } else {
+ return null;
+ }
+ }
+
+ public String getKerberosSpnegoKeytabLocation() {
+ final String keytabLocation = getProperty(KERBEROS_SPNEGO_KEYTAB_LOCATION);
+ if (!StringUtils.isBlank(keytabLocation)) {
+ return keytabLocation.trim();
+ } else {
+ return null;
+ }
+ }
+
+ public String getKerberosAuthenticationExpiration() {
+ final String authenticationExpirationString = getProperty(KERBEROS_AUTHENTICATION_EXPIRATION, DEFAULT_KERBEROS_AUTHENTICATION_EXPIRATION);
+ if (!StringUtils.isBlank(authenticationExpirationString)) {
+ return authenticationExpirationString.trim();
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * Returns true if the Kerberos service principal and keytab location
+ * properties are populated.
+ *
+ * @return true if Kerberos service support is enabled
+ */
+ public boolean isKerberosSpnegoSupportEnabled() {
+ return !StringUtils.isBlank(getKerberosSpnegoPrincipal()) && !StringUtils.isBlank(getKerberosSpnegoKeytabLocation());
+ }
+
+ /**
+ * Returns true if the login identity provider has been configured.
+ *
+ * @return true if the login identity provider has been configured
+ */
+ public boolean isLoginIdentityProviderEnabled() {
+ return !StringUtils.isBlank(getProperty(NiFiProperties.SECURITY_USER_LOGIN_IDENTITY_PROVIDER));
+ }
+
+ /**
+ * Returns whether an OpenId Connect (OIDC) URL is set.
+ *
+ * @return whether an OpenId Connection URL is set
+ */
+ public boolean isOidcEnabled() {
+ return !StringUtils.isBlank(getOidcDiscoveryUrl());
+ }
+
+ /**
+ * Returns the OpenId Connect (OIDC) URL. Null otherwise.
+ *
+ * @return OIDC discovery url
+ */
+ public String getOidcDiscoveryUrl() {
+ return getProperty(SECURITY_USER_OIDC_DISCOVERY_URL);
+ }
+
+ /**
+ * Returns the OpenId Connect connect timeout. Non null.
+ *
+ * @return OIDC connect timeout
+ */
+ public String getOidcConnectTimeout() {
+ return getProperty(SECURITY_USER_OIDC_CONNECT_TIMEOUT, DEFAULT_SECURITY_USER_OIDC_CONNECT_TIMEOUT);
+ }
+
+ /**
+ * Returns the OpenId Connect read timeout. Non null.
+ *
+ * @return OIDC read timeout
+ */
+ public String getOidcReadTimeout() {
+ return getProperty(SECURITY_USER_OIDC_READ_TIMEOUT, DEFAULT_SECURITY_USER_OIDC_READ_TIMEOUT);
+ }
+
+ /**
+ * Returns the OpenId Connect client id.
+ *
+ * @return OIDC client id
+ */
+ public String getOidcClientId() {
+ return getProperty(SECURITY_USER_OIDC_CLIENT_ID);
+ }
+
+ /**
+ * Returns the OpenId Connect client secret.
+ *
+ * @return OIDC client secret
+ */
+ public String getOidcClientSecret() {
+ return getProperty(SECURITY_USER_OIDC_CLIENT_SECRET);
+ }
+
+ /**
+ * Returns the preferred json web signature algorithm. May be null/blank.
+ *
+ * @return OIDC preferred json web signature algorithm
+ */
+ public String getOidcPreferredJwsAlgorithm() {
+ return getProperty(SECURITY_USER_OIDC_PREFERRED_JWSALGORITHM);
+ }
+
+ /**
+ * Returns whether Knox SSO is enabled.
+ *
+ * @return whether Knox SSO is enabled
+ */
+ public boolean isKnoxSsoEnabled() {
+ return !StringUtils.isBlank(getKnoxUrl());
+ }
+
+ /**
+ * Returns the Knox URL.
+ *
+ * @return Knox URL
+ */
+ public String getKnoxUrl() {
+ return getProperty(SECURITY_USER_KNOX_URL);
+ }
+
+ /**
+ * Gets the configured Knox Audiences.
+ *
+ * @return Knox audiences
+ */
+ public Set<String> getKnoxAudiences() {
+ final String rawAudiences = getProperty(SECURITY_USER_KNOX_AUDIENCES);
+ if (StringUtils.isBlank(rawAudiences)) {
+ return null;
+ } else {
+ final String[] audienceTokens = rawAudiences.split(",");
+ return Stream.of(audienceTokens).map(String::trim).filter(aud -> !StringUtils.isEmpty(aud)).collect(Collectors.toSet());
+ }
+ }
+
+ /**
+ * Returns the path to the Knox public key.
+ *
+ * @return path to the Knox public key
+ */
+ public Path getKnoxPublicKeyPath() {
+ return Paths.get(getProperty(SECURITY_USER_KNOX_PUBLIC_KEY));
+ }
+
+ /**
+ * Returns the name of the Knox cookie.
+ *
+ * @return name of the Knox cookie
+ */
+ public String getKnoxCookieName() {
+ return getProperty(SECURITY_USER_KNOX_COOKIE_NAME);
+ }
+
+ /**
+ * Returns true if client certificates are required for REST API. Determined
+ * if the following conditions are all true:
+ * <p>
+ * - login identity provider is not populated
+ * - Kerberos service support is not enabled
+ * - openid connect is not enabled
+ * - knox sso is not enabled
+ * </p>
+ *
+ * @return true if client certificates are required for access to the REST API
+ */
+ public boolean isClientAuthRequiredForRestApi() {
+ return !isLoginIdentityProviderEnabled() && !isKerberosSpnegoSupportEnabled() && !isOidcEnabled() && !isKnoxSsoEnabled();
+ }
+
+ public InetSocketAddress getNodeApiAddress() {
+
+ final String rawScheme = getClusterProtocolManagerToNodeApiScheme();
+ final String scheme = (rawScheme == null) ? "http" : rawScheme;
+
+ final String host;
+ final Integer port;
+ if ("http".equalsIgnoreCase(scheme)) {
+ // get host
+ if (StringUtils.isBlank(getProperty(WEB_HTTP_HOST))) {
+ host = "localhost";
+ } else {
+ host = getProperty(WEB_HTTP_HOST);
+ }
+ // get port
+ port = getPort();
+
+ if (port == null) {
+ throw new RuntimeException(String.format("The %s must be specified if running in a cluster with %s set to false.", WEB_HTTP_PORT, CLUSTER_PROTOCOL_IS_SECURE));
+ }
+ } else {
+ // get host
+ if (StringUtils.isBlank(getProperty(WEB_HTTPS_HOST))) {
+ host = "localhost";
+ } else {
+ host = getProperty(WEB_HTTPS_HOST);
+ }
+ // get port
+ port = getSslPort();
+
+ if (port == null) {
+ throw new RuntimeException(String.format("The %s must be specified if running in a cluster with %s set to true.", WEB_HTTPS_PORT, CLUSTER_PROTOCOL_IS_SECURE));
+ }
+ }
+
+ return InetSocketAddress.createUnresolved(host, port);
+
+ }
+
+ /**
+ * Returns the database repository path. It simply returns the value
+ * configured. No directories will be created as a result of this operation.
+ *
+ * @return database repository path
+ * @throws InvalidPathException If the configured path is invalid
+ */
+ public Path getDatabaseRepositoryPath() {
+ return Paths.get(getProperty(REPOSITORY_DATABASE_DIRECTORY));
+ }
+
+ /**
+ * Returns the flow file repository path. It simply returns the value
+ * configured. No directories will be created as a result of this operation.
+ *
+ * @return database repository path
+ * @throws InvalidPathException If the configured path is invalid
+ */
+ public Path getFlowFileRepositoryPath() {
+ return Paths.get(getProperty(FLOWFILE_REPOSITORY_DIRECTORY));
+ }
+
+ /**
+ * Returns the content repository paths. This method returns a mapping of
+ * file repository name to file repository paths. It simply returns the
+ * values configured. No directories will be created as a result of this
+ * operation.
+ *
+ * @return file repositories paths
+ * @throws InvalidPathException If any of the configured paths are invalid
+ */
+ public Map<String, Path> getContentRepositoryPaths() {
+ final Map<String, Path> contentRepositoryPaths = new HashMap<>();
+
+ // go through each property
+ for (String propertyName : getPropertyKeys()) {
+ // determine if the property is a file repository path
+ if (StringUtils.startsWith(propertyName, REPOSITORY_CONTENT_PREFIX)) {
+ // get the repository key
+ final String key = StringUtils.substringAfter(propertyName,
+ REPOSITORY_CONTENT_PREFIX);
+
+ // attempt to resolve the path specified
+ contentRepositoryPaths.put(key, Paths.get(getProperty(propertyName)));
+ }
+ }
+ return contentRepositoryPaths;
+ }
+
+ /**
+ * Returns the provenance repository paths. This method returns a mapping of
+ * file repository name to file repository paths. It simply returns the
+ * values configured. No directories will be created as a result of this
+ * operation.
+ *
+ * @return the name and paths of all provenance repository locations
+ */
+ public Map<String, Path> getProvenanceRepositoryPaths() {
+ final Map<String, Path> provenanceRepositoryPaths = new HashMap<>();
+
+ // go through each property
+ for (String propertyName : getPropertyKeys()) {
+ // determine if the property is a file repository path
+ if (StringUtils.startsWith(propertyName, PROVENANCE_REPO_DIRECTORY_PREFIX)) {
+ // get the repository key
+ final String key = StringUtils.substringAfter(propertyName,
+ PROVENANCE_REPO_DIRECTORY_PREFIX);
+
+ // attempt to resolve the path specified
+ provenanceRepositoryPaths.put(key, Paths.get(getProperty(propertyName)));
+ }
+ }
+ return provenanceRepositoryPaths;
+ }
+
+ /**
+ * Returns the number of claims to keep open for writing. Ideally, this will be at
+ * least as large as the number of threads that will be updating the repository simultaneously but we don't want
+ * to get too large because it will hold open up to this many FileOutputStreams.
+ * <p>
+ * Default is {@link #DEFAULT_MAX_FLOWFILES_PER_CLAIM}
+ *
+ * @return the maximum number of flow files per claim
+ */
+ public int getMaxFlowFilesPerClaim() {
+ try {
+ return Integer.parseInt(getProperty(MAX_FLOWFILES_PER_CLAIM));
+ } catch (NumberFormatException nfe) {
+ return DEFAULT_MAX_FLOWFILES_PER_CLAIM;
+ }
+ }
+
+ /**
+ * Returns the maximum size, in bytes, that claims should grow before writing a new file. This means that we won't continually write to one
+ * file that keeps growing but gives us a chance to bunch together many small files.
+ * <p>
+ * Default is {@link #DEFAULT_MAX_APPENDABLE_CLAIM_SIZE}
+ *
+ * @return the maximum appendable claim size
+ */
+ public String getMaxAppendableClaimSize() {
+ return getProperty(MAX_APPENDABLE_CLAIM_SIZE, DEFAULT_MAX_APPENDABLE_CLAIM_SIZE);
+ }
+
+ public String getProperty(final String key, final String defaultValue) {
+ final String value = getProperty(key);
+ return (value == null || value.trim().isEmpty()) ? defaultValue : value;
+ }
+
+ public String getBoredYieldDuration() {
+ return getProperty(BORED_YIELD_DURATION, DEFAULT_BORED_YIELD_DURATION);
+ }
+
+ public File getStateManagementConfigFile() {
+ return new File(getProperty(STATE_MANAGEMENT_CONFIG_FILE, DEFAULT_STATE_MANAGEMENT_CONFIG_FILE));
+ }
+
+ public String getLocalStateProviderId() {
+ return getProperty(STATE_MANAGEMENT_LOCAL_PROVIDER_ID);
+ }
+
+ public String getClusterStateProviderId() {
+ return getProperty(STATE_MANAGEMENT_CLUSTER_PROVIDER_ID);
+ }
+
+ public File getEmbeddedZooKeeperPropertiesFile() {
+ final String filename = getProperty(STATE_MANAGEMENT_ZOOKEEPER_PROPERTIES);
+ return filename == null ? null : new File(filename);
+ }
+
+ public boolean isStartEmbeddedZooKeeper() {
+ return Boolean.parseBoolean(getProperty(STATE_MANAGEMENT_START_EMBEDDED_ZOOKEEPER));
+ }
+
+ public boolean isFlowConfigurationArchiveEnabled() {
+ return Boolean.parseBoolean(getProperty(FLOW_CONFIGURATION_ARCHIVE_ENABLED, DEFAULT_FLOW_CONFIGURATION_ARCHIVE_ENABLED));
+ }
+
+ public String getFlowConfigurationArchiveDir() {
+ return getProperty(FLOW_CONFIGURATION_ARCHIVE_DIR);
+ }
+
+ public String getFlowElectionMaxWaitTime() {
+ return getProperty(FLOW_ELECTION_MAX_WAIT_TIME, DEFAULT_FLOW_ELECTION_MAX_WAIT_TIME);
+ }
+
+ public Integer getFlowElectionMaxCandidates() {
+ return getIntegerProperty(FLOW_ELECTION_MAX_CANDIDATES, null);
+ }
+
+ public String getFlowConfigurationArchiveMaxTime() {
+ return getProperty(FLOW_CONFIGURATION_ARCHIVE_MAX_TIME, null);
+ }
+
+ public String getFlowConfigurationArchiveMaxStorage() {
+ return getProperty(FLOW_CONFIGURATION_ARCHIVE_MAX_STORAGE, null);
+ }
+
+ public Integer getFlowConfigurationArchiveMaxCount() {
+ return getIntegerProperty(FLOW_CONFIGURATION_ARCHIVE_MAX_COUNT, null);
+ }
+
+ public String getVariableRegistryProperties() {
+ return getProperty(VARIABLE_REGISTRY_PROPERTIES);
+ }
+
+ public Path[] getVariableRegistryPropertiesPaths() {
+ final List<Path> vrPropertiesPaths = new ArrayList<>();
+
+ final String vrPropertiesFiles = getVariableRegistryProperties();
+ if (!StringUtils.isEmpty(vrPropertiesFiles)) {
+
+ final List<String> vrPropertiesFileList = Arrays.asList(vrPropertiesFiles.split(","));
+
+ for (String propertiesFile : vrPropertiesFileList) {
+ vrPropertiesPaths.add(Paths.get(propertiesFile));
+ }
+
+ return vrPropertiesPaths.toArray(new Path[vrPropertiesPaths.size()]);
+ } else {
+ return new Path[]{};
+ }
+ }
+
+ /**
+ * Returns the network interface list to use for HTTP. This method returns a mapping of
+ * network interface property names to network interface names.
+ *
+ * @return the property name and network interface name of all HTTP network interfaces
+ */
+ public Map<String, String> getHttpNetworkInterfaces() {
+ final Map<String, String> networkInterfaces = new HashMap<>();
+
+ // go through each property
+ for (String propertyName : getPropertyKeys()) {
+ // determine if the property is a network interface name
+ if (StringUtils.startsWith(propertyName, WEB_HTTP_NETWORK_INTERFACE_PREFIX)) {
+ // get the network interface property key
+ final String key = StringUtils.substringAfter(propertyName,
+ WEB_HTTP_NETWORK_INTERFACE_PREFIX);
+ networkInterfaces.put(key, getProperty(propertyName));
+ }
+ }
+ return networkInterfaces;
+ }
+
+ /**
+ * Returns the network interface list to use for HTTPS. This method returns a mapping of
+ * network interface property names to network interface names.
+ *
+ * @return the property name and network interface name of all HTTPS network interfaces
+ */
+ public Map<String, String> getHttpsNetworkInterfaces() {
+ final Map<String, String> networkInterfaces = new HashMap<>();
+
+ // go through each property
+ for (String propertyName : getPropertyKeys()) {
+ // determine if the property is a network interface name
+ if (StringUtils.startsWith(propertyName, WEB_HTTPS_NETWORK_INTERFACE_PREFIX)) {
+ // get the network interface property key
+ final String key = StringUtils.substringAfter(propertyName,
+ WEB_HTTPS_NETWORK_INTERFACE_PREFIX);
+ networkInterfaces.put(key, getProperty(propertyName));
+ }
+ }
+ return networkInterfaces;
+ }
+
+ public int size() {
+ return getPropertyKeys().size();
+ }
+
+ public String getProvenanceRepoEncryptionKeyId() {
+ return getProperty(PROVENANCE_REPO_ENCRYPTION_KEY_ID);
+ }
+
+ /**
+ * Returns the active provenance repository encryption key if a {@code StaticKeyProvider} is in use.
+ * If no key ID is specified in the properties file, the default
+ * {@code nifi.provenance.repository.encryption.key} value is returned. If a key ID is specified in
+ * {@code nifi.provenance.repository.encryption.key.id}, it will attempt to read from
+ * {@code nifi.provenance.repository.encryption.key.id.XYZ} where {@code XYZ} is the provided key
+ * ID. If that value is empty, it will use the default property
+ * {@code nifi.provenance.repository.encryption.key}.
+ *
+ * @return the provenance repository encryption key in hex form
+ */
+ public String getProvenanceRepoEncryptionKey() {
+ String keyId = getProvenanceRepoEncryptionKeyId();
+ String keyKey = StringUtils.isBlank(keyId) ? PROVENANCE_REPO_ENCRYPTION_KEY : PROVENANCE_REPO_ENCRYPTION_KEY + ".id." + keyId;
+ return getProperty(keyKey, getProperty(PROVENANCE_REPO_ENCRYPTION_KEY));
+ }
+
+ /**
+ * Returns a map of keyId -> key in hex loaded from the {@code nifi.properties} file if a
+ * {@code StaticKeyProvider} is defined. If {@code FileBasedKeyProvider} is defined, use
+ * {@code CryptoUtils#readKeys()} instead -- this method will return an empty map.
+ *
+ * @return a Map of the keys identified by key ID
+ */
+ public Map<String, String> getProvenanceRepoEncryptionKeys() {
+ Map<String, String> keys = new HashMap<>();
+ List<String> keyProperties = getProvenanceRepositoryEncryptionKeyProperties();
+
+ // Retrieve the actual key values and store non-empty values in the map
+ for (String prop : keyProperties) {
+ final String value = getProperty(prop);
+ if (!StringUtils.isBlank(value)) {
+ if (prop.equalsIgnoreCase(PROVENANCE_REPO_ENCRYPTION_KEY)) {
+ prop = getProvenanceRepoEncryptionKeyId();
+ } else {
+ // Extract nifi.provenance.repository.encryption.key.id.key1 -> key1
+ prop = prop.substring(prop.lastIndexOf(".") + 1);
+ }
+ keys.put(prop, value);
+ }
+ }
+ return keys;
+ }
+
+ /**
+ * Returns the whitelisted proxy hostnames (and IP addresses) as a comma-delimited string.
+ * The hosts have been normalized to the form {@code somehost.com}, {@code somehost.com:port}, or {@code 127.0.0.1}.
+ * <p>
+ * Note: Calling {@code NiFiProperties.getProperty(NiFiProperties.WEB_PROXY_HOST)} will not normalize the hosts.
+ *
+ * @return the hostname(s)
+ */
+ public String getWhitelistedHosts() {
+ return StringUtils.join(getWhitelistedHostsAsList(), ",");
+ }
+
+ /**
+ * Returns the whitelisted proxy hostnames (and IP addresses) as a List. The hosts have been normalized to the form {@code somehost.com}, {@code somehost.com:port}, or {@code 127.0.0.1}.
+ *
+ * @return the hostname(s)
+ */
+ public List<String> getWhitelistedHostsAsList() {
+ String rawProperty = getProperty(WEB_PROXY_HOST, "");
+ List<String> hosts = Arrays.asList(rawProperty.split(","));
+ return hosts.stream()
+ .map(this::normalizeHost).filter(host -> !StringUtils.isBlank(host)).collect(Collectors.toList());
+ }
+
+ String normalizeHost(String host) {
+ if (host == null || host.equalsIgnoreCase("")) {
+ return "";
+ } else {
+ return host.trim();
+ }
+ }
+
+ /**
+ * Returns the whitelisted proxy context paths as a comma-delimited string. The paths have been normalized to the form {@code /some/context/path}.
+ * <p>
+ * Note: Calling {@code NiFiProperties.getProperty(NiFiProperties.WEB_PROXY_CONTEXT_PATH)} will not normalize the paths.
+ *
+ * @return the path(s)
+ */
+ public String getWhitelistedContextPaths() {
+ return StringUtils.join(getWhitelistedContextPathsAsList(), ",");
+ }
+
+ /**
+ * Returns the whitelisted proxy context paths as a list of paths. The paths have been normalized to the form {@code /some/context/path}.
+ *
+ * @return the path(s)
+ */
+ public List<String> getWhitelistedContextPathsAsList() {
+ String rawProperty = getProperty(WEB_PROXY_CONTEXT_PATH, "");
+ List<String> contextPaths = Arrays.asList(rawProperty.split(","));
+ return contextPaths.stream()
+ .map(this::normalizeContextPath).collect(Collectors.toList());
+ }
+
+ private String normalizeContextPath(String cp) {
+ if (cp == null || cp.equalsIgnoreCase("")) {
+ return "";
+ } else {
+ String trimmedCP = cp.trim();
+ // Ensure it starts with a leading slash and does not end in a trailing slash
+ // There's a potential for the path to be something like bad/path/// but this is semi-trusted data from an admin-accessible file and there are way worse possibilities here
+ trimmedCP = trimmedCP.startsWith("/") ? trimmedCP : "/" + trimmedCP;
+ trimmedCP = trimmedCP.endsWith("/") ? trimmedCP.substring(0, trimmedCP.length() - 1) : trimmedCP;
+ return trimmedCP;
+ }
+ }
+
+ private List<String> getProvenanceRepositoryEncryptionKeyProperties() {
+ // Filter all the property keys that define a key
+ return getPropertyKeys().stream().filter(k ->
+ k.startsWith(PROVENANCE_REPO_ENCRYPTION_KEY_ID + ".") || k.equalsIgnoreCase(PROVENANCE_REPO_ENCRYPTION_KEY)
+ ).collect(Collectors.toList());
+ }
+
+ public Long getDefaultBackPressureObjectThreshold() {
+ long backPressureCount;
+ try {
+ String backPressureCountStr = getProperty(BACKPRESSURE_COUNT);
+ if (backPressureCountStr == null || backPressureCountStr.trim().isEmpty()) {
+ backPressureCount = DEFAULT_BACKPRESSURE_COUNT;
+ } else {
+ backPressureCount = Long.parseLong(backPressureCountStr);
+ }
+ } catch (NumberFormatException nfe) {
+ backPressureCount = DEFAULT_BACKPRESSURE_COUNT;
+ }
+ return backPressureCount;
+ }
+
+ public String getDefaultBackPressureDataSizeThreshold() {
+ return getProperty(BACKPRESSURE_SIZE, DEFAULT_BACKPRESSURE_SIZE);
+ }
+
+ /**
+ * Creates an instance of NiFiProperties. This should likely not be called
+ * by any classes outside of the NiFi framework but can be useful by the
+ * framework for default property loading behavior or helpful in tests
+ * needing to create specific instances of NiFiProperties. If properties
+ * file specified cannot be found/read a runtime exception will be thrown.
+ * If one is not specified no properties will be loaded by default.
+ *
+ * @param propertiesFilePath if provided properties will be loaded from
+ * given file; else will be loaded from System property. Can be null.
+ * @param additionalProperties allows overriding of properties with the
+ * supplied values. these will be applied after loading from any properties
+ * file. Can be null or empty.
+ * @return NiFiProperties
+ */
+ public static NiFiProperties createBasicNiFiProperties(final String propertiesFilePath, final Map<String, String> additionalProperties) {
+ final Map<String, String> addProps = (additionalProperties == null) ? Collections.EMPTY_MAP : additionalProperties;
+ final Properties properties = new Properties();
+ final String nfPropertiesFilePath = (propertiesFilePath == null)
+ ? System.getProperty(NiFiProperties.PROPERTIES_FILE_PATH)
+ : propertiesFilePath;
+ if (nfPropertiesFilePath != null) {
+ final File propertiesFile = new File(nfPropertiesFilePath.trim());
+ if (!propertiesFile.exists()) {
+ throw new RuntimeException("Properties file doesn't exist \'"
+ + propertiesFile.getAbsolutePath() + "\'");
+ }
+ if (!propertiesFile.canRead()) {
+ throw new RuntimeException("Properties file exists but cannot be read \'"
+ + propertiesFile.getAbsolutePath() + "\'");
+ }
+ InputStream inStream = null;
+ try {
+ inStream = new BufferedInputStream(new FileInputStream(propertiesFile));
+ properties.load(inStream);
+ } catch (final Exception ex) {
+ throw new RuntimeException("Cannot load properties file due to "
+ + ex.getLocalizedMessage(), ex);
+ } finally {
+ if (null != inStream) {
+ try {
+ inStream.close();
+ } catch (final Exception ex) {
+ /**
+ * do nothing *
+ */
+ }
+ }
+ }
+ }
+ addProps.entrySet().stream().forEach((entry) -> {
+ properties.setProperty(entry.getKey(), entry.getValue());
+ });
+ return new NiFiProperties() {
+ @Override
+ public String getProperty(String key) {
+ return properties.getProperty(key);
+ }
+
+ @Override
+ public Set<String> getPropertyKeys() {
+ return properties.stringPropertyNames();
+ }
+ };
+ }
+
+ /**
+ * This method is used to validate the NiFi properties when the file is loaded
+ * for the first time. The objective is to stop NiFi startup in case a property
+ * is not correctly configured and could cause issues afterwards.
+ */
+ public void validate() {
+ // REMOTE_INPUT_HOST should be a valid hostname
+ String remoteInputHost = getProperty(REMOTE_INPUT_HOST);
+ if (!StringUtils.isBlank(remoteInputHost) && remoteInputHost.split(":").length > 1) { // no scheme/port needed here (http://)
+ throw new IllegalArgumentException(remoteInputHost + " is not a correct value for " + REMOTE_INPUT_HOST + ". It should be a valid hostname without protocol or port.");
+ }
+ // Other properties to validate...
+ }
+}
diff --git a/mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/StandardNiFiServiceFacade.java b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/StandardNiFiServiceFacade.java
new file mode 100644
index 0000000..8ad05bd
--- /dev/null
+++ b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/StandardNiFiServiceFacade.java
@@ -0,0 +1,4899 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Modifications to the original nifi code for the ONAP project are made
+ * available under the Apache License, Version 2.0
+ */
+package org.apache.nifi.web;
+
+import com.google.common.collect.Sets;
+import org.apache.commons.collections4.CollectionUtils;
+import org.apache.nifi.action.Action;
+import org.apache.nifi.action.Component;
+import org.apache.nifi.action.FlowChangeAction;
+import org.apache.nifi.action.Operation;
+import org.apache.nifi.action.details.FlowChangePurgeDetails;
+import org.apache.nifi.admin.service.AuditService;
+import org.apache.nifi.authorization.AccessDeniedException;
+import org.apache.nifi.authorization.AccessPolicy;
+import org.apache.nifi.authorization.AuthorizableLookup;
+import org.apache.nifi.authorization.AuthorizationRequest;
+import org.apache.nifi.authorization.AuthorizationResult;
+import org.apache.nifi.authorization.AuthorizationResult.Result;
+import org.apache.nifi.authorization.AuthorizeAccess;
+import org.apache.nifi.authorization.Authorizer;
+import org.apache.nifi.authorization.Group;
+import org.apache.nifi.authorization.RequestAction;
+import org.apache.nifi.authorization.Resource;
+import org.apache.nifi.authorization.User;
+import org.apache.nifi.authorization.UserContextKeys;
+import org.apache.nifi.authorization.resource.Authorizable;
+import org.apache.nifi.authorization.resource.EnforcePolicyPermissionsThroughBaseResource;
+import org.apache.nifi.authorization.resource.OperationAuthorizable;
+import org.apache.nifi.authorization.resource.ResourceFactory;
+import org.apache.nifi.authorization.user.NiFiUser;
+import org.apache.nifi.authorization.user.NiFiUserUtils;
+import org.apache.nifi.bundle.BundleCoordinate;
+import org.apache.nifi.cluster.coordination.ClusterCoordinator;
+import org.apache.nifi.cluster.coordination.heartbeat.HeartbeatMonitor;
+import org.apache.nifi.cluster.coordination.heartbeat.NodeHeartbeat;
+import org.apache.nifi.cluster.coordination.node.ClusterRoles;
+import org.apache.nifi.cluster.coordination.node.DisconnectionCode;
+import org.apache.nifi.cluster.coordination.node.NodeConnectionState;
+import org.apache.nifi.cluster.coordination.node.NodeConnectionStatus;
+import org.apache.nifi.cluster.coordination.node.OffloadCode;
+import org.apache.nifi.cluster.event.NodeEvent;
+import org.apache.nifi.cluster.manager.exception.IllegalNodeDeletionException;
+import org.apache.nifi.cluster.manager.exception.UnknownNodeException;
+import org.apache.nifi.cluster.protocol.NodeIdentifier;
+import org.apache.nifi.components.ConfigurableComponent;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.components.RequiredPermission;
+import org.apache.nifi.components.ValidationResult;
+import org.apache.nifi.components.Validator;
+import org.apache.nifi.components.state.Scope;
+import org.apache.nifi.components.state.StateMap;
+import org.apache.nifi.connectable.Connectable;
+import org.apache.nifi.connectable.Connection;
+import org.apache.nifi.connectable.Funnel;
+import org.apache.nifi.connectable.Port;
+import org.apache.nifi.controller.ComponentNode;
+import org.apache.nifi.controller.Counter;
+import org.apache.nifi.controller.FlowController;
+import org.apache.nifi.controller.ProcessorNode;
+import org.apache.nifi.controller.ReportingTaskNode;
+import org.apache.nifi.controller.ScheduledState;
+import org.apache.nifi.controller.Snippet;
+import org.apache.nifi.controller.Template;
+import org.apache.nifi.controller.label.Label;
+import org.apache.nifi.controller.leader.election.LeaderElectionManager;
+import org.apache.nifi.controller.repository.claim.ContentDirection;
+import org.apache.nifi.controller.service.ControllerServiceNode;
+import org.apache.nifi.controller.service.ControllerServiceReference;
+import org.apache.nifi.controller.service.ControllerServiceState;
+import org.apache.nifi.controller.status.ProcessGroupStatus;
+import org.apache.nifi.controller.status.ProcessorStatus;
+import org.apache.nifi.diagnostics.SystemDiagnostics;
+import org.apache.nifi.events.BulletinFactory;
+import org.apache.nifi.groups.ProcessGroup;
+import org.apache.nifi.groups.ProcessGroupCounts;
+import org.apache.nifi.groups.RemoteProcessGroup;
+import org.apache.nifi.history.History;
+import org.apache.nifi.history.HistoryQuery;
+import org.apache.nifi.history.PreviousValue;
+import org.apache.nifi.registry.ComponentVariableRegistry;
+import org.apache.nifi.registry.authorization.Permissions;
+import org.apache.nifi.registry.bucket.Bucket;
+import org.apache.nifi.registry.client.NiFiRegistryException;
+import org.apache.nifi.registry.flow.FlowRegistry;
+import org.apache.nifi.registry.flow.FlowRegistryClient;
+import org.apache.nifi.registry.flow.VersionControlInformation;
+import org.apache.nifi.registry.flow.VersionedComponent;
+import org.apache.nifi.registry.flow.VersionedConnection;
+import org.apache.nifi.registry.flow.VersionedFlow;
+import org.apache.nifi.registry.flow.VersionedFlowCoordinates;
+import org.apache.nifi.registry.flow.VersionedFlowSnapshot;
+import org.apache.nifi.registry.flow.VersionedFlowSnapshotMetadata;
+import org.apache.nifi.registry.flow.VersionedFlowState;
+import org.apache.nifi.registry.flow.VersionedProcessGroup;
+import org.apache.nifi.registry.flow.diff.ComparableDataFlow;
+import org.apache.nifi.registry.flow.diff.ConciseEvolvingDifferenceDescriptor;
+import org.apache.nifi.registry.flow.diff.DifferenceType;
+import org.apache.nifi.registry.flow.diff.FlowComparator;
+import org.apache.nifi.registry.flow.diff.FlowComparison;
+import org.apache.nifi.registry.flow.diff.FlowDifference;
+import org.apache.nifi.registry.flow.diff.StandardComparableDataFlow;
+import org.apache.nifi.registry.flow.diff.StandardFlowComparator;
+import org.apache.nifi.registry.flow.diff.StaticDifferenceDescriptor;
+import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedComponent;
+import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedControllerService;
+import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedProcessGroup;
+import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedProcessor;
+import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedRemoteGroupPort;
+import org.apache.nifi.registry.flow.mapping.NiFiRegistryFlowMapper;
+import org.apache.nifi.remote.RemoteGroupPort;
+import org.apache.nifi.remote.RootGroupPort;
+import org.apache.nifi.reporting.Bulletin;
+import org.apache.nifi.reporting.BulletinQuery;
+import org.apache.nifi.reporting.BulletinRepository;
+import org.apache.nifi.reporting.ComponentType;
+import org.apache.nifi.util.BundleUtils;
+import org.apache.nifi.util.FlowDifferenceFilters;
+import org.apache.nifi.util.NiFiProperties;
+import org.apache.nifi.web.api.dto.AccessPolicyDTO;
+import org.apache.nifi.web.api.dto.AccessPolicySummaryDTO;
+import org.apache.nifi.web.api.dto.AffectedComponentDTO;
+import org.apache.nifi.web.api.dto.BucketDTO;
+import org.apache.nifi.web.api.dto.BulletinBoardDTO;
+import org.apache.nifi.web.api.dto.BulletinDTO;
+import org.apache.nifi.web.api.dto.BulletinQueryDTO;
+import org.apache.nifi.web.api.dto.BundleDTO;
+import org.apache.nifi.web.api.dto.ClusterDTO;
+import org.apache.nifi.web.api.dto.ComponentDTO;
+import org.apache.nifi.web.api.dto.ComponentDifferenceDTO;
+import org.apache.nifi.web.api.dto.ComponentHistoryDTO;
+import org.apache.nifi.web.api.dto.ComponentReferenceDTO;
+import org.apache.nifi.web.api.dto.ComponentRestrictionPermissionDTO;
+import org.apache.nifi.web.api.dto.ComponentStateDTO;
+import org.apache.nifi.web.api.dto.ConnectionDTO;
+import org.apache.nifi.web.api.dto.ControllerConfigurationDTO;
+import org.apache.nifi.web.api.dto.ControllerDTO;
+import org.apache.nifi.web.api.dto.ControllerServiceDTO;
+import org.apache.nifi.web.api.dto.ControllerServiceReferencingComponentDTO;
+import org.apache.nifi.web.api.dto.CounterDTO;
+import org.apache.nifi.web.api.dto.CountersDTO;
+import org.apache.nifi.web.api.dto.CountersSnapshotDTO;
+import org.apache.nifi.web.api.dto.DocumentedTypeDTO;
+import org.apache.nifi.web.api.dto.DropRequestDTO;
+import org.apache.nifi.web.api.dto.DtoFactory;
+import org.apache.nifi.web.api.dto.EntityFactory;
+import org.apache.nifi.web.api.dto.FlowConfigurationDTO;
+import org.apache.nifi.web.api.dto.FlowFileDTO;
+import org.apache.nifi.web.api.dto.FlowSnippetDTO;
+import org.apache.nifi.web.api.dto.FunnelDTO;
+import org.apache.nifi.web.api.dto.LabelDTO;
+import org.apache.nifi.web.api.dto.ListingRequestDTO;
+import org.apache.nifi.web.api.dto.NodeDTO;
+import org.apache.nifi.web.api.dto.PermissionsDTO;
+import org.apache.nifi.web.api.dto.PortDTO;
+import org.apache.nifi.web.api.dto.PreviousValueDTO;
+import org.apache.nifi.web.api.dto.ProcessGroupDTO;
+import org.apache.nifi.web.api.dto.ProcessorConfigDTO;
+import org.apache.nifi.web.api.dto.ProcessorDTO;
+import org.apache.nifi.web.api.dto.PropertyDescriptorDTO;
+import org.apache.nifi.web.api.dto.PropertyHistoryDTO;
+import org.apache.nifi.web.api.dto.RegistryDTO;
+import org.apache.nifi.web.api.dto.RemoteProcessGroupDTO;
+import org.apache.nifi.web.api.dto.RemoteProcessGroupPortDTO;
+import org.apache.nifi.web.api.dto.ReportingTaskDTO;
+import org.apache.nifi.web.api.dto.RequiredPermissionDTO;
+import org.apache.nifi.web.api.dto.ResourceDTO;
+import org.apache.nifi.web.api.dto.RevisionDTO;
+import org.apache.nifi.web.api.dto.SnippetDTO;
+import org.apache.nifi.web.api.dto.SystemDiagnosticsDTO;
+import org.apache.nifi.web.api.dto.TemplateDTO;
+import org.apache.nifi.web.api.dto.UserDTO;
+import org.apache.nifi.web.api.dto.UserGroupDTO;
+import org.apache.nifi.web.api.dto.VariableRegistryDTO;
+import org.apache.nifi.web.api.dto.VersionControlInformationDTO;
+import org.apache.nifi.web.api.dto.VersionedFlowDTO;
+import org.apache.nifi.web.api.dto.action.HistoryDTO;
+import org.apache.nifi.web.api.dto.action.HistoryQueryDTO;
+import org.apache.nifi.web.api.dto.diagnostics.ConnectionDiagnosticsDTO;
+import org.apache.nifi.web.api.dto.diagnostics.ControllerServiceDiagnosticsDTO;
+import org.apache.nifi.web.api.dto.diagnostics.JVMDiagnosticsDTO;
+import org.apache.nifi.web.api.dto.diagnostics.JVMDiagnosticsSnapshotDTO;
+import org.apache.nifi.web.api.dto.diagnostics.ProcessorDiagnosticsDTO;
+import org.apache.nifi.web.api.dto.flow.FlowDTO;
+import org.apache.nifi.web.api.dto.provenance.ProvenanceDTO;
+import org.apache.nifi.web.api.dto.provenance.ProvenanceEventDTO;
+import org.apache.nifi.web.api.dto.provenance.ProvenanceOptionsDTO;
+import org.apache.nifi.web.api.dto.provenance.lineage.LineageDTO;
+import org.apache.nifi.web.api.dto.search.SearchResultsDTO;
+import org.apache.nifi.web.api.dto.status.ConnectionStatusDTO;
+import org.apache.nifi.web.api.dto.status.ControllerStatusDTO;
+import org.apache.nifi.web.api.dto.status.NodeProcessGroupStatusSnapshotDTO;
+import org.apache.nifi.web.api.dto.status.PortStatusDTO;
+import org.apache.nifi.web.api.dto.status.ProcessGroupStatusDTO;
+import org.apache.nifi.web.api.dto.status.ProcessGroupStatusSnapshotDTO;
+import org.apache.nifi.web.api.dto.status.ProcessorStatusDTO;
+import org.apache.nifi.web.api.dto.status.RemoteProcessGroupStatusDTO;
+import org.apache.nifi.web.api.dto.status.StatusHistoryDTO;
+import org.apache.nifi.web.api.entity.AccessPolicyEntity;
+import org.apache.nifi.web.api.entity.AccessPolicySummaryEntity;
+import org.apache.nifi.web.api.entity.ActionEntity;
+import org.apache.nifi.web.api.entity.ActivateControllerServicesEntity;
+import org.apache.nifi.web.api.entity.AffectedComponentEntity;
+import org.apache.nifi.web.api.entity.BucketEntity;
+import org.apache.nifi.web.api.entity.BulletinEntity;
+import org.apache.nifi.web.api.entity.ComponentReferenceEntity;
+import org.apache.nifi.web.api.entity.ConnectionEntity;
+import org.apache.nifi.web.api.entity.ConnectionStatusEntity;
+import org.apache.nifi.web.api.entity.ControllerBulletinsEntity;
+import org.apache.nifi.web.api.entity.ControllerConfigurationEntity;
+import org.apache.nifi.web.api.entity.ControllerServiceEntity;
+import org.apache.nifi.web.api.entity.ControllerServiceReferencingComponentEntity;
+import org.apache.nifi.web.api.entity.ControllerServiceReferencingComponentsEntity;
+import org.apache.nifi.web.api.entity.CurrentUserEntity;
+import org.apache.nifi.web.api.entity.FlowComparisonEntity;
+import org.apache.nifi.web.api.entity.FlowConfigurationEntity;
+import org.apache.nifi.web.api.entity.FlowEntity;
+import org.apache.nifi.web.api.entity.FunnelEntity;
+import org.apache.nifi.web.api.entity.LabelEntity;
+import org.apache.nifi.web.api.entity.PortEntity;
+import org.apache.nifi.web.api.entity.PortStatusEntity;
+import org.apache.nifi.web.api.entity.ProcessGroupEntity;
+import org.apache.nifi.web.api.entity.ProcessGroupFlowEntity;
+import org.apache.nifi.web.api.entity.ProcessGroupStatusEntity;
+import org.apache.nifi.web.api.entity.ProcessGroupStatusSnapshotEntity;
+import org.apache.nifi.web.api.entity.ProcessorDiagnosticsEntity;
+import org.apache.nifi.web.api.entity.ProcessorEntity;
+import org.apache.nifi.web.api.entity.ProcessorStatusEntity;
+import org.apache.nifi.web.api.entity.RegistryClientEntity;
+import org.apache.nifi.web.api.entity.RegistryEntity;
+import org.apache.nifi.web.api.entity.RemoteProcessGroupEntity;
+import org.apache.nifi.web.api.entity.RemoteProcessGroupPortEntity;
+import org.apache.nifi.web.api.entity.RemoteProcessGroupStatusEntity;
+import org.apache.nifi.web.api.entity.ReportingTaskEntity;
+import org.apache.nifi.web.api.entity.ScheduleComponentsEntity;
+import org.apache.nifi.web.api.entity.SnippetEntity;
+import org.apache.nifi.web.api.entity.StartVersionControlRequestEntity;
+import org.apache.nifi.web.api.entity.StatusHistoryEntity;
+import org.apache.nifi.web.api.entity.TemplateEntity;
+import org.apache.nifi.web.api.entity.TenantEntity;
+import org.apache.nifi.web.api.entity.UserEntity;
+import org.apache.nifi.web.api.entity.UserGroupEntity;
+import org.apache.nifi.web.api.entity.VariableEntity;
+import org.apache.nifi.web.api.entity.VariableRegistryEntity;
+import org.apache.nifi.web.api.entity.VersionControlComponentMappingEntity;
+import org.apache.nifi.web.api.entity.VersionControlInformationEntity;
+import org.apache.nifi.web.api.entity.VersionedFlowEntity;
+import org.apache.nifi.web.api.entity.VersionedFlowSnapshotMetadataEntity;
+import org.apache.nifi.web.controller.ControllerFacade;
+import org.apache.nifi.web.dao.AccessPolicyDAO;
+import org.apache.nifi.web.dao.ConnectionDAO;
+import org.apache.nifi.web.dao.ControllerServiceDAO;
+import org.apache.nifi.web.dao.FunnelDAO;
+import org.apache.nifi.web.dao.LabelDAO;
+import org.apache.nifi.web.dao.PortDAO;
+import org.apache.nifi.web.dao.ProcessGroupDAO;
+import org.apache.nifi.web.dao.ProcessorDAO;
+import org.apache.nifi.web.dao.RegistryDAO;
+import org.apache.nifi.web.dao.RemoteProcessGroupDAO;
+import org.apache.nifi.web.dao.ReportingTaskDAO;
+import org.apache.nifi.web.dao.SnippetDAO;
+import org.apache.nifi.web.dao.TemplateDAO;
+import org.apache.nifi.web.dao.UserDAO;
+import org.apache.nifi.web.dao.UserGroupDAO;
+import org.apache.nifi.web.revision.DeleteRevisionTask;
+import org.apache.nifi.web.revision.ExpiredRevisionClaimException;
+import org.apache.nifi.web.revision.RevisionClaim;
+import org.apache.nifi.web.revision.RevisionManager;
+import org.apache.nifi.web.revision.RevisionUpdate;
+import org.apache.nifi.web.revision.StandardRevisionClaim;
+import org.apache.nifi.web.revision.StandardRevisionUpdate;
+import org.apache.nifi.web.revision.UpdateRevisionTask;
+import org.apache.nifi.web.util.SnippetUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.ListIterator;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.Set;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Function;
+import java.util.function.Predicate;
+import java.util.function.Supplier;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+/**
+ * Implementation of NiFiServiceFacade that performs revision checking.
+ */
+public class StandardNiFiServiceFacade implements NiFiServiceFacade {
+ private static final Logger logger = LoggerFactory.getLogger(StandardNiFiServiceFacade.class);
+ private static final int VALIDATION_WAIT_MILLIS = 50;
+
+ // nifi core components
+ private ControllerFacade controllerFacade;
+ private SnippetUtils snippetUtils;
+
+ // revision manager
+ private RevisionManager revisionManager;
+ private BulletinRepository bulletinRepository;
+
+ // data access objects
+ private ProcessorDAO processorDAO;
+ private ProcessGroupDAO processGroupDAO;
+ private RemoteProcessGroupDAO remoteProcessGroupDAO;
+ private LabelDAO labelDAO;
+ private FunnelDAO funnelDAO;
+ private SnippetDAO snippetDAO;
+ private PortDAO inputPortDAO;
+ private PortDAO outputPortDAO;
+ private ConnectionDAO connectionDAO;
+ private ControllerServiceDAO controllerServiceDAO;
+ private ReportingTaskDAO reportingTaskDAO;
+ private TemplateDAO templateDAO;
+ private UserDAO userDAO;
+ private UserGroupDAO userGroupDAO;
+ private AccessPolicyDAO accessPolicyDAO;
+ private RegistryDAO registryDAO;
+ private ClusterCoordinator clusterCoordinator;
+ private HeartbeatMonitor heartbeatMonitor;
+ private LeaderElectionManager leaderElectionManager;
+
+ // administrative services
+ private AuditService auditService;
+
+ // flow registry
+ private FlowRegistryClient flowRegistryClient;
+
+ // properties
+ private NiFiProperties properties;
+ private DtoFactory dtoFactory;
+ private EntityFactory entityFactory;
+
+ private Authorizer authorizer;
+
+ private AuthorizableLookup authorizableLookup;
+
+ // -----------------------------------------
+ // Synchronization methods
+ // -----------------------------------------
+ @Override
+ public void authorizeAccess(final AuthorizeAccess authorizeAccess) {
+ authorizeAccess.authorize(authorizableLookup);
+ }
+
+ @Override
+ public void verifyRevision(final Revision revision, final NiFiUser user) {
+ final Revision curRevision = revisionManager.getRevision(revision.getComponentId());
+ if (revision.equals(curRevision)) {
+ return;
+ }
+
+ throw new InvalidRevisionException(revision + " is not the most up-to-date revision. This component appears to have been modified");
+ }
+
+ @Override
+ public void verifyRevisions(final Set<Revision> revisions, final NiFiUser user) {
+ for (final Revision revision : revisions) {
+ verifyRevision(revision, user);
+ }
+ }
+
+ @Override
+ public Set<Revision> getRevisionsFromGroup(final String groupId, final Function<ProcessGroup, Set<String>> getComponents) {
+ final ProcessGroup group = processGroupDAO.getProcessGroup(groupId);
+ final Set<String> componentIds = getComponents.apply(group);
+ return componentIds.stream().map(id -> revisionManager.getRevision(id)).collect(Collectors.toSet());
+ }
+
+ @Override
+ public Set<Revision> getRevisionsFromSnippet(final String snippetId) {
+ final Snippet snippet = snippetDAO.getSnippet(snippetId);
+ final Set<String> componentIds = new HashSet<>();
+ componentIds.addAll(snippet.getProcessors().keySet());
+ componentIds.addAll(snippet.getFunnels().keySet());
+ componentIds.addAll(snippet.getLabels().keySet());
+ componentIds.addAll(snippet.getConnections().keySet());
+ componentIds.addAll(snippet.getInputPorts().keySet());
+ componentIds.addAll(snippet.getOutputPorts().keySet());
+ componentIds.addAll(snippet.getProcessGroups().keySet());
+ componentIds.addAll(snippet.getRemoteProcessGroups().keySet());
+ return componentIds.stream().map(id -> revisionManager.getRevision(id)).collect(Collectors.toSet());
+ }
+
+ // -----------------------------------------
+ // Verification Operations
+ // -----------------------------------------
+
+ @Override
+ public void verifyListQueue(final String connectionId) {
+ connectionDAO.verifyList(connectionId);
+ }
+
+ @Override
+ public void verifyCreateConnection(final String groupId, final ConnectionDTO connectionDTO) {
+ connectionDAO.verifyCreate(groupId, connectionDTO);
+ }
+
+ @Override
+ public void verifyUpdateConnection(final ConnectionDTO connectionDTO) {
+ // if connection does not exist, then the update request is likely creating it
+ // so we don't verify since it will fail
+ if (connectionDAO.hasConnection(connectionDTO.getId())) {
+ connectionDAO.verifyUpdate(connectionDTO);
+ } else {
+ connectionDAO.verifyCreate(connectionDTO.getParentGroupId(), connectionDTO);
+ }
+ }
+
+ @Override
+ public void verifyDeleteConnection(final String connectionId) {
+ connectionDAO.verifyDelete(connectionId);
+ }
+
+ @Override
+ public void verifyDeleteFunnel(final String funnelId) {
+ funnelDAO.verifyDelete(funnelId);
+ }
+
+ @Override
+ public void verifyUpdateInputPort(final PortDTO inputPortDTO) {
+ // if connection does not exist, then the update request is likely creating it
+ // so we don't verify since it will fail
+ if (inputPortDAO.hasPort(inputPortDTO.getId())) {
+ inputPortDAO.verifyUpdate(inputPortDTO);
+ }
+ }
+
+ @Override
+ public void verifyDeleteInputPort(final String inputPortId) {
+ inputPortDAO.verifyDelete(inputPortId);
+ }
+
+ @Override
+ public void verifyUpdateOutputPort(final PortDTO outputPortDTO) {
+ // if connection does not exist, then the update request is likely creating it
+ // so we don't verify since it will fail
+ if (outputPortDAO.hasPort(outputPortDTO.getId())) {
+ outputPortDAO.verifyUpdate(outputPortDTO);
+ }
+ }
+
+ @Override
+ public void verifyDeleteOutputPort(final String outputPortId) {
+ outputPortDAO.verifyDelete(outputPortId);
+ }
+
+ @Override
+ public void verifyCreateProcessor(ProcessorDTO processorDTO) {
+ processorDAO.verifyCreate(processorDTO);
+ }
+
+ @Override
+ public void verifyUpdateProcessor(final ProcessorDTO processorDTO) {
+ // if group does not exist, then the update request is likely creating it
+ // so we don't verify since it will fail
+ if (processorDAO.hasProcessor(processorDTO.getId())) {
+ processorDAO.verifyUpdate(processorDTO);
+ } else {
+ verifyCreateProcessor(processorDTO);
+ }
+ }
+
+ @Override
+ public void verifyDeleteProcessor(final String processorId) {
+ processorDAO.verifyDelete(processorId);
+ }
+
+ @Override
+ public void verifyScheduleComponents(final String groupId, final ScheduledState state, final Set<String> componentIds) {
+ processGroupDAO.verifyScheduleComponents(groupId, state, componentIds);
+ }
+
+ @Override
+ public void verifyEnableComponents(String processGroupId, ScheduledState state, Set<String> componentIds) {
+ processGroupDAO.verifyEnableComponents(processGroupId, state, componentIds);
+ }
+
+ @Override
+ public void verifyActivateControllerServices(final String groupId, final ControllerServiceState state, final Collection<String> serviceIds) {
+ processGroupDAO.verifyActivateControllerServices(state, serviceIds);
+ }
+
+ @Override
+ public void verifyDeleteProcessGroup(final String groupId) {
+ processGroupDAO.verifyDelete(groupId);
+ }
+
+ @Override
+ public void verifyUpdateRemoteProcessGroup(final RemoteProcessGroupDTO remoteProcessGroupDTO) {
+ // if remote group does not exist, then the update request is likely creating it
+ // so we don't verify since it will fail
+ if (remoteProcessGroupDAO.hasRemoteProcessGroup(remoteProcessGroupDTO.getId())) {
+ remoteProcessGroupDAO.verifyUpdate(remoteProcessGroupDTO);
+ }
+ }
+
+ @Override
+ public void verifyUpdateRemoteProcessGroupInputPort(final String remoteProcessGroupId, final RemoteProcessGroupPortDTO remoteProcessGroupPortDTO) {
+ remoteProcessGroupDAO.verifyUpdateInputPort(remoteProcessGroupId, remoteProcessGroupPortDTO);
+ }
+
+ @Override
+ public void verifyUpdateRemoteProcessGroupOutputPort(final String remoteProcessGroupId, final RemoteProcessGroupPortDTO remoteProcessGroupPortDTO) {
+ remoteProcessGroupDAO.verifyUpdateOutputPort(remoteProcessGroupId, remoteProcessGroupPortDTO);
+ }
+
+ @Override
+ public void verifyDeleteRemoteProcessGroup(final String remoteProcessGroupId) {
+ remoteProcessGroupDAO.verifyDelete(remoteProcessGroupId);
+ }
+
+ @Override
+ public void verifyCreateControllerService(ControllerServiceDTO controllerServiceDTO) {
+ controllerServiceDAO.verifyCreate(controllerServiceDTO);
+ }
+
+ @Override
+ public void verifyUpdateControllerService(final ControllerServiceDTO controllerServiceDTO) {
+ // if service does not exist, then the update request is likely creating it
+ // so we don't verify since it will fail
+ if (controllerServiceDAO.hasControllerService(controllerServiceDTO.getId())) {
+ controllerServiceDAO.verifyUpdate(controllerServiceDTO);
+ } else {
+ verifyCreateControllerService(controllerServiceDTO);
+ }
+ }
+
+ @Override
+ public void verifyUpdateControllerServiceReferencingComponents(final String controllerServiceId, final ScheduledState scheduledState, final ControllerServiceState controllerServiceState) {
+ controllerServiceDAO.verifyUpdateReferencingComponents(controllerServiceId, scheduledState, controllerServiceState);
+ }
+
+ @Override
+ public void verifyDeleteControllerService(final String controllerServiceId) {
+ controllerServiceDAO.verifyDelete(controllerServiceId);
+ }
+
+ @Override
+ public void verifyCreateReportingTask(ReportingTaskDTO reportingTaskDTO) {
+ reportingTaskDAO.verifyCreate(reportingTaskDTO);
+ }
+
+ @Override
+ public void verifyUpdateReportingTask(final ReportingTaskDTO reportingTaskDTO) {
+ // if tasks does not exist, then the update request is likely creating it
+ // so we don't verify since it will fail
+ if (reportingTaskDAO.hasReportingTask(reportingTaskDTO.getId())) {
+ reportingTaskDAO.verifyUpdate(reportingTaskDTO);
+ } else {
+ verifyCreateReportingTask(reportingTaskDTO);
+ }
+ }
+
+ @Override
+ public void verifyDeleteReportingTask(final String reportingTaskId) {
+ reportingTaskDAO.verifyDelete(reportingTaskId);
+ }
+
+ // -----------------------------------------
+ // Write Operations
+ // -----------------------------------------
+
+ @Override
+ public AccessPolicyEntity updateAccessPolicy(final Revision revision, final AccessPolicyDTO accessPolicyDTO) {
+ final Authorizable authorizable = authorizableLookup.getAccessPolicyById(accessPolicyDTO.getId());
+ final RevisionUpdate<AccessPolicyDTO> snapshot = updateComponent(revision,
+ authorizable,
+ () -> accessPolicyDAO.updateAccessPolicy(accessPolicyDTO),
+ accessPolicy -> {
+ final Set<TenantEntity> users = accessPolicy.getUsers().stream().map(mapUserIdToTenantEntity(false)).collect(Collectors.toSet());
+ final Set<TenantEntity> userGroups = accessPolicy.getGroups().stream().map(mapUserGroupIdToTenantEntity(false)).collect(Collectors.toSet());
+ final ComponentReferenceEntity componentReference = createComponentReferenceEntity(accessPolicy.getResource());
+ return dtoFactory.createAccessPolicyDto(accessPolicy, userGroups, users, componentReference);
+ });
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(authorizable);
+ return entityFactory.createAccessPolicyEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions);
+ }
+
+ @Override
+ public UserEntity updateUser(final Revision revision, final UserDTO userDTO) {
+ final Authorizable usersAuthorizable = authorizableLookup.getTenant();
+ final Set<Group> groups = userGroupDAO.getUserGroupsForUser(userDTO.getId());
+ final Set<AccessPolicy> policies = userGroupDAO.getAccessPoliciesForUser(userDTO.getId());
+ final RevisionUpdate<UserDTO> snapshot = updateComponent(revision,
+ usersAuthorizable,
+ () -> userDAO.updateUser(userDTO),
+ user -> {
+ final Set<TenantEntity> tenantEntities = groups.stream().map(g -> g.getIdentifier()).map(mapUserGroupIdToTenantEntity(false)).collect(Collectors.toSet());
+ final Set<AccessPolicySummaryEntity> policyEntities = policies.stream().map(ap -> createAccessPolicySummaryEntity(ap)).collect(Collectors.toSet());
+ return dtoFactory.createUserDto(user, tenantEntities, policyEntities);
+ });
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(usersAuthorizable);
+ return entityFactory.createUserEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions);
+ }
+
+ @Override
+ public UserGroupEntity updateUserGroup(final Revision revision, final UserGroupDTO userGroupDTO) {
+ final Authorizable userGroupsAuthorizable = authorizableLookup.getTenant();
+ final Set<AccessPolicy> policies = userGroupDAO.getAccessPoliciesForUserGroup(userGroupDTO.getId());
+ final RevisionUpdate<UserGroupDTO> snapshot = updateComponent(revision,
+ userGroupsAuthorizable,
+ () -> userGroupDAO.updateUserGroup(userGroupDTO),
+ userGroup -> {
+ final Set<TenantEntity> tenantEntities = userGroup.getUsers().stream().map(mapUserIdToTenantEntity(false)).collect(Collectors.toSet());
+ final Set<AccessPolicySummaryEntity> policyEntities = policies.stream().map(ap -> createAccessPolicySummaryEntity(ap)).collect(Collectors.toSet());
+ return dtoFactory.createUserGroupDto(userGroup, tenantEntities, policyEntities);
+ }
+ );
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(userGroupsAuthorizable);
+ return entityFactory.createUserGroupEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions);
+ }
+
+ @Override
+ public ConnectionEntity updateConnection(final Revision revision, final ConnectionDTO connectionDTO) {
+ final Connection connectionNode = connectionDAO.getConnection(connectionDTO.getId());
+
+ final RevisionUpdate<ConnectionDTO> snapshot = updateComponent(
+ revision,
+ connectionNode,
+ () -> connectionDAO.updateConnection(connectionDTO),
+ connection -> dtoFactory.createConnectionDto(connection));
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(connectionNode);
+ final ConnectionStatusDTO status = dtoFactory.createConnectionStatusDto(controllerFacade.getConnectionStatus(connectionNode.getIdentifier()));
+ return entityFactory.createConnectionEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions, status);
+ }
+
+ @Override
+ public ProcessorEntity updateProcessor(final Revision revision, final ProcessorDTO processorDTO) {
+ // get the component, ensure we have access to it, and perform the update request
+ final ProcessorNode processorNode = processorDAO.getProcessor(processorDTO.getId());
+ final RevisionUpdate<ProcessorDTO> snapshot = updateComponent(revision,
+ processorNode,
+ () -> processorDAO.updateProcessor(processorDTO),
+ proc -> {
+ awaitValidationCompletion(proc);
+ return dtoFactory.createProcessorDto(proc);
+ });
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(processorNode);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(processorNode));
+ final ProcessorStatusDTO status = dtoFactory.createProcessorStatusDto(controllerFacade.getProcessorStatus(processorNode.getIdentifier()));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(processorNode.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createProcessorEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions, operatePermissions, status, bulletinEntities);
+ }
+
+ private void awaitValidationCompletion(final ComponentNode component) {
+ component.getValidationStatus(VALIDATION_WAIT_MILLIS, TimeUnit.MILLISECONDS);
+ }
+
+ @Override
+ public LabelEntity updateLabel(final Revision revision, final LabelDTO labelDTO) {
+ final Label labelNode = labelDAO.getLabel(labelDTO.getId());
+ final RevisionUpdate<LabelDTO> snapshot = updateComponent(revision,
+ labelNode,
+ () -> labelDAO.updateLabel(labelDTO),
+ label -> dtoFactory.createLabelDto(label));
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(labelNode);
+ return entityFactory.createLabelEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions);
+ }
+
+ @Override
+ public FunnelEntity updateFunnel(final Revision revision, final FunnelDTO funnelDTO) {
+ final Funnel funnelNode = funnelDAO.getFunnel(funnelDTO.getId());
+ final RevisionUpdate<FunnelDTO> snapshot = updateComponent(revision,
+ funnelNode,
+ () -> funnelDAO.updateFunnel(funnelDTO),
+ funnel -> dtoFactory.createFunnelDto(funnel));
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(funnelNode);
+ return entityFactory.createFunnelEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions);
+ }
+
+
+ /**
+ * Updates a component with the given revision, using the provided supplier to call
+ * into the appropriate DAO and the provided function to convert the component into a DTO.
+ *
+ * @param revision the current revision
+ * @param daoUpdate a Supplier that will update the component via the appropriate DAO
+ * @param dtoCreation a Function to convert a component into a dao
+ * @param <D> the DTO Type of the updated component
+ * @param <C> the Component Type of the updated component
+ * @return A RevisionUpdate that represents the new configuration
+ */
+ private <D, C> RevisionUpdate<D> updateComponent(final Revision revision, final Authorizable authorizable, final Supplier<C> daoUpdate, final Function<C, D> dtoCreation) {
+ try {
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+
+ final RevisionUpdate<D> updatedComponent = revisionManager.updateRevision(new StandardRevisionClaim(revision), user, new UpdateRevisionTask<D>() {
+ @Override
+ public RevisionUpdate<D> update() {
+ // get the updated component
+ final C component = daoUpdate.get();
+
+ // save updated controller
+ controllerFacade.save();
+
+ final D dto = dtoCreation.apply(component);
+
+ final Revision updatedRevision = revisionManager.getRevision(revision.getComponentId()).incrementRevision(revision.getClientId());
+ final FlowModification lastModification = new FlowModification(updatedRevision, user.getIdentity());
+ return new StandardRevisionUpdate<>(dto, lastModification);
+ }
+ });
+
+ return updatedComponent;
+ } catch (final ExpiredRevisionClaimException erce) {
+ throw new InvalidRevisionException("Failed to update component " + authorizable, erce);
+ }
+ }
+
+
+ @Override
+ public void verifyUpdateSnippet(final SnippetDTO snippetDto, final Set<String> affectedComponentIds) {
+ // if snippet does not exist, then the update request is likely creating it
+ // so we don't verify since it will fail
+ if (snippetDAO.hasSnippet(snippetDto.getId())) {
+ snippetDAO.verifyUpdateSnippetComponent(snippetDto);
+ }
+ }
+
+ @Override
+ public SnippetEntity updateSnippet(final Set<Revision> revisions, final SnippetDTO snippetDto) {
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+ final RevisionClaim revisionClaim = new StandardRevisionClaim(revisions);
+
+ final RevisionUpdate<SnippetDTO> snapshot;
+ try {
+ snapshot = revisionManager.updateRevision(revisionClaim, user, new UpdateRevisionTask<SnippetDTO>() {
+ @Override
+ public RevisionUpdate<SnippetDTO> update() {
+ // get the updated component
+ final Snippet snippet = snippetDAO.updateSnippetComponents(snippetDto);
+
+ // drop the snippet
+ snippetDAO.dropSnippet(snippet.getId());
+
+ // save updated controller
+ controllerFacade.save();
+
+ // increment the revisions
+ final Set<Revision> updatedRevisions = revisions.stream().map(revision -> {
+ final Revision currentRevision = revisionManager.getRevision(revision.getComponentId());
+ return currentRevision.incrementRevision(revision.getClientId());
+ }).collect(Collectors.toSet());
+
+ final SnippetDTO dto = dtoFactory.createSnippetDto(snippet);
+ return new StandardRevisionUpdate<>(dto, null, updatedRevisions);
+ }
+ });
+ } catch (final ExpiredRevisionClaimException e) {
+ throw new InvalidRevisionException("Failed to update Snippet", e);
+ }
+
+ return entityFactory.createSnippetEntity(snapshot.getComponent());
+ }
+
+ @Override
+ public PortEntity updateInputPort(final Revision revision, final PortDTO inputPortDTO) {
+ final Port inputPortNode = inputPortDAO.getPort(inputPortDTO.getId());
+ final RevisionUpdate<PortDTO> snapshot = updateComponent(revision,
+ inputPortNode,
+ () -> inputPortDAO.updatePort(inputPortDTO),
+ port -> dtoFactory.createPortDto(port));
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(inputPortNode);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(inputPortNode));
+ final PortStatusDTO status = dtoFactory.createPortStatusDto(controllerFacade.getInputPortStatus(inputPortNode.getIdentifier()));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(inputPortNode.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createPortEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions, operatePermissions, status, bulletinEntities);
+ }
+
+ @Override
+ public PortEntity updateOutputPort(final Revision revision, final PortDTO outputPortDTO) {
+ final Port outputPortNode = outputPortDAO.getPort(outputPortDTO.getId());
+ final RevisionUpdate<PortDTO> snapshot = updateComponent(revision,
+ outputPortNode,
+ () -> outputPortDAO.updatePort(outputPortDTO),
+ port -> dtoFactory.createPortDto(port));
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(outputPortNode);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(outputPortNode), NiFiUserUtils.getNiFiUser());
+ final PortStatusDTO status = dtoFactory.createPortStatusDto(controllerFacade.getOutputPortStatus(outputPortNode.getIdentifier()));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(outputPortNode.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createPortEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions, operatePermissions, status, bulletinEntities);
+ }
+
+ @Override
+ public RemoteProcessGroupEntity updateRemoteProcessGroup(final Revision revision, final RemoteProcessGroupDTO remoteProcessGroupDTO) {
+ final RemoteProcessGroup remoteProcessGroupNode = remoteProcessGroupDAO.getRemoteProcessGroup(remoteProcessGroupDTO.getId());
+ final RevisionUpdate<RemoteProcessGroupDTO> snapshot = updateComponent(
+ revision,
+ remoteProcessGroupNode,
+ () -> remoteProcessGroupDAO.updateRemoteProcessGroup(remoteProcessGroupDTO),
+ remoteProcessGroup -> dtoFactory.createRemoteProcessGroupDto(remoteProcessGroup));
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(remoteProcessGroupNode);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(remoteProcessGroupNode));
+ final RevisionDTO updateRevision = dtoFactory.createRevisionDTO(snapshot.getLastModification());
+ final RemoteProcessGroupStatusDTO status = dtoFactory.createRemoteProcessGroupStatusDto(remoteProcessGroupNode,
+ controllerFacade.getRemoteProcessGroupStatus(remoteProcessGroupNode.getIdentifier()));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(remoteProcessGroupNode.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createRemoteProcessGroupEntity(snapshot.getComponent(), updateRevision, permissions, operatePermissions, status, bulletinEntities);
+ }
+
+ @Override
+ public RemoteProcessGroupPortEntity updateRemoteProcessGroupInputPort(
+ final Revision revision, final String remoteProcessGroupId, final RemoteProcessGroupPortDTO remoteProcessGroupPortDTO) {
+
+ final RemoteProcessGroup remoteProcessGroupNode = remoteProcessGroupDAO.getRemoteProcessGroup(remoteProcessGroupPortDTO.getGroupId());
+ final RevisionUpdate<RemoteProcessGroupPortDTO> snapshot = updateComponent(
+ revision,
+ remoteProcessGroupNode,
+ () -> remoteProcessGroupDAO.updateRemoteProcessGroupInputPort(remoteProcessGroupId, remoteProcessGroupPortDTO),
+ remoteGroupPort -> dtoFactory.createRemoteProcessGroupPortDto(remoteGroupPort));
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(remoteProcessGroupNode);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(remoteProcessGroupNode));
+ final RevisionDTO updatedRevision = dtoFactory.createRevisionDTO(snapshot.getLastModification());
+ return entityFactory.createRemoteProcessGroupPortEntity(snapshot.getComponent(), updatedRevision, permissions, operatePermissions);
+ }
+
+ @Override
+ public RemoteProcessGroupPortEntity updateRemoteProcessGroupOutputPort(
+ final Revision revision, final String remoteProcessGroupId, final RemoteProcessGroupPortDTO remoteProcessGroupPortDTO) {
+
+ final RemoteProcessGroup remoteProcessGroupNode = remoteProcessGroupDAO.getRemoteProcessGroup(remoteProcessGroupPortDTO.getGroupId());
+ final RevisionUpdate<RemoteProcessGroupPortDTO> snapshot = updateComponent(
+ revision,
+ remoteProcessGroupNode,
+ () -> remoteProcessGroupDAO.updateRemoteProcessGroupOutputPort(remoteProcessGroupId, remoteProcessGroupPortDTO),
+ remoteGroupPort -> dtoFactory.createRemoteProcessGroupPortDto(remoteGroupPort));
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(remoteProcessGroupNode);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(remoteProcessGroupNode));
+ final RevisionDTO updatedRevision = dtoFactory.createRevisionDTO(snapshot.getLastModification());
+ return entityFactory.createRemoteProcessGroupPortEntity(snapshot.getComponent(), updatedRevision, permissions, operatePermissions);
+ }
+
+ @Override
+ public Set<AffectedComponentDTO> getActiveComponentsAffectedByVariableRegistryUpdate(final VariableRegistryDTO variableRegistryDto) {
+ final ProcessGroup group = processGroupDAO.getProcessGroup(variableRegistryDto.getProcessGroupId());
+ if (group == null) {
+ throw new ResourceNotFoundException("Could not find Process Group with ID " + variableRegistryDto.getProcessGroupId());
+ }
+
+ final Map<String, String> variableMap = new HashMap<>();
+ variableRegistryDto.getVariables().stream() // have to use forEach here instead of using Collectors.toMap because value may be null
+ .map(VariableEntity::getVariable)
+ .forEach(var -> variableMap.put(var.getName(), var.getValue()));
+
+ final Set<AffectedComponentDTO> affectedComponentDtos = new HashSet<>();
+
+ final Set<String> updatedVariableNames = getUpdatedVariables(group, variableMap);
+ for (final String variableName : updatedVariableNames) {
+ final Set<ComponentNode> affectedComponents = group.getComponentsAffectedByVariable(variableName);
+
+ for (final ComponentNode component : affectedComponents) {
+ if (component instanceof ProcessorNode) {
+ final ProcessorNode procNode = (ProcessorNode) component;
+ if (procNode.isRunning()) {
+ affectedComponentDtos.add(dtoFactory.createAffectedComponentDto(procNode));
+ }
+ } else if (component instanceof ControllerServiceNode) {
+ final ControllerServiceNode serviceNode = (ControllerServiceNode) component;
+ if (serviceNode.isActive()) {
+ affectedComponentDtos.add(dtoFactory.createAffectedComponentDto(serviceNode));
+ }
+ } else {
+ throw new RuntimeException("Found unexpected type of Component [" + component.getCanonicalClassName() + "] dependending on variable");
+ }
+ }
+ }
+
+ return affectedComponentDtos;
+ }
+
+ @Override
+ public Set<AffectedComponentEntity> getComponentsAffectedByVariableRegistryUpdate(final VariableRegistryDTO variableRegistryDto) {
+ final ProcessGroup group = processGroupDAO.getProcessGroup(variableRegistryDto.getProcessGroupId());
+ if (group == null) {
+ throw new ResourceNotFoundException("Could not find Process Group with ID " + variableRegistryDto.getProcessGroupId());
+ }
+
+ final Map<String, String> variableMap = new HashMap<>();
+ variableRegistryDto.getVariables().stream() // have to use forEach here instead of using Collectors.toMap because value may be null
+ .map(VariableEntity::getVariable)
+ .forEach(var -> variableMap.put(var.getName(), var.getValue()));
+
+ final Set<AffectedComponentEntity> affectedComponentEntities = new HashSet<>();
+
+ final Set<String> updatedVariableNames = getUpdatedVariables(group, variableMap);
+ for (final String variableName : updatedVariableNames) {
+ final Set<ComponentNode> affectedComponents = group.getComponentsAffectedByVariable(variableName);
+ affectedComponentEntities.addAll(dtoFactory.createAffectedComponentEntities(affectedComponents, revisionManager));
+ }
+
+ return affectedComponentEntities;
+ }
+
+ private Set<String> getUpdatedVariables(final ProcessGroup group, final Map<String, String> newVariableValues) {
+ final Set<String> updatedVariableNames = new HashSet<>();
+
+ final ComponentVariableRegistry registry = group.getVariableRegistry();
+ for (final Map.Entry<String, String> entry : newVariableValues.entrySet()) {
+ final String varName = entry.getKey();
+ final String newValue = entry.getValue();
+
+ final String curValue = registry.getVariableValue(varName);
+ if (!Objects.equals(newValue, curValue)) {
+ updatedVariableNames.add(varName);
+ }
+ }
+
+ return updatedVariableNames;
+ }
+
+
+ @Override
+ public VariableRegistryEntity updateVariableRegistry(Revision revision, VariableRegistryDTO variableRegistryDto) {
+ final ProcessGroup processGroupNode = processGroupDAO.getProcessGroup(variableRegistryDto.getProcessGroupId());
+ final RevisionUpdate<VariableRegistryDTO> snapshot = updateComponent(revision,
+ processGroupNode,
+ () -> processGroupDAO.updateVariableRegistry(variableRegistryDto),
+ processGroup -> dtoFactory.createVariableRegistryDto(processGroup, revisionManager));
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(processGroupNode);
+ final RevisionDTO updatedRevision = dtoFactory.createRevisionDTO(snapshot.getLastModification());
+ return entityFactory.createVariableRegistryEntity(snapshot.getComponent(), updatedRevision, permissions);
+ }
+
+
+ @Override
+ public ProcessGroupEntity updateProcessGroup(final Revision revision, final ProcessGroupDTO processGroupDTO) {
+ final ProcessGroup processGroupNode = processGroupDAO.getProcessGroup(processGroupDTO.getId());
+ final RevisionUpdate<ProcessGroupDTO> snapshot = updateComponent(revision,
+ processGroupNode,
+ () -> processGroupDAO.updateProcessGroup(processGroupDTO),
+ processGroup -> dtoFactory.createProcessGroupDto(processGroup));
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(processGroupNode);
+ final RevisionDTO updatedRevision = dtoFactory.createRevisionDTO(snapshot.getLastModification());
+ final ProcessGroupStatusDTO status = dtoFactory.createConciseProcessGroupStatusDto(controllerFacade.getProcessGroupStatus(processGroupNode.getIdentifier()));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(processGroupNode.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createProcessGroupEntity(snapshot.getComponent(), updatedRevision, permissions, status, bulletinEntities);
+ }
+
+ @Override
+ public void verifyUpdateProcessGroup(ProcessGroupDTO processGroupDTO) {
+ if (processGroupDAO.hasProcessGroup(processGroupDTO.getId())) {
+ processGroupDAO.verifyUpdate(processGroupDTO);
+ }
+ }
+
+ @Override
+ public ScheduleComponentsEntity enableComponents(String processGroupId, ScheduledState state, Map<String, Revision> componentRevisions) {
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+
+ final RevisionUpdate<ScheduleComponentsEntity> updatedComponent = revisionManager.updateRevision(new StandardRevisionClaim(componentRevisions.values()), user, new
+ UpdateRevisionTask<ScheduleComponentsEntity>() {
+ @Override
+ public RevisionUpdate<ScheduleComponentsEntity> update() {
+ // schedule the components
+ processGroupDAO.enableComponents(processGroupId, state, componentRevisions.keySet());
+
+ // update the revisions
+ final Map<String, Revision> updatedRevisions = new HashMap<>();
+ for (final Revision revision : componentRevisions.values()) {
+ final Revision currentRevision = revisionManager.getRevision(revision.getComponentId());
+ updatedRevisions.put(revision.getComponentId(), currentRevision.incrementRevision(revision.getClientId()));
+ }
+
+ // save
+ controllerFacade.save();
+
+ // gather details for response
+ final ScheduleComponentsEntity entity = new ScheduleComponentsEntity();
+ entity.setId(processGroupId);
+ entity.setState(state.name());
+ return new StandardRevisionUpdate<>(entity, null, new HashSet<>(updatedRevisions.values()));
+ }
+ });
+
+ return updatedComponent.getComponent();
+ }
+
+ @Override
+ public ScheduleComponentsEntity scheduleComponents(final String processGroupId, final ScheduledState state, final Map<String, Revision> componentRevisions) {
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+ final RevisionUpdate<ScheduleComponentsEntity> updatedComponent = revisionManager.updateRevision(new StandardRevisionClaim(componentRevisions.values()), user, new
+ UpdateRevisionTask<ScheduleComponentsEntity>() {
+ @Override
+ public RevisionUpdate<ScheduleComponentsEntity> update() {
+ // schedule the components
+ processGroupDAO.scheduleComponents(processGroupId, state, componentRevisions.keySet());
+
+ // update the revisions
+ final Map<String, Revision> updatedRevisions = new HashMap<>();
+ for (final Revision revision : componentRevisions.values()) {
+ final Revision currentRevision = revisionManager.getRevision(revision.getComponentId());
+ updatedRevisions.put(revision.getComponentId(), currentRevision.incrementRevision(revision.getClientId()));
+ }
+
+ // save
+ controllerFacade.save();
+
+ // gather details for response
+ final ScheduleComponentsEntity entity = new ScheduleComponentsEntity();
+ entity.setId(processGroupId);
+ entity.setState(state.name());
+ return new StandardRevisionUpdate<>(entity, null, new HashSet<>(updatedRevisions.values()));
+ }
+ });
+
+ return updatedComponent.getComponent();
+ }
+
+ @Override
+ public ActivateControllerServicesEntity activateControllerServices(final String processGroupId, final ControllerServiceState state, final Map<String, Revision> serviceRevisions) {
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+ final RevisionUpdate<ActivateControllerServicesEntity> updatedComponent = revisionManager.updateRevision(new StandardRevisionClaim(serviceRevisions.values()), user,
+ new UpdateRevisionTask<ActivateControllerServicesEntity>() {
+ @Override
+ public RevisionUpdate<ActivateControllerServicesEntity> update() {
+ // schedule the components
+ processGroupDAO.activateControllerServices(processGroupId, state, serviceRevisions.keySet());
+
+ // update the revisions
+ final Map<String, Revision> updatedRevisions = new HashMap<>();
+ for (final Revision revision : serviceRevisions.values()) {
+ final Revision currentRevision = revisionManager.getRevision(revision.getComponentId());
+ updatedRevisions.put(revision.getComponentId(), currentRevision.incrementRevision(revision.getClientId()));
+ }
+
+ // save
+ controllerFacade.save();
+
+ // gather details for response
+ final ActivateControllerServicesEntity entity = new ActivateControllerServicesEntity();
+ entity.setId(processGroupId);
+ entity.setState(state.name());
+ return new StandardRevisionUpdate<>(entity, null, new HashSet<>(updatedRevisions.values()));
+ }
+ });
+
+ return updatedComponent.getComponent();
+ }
+
+
+ @Override
+ public ControllerConfigurationEntity updateControllerConfiguration(final Revision revision, final ControllerConfigurationDTO controllerConfigurationDTO) {
+ final RevisionUpdate<ControllerConfigurationDTO> updatedComponent = updateComponent(
+ revision,
+ controllerFacade,
+ () -> {
+ if (controllerConfigurationDTO.getMaxTimerDrivenThreadCount() != null) {
+ controllerFacade.setMaxTimerDrivenThreadCount(controllerConfigurationDTO.getMaxTimerDrivenThreadCount());
+ }
+ if (controllerConfigurationDTO.getMaxEventDrivenThreadCount() != null) {
+ controllerFacade.setMaxEventDrivenThreadCount(controllerConfigurationDTO.getMaxEventDrivenThreadCount());
+ }
+
+ return controllerConfigurationDTO;
+ },
+ controller -> dtoFactory.createControllerConfigurationDto(controllerFacade));
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(controllerFacade);
+ final RevisionDTO updateRevision = dtoFactory.createRevisionDTO(updatedComponent.getLastModification());
+ return entityFactory.createControllerConfigurationEntity(updatedComponent.getComponent(), updateRevision, permissions);
+ }
+
+
+ @Override
+ public NodeDTO updateNode(final NodeDTO nodeDTO) {
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+ if (user == null) {
+ throw new WebApplicationException(new Throwable("Unable to access details for current user."));
+ }
+ final String userDn = user.getIdentity();
+
+ final NodeIdentifier nodeId = clusterCoordinator.getNodeIdentifier(nodeDTO.getNodeId());
+ if (nodeId == null) {
+ throw new UnknownNodeException("No node exists with ID " + nodeDTO.getNodeId());
+ }
+
+
+ if (NodeConnectionState.CONNECTING.name().equalsIgnoreCase(nodeDTO.getStatus())) {
+ clusterCoordinator.requestNodeConnect(nodeId, userDn);
+ } else if (NodeConnectionState.OFFLOADING.name().equalsIgnoreCase(nodeDTO.getStatus())) {
+ clusterCoordinator.requestNodeOffload(nodeId, OffloadCode.OFFLOADED,
+ "User " + userDn + " requested that node be offloaded");
+ } else if (NodeConnectionState.DISCONNECTING.name().equalsIgnoreCase(nodeDTO.getStatus())) {
+ clusterCoordinator.requestNodeDisconnect(nodeId, DisconnectionCode.USER_DISCONNECTED,
+ "User " + userDn + " requested that node be disconnected from cluster");
+ }
+
+ return getNode(nodeId);
+ }
+
+ @Override
+ public CounterDTO updateCounter(final String counterId) {
+ return dtoFactory.createCounterDto(controllerFacade.resetCounter(counterId));
+ }
+
+ @Override
+ public void verifyCanClearProcessorState(final String processorId) {
+ processorDAO.verifyClearState(processorId);
+ }
+
+ @Override
+ public void clearProcessorState(final String processorId) {
+ processorDAO.clearState(processorId);
+ }
+
+ @Override
+ public void verifyCanClearControllerServiceState(final String controllerServiceId) {
+ controllerServiceDAO.verifyClearState(controllerServiceId);
+ }
+
+ @Override
+ public void clearControllerServiceState(final String controllerServiceId) {
+ controllerServiceDAO.clearState(controllerServiceId);
+ }
+
+ @Override
+ public void verifyCanClearReportingTaskState(final String reportingTaskId) {
+ reportingTaskDAO.verifyClearState(reportingTaskId);
+ }
+
+ @Override
+ public void clearReportingTaskState(final String reportingTaskId) {
+ reportingTaskDAO.clearState(reportingTaskId);
+ }
+
+ @Override
+ public ConnectionEntity deleteConnection(final Revision revision, final String connectionId) {
+ final Connection connection = connectionDAO.getConnection(connectionId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(connection);
+ final ConnectionDTO snapshot = deleteComponent(
+ revision,
+ connection.getResource(),
+ () -> connectionDAO.deleteConnection(connectionId),
+ false, // no policies to remove
+ dtoFactory.createConnectionDto(connection));
+
+ return entityFactory.createConnectionEntity(snapshot, null, permissions, null);
+ }
+
+ @Override
+ public DropRequestDTO deleteFlowFileDropRequest(final String connectionId, final String dropRequestId) {
+ return dtoFactory.createDropRequestDTO(connectionDAO.deleteFlowFileDropRequest(connectionId, dropRequestId));
+ }
+
+ @Override
+ public ListingRequestDTO deleteFlowFileListingRequest(final String connectionId, final String listingRequestId) {
+ final Connection connection = connectionDAO.getConnection(connectionId);
+ final ListingRequestDTO listRequest = dtoFactory.createListingRequestDTO(connectionDAO.deleteFlowFileListingRequest(connectionId, listingRequestId));
+
+ // include whether the source and destination are running
+ if (connection.getSource() != null) {
+ listRequest.setSourceRunning(connection.getSource().isRunning());
+ }
+ if (connection.getDestination() != null) {
+ listRequest.setDestinationRunning(connection.getDestination().isRunning());
+ }
+
+ return listRequest;
+ }
+
+ @Override
+ public ProcessorEntity deleteProcessor(final Revision revision, final String processorId) {
+ final ProcessorNode processor = processorDAO.getProcessor(processorId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(processor);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(processor));
+ final ProcessorDTO snapshot = deleteComponent(
+ revision,
+ processor.getResource(),
+ () -> processorDAO.deleteProcessor(processorId),
+ true,
+ dtoFactory.createProcessorDto(processor));
+
+ return entityFactory.createProcessorEntity(snapshot, null, permissions, operatePermissions, null, null);
+ }
+
+ @Override
+ public ProcessorEntity terminateProcessor(final String processorId) {
+ processorDAO.terminate(processorId);
+ return getProcessor(processorId);
+ }
+
+ @Override
+ public void verifyTerminateProcessor(final String processorId) {
+ processorDAO.verifyTerminate(processorId);
+ }
+
+ @Override
+ public LabelEntity deleteLabel(final Revision revision, final String labelId) {
+ final Label label = labelDAO.getLabel(labelId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(label);
+ final LabelDTO snapshot = deleteComponent(
+ revision,
+ label.getResource(),
+ () -> labelDAO.deleteLabel(labelId),
+ true,
+ dtoFactory.createLabelDto(label));
+
+ return entityFactory.createLabelEntity(snapshot, null, permissions);
+ }
+
+ @Override
+ public UserEntity deleteUser(final Revision revision, final String userId) {
+ final User user = userDAO.getUser(userId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(authorizableLookup.getTenant());
+ final Set<TenantEntity> userGroups = user != null ? userGroupDAO.getUserGroupsForUser(userId).stream()
+ .map(g -> g.getIdentifier()).map(mapUserGroupIdToTenantEntity(false)).collect(Collectors.toSet()) : null;
+ final Set<AccessPolicySummaryEntity> policyEntities = user != null ? userGroupDAO.getAccessPoliciesForUser(userId).stream()
+ .map(ap -> createAccessPolicySummaryEntity(ap)).collect(Collectors.toSet()) : null;
+
+ final String resourceIdentifier = ResourceFactory.getTenantResource().getIdentifier() + "/" + userId;
+ final UserDTO snapshot = deleteComponent(
+ revision,
+ new Resource() {
+ @Override
+ public String getIdentifier() {
+ return resourceIdentifier;
+ }
+
+ @Override
+ public String getName() {
+ return resourceIdentifier;
+ }
+
+ @Override
+ public String getSafeDescription() {
+ return "User " + userId;
+ }
+ },
+ () -> userDAO.deleteUser(userId),
+ false, // no user specific policies to remove
+ dtoFactory.createUserDto(user, userGroups, policyEntities));
+
+ return entityFactory.createUserEntity(snapshot, null, permissions);
+ }
+
+ @Override
+ public UserGroupEntity deleteUserGroup(final Revision revision, final String userGroupId) {
+ final Group userGroup = userGroupDAO.getUserGroup(userGroupId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(authorizableLookup.getTenant());
+ final Set<TenantEntity> users = userGroup != null ? userGroup.getUsers().stream()
+ .map(mapUserIdToTenantEntity(false)).collect(Collectors.toSet()) : null;
+ final Set<AccessPolicySummaryEntity> policyEntities = userGroupDAO.getAccessPoliciesForUserGroup(userGroup.getIdentifier()).stream()
+ .map(ap -> createAccessPolicySummaryEntity(ap)).collect(Collectors.toSet());
+
+ final String resourceIdentifier = ResourceFactory.getTenantResource().getIdentifier() + "/" + userGroupId;
+ final UserGroupDTO snapshot = deleteComponent(
+ revision,
+ new Resource() {
+ @Override
+ public String getIdentifier() {
+ return resourceIdentifier;
+ }
+
+ @Override
+ public String getName() {
+ return resourceIdentifier;
+ }
+
+ @Override
+ public String getSafeDescription() {
+ return "User Group " + userGroupId;
+ }
+ },
+ () -> userGroupDAO.deleteUserGroup(userGroupId),
+ false, // no user group specific policies to remove
+ dtoFactory.createUserGroupDto(userGroup, users, policyEntities));
+
+ return entityFactory.createUserGroupEntity(snapshot, null, permissions);
+ }
+
+ @Override
+ public AccessPolicyEntity deleteAccessPolicy(final Revision revision, final String accessPolicyId) {
+ final AccessPolicy accessPolicy = accessPolicyDAO.getAccessPolicy(accessPolicyId);
+ final ComponentReferenceEntity componentReference = createComponentReferenceEntity(accessPolicy.getResource());
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(authorizableLookup.getAccessPolicyById(accessPolicyId));
+ final Set<TenantEntity> userGroups = accessPolicy != null ? accessPolicy.getGroups().stream().map(mapUserGroupIdToTenantEntity(false)).collect(Collectors.toSet()) : null;
+ final Set<TenantEntity> users = accessPolicy != null ? accessPolicy.getUsers().stream().map(mapUserIdToTenantEntity(false)).collect(Collectors.toSet()) : null;
+ final AccessPolicyDTO snapshot = deleteComponent(
+ revision,
+ new Resource() {
+ @Override
+ public String getIdentifier() {
+ return accessPolicy.getResource();
+ }
+
+ @Override
+ public String getName() {
+ return accessPolicy.getResource();
+ }
+
+ @Override
+ public String getSafeDescription() {
+ return "Policy " + accessPolicyId;
+ }
+ },
+ () -> accessPolicyDAO.deleteAccessPolicy(accessPolicyId),
+ false, // no need to clean up any policies as it's already been removed above
+ dtoFactory.createAccessPolicyDto(accessPolicy, userGroups, users, componentReference));
+
+ return entityFactory.createAccessPolicyEntity(snapshot, null, permissions);
+ }
+
+ @Override
+ public FunnelEntity deleteFunnel(final Revision revision, final String funnelId) {
+ final Funnel funnel = funnelDAO.getFunnel(funnelId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(funnel);
+ final FunnelDTO snapshot = deleteComponent(
+ revision,
+ funnel.getResource(),
+ () -> funnelDAO.deleteFunnel(funnelId),
+ true,
+ dtoFactory.createFunnelDto(funnel));
+
+ return entityFactory.createFunnelEntity(snapshot, null, permissions);
+ }
+
+ /**
+ * Deletes a component using the Optimistic Locking Manager
+ *
+ * @param revision the current revision
+ * @param resource the resource being removed
+ * @param deleteAction the action that deletes the component via the appropriate DAO object
+ * @param cleanUpPolicies whether or not the policies for this resource should be removed as well - not necessary when there are
+ * no component specific policies or if the policies of the component are inherited
+ * @return a dto that represents the new configuration
+ */
+ private <D, C> D deleteComponent(final Revision revision, final Resource resource, final Runnable deleteAction, final boolean cleanUpPolicies, final D dto) {
+ final RevisionClaim claim = new StandardRevisionClaim(revision);
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+
+ return revisionManager.deleteRevision(claim, user, new DeleteRevisionTask<D>() {
+ @Override
+ public D performTask() {
+ logger.debug("Attempting to delete component {} with claim {}", resource.getIdentifier(), claim);
+
+ // run the delete action
+ deleteAction.run();
+
+ // save the flow
+ controllerFacade.save();
+ logger.debug("Deletion of component {} was successful", resource.getIdentifier());
+
+ if (cleanUpPolicies) {
+ cleanUpPolicies(resource);
+ }
+
+ return dto;
+ }
+ });
+ }
+
+ /**
+ * Clean up the policies for the specified component resource.
+ *
+ * @param componentResource the resource for the component
+ */
+ private void cleanUpPolicies(final Resource componentResource) {
+ // ensure the authorizer supports configuration
+ if (accessPolicyDAO.supportsConfigurableAuthorizer()) {
+ final List<Resource> resources = new ArrayList<>();
+ resources.add(componentResource);
+ resources.add(ResourceFactory.getDataResource(componentResource));
+ resources.add(ResourceFactory.getProvenanceDataResource(componentResource));
+ resources.add(ResourceFactory.getDataTransferResource(componentResource));
+ resources.add(ResourceFactory.getPolicyResource(componentResource));
+
+ for (final Resource resource : resources) {
+ for (final RequestAction action : RequestAction.values()) {
+ try {
+ // since the component is being deleted, also delete any relevant access policies
+ final AccessPolicy readPolicy = accessPolicyDAO.getAccessPolicy(action, resource.getIdentifier());
+ if (readPolicy != null) {
+ accessPolicyDAO.deleteAccessPolicy(readPolicy.getIdentifier());
+ }
+ } catch (final Exception e) {
+ logger.warn(String.format("Unable to remove access policy for %s %s after component removal.", action, resource.getIdentifier()), e);
+ }
+ }
+ }
+ }
+ }
+
+ @Override
+ public void verifyDeleteSnippet(final String snippetId, final Set<String> affectedComponentIds) {
+ snippetDAO.verifyDeleteSnippetComponents(snippetId);
+ }
+
+ @Override
+ public SnippetEntity deleteSnippet(final Set<Revision> revisions, final String snippetId) {
+ final Snippet snippet = snippetDAO.getSnippet(snippetId);
+
+ // grab the resources in the snippet so we can delete the policies afterwards
+ final Set<Resource> snippetResources = new HashSet<>();
+ snippet.getProcessors().keySet().forEach(id -> snippetResources.add(processorDAO.getProcessor(id).getResource()));
+ snippet.getInputPorts().keySet().forEach(id -> snippetResources.add(inputPortDAO.getPort(id).getResource()));
+ snippet.getOutputPorts().keySet().forEach(id -> snippetResources.add(outputPortDAO.getPort(id).getResource()));
+ snippet.getFunnels().keySet().forEach(id -> snippetResources.add(funnelDAO.getFunnel(id).getResource()));
+ snippet.getLabels().keySet().forEach(id -> snippetResources.add(labelDAO.getLabel(id).getResource()));
+ snippet.getRemoteProcessGroups().keySet().forEach(id -> snippetResources.add(remoteProcessGroupDAO.getRemoteProcessGroup(id).getResource()));
+ snippet.getProcessGroups().keySet().forEach(id -> {
+ final ProcessGroup processGroup = processGroupDAO.getProcessGroup(id);
+
+ // add the process group
+ snippetResources.add(processGroup.getResource());
+
+ // add each encapsulated component
+ processGroup.findAllProcessors().forEach(processor -> snippetResources.add(processor.getResource()));
+ processGroup.findAllInputPorts().forEach(inputPort -> snippetResources.add(inputPort.getResource()));
+ processGroup.findAllOutputPorts().forEach(outputPort -> snippetResources.add(outputPort.getResource()));
+ processGroup.findAllFunnels().forEach(funnel -> snippetResources.add(funnel.getResource()));
+ processGroup.findAllLabels().forEach(label -> snippetResources.add(label.getResource()));
+ processGroup.findAllProcessGroups().forEach(childGroup -> snippetResources.add(childGroup.getResource()));
+ processGroup.findAllRemoteProcessGroups().forEach(remoteProcessGroup -> snippetResources.add(remoteProcessGroup.getResource()));
+ processGroup.findAllTemplates().forEach(template -> snippetResources.add(template.getResource()));
+ processGroup.findAllControllerServices().forEach(controllerService -> snippetResources.add(controllerService.getResource()));
+ });
+
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+ final RevisionClaim claim = new StandardRevisionClaim(revisions);
+ final SnippetDTO dto = revisionManager.deleteRevision(claim, user, new DeleteRevisionTask<SnippetDTO>() {
+ @Override
+ public SnippetDTO performTask() {
+ // delete the components in the snippet
+ snippetDAO.deleteSnippetComponents(snippetId);
+
+ // drop the snippet
+ snippetDAO.dropSnippet(snippetId);
+
+ // save
+ controllerFacade.save();
+
+ // create the dto for the snippet that was just removed
+ return dtoFactory.createSnippetDto(snippet);
+ }
+ });
+
+ // clean up component policies
+ snippetResources.forEach(resource -> cleanUpPolicies(resource));
+
+ return entityFactory.createSnippetEntity(dto);
+ }
+
+ @Override
+ public PortEntity deleteInputPort(final Revision revision, final String inputPortId) {
+ final Port port = inputPortDAO.getPort(inputPortId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(port);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(port));
+ final PortDTO snapshot = deleteComponent(
+ revision,
+ port.getResource(),
+ () -> inputPortDAO.deletePort(inputPortId),
+ true,
+ dtoFactory.createPortDto(port));
+
+ return entityFactory.createPortEntity(snapshot, null, permissions, operatePermissions, null, null);
+ }
+
+ @Override
+ public PortEntity deleteOutputPort(final Revision revision, final String outputPortId) {
+ final Port port = outputPortDAO.getPort(outputPortId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(port);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(port));
+ final PortDTO snapshot = deleteComponent(
+ revision,
+ port.getResource(),
+ () -> outputPortDAO.deletePort(outputPortId),
+ true,
+ dtoFactory.createPortDto(port));
+
+ return entityFactory.createPortEntity(snapshot, null, permissions, operatePermissions, null, null);
+ }
+
+ @Override
+ public ProcessGroupEntity deleteProcessGroup(final Revision revision, final String groupId) {
+ final ProcessGroup processGroup = processGroupDAO.getProcessGroup(groupId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(processGroup);
+
+ // grab the resources in the snippet so we can delete the policies afterwards
+ final Set<Resource> groupResources = new HashSet<>();
+ processGroup.findAllProcessors().forEach(processor -> groupResources.add(processor.getResource()));
+ processGroup.findAllInputPorts().forEach(inputPort -> groupResources.add(inputPort.getResource()));
+ processGroup.findAllOutputPorts().forEach(outputPort -> groupResources.add(outputPort.getResource()));
+ processGroup.findAllFunnels().forEach(funnel -> groupResources.add(funnel.getResource()));
+ processGroup.findAllLabels().forEach(label -> groupResources.add(label.getResource()));
+ processGroup.findAllProcessGroups().forEach(childGroup -> groupResources.add(childGroup.getResource()));
+ processGroup.findAllRemoteProcessGroups().forEach(remoteProcessGroup -> groupResources.add(remoteProcessGroup.getResource()));
+ processGroup.findAllTemplates().forEach(template -> groupResources.add(template.getResource()));
+ processGroup.findAllControllerServices().forEach(controllerService -> groupResources.add(controllerService.getResource()));
+
+ final ProcessGroupDTO snapshot = deleteComponent(
+ revision,
+ processGroup.getResource(),
+ () -> processGroupDAO.deleteProcessGroup(groupId),
+ true,
+ dtoFactory.createProcessGroupDto(processGroup));
+
+ // delete all applicable component policies
+ groupResources.forEach(groupResource -> cleanUpPolicies(groupResource));
+
+ return entityFactory.createProcessGroupEntity(snapshot, null, permissions, null, null);
+ }
+
+ @Override
+ public RemoteProcessGroupEntity deleteRemoteProcessGroup(final Revision revision, final String remoteProcessGroupId) {
+ final RemoteProcessGroup remoteProcessGroup = remoteProcessGroupDAO.getRemoteProcessGroup(remoteProcessGroupId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(remoteProcessGroup);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(remoteProcessGroup));
+ final RemoteProcessGroupDTO snapshot = deleteComponent(
+ revision,
+ remoteProcessGroup.getResource(),
+ () -> remoteProcessGroupDAO.deleteRemoteProcessGroup(remoteProcessGroupId),
+ true,
+ dtoFactory.createRemoteProcessGroupDto(remoteProcessGroup));
+
+ return entityFactory.createRemoteProcessGroupEntity(snapshot, null, permissions, operatePermissions, null, null);
+ }
+
+ @Override
+ public void deleteTemplate(final String id) {
+ // delete the template and save the flow
+ templateDAO.deleteTemplate(id);
+ controllerFacade.save();
+ }
+
+ @Override
+ public ConnectionEntity createConnection(final Revision revision, final String groupId, final ConnectionDTO connectionDTO) {
+ final RevisionUpdate<ConnectionDTO> snapshot = createComponent(
+ revision,
+ connectionDTO,
+ () -> connectionDAO.createConnection(groupId, connectionDTO),
+ connection -> dtoFactory.createConnectionDto(connection));
+
+ final Connection connection = connectionDAO.getConnection(connectionDTO.getId());
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(connection);
+ final ConnectionStatusDTO status = dtoFactory.createConnectionStatusDto(controllerFacade.getConnectionStatus(connectionDTO.getId()));
+ return entityFactory.createConnectionEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions, status);
+ }
+
+ @Override
+ public DropRequestDTO createFlowFileDropRequest(final String connectionId, final String dropRequestId) {
+ return dtoFactory.createDropRequestDTO(connectionDAO.createFlowFileDropRequest(connectionId, dropRequestId));
+ }
+
+ @Override
+ public ListingRequestDTO createFlowFileListingRequest(final String connectionId, final String listingRequestId) {
+ final Connection connection = connectionDAO.getConnection(connectionId);
+ final ListingRequestDTO listRequest = dtoFactory.createListingRequestDTO(connectionDAO.createFlowFileListingRequest(connectionId, listingRequestId));
+
+ // include whether the source and destination are running
+ if (connection.getSource() != null) {
+ listRequest.setSourceRunning(connection.getSource().isRunning());
+ }
+ if (connection.getDestination() != null) {
+ listRequest.setDestinationRunning(connection.getDestination().isRunning());
+ }
+
+ return listRequest;
+ }
+
+ @Override
+ public ProcessorEntity createProcessor(final Revision revision, final String groupId, final ProcessorDTO processorDTO) {
+ final RevisionUpdate<ProcessorDTO> snapshot = createComponent(
+ revision,
+ processorDTO,
+ () -> processorDAO.createProcessor(groupId, processorDTO),
+ processor -> {
+ awaitValidationCompletion(processor);
+ return dtoFactory.createProcessorDto(processor);
+ });
+
+ final ProcessorNode processor = processorDAO.getProcessor(processorDTO.getId());
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(processor);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(processor));
+ final ProcessorStatusDTO status = dtoFactory.createProcessorStatusDto(controllerFacade.getProcessorStatus(processorDTO.getId()));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(processorDTO.getId()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createProcessorEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions, operatePermissions, status, bulletinEntities);
+ }
+
+ @Override
+ public LabelEntity createLabel(final Revision revision, final String groupId, final LabelDTO labelDTO) {
+ final RevisionUpdate<LabelDTO> snapshot = createComponent(
+ revision,
+ labelDTO,
+ () -> labelDAO.createLabel(groupId, labelDTO),
+ label -> dtoFactory.createLabelDto(label));
+
+ final Label label = labelDAO.getLabel(labelDTO.getId());
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(label);
+ return entityFactory.createLabelEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions);
+ }
+
+ /**
+ * Creates a component using the optimistic locking manager.
+ *
+ * @param componentDto the DTO that will be used to create the component
+ * @param daoCreation A Supplier that will create the NiFi Component to use
+ * @param dtoCreation a Function that will convert the NiFi Component into a corresponding DTO
+ * @param <D> the DTO Type
+ * @param <C> the NiFi Component Type
+ * @return a RevisionUpdate that represents the updated configuration
+ */
+ private <D, C> RevisionUpdate<D> createComponent(final Revision revision, final ComponentDTO componentDto, final Supplier<C> daoCreation, final Function<C, D> dtoCreation) {
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+
+ // read lock on the containing group
+ // request claim for component to be created... revision already verified (version == 0)
+ final RevisionClaim claim = new StandardRevisionClaim(revision);
+
+ // update revision through revision manager
+ return revisionManager.updateRevision(claim, user, () -> {
+ // add the component
+ final C component = daoCreation.get();
+
+ // save the flow
+ controllerFacade.save();
+
+ final D dto = dtoCreation.apply(component);
+ final FlowModification lastMod = new FlowModification(revision.incrementRevision(revision.getClientId()), user.getIdentity());
+ return new StandardRevisionUpdate<>(dto, lastMod);
+ });
+ }
+
+ @Override
+ public BulletinEntity createBulletin(final BulletinDTO bulletinDTO, final Boolean canRead){
+ final Bulletin bulletin = BulletinFactory.createBulletin(bulletinDTO.getCategory(),bulletinDTO.getLevel(),bulletinDTO.getMessage());
+ bulletinRepository.addBulletin(bulletin);
+ return entityFactory.createBulletinEntity(dtoFactory.createBulletinDto(bulletin),canRead);
+ }
+
+ @Override
+ public FunnelEntity createFunnel(final Revision revision, final String groupId, final FunnelDTO funnelDTO) {
+ final RevisionUpdate<FunnelDTO> snapshot = createComponent(
+ revision,
+ funnelDTO,
+ () -> funnelDAO.createFunnel(groupId, funnelDTO),
+ funnel -> dtoFactory.createFunnelDto(funnel));
+
+ final Funnel funnel = funnelDAO.getFunnel(funnelDTO.getId());
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(funnel);
+ return entityFactory.createFunnelEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions);
+ }
+
+ @Override
+ public AccessPolicyEntity createAccessPolicy(final Revision revision, final AccessPolicyDTO accessPolicyDTO) {
+ final Authorizable tenantAuthorizable = authorizableLookup.getTenant();
+ final String creator = NiFiUserUtils.getNiFiUserIdentity();
+
+ final AccessPolicy newAccessPolicy = accessPolicyDAO.createAccessPolicy(accessPolicyDTO);
+ final ComponentReferenceEntity componentReference = createComponentReferenceEntity(newAccessPolicy.getResource());
+ final AccessPolicyDTO newAccessPolicyDto = dtoFactory.createAccessPolicyDto(newAccessPolicy,
+ newAccessPolicy.getGroups().stream().map(mapUserGroupIdToTenantEntity(false)).collect(Collectors.toSet()),
+ newAccessPolicy.getUsers().stream().map(userId -> {
+ final RevisionDTO userRevision = dtoFactory.createRevisionDTO(revisionManager.getRevision(userId));
+ return entityFactory.createTenantEntity(dtoFactory.createTenantDTO(userDAO.getUser(userId)), userRevision,
+ dtoFactory.createPermissionsDto(tenantAuthorizable));
+ }).collect(Collectors.toSet()), componentReference);
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(authorizableLookup.getAccessPolicyById(accessPolicyDTO.getId()));
+ return entityFactory.createAccessPolicyEntity(newAccessPolicyDto, dtoFactory.createRevisionDTO(new FlowModification(revision, creator)), permissions);
+ }
+
+ @Override
+ public UserEntity createUser(final Revision revision, final UserDTO userDTO) {
+ final String creator = NiFiUserUtils.getNiFiUserIdentity();
+ final User newUser = userDAO.createUser(userDTO);
+ final Set<TenantEntity> tenantEntities = userGroupDAO.getUserGroupsForUser(newUser.getIdentifier()).stream()
+ .map(g -> g.getIdentifier()).map(mapUserGroupIdToTenantEntity(false)).collect(Collectors.toSet());
+ final Set<AccessPolicySummaryEntity> policyEntities = userGroupDAO.getAccessPoliciesForUser(newUser.getIdentifier()).stream()
+ .map(ap -> createAccessPolicySummaryEntity(ap)).collect(Collectors.toSet());
+ final UserDTO newUserDto = dtoFactory.createUserDto(newUser, tenantEntities, policyEntities);
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(authorizableLookup.getTenant());
+ return entityFactory.createUserEntity(newUserDto, dtoFactory.createRevisionDTO(new FlowModification(revision, creator)), permissions);
+ }
+
+ private ComponentReferenceEntity createComponentReferenceEntity(final String resource) {
+ ComponentReferenceEntity componentReferenceEntity = null;
+ try {
+ // get the component authorizable
+ Authorizable componentAuthorizable = authorizableLookup.getAuthorizableFromResource(resource);
+
+ // if this represents an authorizable whose policy permissions are enforced through the base resource,
+ // get the underlying base authorizable for the component reference
+ if (componentAuthorizable instanceof EnforcePolicyPermissionsThroughBaseResource) {
+ componentAuthorizable = ((EnforcePolicyPermissionsThroughBaseResource) componentAuthorizable).getBaseAuthorizable();
+ }
+
+ final ComponentReferenceDTO componentReference = dtoFactory.createComponentReferenceDto(componentAuthorizable);
+ if (componentReference != null) {
+ final PermissionsDTO componentReferencePermissions = dtoFactory.createPermissionsDto(componentAuthorizable);
+ final RevisionDTO componentReferenceRevision = dtoFactory.createRevisionDTO(revisionManager.getRevision(componentReference.getId()));
+ componentReferenceEntity = entityFactory.createComponentReferenceEntity(componentReference, componentReferenceRevision, componentReferencePermissions);
+ }
+ } catch (final ResourceNotFoundException e) {
+ // component not found for the specified resource
+ }
+
+ return componentReferenceEntity;
+ }
+
+ private AccessPolicySummaryEntity createAccessPolicySummaryEntity(final AccessPolicy ap) {
+ final ComponentReferenceEntity componentReference = createComponentReferenceEntity(ap.getResource());
+ final AccessPolicySummaryDTO apSummary = dtoFactory.createAccessPolicySummaryDto(ap, componentReference);
+ final PermissionsDTO apPermissions = dtoFactory.createPermissionsDto(authorizableLookup.getAccessPolicyById(ap.getIdentifier()));
+ final RevisionDTO apRevision = dtoFactory.createRevisionDTO(revisionManager.getRevision(ap.getIdentifier()));
+ return entityFactory.createAccessPolicySummaryEntity(apSummary, apRevision, apPermissions);
+ }
+
+ @Override
+ public UserGroupEntity createUserGroup(final Revision revision, final UserGroupDTO userGroupDTO) {
+ final String creator = NiFiUserUtils.getNiFiUserIdentity();
+ final Group newUserGroup = userGroupDAO.createUserGroup(userGroupDTO);
+ final Set<TenantEntity> tenantEntities = newUserGroup.getUsers().stream().map(mapUserIdToTenantEntity(false)).collect(Collectors.toSet());
+ final Set<AccessPolicySummaryEntity> policyEntities = userGroupDAO.getAccessPoliciesForUserGroup(newUserGroup.getIdentifier()).stream()
+ .map(ap -> createAccessPolicySummaryEntity(ap)).collect(Collectors.toSet());
+ final UserGroupDTO newUserGroupDto = dtoFactory.createUserGroupDto(newUserGroup, tenantEntities, policyEntities);
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(authorizableLookup.getTenant());
+ return entityFactory.createUserGroupEntity(newUserGroupDto, dtoFactory.createRevisionDTO(new FlowModification(revision, creator)), permissions);
+ }
+
+ private void validateSnippetContents(final FlowSnippetDTO flow) {
+ // validate any processors
+ if (flow.getProcessors() != null) {
+ for (final ProcessorDTO processorDTO : flow.getProcessors()) {
+ final ProcessorNode processorNode = processorDAO.getProcessor(processorDTO.getId());
+ processorDTO.setValidationStatus(processorNode.getValidationStatus().name());
+
+ final Collection<ValidationResult> validationErrors = processorNode.getValidationErrors();
+ if (validationErrors != null && !validationErrors.isEmpty()) {
+ final List<String> errors = new ArrayList<>();
+ for (final ValidationResult validationResult : validationErrors) {
+ errors.add(validationResult.toString());
+ }
+ processorDTO.setValidationErrors(errors);
+ }
+ }
+ }
+
+ if (flow.getInputPorts() != null) {
+ for (final PortDTO portDTO : flow.getInputPorts()) {
+ final Port port = inputPortDAO.getPort(portDTO.getId());
+ final Collection<ValidationResult> validationErrors = port.getValidationErrors();
+ if (validationErrors != null && !validationErrors.isEmpty()) {
+ final List<String> errors = new ArrayList<>();
+ for (final ValidationResult validationResult : validationErrors) {
+ errors.add(validationResult.toString());
+ }
+ portDTO.setValidationErrors(errors);
+ }
+ }
+ }
+
+ if (flow.getOutputPorts() != null) {
+ for (final PortDTO portDTO : flow.getOutputPorts()) {
+ final Port port = outputPortDAO.getPort(portDTO.getId());
+ final Collection<ValidationResult> validationErrors = port.getValidationErrors();
+ if (validationErrors != null && !validationErrors.isEmpty()) {
+ final List<String> errors = new ArrayList<>();
+ for (final ValidationResult validationResult : validationErrors) {
+ errors.add(validationResult.toString());
+ }
+ portDTO.setValidationErrors(errors);
+ }
+ }
+ }
+
+ // get any remote process group issues
+ if (flow.getRemoteProcessGroups() != null) {
+ for (final RemoteProcessGroupDTO remoteProcessGroupDTO : flow.getRemoteProcessGroups()) {
+ final RemoteProcessGroup remoteProcessGroup = remoteProcessGroupDAO.getRemoteProcessGroup(remoteProcessGroupDTO.getId());
+
+ if (remoteProcessGroup.getAuthorizationIssue() != null) {
+ remoteProcessGroupDTO.setAuthorizationIssues(Arrays.asList(remoteProcessGroup.getAuthorizationIssue()));
+ }
+ }
+ }
+ }
+
+ @Override
+ public FlowEntity copySnippet(final String groupId, final String snippetId, final Double originX, final Double originY, final String idGenerationSeed) {
+ // create the new snippet
+ final FlowSnippetDTO snippet = snippetDAO.copySnippet(groupId, snippetId, originX, originY, idGenerationSeed);
+
+ // save the flow
+ controllerFacade.save();
+
+ // drop the snippet
+ snippetDAO.dropSnippet(snippetId);
+
+ // post process new flow snippet
+ final FlowDTO flowDto = postProcessNewFlowSnippet(groupId, snippet);
+
+ final FlowEntity flowEntity = new FlowEntity();
+ flowEntity.setFlow(flowDto);
+ return flowEntity;
+ }
+
+ @Override
+ public SnippetEntity createSnippet(final SnippetDTO snippetDTO) {
+ // add the component
+ final Snippet snippet = snippetDAO.createSnippet(snippetDTO);
+
+ // save the flow
+ controllerFacade.save();
+
+ final SnippetDTO dto = dtoFactory.createSnippetDto(snippet);
+ final RevisionUpdate<SnippetDTO> snapshot = new StandardRevisionUpdate<>(dto, null);
+
+ return entityFactory.createSnippetEntity(snapshot.getComponent());
+ }
+
+ @Override
+ public PortEntity createInputPort(final Revision revision, final String groupId, final PortDTO inputPortDTO) {
+ final RevisionUpdate<PortDTO> snapshot = createComponent(
+ revision,
+ inputPortDTO,
+ () -> inputPortDAO.createPort(groupId, inputPortDTO),
+ port -> dtoFactory.createPortDto(port));
+
+ final Port port = inputPortDAO.getPort(inputPortDTO.getId());
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(port);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(port));
+ final PortStatusDTO status = dtoFactory.createPortStatusDto(controllerFacade.getInputPortStatus(port.getIdentifier()));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(port.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createPortEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions, operatePermissions, status, bulletinEntities);
+ }
+
+ @Override
+ public PortEntity createOutputPort(final Revision revision, final String groupId, final PortDTO outputPortDTO) {
+ final RevisionUpdate<PortDTO> snapshot = createComponent(
+ revision,
+ outputPortDTO,
+ () -> outputPortDAO.createPort(groupId, outputPortDTO),
+ port -> dtoFactory.createPortDto(port));
+
+ final Port port = outputPortDAO.getPort(outputPortDTO.getId());
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(port);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(port));
+ final PortStatusDTO status = dtoFactory.createPortStatusDto(controllerFacade.getOutputPortStatus(port.getIdentifier()));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(port.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createPortEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions, operatePermissions, status, bulletinEntities);
+ }
+
+ @Override
+ public ProcessGroupEntity createProcessGroup(final Revision revision, final String parentGroupId, final ProcessGroupDTO processGroupDTO) {
+ final RevisionUpdate<ProcessGroupDTO> snapshot = createComponent(
+ revision,
+ processGroupDTO,
+ () -> processGroupDAO.createProcessGroup(parentGroupId, processGroupDTO),
+ processGroup -> dtoFactory.createProcessGroupDto(processGroup));
+
+ final ProcessGroup processGroup = processGroupDAO.getProcessGroup(processGroupDTO.getId());
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(processGroup);
+ final ProcessGroupStatusDTO status = dtoFactory.createConciseProcessGroupStatusDto(controllerFacade.getProcessGroupStatus(processGroup.getIdentifier()));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(processGroup.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createProcessGroupEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions, status, bulletinEntities);
+ }
+
+ @Override
+ public RemoteProcessGroupEntity createRemoteProcessGroup(final Revision revision, final String groupId, final RemoteProcessGroupDTO remoteProcessGroupDTO) {
+ final RevisionUpdate<RemoteProcessGroupDTO> snapshot = createComponent(
+ revision,
+ remoteProcessGroupDTO,
+ () -> remoteProcessGroupDAO.createRemoteProcessGroup(groupId, remoteProcessGroupDTO),
+ remoteProcessGroup -> dtoFactory.createRemoteProcessGroupDto(remoteProcessGroup));
+
+ final RemoteProcessGroup remoteProcessGroup = remoteProcessGroupDAO.getRemoteProcessGroup(remoteProcessGroupDTO.getId());
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(remoteProcessGroup);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(remoteProcessGroup));
+ final RemoteProcessGroupStatusDTO status = dtoFactory.createRemoteProcessGroupStatusDto(remoteProcessGroup, controllerFacade.getRemoteProcessGroupStatus(remoteProcessGroup.getIdentifier()));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(remoteProcessGroup.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createRemoteProcessGroupEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()),
+ permissions, operatePermissions, status, bulletinEntities);
+ }
+
+ @Override
+ public boolean isRemoteGroupPortConnected(final String remoteProcessGroupId, final String remotePortId) {
+ final RemoteProcessGroup rpg = remoteProcessGroupDAO.getRemoteProcessGroup(remoteProcessGroupId);
+ RemoteGroupPort port = rpg.getInputPort(remotePortId);
+ if (port != null) {
+ return port.hasIncomingConnection();
+ }
+
+ port = rpg.getOutputPort(remotePortId);
+ if (port != null) {
+ return !port.getConnections().isEmpty();
+ }
+
+ throw new ResourceNotFoundException("Could not find Port with ID " + remotePortId + " as a child of RemoteProcessGroup with ID " + remoteProcessGroupId);
+ }
+
+ @Override
+ public void verifyCanAddTemplate(String groupId, String name) {
+ templateDAO.verifyCanAddTemplate(name, groupId);
+ }
+
+ @Override
+ public void verifyComponentTypes(FlowSnippetDTO snippet) {
+ templateDAO.verifyComponentTypes(snippet);
+ }
+
+ @Override
+ public void verifyComponentTypes(final VersionedProcessGroup versionedGroup) {
+ controllerFacade.verifyComponentTypes(versionedGroup);
+ }
+
+ @Override
+ public void verifyImportProcessGroup(final VersionControlInformationDTO versionControlInfo, final VersionedProcessGroup contents, final String groupId) {
+ final ProcessGroup group = processGroupDAO.getProcessGroup(groupId);
+ verifyImportProcessGroup(versionControlInfo, contents, group);
+ }
+
+ private void verifyImportProcessGroup(final VersionControlInformationDTO vciDto, final VersionedProcessGroup contents, final ProcessGroup group) {
+ if (group == null) {
+ return;
+ }
+
+ final VersionControlInformation vci = group.getVersionControlInformation();
+ if (vci != null) {
+ // Note that we do not compare the Registry ID here because there could be two registry clients
+ // that point to the same server (one could point to localhost while another points to 127.0.0.1, for instance)..
+ if (Objects.equals(vciDto.getBucketId(), vci.getBucketIdentifier())
+ && Objects.equals(vciDto.getFlowId(), vci.getFlowIdentifier())) {
+
+ throw new IllegalStateException("Cannot import the specified Versioned Flow into the Process Group because doing so would cause a recursive dataflow. "
+ + "If Process Group A contains Process Group B, then Process Group B is not allowed to contain the flow identified by Process Group A.");
+ }
+ }
+
+ final Set<VersionedProcessGroup> childGroups = contents.getProcessGroups();
+ if (childGroups != null) {
+ for (final VersionedProcessGroup childGroup : childGroups) {
+ final VersionedFlowCoordinates childCoordinates = childGroup.getVersionedFlowCoordinates();
+ if (childCoordinates != null) {
+ final VersionControlInformationDTO childVci = new VersionControlInformationDTO();
+ childVci.setBucketId(childCoordinates.getBucketId());
+ childVci.setFlowId(childCoordinates.getFlowId());
+ verifyImportProcessGroup(childVci, childGroup, group);
+ }
+ }
+ }
+
+ verifyImportProcessGroup(vciDto, contents, group.getParent());
+ }
+
+ @Override
+ public TemplateDTO createTemplate(final String name, final String description, final String snippetId, final String groupId, final Optional<String> idGenerationSeed) {
+ // get the specified snippet
+ final Snippet snippet = snippetDAO.getSnippet(snippetId);
+
+ // create the template
+ final TemplateDTO templateDTO = new TemplateDTO();
+ templateDTO.setName(name);
+ templateDTO.setDescription(description);
+ templateDTO.setTimestamp(new Date());
+ templateDTO.setSnippet(snippetUtils.populateFlowSnippet(snippet, true, true, true));
+ templateDTO.setEncodingVersion(TemplateDTO.MAX_ENCODING_VERSION);
+
+ // set the id based on the specified seed
+ final String uuid = idGenerationSeed.isPresent() ? (UUID.nameUUIDFromBytes(idGenerationSeed.get().getBytes(StandardCharsets.UTF_8))).toString() : UUID.randomUUID().toString();
+ templateDTO.setId(uuid);
+
+ // create the template
+ final Template template = templateDAO.createTemplate(templateDTO, groupId);
+
+ // drop the snippet
+ snippetDAO.dropSnippet(snippetId);
+
+ // save the flow
+ controllerFacade.save();
+
+ return dtoFactory.createTemplateDTO(template);
+ }
+
+ /**
+ * Ensures default values are populated for all components in this snippet. This is necessary to handle old templates without default values
+ * and when existing properties have default values introduced.
+ *
+ * @param snippet snippet
+ */
+ private void ensureDefaultPropertyValuesArePopulated(final FlowSnippetDTO snippet) {
+ if (snippet != null) {
+ if (snippet.getControllerServices() != null) {
+ snippet.getControllerServices().forEach(dto -> {
+ if (dto.getProperties() == null) {
+ dto.setProperties(new LinkedHashMap<>());
+ }
+
+ try {
+ final ConfigurableComponent configurableComponent = controllerFacade.getTemporaryComponent(dto.getType(), dto.getBundle());
+ configurableComponent.getPropertyDescriptors().forEach(descriptor -> {
+ if (dto.getProperties().get(descriptor.getName()) == null) {
+ dto.getProperties().put(descriptor.getName(), descriptor.getDefaultValue());
+ }
+ });
+ } catch (final Exception e) {
+ logger.warn(String.format("Unable to create ControllerService of type %s to populate default values.", dto.getType()));
+ }
+ });
+ }
+
+ if (snippet.getProcessors() != null) {
+ snippet.getProcessors().forEach(dto -> {
+ if (dto.getConfig() == null) {
+ dto.setConfig(new ProcessorConfigDTO());
+ }
+
+ final ProcessorConfigDTO config = dto.getConfig();
+ if (config.getProperties() == null) {
+ config.setProperties(new LinkedHashMap<>());
+ }
+
+ try {
+ final ConfigurableComponent configurableComponent = controllerFacade.getTemporaryComponent(dto.getType(), dto.getBundle());
+ configurableComponent.getPropertyDescriptors().forEach(descriptor -> {
+ if (config.getProperties().get(descriptor.getName()) == null) {
+ config.getProperties().put(descriptor.getName(), descriptor.getDefaultValue());
+ }
+ });
+ } catch (final Exception e) {
+ logger.warn(String.format("Unable to create Processor of type %s to populate default values.", dto.getType()));
+ }
+ });
+ }
+
+ if (snippet.getProcessGroups() != null) {
+ snippet.getProcessGroups().forEach(processGroup -> {
+ ensureDefaultPropertyValuesArePopulated(processGroup.getContents());
+ });
+ }
+ }
+ }
+
+ @Override
+ public TemplateDTO importTemplate(final TemplateDTO templateDTO, final String groupId, final Optional<String> idGenerationSeed) {
+ // ensure id is set
+ final String uuid = idGenerationSeed.isPresent() ? (UUID.nameUUIDFromBytes(idGenerationSeed.get().getBytes(StandardCharsets.UTF_8))).toString() : UUID.randomUUID().toString();
+ templateDTO.setId(uuid);
+
+ // mark the timestamp
+ templateDTO.setTimestamp(new Date());
+
+ // ensure default values are populated
+ ensureDefaultPropertyValuesArePopulated(templateDTO.getSnippet());
+
+ // import the template
+ final Template template = templateDAO.importTemplate(templateDTO, groupId);
+
+ // save the flow
+ controllerFacade.save();
+
+ // return the template dto
+ return dtoFactory.createTemplateDTO(template);
+ }
+
+ /**
+ * Post processes a new flow snippet including validation, removing the snippet, and DTO conversion.
+ *
+ * @param groupId group id
+ * @param snippet snippet
+ * @return flow dto
+ */
+ private FlowDTO postProcessNewFlowSnippet(final String groupId, final FlowSnippetDTO snippet) {
+ // validate the new snippet
+ validateSnippetContents(snippet);
+
+ // identify all components added
+ final Set<String> identifiers = new HashSet<>();
+ snippet.getProcessors().stream()
+ .map(proc -> proc.getId())
+ .forEach(id -> identifiers.add(id));
+ snippet.getConnections().stream()
+ .map(conn -> conn.getId())
+ .forEach(id -> identifiers.add(id));
+ snippet.getInputPorts().stream()
+ .map(port -> port.getId())
+ .forEach(id -> identifiers.add(id));
+ snippet.getOutputPorts().stream()
+ .map(port -> port.getId())
+ .forEach(id -> identifiers.add(id));
+ snippet.getProcessGroups().stream()
+ .map(group -> group.getId())
+ .forEach(id -> identifiers.add(id));
+ snippet.getRemoteProcessGroups().stream()
+ .map(remoteGroup -> remoteGroup.getId())
+ .forEach(id -> identifiers.add(id));
+ snippet.getRemoteProcessGroups().stream()
+ .filter(remoteGroup -> remoteGroup.getContents() != null && remoteGroup.getContents().getInputPorts() != null)
+ .flatMap(remoteGroup -> remoteGroup.getContents().getInputPorts().stream())
+ .map(remoteInputPort -> remoteInputPort.getId())
+ .forEach(id -> identifiers.add(id));
+ snippet.getRemoteProcessGroups().stream()
+ .filter(remoteGroup -> remoteGroup.getContents() != null && remoteGroup.getContents().getOutputPorts() != null)
+ .flatMap(remoteGroup -> remoteGroup.getContents().getOutputPorts().stream())
+ .map(remoteOutputPort -> remoteOutputPort.getId())
+ .forEach(id -> identifiers.add(id));
+ snippet.getLabels().stream()
+ .map(label -> label.getId())
+ .forEach(id -> identifiers.add(id));
+
+ final ProcessGroup group = processGroupDAO.getProcessGroup(groupId);
+ final ProcessGroupStatus groupStatus = controllerFacade.getProcessGroupStatus(groupId);
+ return dtoFactory.createFlowDto(group, groupStatus, snippet, revisionManager, this::getProcessGroupBulletins);
+ }
+
+ @Override
+ public FlowEntity createTemplateInstance(final String groupId, final Double originX, final Double originY, final String templateEncodingVersion,
+ final FlowSnippetDTO requestSnippet, final String idGenerationSeed) {
+
+ // instantiate the template - there is no need to make another copy of the flow snippet since the actual template
+ // was copied and this dto is only used to instantiate it's components (which as already completed)
+ final FlowSnippetDTO snippet = templateDAO.instantiateTemplate(groupId, originX, originY, templateEncodingVersion, requestSnippet, idGenerationSeed);
+
+ // save the flow
+ controllerFacade.save();
+
+ // post process the new flow snippet
+ final FlowDTO flowDto = postProcessNewFlowSnippet(groupId, snippet);
+
+ final FlowEntity flowEntity = new FlowEntity();
+ flowEntity.setFlow(flowDto);
+ return flowEntity;
+ }
+
+ @Override
+ public ControllerServiceEntity createControllerService(final Revision revision, final String groupId, final ControllerServiceDTO controllerServiceDTO) {
+ controllerServiceDTO.setParentGroupId(groupId);
+
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+
+ // request claim for component to be created... revision already verified (version == 0)
+ final RevisionClaim claim = new StandardRevisionClaim(revision);
+
+ final RevisionUpdate<ControllerServiceDTO> snapshot;
+ if (groupId == null) {
+ // update revision through revision manager
+ snapshot = revisionManager.updateRevision(claim, user, () -> {
+ // Unfortunately, we can not use the createComponent() method here because createComponent() wants to obtain the read lock
+ // on the group. The Controller Service may or may not have a Process Group (it won't if it's controller-scoped).
+ final ControllerServiceNode controllerService = controllerServiceDAO.createControllerService(controllerServiceDTO);
+ controllerFacade.save();
+
+ awaitValidationCompletion(controllerService);
+ final ControllerServiceDTO dto = dtoFactory.createControllerServiceDto(controllerService);
+
+ final FlowModification lastMod = new FlowModification(revision.incrementRevision(revision.getClientId()), user.getIdentity());
+ return new StandardRevisionUpdate<>(dto, lastMod);
+ });
+ } else {
+ snapshot = revisionManager.updateRevision(claim, user, () -> {
+ final ControllerServiceNode controllerService = controllerServiceDAO.createControllerService(controllerServiceDTO);
+ controllerFacade.save();
+
+ awaitValidationCompletion(controllerService);
+ final ControllerServiceDTO dto = dtoFactory.createControllerServiceDto(controllerService);
+
+ final FlowModification lastMod = new FlowModification(revision.incrementRevision(revision.getClientId()), user.getIdentity());
+ return new StandardRevisionUpdate<>(dto, lastMod);
+ });
+ }
+
+ final ControllerServiceNode controllerService = controllerServiceDAO.getControllerService(controllerServiceDTO.getId());
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(controllerService);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(controllerService));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(controllerServiceDTO.getId()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createControllerServiceEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions, operatePermissions, bulletinEntities);
+ }
+
+ @Override
+ public ControllerServiceEntity updateControllerService(final Revision revision, final ControllerServiceDTO controllerServiceDTO) {
+ // get the component, ensure we have access to it, and perform the update request
+ final ControllerServiceNode controllerService = controllerServiceDAO.getControllerService(controllerServiceDTO.getId());
+ final RevisionUpdate<ControllerServiceDTO> snapshot = updateComponent(revision,
+ controllerService,
+ () -> controllerServiceDAO.updateControllerService(controllerServiceDTO),
+ cs -> {
+ awaitValidationCompletion(cs);
+ final ControllerServiceDTO dto = dtoFactory.createControllerServiceDto(cs);
+ final ControllerServiceReference ref = controllerService.getReferences();
+ final ControllerServiceReferencingComponentsEntity referencingComponentsEntity =
+ createControllerServiceReferencingComponentsEntity(ref, Sets.newHashSet(controllerService.getIdentifier()));
+ dto.setReferencingComponents(referencingComponentsEntity.getControllerServiceReferencingComponents());
+ return dto;
+ });
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(controllerService);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(controllerService));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(controllerServiceDTO.getId()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createControllerServiceEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions, operatePermissions, bulletinEntities);
+ }
+
+
+ @Override
+ public ControllerServiceReferencingComponentsEntity updateControllerServiceReferencingComponents(
+ final Map<String, Revision> referenceRevisions, final String controllerServiceId, final ScheduledState scheduledState, final ControllerServiceState controllerServiceState) {
+
+ final RevisionClaim claim = new StandardRevisionClaim(referenceRevisions.values());
+
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+ final RevisionUpdate<ControllerServiceReferencingComponentsEntity> update = revisionManager.updateRevision(claim, user,
+ new UpdateRevisionTask<ControllerServiceReferencingComponentsEntity>() {
+ @Override
+ public RevisionUpdate<ControllerServiceReferencingComponentsEntity> update() {
+ final Set<ComponentNode> updated = controllerServiceDAO.updateControllerServiceReferencingComponents(controllerServiceId, scheduledState, controllerServiceState);
+ final ControllerServiceReference updatedReference = controllerServiceDAO.getControllerService(controllerServiceId).getReferences();
+
+ // get the revisions of the updated components
+ final Map<String, Revision> updatedRevisions = new HashMap<>();
+ for (final ComponentNode component : updated) {
+ final Revision currentRevision = revisionManager.getRevision(component.getIdentifier());
+ final Revision requestRevision = referenceRevisions.get(component.getIdentifier());
+ updatedRevisions.put(component.getIdentifier(), currentRevision.incrementRevision(requestRevision.getClientId()));
+ }
+
+ // ensure the revision for all referencing components is included regardless of whether they were updated in this request
+ for (final ComponentNode component : updatedReference.findRecursiveReferences(ComponentNode.class)) {
+ updatedRevisions.putIfAbsent(component.getIdentifier(), revisionManager.getRevision(component.getIdentifier()));
+ }
+
+ final ControllerServiceReferencingComponentsEntity entity = createControllerServiceReferencingComponentsEntity(updatedReference, updatedRevisions);
+ return new StandardRevisionUpdate<>(entity, null, new HashSet<>(updatedRevisions.values()));
+ }
+ });
+
+ return update.getComponent();
+ }
+
+ /**
+ * Finds the identifiers for all components referencing a ControllerService.
+ *
+ * @param reference ControllerServiceReference
+ * @param visited ControllerServices we've already visited
+ */
+ private void findControllerServiceReferencingComponentIdentifiers(final ControllerServiceReference reference, final Set<ControllerServiceNode> visited) {
+ for (final ComponentNode component : reference.getReferencingComponents()) {
+
+ // if this is a ControllerService consider it's referencing components
+ if (component instanceof ControllerServiceNode) {
+ final ControllerServiceNode node = (ControllerServiceNode) component;
+ if (!visited.contains(node)) {
+ visited.add(node);
+ findControllerServiceReferencingComponentIdentifiers(node.getReferences(), visited);
+ }
+ }
+ }
+ }
+
+ /**
+ * Creates entities for components referencing a ControllerService using their current revision.
+ *
+ * @param reference ControllerServiceReference
+ * @return The entity
+ */
+ private ControllerServiceReferencingComponentsEntity createControllerServiceReferencingComponentsEntity(final ControllerServiceReference reference, final Set<String> lockedIds) {
+ final Set<ControllerServiceNode> visited = new HashSet<>();
+ visited.add(reference.getReferencedComponent());
+ findControllerServiceReferencingComponentIdentifiers(reference, visited);
+
+ final Map<String, Revision> referencingRevisions = new HashMap<>();
+ for (final ComponentNode component : reference.getReferencingComponents()) {
+ referencingRevisions.put(component.getIdentifier(), revisionManager.getRevision(component.getIdentifier()));
+ }
+
+ return createControllerServiceReferencingComponentsEntity(reference, referencingRevisions);
+ }
+
+ /**
+ * Creates entities for components referencing a ControllerService using the specified revisions.
+ *
+ * @param reference ControllerServiceReference
+ * @param revisions The revisions
+ * @return The entity
+ */
+ private ControllerServiceReferencingComponentsEntity createControllerServiceReferencingComponentsEntity(
+ final ControllerServiceReference reference, final Map<String, Revision> revisions) {
+ final Set<ControllerServiceNode> visited = new HashSet<>();
+ visited.add(reference.getReferencedComponent());
+ return createControllerServiceReferencingComponentsEntity(reference, revisions, visited);
+ }
+
+ /**
+ * Creates entities for components referencing a ControllerServcie using the specified revisions.
+ *
+ * @param reference ControllerServiceReference
+ * @param revisions The revisions
+ * @param visited Which services we've already considered (in case of cycle)
+ * @return The entity
+ */
+ private ControllerServiceReferencingComponentsEntity createControllerServiceReferencingComponentsEntity(
+ final ControllerServiceReference reference, final Map<String, Revision> revisions, final Set<ControllerServiceNode> visited) {
+
+ final String modifier = NiFiUserUtils.getNiFiUserIdentity();
+ final Set<ComponentNode> referencingComponents = reference.getReferencingComponents();
+
+ final Set<ControllerServiceReferencingComponentEntity> componentEntities = new HashSet<>();
+ for (final ComponentNode refComponent : referencingComponents) {
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(refComponent);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(refComponent));
+
+ final Revision revision = revisions.get(refComponent.getIdentifier());
+ final FlowModification flowMod = new FlowModification(revision, modifier);
+ final RevisionDTO revisionDto = dtoFactory.createRevisionDTO(flowMod);
+ final ControllerServiceReferencingComponentDTO dto = dtoFactory.createControllerServiceReferencingComponentDTO(refComponent);
+
+ if (refComponent instanceof ControllerServiceNode) {
+ final ControllerServiceNode node = (ControllerServiceNode) refComponent;
+
+ // indicate if we've hit a cycle
+ dto.setReferenceCycle(visited.contains(node));
+
+ // mark node as visited before building the reference cycle
+ visited.add(node);
+
+ // if we haven't encountered this service before include it's referencing components
+ if (!dto.getReferenceCycle()) {
+ final ControllerServiceReference refReferences = node.getReferences();
+ final Map<String, Revision> referencingRevisions = new HashMap<>(revisions);
+ for (final ComponentNode component : refReferences.getReferencingComponents()) {
+ referencingRevisions.putIfAbsent(component.getIdentifier(), revisionManager.getRevision(component.getIdentifier()));
+ }
+ final ControllerServiceReferencingComponentsEntity references = createControllerServiceReferencingComponentsEntity(refReferences, referencingRevisions, visited);
+ dto.setReferencingComponents(references.getControllerServiceReferencingComponents());
+ }
+ }
+
+ componentEntities.add(entityFactory.createControllerServiceReferencingComponentEntity(refComponent.getIdentifier(), dto, revisionDto, permissions, operatePermissions));
+ }
+
+ final ControllerServiceReferencingComponentsEntity entity = new ControllerServiceReferencingComponentsEntity();
+ entity.setControllerServiceReferencingComponents(componentEntities);
+ return entity;
+ }
+
+ @Override
+ public ControllerServiceEntity deleteControllerService(final Revision revision, final String controllerServiceId) {
+ final ControllerServiceNode controllerService = controllerServiceDAO.getControllerService(controllerServiceId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(controllerService);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(controllerService));
+ final ControllerServiceDTO snapshot = deleteComponent(
+ revision,
+ controllerService.getResource(),
+ () -> controllerServiceDAO.deleteControllerService(controllerServiceId),
+ true,
+ dtoFactory.createControllerServiceDto(controllerService));
+
+ return entityFactory.createControllerServiceEntity(snapshot, null, permissions, operatePermissions, null);
+ }
+
+
+ @Override
+ public RegistryClientEntity createRegistryClient(Revision revision, RegistryDTO registryDTO) {
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+
+ // request claim for component to be created... revision already verified (version == 0)
+ final RevisionClaim claim = new StandardRevisionClaim(revision);
+
+ // update revision through revision manager
+ final RevisionUpdate<FlowRegistry> revisionUpdate = revisionManager.updateRevision(claim, user, () -> {
+ // add the component
+ final FlowRegistry registry = registryDAO.createFlowRegistry(registryDTO);
+
+ // save the flow
+ controllerFacade.save();
+
+ final FlowModification lastMod = new FlowModification(revision.incrementRevision(revision.getClientId()), user.getIdentity());
+ return new StandardRevisionUpdate<>(registry, lastMod);
+ });
+
+ final FlowRegistry registry = revisionUpdate.getComponent();
+ return createRegistryClientEntity(registry);
+ }
+
+ @Override
+ public RegistryClientEntity getRegistryClient(final String registryId) {
+ final FlowRegistry registry = registryDAO.getFlowRegistry(registryId);
+ return createRegistryClientEntity(registry);
+ }
+
+ private RegistryClientEntity createRegistryClientEntity(final FlowRegistry flowRegistry) {
+ if (flowRegistry == null) {
+ return null;
+ }
+
+ final RevisionDTO revision = dtoFactory.createRevisionDTO(revisionManager.getRevision(flowRegistry.getIdentifier()));
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(authorizableLookup.getController());
+ final RegistryDTO dto = dtoFactory.createRegistryDto(flowRegistry);
+
+ return entityFactory.createRegistryClientEntity(dto, revision, permissions);
+ }
+
+ private VersionedFlowEntity createVersionedFlowEntity(final String registryId, final VersionedFlow versionedFlow) {
+ if (versionedFlow == null) {
+ return null;
+ }
+
+ final VersionedFlowDTO dto = new VersionedFlowDTO();
+ dto.setRegistryId(registryId);
+ dto.setBucketId(versionedFlow.getBucketIdentifier());
+ dto.setFlowId(versionedFlow.getIdentifier());
+ dto.setFlowName(versionedFlow.getName());
+ dto.setDescription(versionedFlow.getDescription());
+
+ final VersionedFlowEntity entity = new VersionedFlowEntity();
+ entity.setVersionedFlow(dto);
+
+ return entity;
+ }
+
+ private VersionedFlowSnapshotMetadataEntity createVersionedFlowSnapshotMetadataEntity(final String registryId, final VersionedFlowSnapshotMetadata metadata) {
+ if (metadata == null) {
+ return null;
+ }
+
+ final VersionedFlowSnapshotMetadataEntity entity = new VersionedFlowSnapshotMetadataEntity();
+ entity.setRegistryId(registryId);
+ entity.setVersionedFlowMetadata(metadata);
+
+ return entity;
+ }
+
+ @Override
+ public Set<RegistryClientEntity> getRegistryClients() {
+ return registryDAO.getFlowRegistries().stream()
+ .map(this::createRegistryClientEntity)
+ .collect(Collectors.toSet());
+ }
+
+ @Override
+ public Set<RegistryEntity> getRegistriesForUser(final NiFiUser user) {
+ return registryDAO.getFlowRegistriesForUser(user).stream()
+ .map(flowRegistry -> entityFactory.createRegistryEntity(dtoFactory.createRegistryDto(flowRegistry)))
+ .collect(Collectors.toSet());
+ }
+
+ @Override
+ public Set<BucketEntity> getBucketsForUser(final String registryId, final NiFiUser user) {
+ return registryDAO.getBucketsForUser(registryId, user).stream()
+ .map(bucket -> {
+ if (bucket == null) {
+ return null;
+ }
+
+ final BucketDTO dto = new BucketDTO();
+ dto.setId(bucket.getIdentifier());
+ dto.setName(bucket.getName());
+ dto.setDescription(bucket.getDescription());
+ dto.setCreated(bucket.getCreatedTimestamp());
+
+ final Permissions regPermissions = bucket.getPermissions();
+ final PermissionsDTO permissions = new PermissionsDTO();
+ permissions.setCanRead(regPermissions.getCanRead());
+ permissions.setCanWrite(regPermissions.getCanWrite());
+
+ return entityFactory.createBucketEntity(dto, permissions);
+ })
+ .collect(Collectors.toSet());
+ }
+
+ @Override
+ public Set<VersionedFlowEntity> getFlowsForUser(String registryId, String bucketId, NiFiUser user) {
+ return registryDAO.getFlowsForUser(registryId, bucketId, user).stream()
+ .map(vf -> createVersionedFlowEntity(registryId, vf))
+ .collect(Collectors.toSet());
+ }
+
+ @Override
+ public Set<VersionedFlowSnapshotMetadataEntity> getFlowVersionsForUser(String registryId, String bucketId, String flowId, NiFiUser user) {
+ return registryDAO.getFlowVersionsForUser(registryId, bucketId, flowId, user).stream()
+ .map(md -> createVersionedFlowSnapshotMetadataEntity(registryId, md))
+ .collect(Collectors.toSet());
+ }
+
+ @Override
+ public RegistryClientEntity updateRegistryClient(Revision revision, RegistryDTO registryDTO) {
+ final RevisionClaim revisionClaim = new StandardRevisionClaim(revision);
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+
+ final FlowRegistry registry = registryDAO.getFlowRegistry(registryDTO.getId());
+ final RevisionUpdate<FlowRegistry> revisionUpdate = revisionManager.updateRevision(revisionClaim, user, () -> {
+ final boolean duplicateName = registryDAO.getFlowRegistries().stream()
+ .anyMatch(reg -> reg.getName().equals(registryDTO.getName()) && !reg.getIdentifier().equals(registryDTO.getId()));
+
+ if (duplicateName) {
+ throw new IllegalStateException("Cannot update Flow Registry because a Flow Registry already exists with the name " + registryDTO.getName());
+ }
+
+ registry.setDescription(registryDTO.getDescription());
+ registry.setName(registryDTO.getName());
+ registry.setURL(registryDTO.getUri());
+
+ controllerFacade.save();
+
+ final Revision updatedRevision = revisionManager.getRevision(revision.getComponentId()).incrementRevision(revision.getClientId());
+ final FlowModification lastModification = new FlowModification(updatedRevision, user.getIdentity());
+
+ return new StandardRevisionUpdate<>(registry, lastModification);
+ });
+
+ final FlowRegistry updatedReg = revisionUpdate.getComponent();
+ return createRegistryClientEntity(updatedReg);
+ }
+
+ @Override
+ public void verifyDeleteRegistry(String registryId) {
+ processGroupDAO.verifyDeleteFlowRegistry(registryId);
+ }
+
+ @Override
+ public RegistryClientEntity deleteRegistryClient(final Revision revision, final String registryId) {
+ final RevisionClaim claim = new StandardRevisionClaim(revision);
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+
+ final FlowRegistry registry = revisionManager.deleteRevision(claim, user, () -> {
+ final FlowRegistry reg = registryDAO.removeFlowRegistry(registryId);
+ controllerFacade.save();
+ return reg;
+ });
+
+ return createRegistryClientEntity(registry);
+ }
+
+ @Override
+ public ReportingTaskEntity createReportingTask(final Revision revision, final ReportingTaskDTO reportingTaskDTO) {
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+
+ // request claim for component to be created... revision already verified (version == 0)
+ final RevisionClaim claim = new StandardRevisionClaim(revision);
+
+ // update revision through revision manager
+ final RevisionUpdate<ReportingTaskDTO> snapshot = revisionManager.updateRevision(claim, user, () -> {
+ // create the reporting task
+ final ReportingTaskNode reportingTask = reportingTaskDAO.createReportingTask(reportingTaskDTO);
+
+ // save the update
+ controllerFacade.save();
+ awaitValidationCompletion(reportingTask);
+
+ final ReportingTaskDTO dto = dtoFactory.createReportingTaskDto(reportingTask);
+ final FlowModification lastMod = new FlowModification(revision.incrementRevision(revision.getClientId()), user.getIdentity());
+ return new StandardRevisionUpdate<>(dto, lastMod);
+ });
+
+ final ReportingTaskNode reportingTask = reportingTaskDAO.getReportingTask(reportingTaskDTO.getId());
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(reportingTask);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(reportingTask));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(reportingTask.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createReportingTaskEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions, operatePermissions, bulletinEntities);
+ }
+
+ @Override
+ public ReportingTaskEntity updateReportingTask(final Revision revision, final ReportingTaskDTO reportingTaskDTO) {
+ // get the component, ensure we have access to it, and perform the update request
+ final ReportingTaskNode reportingTask = reportingTaskDAO.getReportingTask(reportingTaskDTO.getId());
+ final RevisionUpdate<ReportingTaskDTO> snapshot = updateComponent(revision,
+ reportingTask,
+ () -> reportingTaskDAO.updateReportingTask(reportingTaskDTO),
+ rt -> {
+ awaitValidationCompletion(rt);
+ return dtoFactory.createReportingTaskDto(rt);
+ });
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(reportingTask);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(reportingTask));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(reportingTask.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createReportingTaskEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()), permissions, operatePermissions, bulletinEntities);
+ }
+
+ @Override
+ public ReportingTaskEntity deleteReportingTask(final Revision revision, final String reportingTaskId) {
+ final ReportingTaskNode reportingTask = reportingTaskDAO.getReportingTask(reportingTaskId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(reportingTask);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(reportingTask));
+ final ReportingTaskDTO snapshot = deleteComponent(
+ revision,
+ reportingTask.getResource(),
+ () -> reportingTaskDAO.deleteReportingTask(reportingTaskId),
+ true,
+ dtoFactory.createReportingTaskDto(reportingTask));
+
+ return entityFactory.createReportingTaskEntity(snapshot, null, permissions, operatePermissions, null);
+ }
+
+ @Override
+ public void deleteActions(final Date endDate) {
+ // get the user from the request
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+ if (user == null) {
+ throw new WebApplicationException(new Throwable("Unable to access details for current user."));
+ }
+
+ // create the purge details
+ final FlowChangePurgeDetails details = new FlowChangePurgeDetails();
+ details.setEndDate(endDate);
+
+ // create a purge action to record that records are being removed
+ final FlowChangeAction purgeAction = new FlowChangeAction();
+ purgeAction.setUserIdentity(user.getIdentity());
+ purgeAction.setOperation(Operation.Purge);
+ purgeAction.setTimestamp(new Date());
+ purgeAction.setSourceId("Flow Controller");
+ purgeAction.setSourceName("History");
+ purgeAction.setSourceType(Component.Controller);
+ purgeAction.setActionDetails(details);
+
+ // purge corresponding actions
+ auditService.purgeActions(endDate, purgeAction);
+ }
+
+ @Override
+ public ProvenanceDTO submitProvenance(final ProvenanceDTO query) {
+ return controllerFacade.submitProvenance(query);
+ }
+
+ @Override
+ public void deleteProvenance(final String queryId) {
+ controllerFacade.deleteProvenanceQuery(queryId);
+ }
+
+ @Override
+ public LineageDTO submitLineage(final LineageDTO lineage) {
+ return controllerFacade.submitLineage(lineage);
+ }
+
+ @Override
+ public void deleteLineage(final String lineageId) {
+ controllerFacade.deleteLineage(lineageId);
+ }
+
+ @Override
+ public ProvenanceEventDTO submitReplay(final Long eventId) {
+ return controllerFacade.submitReplay(eventId);
+ }
+
+ // -----------------------------------------
+ // Read Operations
+ // -----------------------------------------
+
+ @Override
+ public SearchResultsDTO searchController(final String query) {
+ return controllerFacade.search(query);
+ }
+
+ @Override
+ public DownloadableContent getContent(final String connectionId, final String flowFileUuid, final String uri) {
+ return connectionDAO.getContent(connectionId, flowFileUuid, uri);
+ }
+
+ @Override
+ public DownloadableContent getContent(final Long eventId, final String uri, final ContentDirection contentDirection) {
+ return controllerFacade.getContent(eventId, uri, contentDirection);
+ }
+
+ @Override
+ public ProvenanceDTO getProvenance(final String queryId, final Boolean summarize, final Boolean incrementalResults) {
+ return controllerFacade.getProvenanceQuery(queryId, summarize, incrementalResults);
+ }
+
+ @Override
+ public LineageDTO getLineage(final String lineageId) {
+ return controllerFacade.getLineage(lineageId);
+ }
+
+ @Override
+ public ProvenanceOptionsDTO getProvenanceSearchOptions() {
+ return controllerFacade.getProvenanceSearchOptions();
+ }
+
+ @Override
+ public ProvenanceEventDTO getProvenanceEvent(final Long id) {
+ return controllerFacade.getProvenanceEvent(id);
+ }
+
+ @Override
+ public ProcessGroupStatusEntity getProcessGroupStatus(final String groupId, final boolean recursive) {
+ final ProcessGroup processGroup = processGroupDAO.getProcessGroup(groupId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(processGroup);
+ final ProcessGroupStatusDTO dto = dtoFactory.createProcessGroupStatusDto(processGroup, controllerFacade.getProcessGroupStatus(groupId));
+
+ // prune the response as necessary
+ if (!recursive) {
+ pruneChildGroups(dto.getAggregateSnapshot());
+ if (dto.getNodeSnapshots() != null) {
+ for (final NodeProcessGroupStatusSnapshotDTO nodeSnapshot : dto.getNodeSnapshots()) {
+ pruneChildGroups(nodeSnapshot.getStatusSnapshot());
+ }
+ }
+ }
+
+ return entityFactory.createProcessGroupStatusEntity(dto, permissions);
+ }
+
+ private void pruneChildGroups(final ProcessGroupStatusSnapshotDTO snapshot) {
+ for (final ProcessGroupStatusSnapshotEntity childProcessGroupStatusEntity : snapshot.getProcessGroupStatusSnapshots()) {
+ final ProcessGroupStatusSnapshotDTO childProcessGroupStatus = childProcessGroupStatusEntity.getProcessGroupStatusSnapshot();
+ childProcessGroupStatus.setConnectionStatusSnapshots(null);
+ childProcessGroupStatus.setProcessGroupStatusSnapshots(null);
+ childProcessGroupStatus.setInputPortStatusSnapshots(null);
+ childProcessGroupStatus.setOutputPortStatusSnapshots(null);
+ childProcessGroupStatus.setProcessorStatusSnapshots(null);
+ childProcessGroupStatus.setRemoteProcessGroupStatusSnapshots(null);
+ }
+ }
+
+ @Override
+ public ControllerStatusDTO getControllerStatus() {
+ return controllerFacade.getControllerStatus();
+ }
+
+ @Override
+ public ComponentStateDTO getProcessorState(final String processorId) {
+ final StateMap clusterState = isClustered() ? processorDAO.getState(processorId, Scope.CLUSTER) : null;
+ final StateMap localState = processorDAO.getState(processorId, Scope.LOCAL);
+
+ // processor will be non null as it was already found when getting the state
+ final ProcessorNode processor = processorDAO.getProcessor(processorId);
+ return dtoFactory.createComponentStateDTO(processorId, processor.getProcessor().getClass(), localState, clusterState);
+ }
+
+ @Override
+ public ComponentStateDTO getControllerServiceState(final String controllerServiceId) {
+ final StateMap clusterState = isClustered() ? controllerServiceDAO.getState(controllerServiceId, Scope.CLUSTER) : null;
+ final StateMap localState = controllerServiceDAO.getState(controllerServiceId, Scope.LOCAL);
+
+ // controller service will be non null as it was already found when getting the state
+ final ControllerServiceNode controllerService = controllerServiceDAO.getControllerService(controllerServiceId);
+ return dtoFactory.createComponentStateDTO(controllerServiceId, controllerService.getControllerServiceImplementation().getClass(), localState, clusterState);
+ }
+
+ @Override
+ public ComponentStateDTO getReportingTaskState(final String reportingTaskId) {
+ final StateMap clusterState = isClustered() ? reportingTaskDAO.getState(reportingTaskId, Scope.CLUSTER) : null;
+ final StateMap localState = reportingTaskDAO.getState(reportingTaskId, Scope.LOCAL);
+
+ // reporting task will be non null as it was already found when getting the state
+ final ReportingTaskNode reportingTask = reportingTaskDAO.getReportingTask(reportingTaskId);
+ return dtoFactory.createComponentStateDTO(reportingTaskId, reportingTask.getReportingTask().getClass(), localState, clusterState);
+ }
+
+ @Override
+ public CountersDTO getCounters() {
+ final List<Counter> counters = controllerFacade.getCounters();
+ final Set<CounterDTO> counterDTOs = new LinkedHashSet<>(counters.size());
+ for (final Counter counter : counters) {
+ counterDTOs.add(dtoFactory.createCounterDto(counter));
+ }
+
+ final CountersSnapshotDTO snapshotDto = dtoFactory.createCountersDto(counterDTOs);
+ final CountersDTO countersDto = new CountersDTO();
+ countersDto.setAggregateSnapshot(snapshotDto);
+
+ return countersDto;
+ }
+
+ private ConnectionEntity createConnectionEntity(final Connection connection) {
+ final RevisionDTO revision = dtoFactory.createRevisionDTO(revisionManager.getRevision(connection.getIdentifier()));
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(connection);
+ final ConnectionStatusDTO status = dtoFactory.createConnectionStatusDto(controllerFacade.getConnectionStatus(connection.getIdentifier()));
+ return entityFactory.createConnectionEntity(dtoFactory.createConnectionDto(connection), revision, permissions, status);
+ }
+
+ @Override
+ public Set<ConnectionEntity> getConnections(final String groupId) {
+ final Set<Connection> connections = connectionDAO.getConnections(groupId);
+ return connections.stream()
+ .map(connection -> createConnectionEntity(connection))
+ .collect(Collectors.toSet());
+ }
+
+ @Override
+ public ConnectionEntity getConnection(final String connectionId) {
+ final Connection connection = connectionDAO.getConnection(connectionId);
+ return createConnectionEntity(connection);
+ }
+
+ @Override
+ public DropRequestDTO getFlowFileDropRequest(final String connectionId, final String dropRequestId) {
+ return dtoFactory.createDropRequestDTO(connectionDAO.getFlowFileDropRequest(connectionId, dropRequestId));
+ }
+
+ @Override
+ public ListingRequestDTO getFlowFileListingRequest(final String connectionId, final String listingRequestId) {
+ final Connection connection = connectionDAO.getConnection(connectionId);
+ final ListingRequestDTO listRequest = dtoFactory.createListingRequestDTO(connectionDAO.getFlowFileListingRequest(connectionId, listingRequestId));
+
+ // include whether the source and destination are running
+ if (connection.getSource() != null) {
+ listRequest.setSourceRunning(connection.getSource().isRunning());
+ }
+ if (connection.getDestination() != null) {
+ listRequest.setDestinationRunning(connection.getDestination().isRunning());
+ }
+
+ return listRequest;
+ }
+
+ @Override
+ public FlowFileDTO getFlowFile(final String connectionId, final String flowFileUuid) {
+ return dtoFactory.createFlowFileDTO(connectionDAO.getFlowFile(connectionId, flowFileUuid));
+ }
+
+ @Override
+ public ConnectionStatusEntity getConnectionStatus(final String connectionId) {
+ final Connection connection = connectionDAO.getConnection(connectionId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(connection);
+ final ConnectionStatusDTO dto = dtoFactory.createConnectionStatusDto(controllerFacade.getConnectionStatus(connectionId));
+ return entityFactory.createConnectionStatusEntity(dto, permissions);
+ }
+
+ @Override
+ public StatusHistoryEntity getConnectionStatusHistory(final String connectionId) {
+ final Connection connection = connectionDAO.getConnection(connectionId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(connection);
+ final StatusHistoryDTO dto = controllerFacade.getConnectionStatusHistory(connectionId);
+ return entityFactory.createStatusHistoryEntity(dto, permissions);
+ }
+
+ private ProcessorEntity createProcessorEntity(final ProcessorNode processor, final NiFiUser user) {
+ final RevisionDTO revision = dtoFactory.createRevisionDTO(revisionManager.getRevision(processor.getIdentifier()));
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(processor, user);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(processor));
+ final ProcessorStatusDTO status = dtoFactory.createProcessorStatusDto(controllerFacade.getProcessorStatus(processor.getIdentifier()));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(processor.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createProcessorEntity(dtoFactory.createProcessorDto(processor), revision, permissions, operatePermissions, status, bulletinEntities);
+ }
+
+ @Override
+ public Set<ProcessorEntity> getProcessors(final String groupId, final boolean includeDescendants) {
+ final Set<ProcessorNode> processors = processorDAO.getProcessors(groupId, includeDescendants);
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+ return processors.stream()
+ .map(processor -> createProcessorEntity(processor, user))
+ .collect(Collectors.toSet());
+ }
+
+ @Override
+ public TemplateDTO exportTemplate(final String id) {
+ final Template template = templateDAO.getTemplate(id);
+ final TemplateDTO templateDetails = template.getDetails();
+
+ final TemplateDTO templateDTO = dtoFactory.createTemplateDTO(template);
+ templateDTO.setSnippet(dtoFactory.copySnippetContents(templateDetails.getSnippet()));
+ return templateDTO;
+ }
+
+ @Override
+ public TemplateDTO getTemplate(final String id) {
+ return dtoFactory.createTemplateDTO(templateDAO.getTemplate(id));
+ }
+
+ @Override
+ public Set<TemplateEntity> getTemplates() {
+ return templateDAO.getTemplates().stream()
+ .map(template -> {
+ final TemplateDTO dto = dtoFactory.createTemplateDTO(template);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(template);
+
+ final TemplateEntity entity = new TemplateEntity();
+ entity.setId(dto.getId());
+ entity.setPermissions(permissions);
+ entity.setTemplate(dto);
+ return entity;
+ }).collect(Collectors.toSet());
+ }
+
+ @Override
+ public Set<DocumentedTypeDTO> getWorkQueuePrioritizerTypes() {
+ return controllerFacade.getFlowFileComparatorTypes();
+ }
+
+ @Override
+ public Set<DocumentedTypeDTO> getProcessorTypes(final String bundleGroup, final String bundleArtifact, final String type) {
+ return controllerFacade.getFlowFileProcessorTypes(bundleGroup, bundleArtifact, type);
+ }
+
+ @Override
+ public Set<DocumentedTypeDTO> getControllerServiceTypes(final String serviceType, final String serviceBundleGroup, final String serviceBundleArtifact, final String serviceBundleVersion,
+ final String bundleGroup, final String bundleArtifact, final String type) {
+ return controllerFacade.getControllerServiceTypes(serviceType, serviceBundleGroup, serviceBundleArtifact, serviceBundleVersion, bundleGroup, bundleArtifact, type);
+ }
+
+ @Override
+ public Set<DocumentedTypeDTO> getReportingTaskTypes(final String bundleGroup, final String bundleArtifact, final String type) {
+ return controllerFacade.getReportingTaskTypes(bundleGroup, bundleArtifact, type);
+ }
+
+ @Override
+ public ProcessorEntity getProcessor(final String id) {
+ final ProcessorNode processor = processorDAO.getProcessor(id);
+ return createProcessorEntity(processor, NiFiUserUtils.getNiFiUser());
+ }
+
+ @Override
+ public PropertyDescriptorDTO getProcessorPropertyDescriptor(final String id, final String property) {
+ final ProcessorNode processor = processorDAO.getProcessor(id);
+ PropertyDescriptor descriptor = processor.getPropertyDescriptor(property);
+
+ // return an invalid descriptor if the processor doesn't support this property
+ if (descriptor == null) {
+ descriptor = new PropertyDescriptor.Builder().name(property).addValidator(Validator.INVALID).dynamic(true).build();
+ }
+
+ return dtoFactory.createPropertyDescriptorDto(descriptor, processor.getProcessGroup().getIdentifier());
+ }
+
+ @Override
+ public ProcessorStatusEntity getProcessorStatus(final String id) {
+ final ProcessorNode processor = processorDAO.getProcessor(id);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(processor);
+ final ProcessorStatusDTO dto = dtoFactory.createProcessorStatusDto(controllerFacade.getProcessorStatus(id));
+ return entityFactory.createProcessorStatusEntity(dto, permissions);
+ }
+
+ @Override
+ public StatusHistoryEntity getProcessorStatusHistory(final String id) {
+ final ProcessorNode processor = processorDAO.getProcessor(id);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(processor);
+ final StatusHistoryDTO dto = controllerFacade.getProcessorStatusHistory(id);
+ return entityFactory.createStatusHistoryEntity(dto, permissions);
+ }
+
+ private boolean authorizeBulletin(final Bulletin bulletin) {
+ final String sourceId = bulletin.getSourceId();
+ final ComponentType type = bulletin.getSourceType();
+
+ final Authorizable authorizable;
+ try {
+ switch (type) {
+ case PROCESSOR:
+ authorizable = authorizableLookup.getProcessor(sourceId).getAuthorizable();
+ break;
+ case REPORTING_TASK:
+ authorizable = authorizableLookup.getReportingTask(sourceId).getAuthorizable();
+ break;
+ case CONTROLLER_SERVICE:
+ authorizable = authorizableLookup.getControllerService(sourceId).getAuthorizable();
+ break;
+ case FLOW_CONTROLLER:
+ authorizable = controllerFacade;
+ break;
+ case INPUT_PORT:
+ authorizable = authorizableLookup.getInputPort(sourceId);
+ break;
+ case OUTPUT_PORT:
+ authorizable = authorizableLookup.getOutputPort(sourceId);
+ break;
+ case REMOTE_PROCESS_GROUP:
+ authorizable = authorizableLookup.getRemoteProcessGroup(sourceId);
+ break;
+ default:
+ throw new WebApplicationException(Response.serverError().entity("An unexpected type of component is the source of this bulletin.").build());
+ }
+ } catch (final ResourceNotFoundException e) {
+ // if the underlying component is gone, disallow
+ return false;
+ }
+
+ // perform the authorization
+ final AuthorizationResult result = authorizable.checkAuthorization(authorizer, RequestAction.READ, NiFiUserUtils.getNiFiUser());
+ return Result.Approved.equals(result.getResult());
+ }
+
+ @Override
+ public BulletinBoardDTO getBulletinBoard(final BulletinQueryDTO query) {
+ // build the query
+ final BulletinQuery.Builder queryBuilder = new BulletinQuery.Builder()
+ .groupIdMatches(query.getGroupId())
+ .sourceIdMatches(query.getSourceId())
+ .nameMatches(query.getName())
+ .messageMatches(query.getMessage())
+ .after(query.getAfter())
+ .limit(query.getLimit());
+
+ // perform the query
+ final List<Bulletin> results = bulletinRepository.findBulletins(queryBuilder.build());
+
+ // perform the query and generate the results - iterating in reverse order since we are
+ // getting the most recent results by ordering by timestamp desc above. this gets the
+ // exact results we want but in reverse order
+ final List<BulletinEntity> bulletinEntities = new ArrayList<>();
+ for (final ListIterator<Bulletin> bulletinIter = results.listIterator(results.size()); bulletinIter.hasPrevious(); ) {
+ final Bulletin bulletin = bulletinIter.previous();
+ bulletinEntities.add(entityFactory.createBulletinEntity(dtoFactory.createBulletinDto(bulletin), authorizeBulletin(bulletin)));
+ }
+
+ // create the bulletin board
+ final BulletinBoardDTO bulletinBoard = new BulletinBoardDTO();
+ bulletinBoard.setBulletins(bulletinEntities);
+ bulletinBoard.setGenerated(new Date());
+ return bulletinBoard;
+ }
+
+ @Override
+ public SystemDiagnosticsDTO getSystemDiagnostics() {
+ final SystemDiagnostics sysDiagnostics = controllerFacade.getSystemDiagnostics();
+ return dtoFactory.createSystemDiagnosticsDto(sysDiagnostics);
+ }
+
+ @Override
+ public List<ResourceDTO> getResources() {
+ final List<Resource> resources = controllerFacade.getResources();
+ final List<ResourceDTO> resourceDtos = new ArrayList<>(resources.size());
+ for (final Resource resource : resources) {
+ resourceDtos.add(dtoFactory.createResourceDto(resource));
+ }
+ return resourceDtos;
+ }
+
+ @Override
+ public void discoverCompatibleBundles(VersionedProcessGroup versionedGroup) {
+ BundleUtils.discoverCompatibleBundles(controllerFacade.getExtensionManager(), versionedGroup);
+ }
+
+ @Override
+ public BundleCoordinate getCompatibleBundle(String type, BundleDTO bundleDTO) {
+ return BundleUtils.getCompatibleBundle(controllerFacade.getExtensionManager(), type, bundleDTO);
+ }
+
+ @Override
+ public ConfigurableComponent getTempComponent(String classType, BundleCoordinate bundleCoordinate) {
+ return controllerFacade.getExtensionManager().getTempComponent(classType, bundleCoordinate);
+ }
+
+ /**
+ * Ensures the specified user has permission to access the specified port. This method does
+ * not utilize the DataTransferAuthorizable as that will enforce the entire chain is
+ * authorized for the transfer. This method is only invoked when obtaining the site to site
+ * details so the entire chain isn't necessary.
+ */
+ private boolean isUserAuthorized(final NiFiUser user, final RootGroupPort port) {
+ final boolean isSiteToSiteSecure = Boolean.TRUE.equals(properties.isSiteToSiteSecure());
+
+ // if site to site is not secure, allow all users
+ if (!isSiteToSiteSecure) {
+ return true;
+ }
+
+ final Map<String, String> userContext;
+ if (user.getClientAddress() != null && !user.getClientAddress().trim().isEmpty()) {
+ userContext = new HashMap<>();
+ userContext.put(UserContextKeys.CLIENT_ADDRESS.name(), user.getClientAddress());
+ } else {
+ userContext = null;
+ }
+
+ final AuthorizationRequest request = new AuthorizationRequest.Builder()
+ .resource(ResourceFactory.getDataTransferResource(port.getResource()))
+ .identity(user.getIdentity())
+ .groups(user.getGroups())
+ .anonymous(user.isAnonymous())
+ .accessAttempt(false)
+ .action(RequestAction.WRITE)
+ .userContext(userContext)
+ .explanationSupplier(() -> "Unable to retrieve port details.")
+ .build();
+
+ final AuthorizationResult result = authorizer.authorize(request);
+ return Result.Approved.equals(result.getResult());
+ }
+
+ @Override
+ public ControllerDTO getSiteToSiteDetails() {
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+ if (user == null) {
+ throw new WebApplicationException(new Throwable("Unable to access details for current user."));
+ }
+
+ // serialize the input ports this NiFi has access to
+ final Set<PortDTO> inputPortDtos = new LinkedHashSet<>();
+ final Set<RootGroupPort> inputPorts = controllerFacade.getInputPorts();
+ for (final RootGroupPort inputPort : inputPorts) {
+ if (isUserAuthorized(user, inputPort)) {
+ final PortDTO dto = new PortDTO();
+ dto.setId(inputPort.getIdentifier());
+ dto.setName(inputPort.getName());
+ dto.setComments(inputPort.getComments());
+ dto.setState(inputPort.getScheduledState().toString());
+ inputPortDtos.add(dto);
+ }
+ }
+
+ // serialize the output ports this NiFi has access to
+ final Set<PortDTO> outputPortDtos = new LinkedHashSet<>();
+ for (final RootGroupPort outputPort : controllerFacade.getOutputPorts()) {
+ if (isUserAuthorized(user, outputPort)) {
+ final PortDTO dto = new PortDTO();
+ dto.setId(outputPort.getIdentifier());
+ dto.setName(outputPort.getName());
+ dto.setComments(outputPort.getComments());
+ dto.setState(outputPort.getScheduledState().toString());
+ outputPortDtos.add(dto);
+ }
+ }
+
+ // get the root group
+ final ProcessGroup rootGroup = processGroupDAO.getProcessGroup(controllerFacade.getRootGroupId());
+ final ProcessGroupCounts counts = rootGroup.getCounts();
+
+ // create the controller dto
+ final ControllerDTO controllerDTO = new ControllerDTO();
+ controllerDTO.setId(controllerFacade.getRootGroupId());
+ controllerDTO.setInstanceId(controllerFacade.getInstanceId());
+ controllerDTO.setName(controllerFacade.getName());
+ controllerDTO.setComments(controllerFacade.getComments());
+ controllerDTO.setInputPorts(inputPortDtos);
+ controllerDTO.setOutputPorts(outputPortDtos);
+ controllerDTO.setInputPortCount(inputPortDtos.size());
+ controllerDTO.setOutputPortCount(outputPortDtos.size());
+ controllerDTO.setRunningCount(counts.getRunningCount());
+ controllerDTO.setStoppedCount(counts.getStoppedCount());
+ controllerDTO.setInvalidCount(counts.getInvalidCount());
+ controllerDTO.setDisabledCount(counts.getDisabledCount());
+
+ // determine the site to site configuration
+ controllerDTO.setRemoteSiteListeningPort(controllerFacade.getRemoteSiteListeningPort());
+ controllerDTO.setRemoteSiteHttpListeningPort(controllerFacade.getRemoteSiteListeningHttpPort());
+ controllerDTO.setSiteToSiteSecure(controllerFacade.isRemoteSiteCommsSecure());
+
+ return controllerDTO;
+ }
+
+ @Override
+ public ControllerConfigurationEntity getControllerConfiguration() {
+ final Revision rev = revisionManager.getRevision(FlowController.class.getSimpleName());
+ final ControllerConfigurationDTO dto = dtoFactory.createControllerConfigurationDto(controllerFacade);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(controllerFacade);
+ final RevisionDTO revision = dtoFactory.createRevisionDTO(rev);
+ return entityFactory.createControllerConfigurationEntity(dto, revision, permissions);
+ }
+
+ @Override
+ public ControllerBulletinsEntity getControllerBulletins() {
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+ final ControllerBulletinsEntity controllerBulletinsEntity = new ControllerBulletinsEntity();
+
+ final List<BulletinEntity> controllerBulletinEntities = new ArrayList<>();
+
+ final Authorizable controllerAuthorizable = authorizableLookup.getController();
+ final boolean authorized = controllerAuthorizable.isAuthorized(authorizer, RequestAction.READ, user);
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForController());
+ controllerBulletinEntities.addAll(bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, authorized)).collect(Collectors.toList()));
+
+ // get the controller service bulletins
+ final BulletinQuery controllerServiceQuery = new BulletinQuery.Builder().sourceType(ComponentType.CONTROLLER_SERVICE).build();
+ final List<Bulletin> allControllerServiceBulletins = bulletinRepository.findBulletins(controllerServiceQuery);
+ final List<BulletinEntity> controllerServiceBulletinEntities = new ArrayList<>();
+ for (final Bulletin bulletin : allControllerServiceBulletins) {
+ try {
+ final Authorizable controllerServiceAuthorizable = authorizableLookup.getControllerService(bulletin.getSourceId()).getAuthorizable();
+ final boolean controllerServiceAuthorized = controllerServiceAuthorizable.isAuthorized(authorizer, RequestAction.READ, user);
+
+ final BulletinEntity controllerServiceBulletin = entityFactory.createBulletinEntity(dtoFactory.createBulletinDto(bulletin), controllerServiceAuthorized);
+ controllerServiceBulletinEntities.add(controllerServiceBulletin);
+ controllerBulletinEntities.add(controllerServiceBulletin);
+ } catch (final ResourceNotFoundException e) {
+ // controller service missing.. skip
+ }
+ }
+ controllerBulletinsEntity.setControllerServiceBulletins(controllerServiceBulletinEntities);
+
+ // get the reporting task bulletins
+ final BulletinQuery reportingTaskQuery = new BulletinQuery.Builder().sourceType(ComponentType.REPORTING_TASK).build();
+ final List<Bulletin> allReportingTaskBulletins = bulletinRepository.findBulletins(reportingTaskQuery);
+ final List<BulletinEntity> reportingTaskBulletinEntities = new ArrayList<>();
+ for (final Bulletin bulletin : allReportingTaskBulletins) {
+ try {
+ final Authorizable reportingTaskAuthorizable = authorizableLookup.getReportingTask(bulletin.getSourceId()).getAuthorizable();
+ final boolean reportingTaskAuthorizableAuthorized = reportingTaskAuthorizable.isAuthorized(authorizer, RequestAction.READ, user);
+
+ final BulletinEntity reportingTaskBulletin = entityFactory.createBulletinEntity(dtoFactory.createBulletinDto(bulletin), reportingTaskAuthorizableAuthorized);
+ reportingTaskBulletinEntities.add(reportingTaskBulletin);
+ controllerBulletinEntities.add(reportingTaskBulletin);
+ } catch (final ResourceNotFoundException e) {
+ // reporting task missing.. skip
+ }
+ }
+ controllerBulletinsEntity.setReportingTaskBulletins(reportingTaskBulletinEntities);
+
+ controllerBulletinsEntity.setBulletins(pruneAndSortBulletins(controllerBulletinEntities, BulletinRepository.MAX_BULLETINS_FOR_CONTROLLER));
+ return controllerBulletinsEntity;
+ }
+
+ @Override
+ public FlowConfigurationEntity getFlowConfiguration() {
+ final FlowConfigurationDTO dto = dtoFactory.createFlowConfigurationDto(properties.getAutoRefreshInterval(),
+ properties.getDefaultBackPressureObjectThreshold(), properties.getDefaultBackPressureDataSizeThreshold(),properties.getDcaeDistributorApiHostname());
+ final FlowConfigurationEntity entity = new FlowConfigurationEntity();
+ entity.setFlowConfiguration(dto);
+ return entity;
+ }
+
+ @Override
+ public AccessPolicyEntity getAccessPolicy(final String accessPolicyId) {
+ final AccessPolicy accessPolicy = accessPolicyDAO.getAccessPolicy(accessPolicyId);
+ return createAccessPolicyEntity(accessPolicy);
+ }
+
+ @Override
+ public AccessPolicyEntity getAccessPolicy(final RequestAction requestAction, final String resource) {
+ Authorizable authorizable;
+ try {
+ authorizable = authorizableLookup.getAuthorizableFromResource(resource);
+ } catch (final ResourceNotFoundException e) {
+ // unable to find the underlying authorizable... user authorized based on top level /policies... create
+ // an anonymous authorizable to attempt to locate an existing policy for this resource
+ authorizable = new Authorizable() {
+ @Override
+ public Authorizable getParentAuthorizable() {
+ return null;
+ }
+
+ @Override
+ public Resource getResource() {
+ return new Resource() {
+ @Override
+ public String getIdentifier() {
+ return resource;
+ }
+
+ @Override
+ public String getName() {
+ return resource;
+ }
+
+ @Override
+ public String getSafeDescription() {
+ return "Policy " + resource;
+ }
+ };
+ }
+ };
+ }
+
+ final AccessPolicy accessPolicy = accessPolicyDAO.getAccessPolicy(requestAction, authorizable);
+ return createAccessPolicyEntity(accessPolicy);
+ }
+
+ private AccessPolicyEntity createAccessPolicyEntity(final AccessPolicy accessPolicy) {
+ final RevisionDTO revision = dtoFactory.createRevisionDTO(revisionManager.getRevision(accessPolicy.getIdentifier()));
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(authorizableLookup.getAccessPolicyById(accessPolicy.getIdentifier()));
+ final ComponentReferenceEntity componentReference = createComponentReferenceEntity(accessPolicy.getResource());
+ return entityFactory.createAccessPolicyEntity(
+ dtoFactory.createAccessPolicyDto(accessPolicy,
+ accessPolicy.getGroups().stream().map(mapUserGroupIdToTenantEntity(false)).collect(Collectors.toSet()),
+ accessPolicy.getUsers().stream().map(mapUserIdToTenantEntity(false)).collect(Collectors.toSet()), componentReference),
+ revision, permissions);
+ }
+
+ @Override
+ public UserEntity getUser(final String userId) {
+ final User user = userDAO.getUser(userId);
+ return createUserEntity(user, true);
+ }
+
+ @Override
+ public Set<UserEntity> getUsers() {
+ final Set<User> users = userDAO.getUsers();
+ return users.stream()
+ .map(user -> createUserEntity(user, false))
+ .collect(Collectors.toSet());
+ }
+
+ private UserEntity createUserEntity(final User user, final boolean enforceUserExistence) {
+ final RevisionDTO userRevision = dtoFactory.createRevisionDTO(revisionManager.getRevision(user.getIdentifier()));
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(authorizableLookup.getTenant());
+ final Set<TenantEntity> userGroups = userGroupDAO.getUserGroupsForUser(user.getIdentifier()).stream()
+ .map(g -> g.getIdentifier()).map(mapUserGroupIdToTenantEntity(enforceUserExistence)).collect(Collectors.toSet());
+ final Set<AccessPolicySummaryEntity> policyEntities = userGroupDAO.getAccessPoliciesForUser(user.getIdentifier()).stream()
+ .map(ap -> createAccessPolicySummaryEntity(ap)).collect(Collectors.toSet());
+ return entityFactory.createUserEntity(dtoFactory.createUserDto(user, userGroups, policyEntities), userRevision, permissions);
+ }
+
+ private UserGroupEntity createUserGroupEntity(final Group userGroup, final boolean enforceGroupExistence) {
+ final RevisionDTO userGroupRevision = dtoFactory.createRevisionDTO(revisionManager.getRevision(userGroup.getIdentifier()));
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(authorizableLookup.getTenant());
+ final Set<TenantEntity> users = userGroup.getUsers().stream().map(mapUserIdToTenantEntity(enforceGroupExistence)).collect(Collectors.toSet());
+ final Set<AccessPolicySummaryEntity> policyEntities = userGroupDAO.getAccessPoliciesForUserGroup(userGroup.getIdentifier()).stream()
+ .map(ap -> createAccessPolicySummaryEntity(ap)).collect(Collectors.toSet());
+ return entityFactory.createUserGroupEntity(dtoFactory.createUserGroupDto(userGroup, users, policyEntities), userGroupRevision, permissions);
+ }
+
+ @Override
+ public UserGroupEntity getUserGroup(final String userGroupId) {
+ final Group userGroup = userGroupDAO.getUserGroup(userGroupId);
+ return createUserGroupEntity(userGroup, true);
+ }
+
+ @Override
+ public Set<UserGroupEntity> getUserGroups() {
+ final Set<Group> userGroups = userGroupDAO.getUserGroups();
+ return userGroups.stream()
+ .map(userGroup -> createUserGroupEntity(userGroup, false))
+ .collect(Collectors.toSet());
+ }
+
+ private LabelEntity createLabelEntity(final Label label) {
+ final RevisionDTO revision = dtoFactory.createRevisionDTO(revisionManager.getRevision(label.getIdentifier()));
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(label);
+ return entityFactory.createLabelEntity(dtoFactory.createLabelDto(label), revision, permissions);
+ }
+
+ @Override
+ public Set<LabelEntity> getLabels(final String groupId) {
+ final Set<Label> labels = labelDAO.getLabels(groupId);
+ return labels.stream()
+ .map(label -> createLabelEntity(label))
+ .collect(Collectors.toSet());
+ }
+
+ @Override
+ public LabelEntity getLabel(final String labelId) {
+ final Label label = labelDAO.getLabel(labelId);
+ return createLabelEntity(label);
+ }
+
+ private FunnelEntity createFunnelEntity(final Funnel funnel) {
+ final RevisionDTO revision = dtoFactory.createRevisionDTO(revisionManager.getRevision(funnel.getIdentifier()));
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(funnel);
+ return entityFactory.createFunnelEntity(dtoFactory.createFunnelDto(funnel), revision, permissions);
+ }
+
+ @Override
+ public Set<FunnelEntity> getFunnels(final String groupId) {
+ final Set<Funnel> funnels = funnelDAO.getFunnels(groupId);
+ return funnels.stream()
+ .map(funnel -> createFunnelEntity(funnel))
+ .collect(Collectors.toSet());
+ }
+
+ @Override
+ public FunnelEntity getFunnel(final String funnelId) {
+ final Funnel funnel = funnelDAO.getFunnel(funnelId);
+ return createFunnelEntity(funnel);
+ }
+
+ private PortEntity createInputPortEntity(final Port port) {
+ final RevisionDTO revision = dtoFactory.createRevisionDTO(revisionManager.getRevision(port.getIdentifier()));
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(port, NiFiUserUtils.getNiFiUser());
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(port), NiFiUserUtils.getNiFiUser());
+ final PortStatusDTO status = dtoFactory.createPortStatusDto(controllerFacade.getInputPortStatus(port.getIdentifier()));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(port.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createPortEntity(dtoFactory.createPortDto(port), revision, permissions, operatePermissions, status, bulletinEntities);
+ }
+
+ private PortEntity createOutputPortEntity(final Port port) {
+ final RevisionDTO revision = dtoFactory.createRevisionDTO(revisionManager.getRevision(port.getIdentifier()));
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(port, NiFiUserUtils.getNiFiUser());
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(port), NiFiUserUtils.getNiFiUser());
+ final PortStatusDTO status = dtoFactory.createPortStatusDto(controllerFacade.getOutputPortStatus(port.getIdentifier()));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(port.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createPortEntity(dtoFactory.createPortDto(port), revision, permissions, operatePermissions, status, bulletinEntities);
+ }
+
+ @Override
+ public Set<PortEntity> getInputPorts(final String groupId) {
+ final Set<Port> inputPorts = inputPortDAO.getPorts(groupId);
+ return inputPorts.stream()
+ .map(port -> createInputPortEntity(port))
+ .collect(Collectors.toSet());
+ }
+
+ @Override
+ public Set<PortEntity> getOutputPorts(final String groupId) {
+ final Set<Port> ports = outputPortDAO.getPorts(groupId);
+ return ports.stream()
+ .map(port -> createOutputPortEntity(port))
+ .collect(Collectors.toSet());
+ }
+
+ private ProcessGroupEntity createProcessGroupEntity(final ProcessGroup group) {
+ final RevisionDTO revision = dtoFactory.createRevisionDTO(revisionManager.getRevision(group.getIdentifier()));
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(group);
+ final ProcessGroupStatusDTO status = dtoFactory.createConciseProcessGroupStatusDto(controllerFacade.getProcessGroupStatus(group.getIdentifier()));
+ final List<BulletinEntity> bulletins = getProcessGroupBulletins(group);
+ return entityFactory.createProcessGroupEntity(dtoFactory.createProcessGroupDto(group), revision, permissions, status, bulletins);
+ }
+
+ private List<BulletinEntity> getProcessGroupBulletins(final ProcessGroup group) {
+ final List<Bulletin> bulletins = new ArrayList<>(bulletinRepository.findBulletinsForGroupBySource(group.getIdentifier()));
+
+ for (final ProcessGroup descendantGroup : group.findAllProcessGroups()) {
+ bulletins.addAll(bulletinRepository.findBulletinsForGroupBySource(descendantGroup.getIdentifier()));
+ }
+
+ List<BulletinEntity> bulletinEntities = new ArrayList<>();
+ for (final Bulletin bulletin : bulletins) {
+ bulletinEntities.add(entityFactory.createBulletinEntity(dtoFactory.createBulletinDto(bulletin), authorizeBulletin(bulletin)));
+ }
+
+ return pruneAndSortBulletins(bulletinEntities, BulletinRepository.MAX_BULLETINS_PER_COMPONENT);
+ }
+
+ private List<BulletinEntity> pruneAndSortBulletins(final List<BulletinEntity> bulletinEntities, final int maxBulletins) {
+ // sort the bulletins
+ Collections.sort(bulletinEntities, new Comparator<BulletinEntity>() {
+ @Override
+ public int compare(BulletinEntity o1, BulletinEntity o2) {
+ if (o1 == null && o2 == null) {
+ return 0;
+ }
+ if (o1 == null) {
+ return 1;
+ }
+ if (o2 == null) {
+ return -1;
+ }
+
+ return -Long.compare(o1.getId(), o2.getId());
+ }
+ });
+
+ // prune the response to only include the max number of bulletins
+ if (bulletinEntities.size() > maxBulletins) {
+ return bulletinEntities.subList(0, maxBulletins);
+ } else {
+ return bulletinEntities;
+ }
+ }
+
+ @Override
+ public Set<ProcessGroupEntity> getProcessGroups(final String parentGroupId) {
+ final Set<ProcessGroup> groups = processGroupDAO.getProcessGroups(parentGroupId);
+ return groups.stream()
+ .map(group -> createProcessGroupEntity(group))
+ .collect(Collectors.toSet());
+ }
+
+ private RemoteProcessGroupEntity createRemoteGroupEntity(final RemoteProcessGroup rpg, final NiFiUser user) {
+ final RevisionDTO revision = dtoFactory.createRevisionDTO(revisionManager.getRevision(rpg.getIdentifier()));
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(rpg, user);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(rpg), user);
+ final RemoteProcessGroupStatusDTO status = dtoFactory.createRemoteProcessGroupStatusDto(rpg, controllerFacade.getRemoteProcessGroupStatus(rpg.getIdentifier()));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(rpg.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createRemoteProcessGroupEntity(dtoFactory.createRemoteProcessGroupDto(rpg), revision, permissions, operatePermissions, status, bulletinEntities);
+ }
+
+ @Override
+ public Set<RemoteProcessGroupEntity> getRemoteProcessGroups(final String groupId) {
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+ final Set<RemoteProcessGroup> rpgs = remoteProcessGroupDAO.getRemoteProcessGroups(groupId);
+ return rpgs.stream()
+ .map(rpg -> createRemoteGroupEntity(rpg, user))
+ .collect(Collectors.toSet());
+ }
+
+ @Override
+ public PortEntity getInputPort(final String inputPortId) {
+ final Port port = inputPortDAO.getPort(inputPortId);
+ return createInputPortEntity(port);
+ }
+
+ @Override
+ public PortStatusEntity getInputPortStatus(final String inputPortId) {
+ final Port inputPort = inputPortDAO.getPort(inputPortId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(inputPort);
+ final PortStatusDTO dto = dtoFactory.createPortStatusDto(controllerFacade.getInputPortStatus(inputPortId));
+ return entityFactory.createPortStatusEntity(dto, permissions);
+ }
+
+ @Override
+ public PortEntity getOutputPort(final String outputPortId) {
+ final Port port = outputPortDAO.getPort(outputPortId);
+ return createOutputPortEntity(port);
+ }
+
+ @Override
+ public PortStatusEntity getOutputPortStatus(final String outputPortId) {
+ final Port outputPort = outputPortDAO.getPort(outputPortId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(outputPort);
+ final PortStatusDTO dto = dtoFactory.createPortStatusDto(controllerFacade.getOutputPortStatus(outputPortId));
+ return entityFactory.createPortStatusEntity(dto, permissions);
+ }
+
+ @Override
+ public RemoteProcessGroupEntity getRemoteProcessGroup(final String remoteProcessGroupId) {
+ final RemoteProcessGroup rpg = remoteProcessGroupDAO.getRemoteProcessGroup(remoteProcessGroupId);
+ return createRemoteGroupEntity(rpg, NiFiUserUtils.getNiFiUser());
+ }
+
+ @Override
+ public RemoteProcessGroupStatusEntity getRemoteProcessGroupStatus(final String id) {
+ final RemoteProcessGroup remoteProcessGroup = remoteProcessGroupDAO.getRemoteProcessGroup(id);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(remoteProcessGroup);
+ final RemoteProcessGroupStatusDTO dto = dtoFactory.createRemoteProcessGroupStatusDto(remoteProcessGroup, controllerFacade.getRemoteProcessGroupStatus(id));
+ return entityFactory.createRemoteProcessGroupStatusEntity(dto, permissions);
+ }
+
+ @Override
+ public StatusHistoryEntity getRemoteProcessGroupStatusHistory(final String id) {
+ final RemoteProcessGroup remoteProcessGroup = remoteProcessGroupDAO.getRemoteProcessGroup(id);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(remoteProcessGroup);
+ final StatusHistoryDTO dto = controllerFacade.getRemoteProcessGroupStatusHistory(id);
+ return entityFactory.createStatusHistoryEntity(dto, permissions);
+ }
+
+ @Override
+ public CurrentUserEntity getCurrentUser() {
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+ final CurrentUserEntity entity = new CurrentUserEntity();
+ entity.setIdentity(user.getIdentity());
+ entity.setAnonymous(user.isAnonymous());
+ entity.setProvenancePermissions(dtoFactory.createPermissionsDto(authorizableLookup.getProvenance()));
+ entity.setCountersPermissions(dtoFactory.createPermissionsDto(authorizableLookup.getCounters()));
+ entity.setTenantsPermissions(dtoFactory.createPermissionsDto(authorizableLookup.getTenant()));
+ entity.setControllerPermissions(dtoFactory.createPermissionsDto(authorizableLookup.getController()));
+ entity.setPoliciesPermissions(dtoFactory.createPermissionsDto(authorizableLookup.getPolicies()));
+ entity.setSystemPermissions(dtoFactory.createPermissionsDto(authorizableLookup.getSystem()));
+ entity.setCanVersionFlows(CollectionUtils.isNotEmpty(flowRegistryClient.getRegistryIdentifiers()));
+
+ entity.setRestrictedComponentsPermissions(dtoFactory.createPermissionsDto(authorizableLookup.getRestrictedComponents()));
+
+ final Set<ComponentRestrictionPermissionDTO> componentRestrictionPermissions = new HashSet<>();
+ Arrays.stream(RequiredPermission.values()).forEach(requiredPermission -> {
+ final PermissionsDTO restrictionPermissions = dtoFactory.createPermissionsDto(authorizableLookup.getRestrictedComponents(requiredPermission));
+
+ final RequiredPermissionDTO requiredPermissionDto = new RequiredPermissionDTO();
+ requiredPermissionDto.setId(requiredPermission.getPermissionIdentifier());
+ requiredPermissionDto.setLabel(requiredPermission.getPermissionLabel());
+
+ final ComponentRestrictionPermissionDTO componentRestrictionPermissionDto = new ComponentRestrictionPermissionDTO();
+ componentRestrictionPermissionDto.setRequiredPermission(requiredPermissionDto);
+ componentRestrictionPermissionDto.setPermissions(restrictionPermissions);
+
+ componentRestrictionPermissions.add(componentRestrictionPermissionDto);
+ });
+ entity.setComponentRestrictionPermissions(componentRestrictionPermissions);
+
+ return entity;
+ }
+
+ @Override
+ public ProcessGroupFlowEntity getProcessGroupFlow(final String groupId) {
+ final ProcessGroup processGroup = processGroupDAO.getProcessGroup(groupId);
+
+ // Get the Process Group Status but we only need a status depth of one because for any child process group,
+ // we ignore the status of each individual components. I.e., if Process Group A has child Group B, and child Group B
+ // has a Processor, we don't care about the individual stats of that Processor because the ProcessGroupFlowEntity
+ // doesn't include that anyway. So we can avoid including the information in the status that is returned.
+ final ProcessGroupStatus groupStatus = controllerFacade.getProcessGroupStatus(groupId, 1);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(processGroup);
+ return entityFactory.createProcessGroupFlowEntity(dtoFactory.createProcessGroupFlowDto(processGroup, groupStatus, revisionManager, this::getProcessGroupBulletins), permissions);
+ }
+
+ @Override
+ public ProcessGroupEntity getProcessGroup(final String groupId) {
+ final ProcessGroup processGroup = processGroupDAO.getProcessGroup(groupId);
+ return createProcessGroupEntity(processGroup);
+ }
+
+ private ControllerServiceEntity createControllerServiceEntity(final ControllerServiceNode serviceNode, final Set<String> serviceIds) {
+ final ControllerServiceDTO dto = dtoFactory.createControllerServiceDto(serviceNode);
+
+ final ControllerServiceReference ref = serviceNode.getReferences();
+ final ControllerServiceReferencingComponentsEntity referencingComponentsEntity = createControllerServiceReferencingComponentsEntity(ref, serviceIds);
+ dto.setReferencingComponents(referencingComponentsEntity.getControllerServiceReferencingComponents());
+
+ final RevisionDTO revision = dtoFactory.createRevisionDTO(revisionManager.getRevision(serviceNode.getIdentifier()));
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(serviceNode, NiFiUserUtils.getNiFiUser());
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(serviceNode), NiFiUserUtils.getNiFiUser());
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(serviceNode.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createControllerServiceEntity(dto, revision, permissions, operatePermissions, bulletinEntities);
+ }
+
+ @Override
+ public VariableRegistryEntity getVariableRegistry(final String groupId, final boolean includeAncestorGroups) {
+ final ProcessGroup processGroup = processGroupDAO.getProcessGroup(groupId);
+ if (processGroup == null) {
+ throw new ResourceNotFoundException("Could not find group with ID " + groupId);
+ }
+
+ return createVariableRegistryEntity(processGroup, includeAncestorGroups);
+ }
+
+ private VariableRegistryEntity createVariableRegistryEntity(final ProcessGroup processGroup, final boolean includeAncestorGroups) {
+ final VariableRegistryDTO registryDto = dtoFactory.createVariableRegistryDto(processGroup, revisionManager);
+ final RevisionDTO revision = dtoFactory.createRevisionDTO(revisionManager.getRevision(processGroup.getIdentifier()));
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(processGroup);
+
+ if (includeAncestorGroups) {
+ ProcessGroup parent = processGroup.getParent();
+ while (parent != null) {
+ final PermissionsDTO parentPerms = dtoFactory.createPermissionsDto(parent);
+ if (Boolean.TRUE.equals(parentPerms.getCanRead())) {
+ final VariableRegistryDTO parentRegistryDto = dtoFactory.createVariableRegistryDto(parent, revisionManager);
+ final Set<VariableEntity> parentVariables = parentRegistryDto.getVariables();
+ registryDto.getVariables().addAll(parentVariables);
+ }
+
+ parent = parent.getParent();
+ }
+ }
+
+ return entityFactory.createVariableRegistryEntity(registryDto, revision, permissions);
+ }
+
+ @Override
+ public VariableRegistryEntity populateAffectedComponents(final VariableRegistryDTO variableRegistryDto) {
+ final String groupId = variableRegistryDto.getProcessGroupId();
+ final ProcessGroup processGroup = processGroupDAO.getProcessGroup(groupId);
+ if (processGroup == null) {
+ throw new ResourceNotFoundException("Could not find group with ID " + groupId);
+ }
+
+ final VariableRegistryDTO registryDto = dtoFactory.populateAffectedComponents(variableRegistryDto, processGroup, revisionManager);
+ final RevisionDTO revision = dtoFactory.createRevisionDTO(revisionManager.getRevision(processGroup.getIdentifier()));
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(processGroup);
+ return entityFactory.createVariableRegistryEntity(registryDto, revision, permissions);
+ }
+
+ @Override
+ public Set<ControllerServiceEntity> getControllerServices(final String groupId, final boolean includeAncestorGroups, final boolean includeDescendantGroups) {
+ final Set<ControllerServiceNode> serviceNodes = controllerServiceDAO.getControllerServices(groupId, includeAncestorGroups, includeDescendantGroups);
+ final Set<String> serviceIds = serviceNodes.stream().map(service -> service.getIdentifier()).collect(Collectors.toSet());
+
+ return serviceNodes.stream()
+ .map(serviceNode -> createControllerServiceEntity(serviceNode, serviceIds))
+ .collect(Collectors.toSet());
+ }
+
+ @Override
+ public ControllerServiceEntity getControllerService(final String controllerServiceId) {
+ final ControllerServiceNode controllerService = controllerServiceDAO.getControllerService(controllerServiceId);
+ return createControllerServiceEntity(controllerService, Sets.newHashSet(controllerServiceId));
+ }
+
+ @Override
+ public PropertyDescriptorDTO getControllerServicePropertyDescriptor(final String id, final String property) {
+ final ControllerServiceNode controllerService = controllerServiceDAO.getControllerService(id);
+ PropertyDescriptor descriptor = controllerService.getControllerServiceImplementation().getPropertyDescriptor(property);
+
+ // return an invalid descriptor if the controller service doesn't support this property
+ if (descriptor == null) {
+ descriptor = new PropertyDescriptor.Builder().name(property).addValidator(Validator.INVALID).dynamic(true).build();
+ }
+
+ final String groupId = controllerService.getProcessGroup() == null ? null : controllerService.getProcessGroup().getIdentifier();
+ return dtoFactory.createPropertyDescriptorDto(descriptor, groupId);
+ }
+
+ @Override
+ public ControllerServiceReferencingComponentsEntity getControllerServiceReferencingComponents(final String controllerServiceId) {
+ final ControllerServiceNode service = controllerServiceDAO.getControllerService(controllerServiceId);
+ final ControllerServiceReference ref = service.getReferences();
+ return createControllerServiceReferencingComponentsEntity(ref, Sets.newHashSet(controllerServiceId));
+ }
+
+ private ReportingTaskEntity createReportingTaskEntity(final ReportingTaskNode reportingTask) {
+ final RevisionDTO revision = dtoFactory.createRevisionDTO(revisionManager.getRevision(reportingTask.getIdentifier()));
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(reportingTask);
+ final PermissionsDTO operatePermissions = dtoFactory.createPermissionsDto(new OperationAuthorizable(reportingTask));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(reportingTask.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createReportingTaskEntity(dtoFactory.createReportingTaskDto(reportingTask), revision, permissions, operatePermissions, bulletinEntities);
+ }
+
+ @Override
+ public Set<ReportingTaskEntity> getReportingTasks() {
+ final Set<ReportingTaskNode> reportingTasks = reportingTaskDAO.getReportingTasks();
+ return reportingTasks.stream()
+ .map(reportingTask -> createReportingTaskEntity(reportingTask))
+ .collect(Collectors.toSet());
+ }
+
+ @Override
+ public ReportingTaskEntity getReportingTask(final String reportingTaskId) {
+ final ReportingTaskNode reportingTask = reportingTaskDAO.getReportingTask(reportingTaskId);
+ return createReportingTaskEntity(reportingTask);
+ }
+
+ @Override
+ public PropertyDescriptorDTO getReportingTaskPropertyDescriptor(final String id, final String property) {
+ final ReportingTaskNode reportingTask = reportingTaskDAO.getReportingTask(id);
+ PropertyDescriptor descriptor = reportingTask.getReportingTask().getPropertyDescriptor(property);
+
+ // return an invalid descriptor if the reporting task doesn't support this property
+ if (descriptor == null) {
+ descriptor = new PropertyDescriptor.Builder().name(property).addValidator(Validator.INVALID).dynamic(true).build();
+ }
+
+ return dtoFactory.createPropertyDescriptorDto(descriptor, null);
+ }
+
+ @Override
+ public StatusHistoryEntity getProcessGroupStatusHistory(final String groupId) {
+ final ProcessGroup processGroup = processGroupDAO.getProcessGroup(groupId);
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(processGroup);
+ final StatusHistoryDTO dto = controllerFacade.getProcessGroupStatusHistory(groupId);
+ return entityFactory.createStatusHistoryEntity(dto, permissions);
+ }
+
+ @Override
+ public VersionControlComponentMappingEntity registerFlowWithFlowRegistry(final String groupId, final StartVersionControlRequestEntity requestEntity) {
+ final ProcessGroup processGroup = processGroupDAO.getProcessGroup(groupId);
+
+ final VersionControlInformation currentVci = processGroup.getVersionControlInformation();
+ final int expectedVersion = currentVci == null ? 1 : currentVci.getVersion() + 1;
+
+ // Create a VersionedProcessGroup snapshot of the flow as it is currently.
+ final InstantiatedVersionedProcessGroup versionedProcessGroup = createFlowSnapshot(groupId);
+
+ final VersionedFlowDTO versionedFlowDto = requestEntity.getVersionedFlow();
+ final String flowId = versionedFlowDto.getFlowId() == null ? UUID.randomUUID().toString() : versionedFlowDto.getFlowId();
+
+ final VersionedFlow versionedFlow = new VersionedFlow();
+ versionedFlow.setBucketIdentifier(versionedFlowDto.getBucketId());
+ versionedFlow.setCreatedTimestamp(System.currentTimeMillis());
+ versionedFlow.setDescription(versionedFlowDto.getDescription());
+ versionedFlow.setModifiedTimestamp(versionedFlow.getCreatedTimestamp());
+ versionedFlow.setName(versionedFlowDto.getFlowName());
+ versionedFlow.setIdentifier(flowId);
+
+ // Add the Versioned Flow and first snapshot to the Flow Registry
+ final String registryId = requestEntity.getVersionedFlow().getRegistryId();
+ final VersionedFlowSnapshot registeredSnapshot;
+ final VersionedFlow registeredFlow;
+
+ String action = "create the flow";
+ try {
+ // first, create the flow in the registry, if necessary
+ if (versionedFlowDto.getFlowId() == null) {
+ registeredFlow = registerVersionedFlow(registryId, versionedFlow);
+ } else {
+ registeredFlow = getVersionedFlow(registryId, versionedFlowDto.getBucketId(), versionedFlowDto.getFlowId());
+ }
+
+ action = "add the local flow to the Flow Registry as the first Snapshot";
+
+ // add first snapshot to the flow in the registry
+ registeredSnapshot = registerVersionedFlowSnapshot(registryId, registeredFlow, versionedProcessGroup, versionedFlowDto.getComments(), expectedVersion);
+ } catch (final NiFiRegistryException e) {
+ throw new IllegalArgumentException(e.getLocalizedMessage());
+ } catch (final IOException ioe) {
+ throw new IllegalStateException("Failed to communicate with Flow Registry when attempting to " + action);
+ }
+
+ final Bucket bucket = registeredSnapshot.getBucket();
+ final VersionedFlow flow = registeredSnapshot.getFlow();
+
+ // Update the Process Group with the new VersionControlInformation. (Send this to all nodes).
+ final VersionControlInformationDTO vci = new VersionControlInformationDTO();
+ vci.setBucketId(bucket.getIdentifier());
+ vci.setBucketName(bucket.getName());
+ vci.setFlowId(flow.getIdentifier());
+ vci.setFlowName(flow.getName());
+ vci.setFlowDescription(flow.getDescription());
+ vci.setGroupId(groupId);
+ vci.setRegistryId(registryId);
+ vci.setRegistryName(getFlowRegistryName(registryId));
+ vci.setVersion(registeredSnapshot.getSnapshotMetadata().getVersion());
+ vci.setState(VersionedFlowState.UP_TO_DATE.name());
+
+ final Map<String, String> mapping = dtoFactory.createVersionControlComponentMappingDto(versionedProcessGroup);
+
+ final Revision groupRevision = revisionManager.getRevision(groupId);
+ final RevisionDTO groupRevisionDto = dtoFactory.createRevisionDTO(groupRevision);
+
+ final VersionControlComponentMappingEntity entity = new VersionControlComponentMappingEntity();
+ entity.setVersionControlInformation(vci);
+ entity.setProcessGroupRevision(groupRevisionDto);
+ entity.setVersionControlComponentMapping(mapping);
+ return entity;
+ }
+
+ @Override
+ public VersionedFlow deleteVersionedFlow(final String registryId, final String bucketId, final String flowId) {
+ final FlowRegistry registry = flowRegistryClient.getFlowRegistry(registryId);
+ if (registry == null) {
+ throw new IllegalArgumentException("No Flow Registry exists with ID " + registryId);
+ }
+
+ try {
+ return registry.deleteVersionedFlow(bucketId, flowId, NiFiUserUtils.getNiFiUser());
+ } catch (final IOException | NiFiRegistryException e) {
+ throw new NiFiCoreException("Failed to remove flow from Flow Registry due to " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public VersionControlInformationEntity getVersionControlInformation(final String groupId) {
+ final ProcessGroup processGroup = processGroupDAO.getProcessGroup(groupId);
+ final VersionControlInformation versionControlInfo = processGroup.getVersionControlInformation();
+ if (versionControlInfo == null) {
+ return null;
+ }
+
+ final VersionControlInformationDTO versionControlDto = dtoFactory.createVersionControlInformationDto(processGroup);
+ final RevisionDTO groupRevision = dtoFactory.createRevisionDTO(revisionManager.getRevision(groupId));
+ return entityFactory.createVersionControlInformationEntity(versionControlDto, groupRevision);
+ }
+
+ private InstantiatedVersionedProcessGroup createFlowSnapshot(final String processGroupId) {
+ final ProcessGroup processGroup = processGroupDAO.getProcessGroup(processGroupId);
+ final NiFiRegistryFlowMapper mapper = new NiFiRegistryFlowMapper(controllerFacade.getExtensionManager());
+ final InstantiatedVersionedProcessGroup versionedGroup = mapper.mapProcessGroup(processGroup, controllerFacade.getControllerServiceProvider(), flowRegistryClient, false);
+ return versionedGroup;
+ }
+
+ @Override
+ public FlowComparisonEntity getLocalModifications(final String processGroupId) {
+ final ProcessGroup processGroup = processGroupDAO.getProcessGroup(processGroupId);
+ final VersionControlInformation versionControlInfo = processGroup.getVersionControlInformation();
+ if (versionControlInfo == null) {
+ throw new IllegalStateException("Process Group with ID " + processGroupId + " is not under Version Control");
+ }
+
+ final FlowRegistry flowRegistry = flowRegistryClient.getFlowRegistry(versionControlInfo.getRegistryIdentifier());
+ if (flowRegistry == null) {
+ throw new IllegalStateException("Process Group with ID " + processGroupId + " is tracking to a flow in Flow Registry with ID " + versionControlInfo.getRegistryIdentifier()
+ + " but cannot find a Flow Registry with that identifier");
+ }
+
+ final VersionedFlowSnapshot versionedFlowSnapshot;
+ try {
+ versionedFlowSnapshot = flowRegistry.getFlowContents(versionControlInfo.getBucketIdentifier(),
+ versionControlInfo.getFlowIdentifier(), versionControlInfo.getVersion(), true, NiFiUserUtils.getNiFiUser());
+ } catch (final IOException | NiFiRegistryException e) {
+ throw new NiFiCoreException("Failed to retrieve flow with Flow Registry in order to calculate local differences due to " + e.getMessage(), e);
+ }
+
+ final NiFiRegistryFlowMapper mapper = new NiFiRegistryFlowMapper(controllerFacade.getExtensionManager());
+ final VersionedProcessGroup localGroup = mapper.mapProcessGroup(processGroup, controllerFacade.getControllerServiceProvider(), flowRegistryClient, true);
+ final VersionedProcessGroup registryGroup = versionedFlowSnapshot.getFlowContents();
+
+ final ComparableDataFlow localFlow = new StandardComparableDataFlow("Local Flow", localGroup);
+ final ComparableDataFlow registryFlow = new StandardComparableDataFlow("Versioned Flow", registryGroup);
+
+ final Set<String> ancestorServiceIds = getAncestorGroupServiceIds(processGroup);
+ final FlowComparator flowComparator = new StandardFlowComparator(registryFlow, localFlow, ancestorServiceIds, new ConciseEvolvingDifferenceDescriptor());
+ final FlowComparison flowComparison = flowComparator.compare();
+
+ final Set<ComponentDifferenceDTO> differenceDtos = dtoFactory.createComponentDifferenceDtos(flowComparison);
+
+ final FlowComparisonEntity entity = new FlowComparisonEntity();
+ entity.setComponentDifferences(differenceDtos);
+ return entity;
+ }
+
+ private Set<String> getAncestorGroupServiceIds(final ProcessGroup group) {
+ final Set<String> ancestorServiceIds;
+ ProcessGroup parentGroup = group.getParent();
+
+ if (parentGroup == null) {
+ ancestorServiceIds = Collections.emptySet();
+ } else {
+ ancestorServiceIds = parentGroup.getControllerServices(true).stream()
+ .map(cs -> {
+ // We want to map the Controller Service to its Versioned Component ID, if it has one.
+ // If it does not have one, we want to generate it in the same way that our Flow Mapper does
+ // because this allows us to find the Controller Service when doing a Flow Diff.
+ final Optional<String> versionedId = cs.getVersionedComponentId();
+ if (versionedId.isPresent()) {
+ return versionedId.get();
+ }
+
+ return UUID.nameUUIDFromBytes(cs.getIdentifier().getBytes(StandardCharsets.UTF_8)).toString();
+ })
+ .collect(Collectors.toSet());
+ }
+
+ return ancestorServiceIds;
+ }
+
+ @Override
+ public VersionedFlow registerVersionedFlow(final String registryId, final VersionedFlow flow) {
+ final FlowRegistry registry = flowRegistryClient.getFlowRegistry(registryId);
+ if (registry == null) {
+ throw new ResourceNotFoundException("No Flow Registry exists with ID " + registryId);
+ }
+
+ try {
+ return registry.registerVersionedFlow(flow, NiFiUserUtils.getNiFiUser());
+ } catch (final IOException | NiFiRegistryException e) {
+ throw new NiFiCoreException("Failed to register flow with Flow Registry due to " + e.getMessage(), e);
+ }
+ }
+
+ private VersionedFlow getVersionedFlow(final String registryId, final String bucketId, final String flowId) throws IOException, NiFiRegistryException {
+ final FlowRegistry registry = flowRegistryClient.getFlowRegistry(registryId);
+ if (registry == null) {
+ throw new ResourceNotFoundException("No Flow Registry exists with ID " + registryId);
+ }
+
+ return registry.getVersionedFlow(bucketId, flowId, NiFiUserUtils.getNiFiUser());
+ }
+
+ @Override
+ public VersionedFlowSnapshot registerVersionedFlowSnapshot(final String registryId, final VersionedFlow flow,
+ final VersionedProcessGroup snapshot, final String comments, final int expectedVersion) {
+ final FlowRegistry registry = flowRegistryClient.getFlowRegistry(registryId);
+ if (registry == null) {
+ throw new ResourceNotFoundException("No Flow Registry exists with ID " + registryId);
+ }
+
+ try {
+ return registry.registerVersionedFlowSnapshot(flow, snapshot, comments, expectedVersion, NiFiUserUtils.getNiFiUser());
+ } catch (final IOException | NiFiRegistryException e) {
+ throw new NiFiCoreException("Failed to register flow with Flow Registry due to " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public VersionControlInformationEntity setVersionControlInformation(final Revision revision, final String processGroupId,
+ final VersionControlInformationDTO versionControlInfo, final Map<String, String> versionedComponentMapping) {
+
+ final ProcessGroup group = processGroupDAO.getProcessGroup(processGroupId);
+
+ final RevisionUpdate<VersionControlInformationDTO> snapshot = updateComponent(revision,
+ group,
+ () -> processGroupDAO.updateVersionControlInformation(versionControlInfo, versionedComponentMapping),
+ processGroup -> dtoFactory.createVersionControlInformationDto(processGroup));
+
+ return entityFactory.createVersionControlInformationEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()));
+ }
+
+ @Override
+ public VersionControlInformationEntity deleteVersionControl(final Revision revision, final String processGroupId) {
+ final ProcessGroup group = processGroupDAO.getProcessGroup(processGroupId);
+
+ final RevisionUpdate<VersionControlInformationDTO> snapshot = updateComponent(revision,
+ group,
+ () -> processGroupDAO.disconnectVersionControl(processGroupId),
+ processGroup -> dtoFactory.createVersionControlInformationDto(group));
+
+ return entityFactory.createVersionControlInformationEntity(snapshot.getComponent(), dtoFactory.createRevisionDTO(snapshot.getLastModification()));
+ }
+
+ @Override
+ public void verifyCanUpdate(final String groupId, final VersionedFlowSnapshot proposedFlow, final boolean verifyConnectionRemoval, final boolean verifyNotDirty) {
+ final ProcessGroup group = processGroupDAO.getProcessGroup(groupId);
+ group.verifyCanUpdate(proposedFlow, verifyConnectionRemoval, verifyNotDirty);
+ }
+
+ @Override
+ public void verifyCanSaveToFlowRegistry(final String groupId, final String registryId, final String bucketId, final String flowId) {
+ final ProcessGroup group = processGroupDAO.getProcessGroup(groupId);
+ group.verifyCanSaveToFlowRegistry(registryId, bucketId, flowId);
+ }
+
+ @Override
+ public void verifyCanRevertLocalModifications(final String groupId, final VersionedFlowSnapshot versionedFlowSnapshot) {
+ final ProcessGroup group = processGroupDAO.getProcessGroup(groupId);
+ group.verifyCanRevertLocalModifications();
+
+ // verify that the process group can be updated to the given snapshot. We do not verify that connections can
+ // be removed, because the flow may still be running, and it only matters that the connections can be removed once the components
+ // have been stopped.
+ group.verifyCanUpdate(versionedFlowSnapshot, false, false);
+ }
+
+ @Override
+ public Set<AffectedComponentEntity> getComponentsAffectedByVersionChange(final String processGroupId, final VersionedFlowSnapshot updatedSnapshot) {
+ final ProcessGroup group = processGroupDAO.getProcessGroup(processGroupId);
+
+ final NiFiRegistryFlowMapper mapper = new NiFiRegistryFlowMapper(controllerFacade.getExtensionManager());
+ final VersionedProcessGroup localContents = mapper.mapProcessGroup(group, controllerFacade.getControllerServiceProvider(), flowRegistryClient, true);
+
+ final ComparableDataFlow localFlow = new StandardComparableDataFlow("Local Flow", localContents);
+ final ComparableDataFlow proposedFlow = new StandardComparableDataFlow("Versioned Flow", updatedSnapshot.getFlowContents());
+
+ final Set<String> ancestorGroupServiceIds = getAncestorGroupServiceIds(group);
+ final FlowComparator flowComparator = new StandardFlowComparator(localFlow, proposedFlow, ancestorGroupServiceIds, new StaticDifferenceDescriptor());
+ final FlowComparison comparison = flowComparator.compare();
+
+ final Set<AffectedComponentEntity> affectedComponents = comparison.getDifferences().stream()
+ .filter(difference -> difference.getDifferenceType() != DifferenceType.COMPONENT_ADDED) // components that are added are not components that will be affected in the local flow.
+ .filter(difference -> difference.getDifferenceType() != DifferenceType.BUNDLE_CHANGED)
+ .filter(FlowDifferenceFilters.FILTER_ADDED_REMOVED_REMOTE_PORTS)
+ .filter(FlowDifferenceFilters.FILTER_IGNORABLE_VERSIONED_FLOW_COORDINATE_CHANGES)
+ .map(difference -> {
+ final VersionedComponent localComponent = difference.getComponentA();
+
+ final String state;
+ switch (localComponent.getComponentType()) {
+ case CONTROLLER_SERVICE:
+ final String serviceId = ((InstantiatedVersionedControllerService) localComponent).getInstanceId();
+ state = controllerServiceDAO.getControllerService(serviceId).getState().name();
+ break;
+ case PROCESSOR:
+ final String processorId = ((InstantiatedVersionedProcessor) localComponent).getInstanceId();
+ state = processorDAO.getProcessor(processorId).getPhysicalScheduledState().name();
+ break;
+ case REMOTE_INPUT_PORT:
+ final InstantiatedVersionedRemoteGroupPort inputPort = (InstantiatedVersionedRemoteGroupPort) localComponent;
+ state = remoteProcessGroupDAO.getRemoteProcessGroup(inputPort.getInstanceGroupId()).getInputPort(inputPort.getInstanceId()).getScheduledState().name();
+ break;
+ case REMOTE_OUTPUT_PORT:
+ final InstantiatedVersionedRemoteGroupPort outputPort = (InstantiatedVersionedRemoteGroupPort) localComponent;
+ state = remoteProcessGroupDAO.getRemoteProcessGroup(outputPort.getInstanceGroupId()).getOutputPort(outputPort.getInstanceId()).getScheduledState().name();
+ break;
+ default:
+ state = null;
+ break;
+ }
+
+ return createAffectedComponentEntity((InstantiatedVersionedComponent) localComponent, localComponent.getComponentType().name(), state);
+ })
+ .collect(Collectors.toCollection(HashSet::new));
+
+ for (final FlowDifference difference : comparison.getDifferences()) {
+ // Ignore these as local differences for now because we can't do anything with it
+ if (difference.getDifferenceType() == DifferenceType.BUNDLE_CHANGED) {
+ continue;
+ }
+
+ // Ignore differences for adding remote ports
+ if (FlowDifferenceFilters.isAddedOrRemovedRemotePort(difference)) {
+ continue;
+ }
+
+ if (FlowDifferenceFilters.isIgnorableVersionedFlowCoordinateChange(difference)) {
+ continue;
+ }
+
+ final VersionedComponent localComponent = difference.getComponentA();
+ if (localComponent == null) {
+ continue;
+ }
+
+ // If any Process Group is removed, consider all components below that Process Group as an affected component
+ if (difference.getDifferenceType() == DifferenceType.COMPONENT_REMOVED && localComponent.getComponentType() == org.apache.nifi.registry.flow.ComponentType.PROCESS_GROUP) {
+ final String localGroupId = ((InstantiatedVersionedProcessGroup) localComponent).getInstanceId();
+ final ProcessGroup localGroup = processGroupDAO.getProcessGroup(localGroupId);
+
+ localGroup.findAllProcessors().stream()
+ .map(comp -> createAffectedComponentEntity(comp))
+ .forEach(affectedComponents::add);
+ localGroup.findAllFunnels().stream()
+ .map(comp -> createAffectedComponentEntity(comp))
+ .forEach(affectedComponents::add);
+ localGroup.findAllInputPorts().stream()
+ .map(comp -> createAffectedComponentEntity(comp))
+ .forEach(affectedComponents::add);
+ localGroup.findAllOutputPorts().stream()
+ .map(comp -> createAffectedComponentEntity(comp))
+ .forEach(affectedComponents::add);
+ localGroup.findAllRemoteProcessGroups().stream()
+ .flatMap(rpg -> Stream.concat(rpg.getInputPorts().stream(), rpg.getOutputPorts().stream()))
+ .map(comp -> createAffectedComponentEntity(comp))
+ .forEach(affectedComponents::add);
+ localGroup.findAllControllerServices().stream()
+ .map(comp -> createAffectedComponentEntity(comp))
+ .forEach(affectedComponents::add);
+ }
+
+ if (localComponent.getComponentType() == org.apache.nifi.registry.flow.ComponentType.CONTROLLER_SERVICE) {
+ final String serviceId = ((InstantiatedVersionedControllerService) localComponent).getInstanceId();
+ final ControllerServiceNode serviceNode = controllerServiceDAO.getControllerService(serviceId);
+
+ final List<ControllerServiceNode> referencingServices = serviceNode.getReferences().findRecursiveReferences(ControllerServiceNode.class);
+ for (final ControllerServiceNode referencingService : referencingServices) {
+ affectedComponents.add(createAffectedComponentEntity(referencingService));
+ }
+
+ final List<ProcessorNode> referencingProcessors = serviceNode.getReferences().findRecursiveReferences(ProcessorNode.class);
+ for (final ProcessorNode referencingProcessor : referencingProcessors) {
+ affectedComponents.add(createAffectedComponentEntity(referencingProcessor));
+ }
+ }
+ }
+
+ // Create a map of all connectable components by versioned component ID to the connectable component itself
+ final Map<String, List<Connectable>> connectablesByVersionId = new HashMap<>();
+ mapToConnectableId(group.findAllFunnels(), connectablesByVersionId);
+ mapToConnectableId(group.findAllInputPorts(), connectablesByVersionId);
+ mapToConnectableId(group.findAllOutputPorts(), connectablesByVersionId);
+ mapToConnectableId(group.findAllProcessors(), connectablesByVersionId);
+
+ final List<RemoteGroupPort> remotePorts = new ArrayList<>();
+ for (final RemoteProcessGroup rpg : group.findAllRemoteProcessGroups()) {
+ remotePorts.addAll(rpg.getInputPorts());
+ remotePorts.addAll(rpg.getOutputPorts());
+ }
+ mapToConnectableId(remotePorts, connectablesByVersionId);
+
+ // If any connection is added or modified, we need to stop both the source (if it exists in the flow currently)
+ // and the destination (if it exists in the flow currently).
+ for (final FlowDifference difference : comparison.getDifferences()) {
+ VersionedComponent component = difference.getComponentA();
+ if (component == null) {
+ component = difference.getComponentB();
+ }
+
+ if (component.getComponentType() != org.apache.nifi.registry.flow.ComponentType.CONNECTION) {
+ continue;
+ }
+
+ final VersionedConnection connection = (VersionedConnection) component;
+
+ final String sourceVersionedId = connection.getSource().getId();
+ final List<Connectable> sources = connectablesByVersionId.get(sourceVersionedId);
+ if (sources != null) {
+ for (final Connectable source : sources) {
+ affectedComponents.add(createAffectedComponentEntity(source));
+ }
+ }
+
+ final String destinationVersionId = connection.getDestination().getId();
+ final List<Connectable> destinations = connectablesByVersionId.get(destinationVersionId);
+ if (destinations != null) {
+ for (final Connectable destination : destinations) {
+ affectedComponents.add(createAffectedComponentEntity(destination));
+ }
+ }
+ }
+
+ return affectedComponents;
+ }
+
+ private void mapToConnectableId(final Collection<? extends Connectable> connectables, final Map<String, List<Connectable>> destination) {
+ for (final Connectable connectable : connectables) {
+ final Optional<String> versionedIdOption = connectable.getVersionedComponentId();
+
+ // Determine the Versioned ID by using the ID that is assigned, if one is. Otherwise,
+ // we will calculate the Versioned ID. This allows us to map connectables that currently are not under
+ // version control. We have to do this so that if we are changing flow versions and have a component that is running and it does not exist
+ // in the Versioned Flow, we still need to be able to create an AffectedComponentDTO for it.
+ final String versionedId;
+ if (versionedIdOption.isPresent()) {
+ versionedId = versionedIdOption.get();
+ } else {
+ versionedId = UUID.nameUUIDFromBytes(connectable.getIdentifier().getBytes(StandardCharsets.UTF_8)).toString();
+ }
+
+ final List<Connectable> byVersionedId = destination.computeIfAbsent(versionedId, key -> new ArrayList<>());
+ byVersionedId.add(connectable);
+ }
+ }
+
+
+ private AffectedComponentEntity createAffectedComponentEntity(final Connectable connectable) {
+ final AffectedComponentEntity entity = new AffectedComponentEntity();
+ entity.setRevision(dtoFactory.createRevisionDTO(revisionManager.getRevision(connectable.getIdentifier())));
+ entity.setId(connectable.getIdentifier());
+
+ final Authorizable authorizable = getAuthorizable(connectable);
+ final PermissionsDTO permissionsDto = dtoFactory.createPermissionsDto(authorizable);
+ entity.setPermissions(permissionsDto);
+
+ final AffectedComponentDTO dto = new AffectedComponentDTO();
+ dto.setId(connectable.getIdentifier());
+ dto.setReferenceType(connectable.getConnectableType().name());
+ dto.setState(connectable.getScheduledState().name());
+
+ final String groupId = connectable instanceof RemoteGroupPort ? ((RemoteGroupPort) connectable).getRemoteProcessGroup().getIdentifier() : connectable.getProcessGroupIdentifier();
+ dto.setProcessGroupId(groupId);
+
+ entity.setComponent(dto);
+ return entity;
+ }
+
+ private AffectedComponentEntity createAffectedComponentEntity(final ControllerServiceNode serviceNode) {
+ final AffectedComponentEntity entity = new AffectedComponentEntity();
+ entity.setRevision(dtoFactory.createRevisionDTO(revisionManager.getRevision(serviceNode.getIdentifier())));
+ entity.setId(serviceNode.getIdentifier());
+
+ final Authorizable authorizable = authorizableLookup.getControllerService(serviceNode.getIdentifier()).getAuthorizable();
+ final PermissionsDTO permissionsDto = dtoFactory.createPermissionsDto(authorizable);
+ entity.setPermissions(permissionsDto);
+
+ final AffectedComponentDTO dto = new AffectedComponentDTO();
+ dto.setId(serviceNode.getIdentifier());
+ dto.setReferenceType(AffectedComponentDTO.COMPONENT_TYPE_CONTROLLER_SERVICE);
+ dto.setProcessGroupId(serviceNode.getProcessGroupIdentifier());
+ dto.setState(serviceNode.getState().name());
+
+ entity.setComponent(dto);
+ return entity;
+ }
+
+ private AffectedComponentEntity createAffectedComponentEntity(final InstantiatedVersionedComponent instance, final String componentTypeName, final String componentState) {
+ final AffectedComponentEntity entity = new AffectedComponentEntity();
+ entity.setRevision(dtoFactory.createRevisionDTO(revisionManager.getRevision(instance.getInstanceId())));
+ entity.setId(instance.getInstanceId());
+
+ final Authorizable authorizable = getAuthorizable(componentTypeName, instance);
+ final PermissionsDTO permissionsDto = dtoFactory.createPermissionsDto(authorizable);
+ entity.setPermissions(permissionsDto);
+
+ final AffectedComponentDTO dto = new AffectedComponentDTO();
+ dto.setId(instance.getInstanceId());
+ dto.setReferenceType(componentTypeName);
+ dto.setProcessGroupId(instance.getInstanceGroupId());
+ dto.setState(componentState);
+
+ entity.setComponent(dto);
+ return entity;
+ }
+
+
+ private Authorizable getAuthorizable(final Connectable connectable) {
+ switch (connectable.getConnectableType()) {
+ case REMOTE_INPUT_PORT:
+ case REMOTE_OUTPUT_PORT:
+ final String rpgId = ((RemoteGroupPort) connectable).getRemoteProcessGroup().getIdentifier();
+ return authorizableLookup.getRemoteProcessGroup(rpgId);
+ default:
+ return authorizableLookup.getLocalConnectable(connectable.getIdentifier());
+ }
+ }
+
+ private Authorizable getAuthorizable(final String componentTypeName, final InstantiatedVersionedComponent versionedComponent) {
+ final String componentId = versionedComponent.getInstanceId();
+
+ if (componentTypeName.equals(org.apache.nifi.registry.flow.ComponentType.CONTROLLER_SERVICE.name())) {
+ return authorizableLookup.getControllerService(componentId).getAuthorizable();
+ }
+
+ if (componentTypeName.equals(org.apache.nifi.registry.flow.ComponentType.CONNECTION.name())) {
+ return authorizableLookup.getConnection(componentId).getAuthorizable();
+ }
+
+ if (componentTypeName.equals(org.apache.nifi.registry.flow.ComponentType.FUNNEL.name())) {
+ return authorizableLookup.getFunnel(componentId);
+ }
+
+ if (componentTypeName.equals(org.apache.nifi.registry.flow.ComponentType.INPUT_PORT.name())) {
+ return authorizableLookup.getInputPort(componentId);
+ }
+
+ if (componentTypeName.equals(org.apache.nifi.registry.flow.ComponentType.OUTPUT_PORT.name())) {
+ return authorizableLookup.getOutputPort(componentId);
+ }
+
+ if (componentTypeName.equals(org.apache.nifi.registry.flow.ComponentType.LABEL.name())) {
+ return authorizableLookup.getLabel(componentId);
+ }
+
+ if (componentTypeName.equals(org.apache.nifi.registry.flow.ComponentType.PROCESS_GROUP.name())) {
+ return authorizableLookup.getProcessGroup(componentId).getAuthorizable();
+ }
+
+ if (componentTypeName.equals(org.apache.nifi.registry.flow.ComponentType.PROCESSOR.name())) {
+ return authorizableLookup.getProcessor(componentId).getAuthorizable();
+ }
+
+ if (componentTypeName.equals(org.apache.nifi.registry.flow.ComponentType.REMOTE_INPUT_PORT.name())) {
+ return authorizableLookup.getRemoteProcessGroup(versionedComponent.getInstanceGroupId());
+ }
+
+ if (componentTypeName.equals(org.apache.nifi.registry.flow.ComponentType.REMOTE_OUTPUT_PORT.name())) {
+ return authorizableLookup.getRemoteProcessGroup(versionedComponent.getInstanceGroupId());
+ }
+
+ if (componentTypeName.equals(org.apache.nifi.registry.flow.ComponentType.REMOTE_PROCESS_GROUP.name())) {
+ return authorizableLookup.getRemoteProcessGroup(componentId);
+ }
+
+ return null;
+ }
+
+ @Override
+ public VersionedFlowSnapshot getVersionedFlowSnapshot(final VersionControlInformationDTO versionControlInfo, final boolean fetchRemoteFlows) {
+ final FlowRegistry flowRegistry = flowRegistryClient.getFlowRegistry(versionControlInfo.getRegistryId());
+ if (flowRegistry == null) {
+ throw new ResourceNotFoundException("Could not find any Flow Registry registered with identifier " + versionControlInfo.getRegistryId());
+ }
+
+ final VersionedFlowSnapshot snapshot;
+ try {
+ snapshot = flowRegistry.getFlowContents(versionControlInfo.getBucketId(), versionControlInfo.getFlowId(), versionControlInfo.getVersion(), fetchRemoteFlows, NiFiUserUtils.getNiFiUser());
+ } catch (final NiFiRegistryException | IOException e) {
+ logger.error(e.getMessage(), e);
+ throw new IllegalArgumentException("The Flow Registry with ID " + versionControlInfo.getRegistryId() + " reports that no Flow exists with Bucket "
+ + versionControlInfo.getBucketId() + ", Flow " + versionControlInfo.getFlowId() + ", Version " + versionControlInfo.getVersion());
+ }
+
+ return snapshot;
+ }
+
+ @Override
+ public String getFlowRegistryName(final String flowRegistryId) {
+ final FlowRegistry flowRegistry = flowRegistryClient.getFlowRegistry(flowRegistryId);
+ return flowRegistry == null ? flowRegistryId : flowRegistry.getName();
+ }
+
+ private List<Revision> getComponentRevisions(final ProcessGroup processGroup, final boolean includeGroupRevision) {
+ final List<Revision> revisions = new ArrayList<>();
+ if (includeGroupRevision) {
+ revisions.add(revisionManager.getRevision(processGroup.getIdentifier()));
+ }
+
+ processGroup.findAllConnections().stream()
+ .map(component -> revisionManager.getRevision(component.getIdentifier()))
+ .forEach(revisions::add);
+ processGroup.findAllControllerServices().stream()
+ .map(component -> revisionManager.getRevision(component.getIdentifier()))
+ .forEach(revisions::add);
+ processGroup.findAllFunnels().stream()
+ .map(component -> revisionManager.getRevision(component.getIdentifier()))
+ .forEach(revisions::add);
+ processGroup.findAllInputPorts().stream()
+ .map(component -> revisionManager.getRevision(component.getIdentifier()))
+ .forEach(revisions::add);
+ processGroup.findAllOutputPorts().stream()
+ .map(component -> revisionManager.getRevision(component.getIdentifier()))
+ .forEach(revisions::add);
+ processGroup.findAllLabels().stream()
+ .map(component -> revisionManager.getRevision(component.getIdentifier()))
+ .forEach(revisions::add);
+ processGroup.findAllProcessGroups().stream()
+ .map(component -> revisionManager.getRevision(component.getIdentifier()))
+ .forEach(revisions::add);
+ processGroup.findAllProcessors().stream()
+ .map(component -> revisionManager.getRevision(component.getIdentifier()))
+ .forEach(revisions::add);
+ processGroup.findAllRemoteProcessGroups().stream()
+ .map(component -> revisionManager.getRevision(component.getIdentifier()))
+ .forEach(revisions::add);
+ processGroup.findAllRemoteProcessGroups().stream()
+ .flatMap(rpg -> Stream.concat(rpg.getInputPorts().stream(), rpg.getOutputPorts().stream()))
+ .map(component -> revisionManager.getRevision(component.getIdentifier()))
+ .forEach(revisions::add);
+
+ return revisions;
+ }
+
+ @Override
+ public ProcessGroupEntity updateProcessGroupContents(final Revision revision, final String groupId, final VersionControlInformationDTO versionControlInfo,
+ final VersionedFlowSnapshot proposedFlowSnapshot, final String componentIdSeed, final boolean verifyNotModified, final boolean updateSettings, final boolean updateDescendantVersionedFlows) {
+
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+
+ final ProcessGroup processGroup = processGroupDAO.getProcessGroup(groupId);
+ final List<Revision> revisions = getComponentRevisions(processGroup, false);
+ revisions.add(revision);
+
+ final RevisionClaim revisionClaim = new StandardRevisionClaim(revisions);
+
+ final RevisionUpdate<ProcessGroupDTO> revisionUpdate = revisionManager.updateRevision(revisionClaim, user, new UpdateRevisionTask<ProcessGroupDTO>() {
+ @Override
+ public RevisionUpdate<ProcessGroupDTO> update() {
+ // update the Process Group
+ processGroupDAO.updateProcessGroupFlow(groupId, proposedFlowSnapshot, versionControlInfo, componentIdSeed, verifyNotModified, updateSettings, updateDescendantVersionedFlows);
+
+ // update the revisions
+ final Set<Revision> updatedRevisions = revisions.stream()
+ .map(rev -> revisionManager.getRevision(rev.getComponentId()).incrementRevision(revision.getClientId()))
+ .collect(Collectors.toSet());
+
+ // save
+ controllerFacade.save();
+
+ // gather details for response
+ final ProcessGroupDTO dto = dtoFactory.createProcessGroupDto(processGroup);
+
+ final Revision updatedRevision = revisionManager.getRevision(groupId).incrementRevision(revision.getClientId());
+ final FlowModification lastModification = new FlowModification(updatedRevision, user.getIdentity());
+ return new StandardRevisionUpdate<>(dto, lastModification, updatedRevisions);
+ }
+ });
+
+ final FlowModification lastModification = revisionUpdate.getLastModification();
+
+ final PermissionsDTO permissions = dtoFactory.createPermissionsDto(processGroup);
+ final RevisionDTO updatedRevision = dtoFactory.createRevisionDTO(lastModification);
+ final ProcessGroupStatusDTO status = dtoFactory.createConciseProcessGroupStatusDto(controllerFacade.getProcessGroupStatus(processGroup.getIdentifier()));
+ final List<BulletinDTO> bulletins = dtoFactory.createBulletinDtos(bulletinRepository.findBulletinsForSource(processGroup.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ return entityFactory.createProcessGroupEntity(revisionUpdate.getComponent(), updatedRevision, permissions, status, bulletinEntities);
+ }
+
+ private AuthorizationResult authorizeAction(final Action action) {
+ final String sourceId = action.getSourceId();
+ final Component type = action.getSourceType();
+
+ Authorizable authorizable;
+ try {
+ switch (type) {
+ case Processor:
+ authorizable = authorizableLookup.getProcessor(sourceId).getAuthorizable();
+ break;
+ case ReportingTask:
+ authorizable = authorizableLookup.getReportingTask(sourceId).getAuthorizable();
+ break;
+ case ControllerService:
+ authorizable = authorizableLookup.getControllerService(sourceId).getAuthorizable();
+ break;
+ case Controller:
+ authorizable = controllerFacade;
+ break;
+ case InputPort:
+ authorizable = authorizableLookup.getInputPort(sourceId);
+ break;
+ case OutputPort:
+ authorizable = authorizableLookup.getOutputPort(sourceId);
+ break;
+ case ProcessGroup:
+ authorizable = authorizableLookup.getProcessGroup(sourceId).getAuthorizable();
+ break;
+ case RemoteProcessGroup:
+ authorizable = authorizableLookup.getRemoteProcessGroup(sourceId);
+ break;
+ case Funnel:
+ authorizable = authorizableLookup.getFunnel(sourceId);
+ break;
+ case Connection:
+ authorizable = authorizableLookup.getConnection(sourceId).getAuthorizable();
+ break;
+ case AccessPolicy:
+ authorizable = authorizableLookup.getAccessPolicyById(sourceId);
+ break;
+ case User:
+ case UserGroup:
+ authorizable = authorizableLookup.getTenant();
+ break;
+ default:
+ throw new WebApplicationException(Response.serverError().entity("An unexpected type of component is the source of this action.").build());
+ }
+ } catch (final ResourceNotFoundException e) {
+ // if the underlying component is gone, use the controller to see if permissions should be allowed
+ authorizable = controllerFacade;
+ }
+
+ // perform the authorization
+ return authorizable.checkAuthorization(authorizer, RequestAction.READ, NiFiUserUtils.getNiFiUser());
+ }
+
+ @Override
+ public HistoryDTO getActions(final HistoryQueryDTO historyQueryDto) {
+ // extract the query criteria
+ final HistoryQuery historyQuery = new HistoryQuery();
+ historyQuery.setStartDate(historyQueryDto.getStartDate());
+ historyQuery.setEndDate(historyQueryDto.getEndDate());
+ historyQuery.setSourceId(historyQueryDto.getSourceId());
+ historyQuery.setUserIdentity(historyQueryDto.getUserIdentity());
+ historyQuery.setOffset(historyQueryDto.getOffset());
+ historyQuery.setCount(historyQueryDto.getCount());
+ historyQuery.setSortColumn(historyQueryDto.getSortColumn());
+ historyQuery.setSortOrder(historyQueryDto.getSortOrder());
+
+ // perform the query
+ final History history = auditService.getActions(historyQuery);
+
+ // only retain authorized actions
+ final HistoryDTO historyDto = dtoFactory.createHistoryDto(history);
+ if (history.getActions() != null) {
+ final List<ActionEntity> actionEntities = new ArrayList<>();
+ for (final Action action : history.getActions()) {
+ final AuthorizationResult result = authorizeAction(action);
+ actionEntities.add(entityFactory.createActionEntity(dtoFactory.createActionDto(action), Result.Approved.equals(result.getResult())));
+ }
+ historyDto.setActions(actionEntities);
+ }
+
+ // create the response
+ return historyDto;
+ }
+
+ @Override
+ public ActionEntity getAction(final Integer actionId) {
+ // get the action
+ final Action action = auditService.getAction(actionId);
+
+ // ensure the action was found
+ if (action == null) {
+ throw new ResourceNotFoundException(String.format("Unable to find action with id '%s'.", actionId));
+ }
+
+ final AuthorizationResult result = authorizeAction(action);
+ final boolean authorized = Result.Approved.equals(result.getResult());
+ if (!authorized) {
+ throw new AccessDeniedException(result.getExplanation());
+ }
+
+ // return the action
+ return entityFactory.createActionEntity(dtoFactory.createActionDto(action), authorized);
+ }
+
+ @Override
+ public ComponentHistoryDTO getComponentHistory(final String componentId) {
+ final Map<String, PropertyHistoryDTO> propertyHistoryDtos = new LinkedHashMap<>();
+ final Map<String, List<PreviousValue>> propertyHistory = auditService.getPreviousValues(componentId);
+
+ for (final Map.Entry<String, List<PreviousValue>> entry : propertyHistory.entrySet()) {
+ final List<PreviousValueDTO> previousValueDtos = new ArrayList<>();
+
+ for (final PreviousValue previousValue : entry.getValue()) {
+ final PreviousValueDTO dto = new PreviousValueDTO();
+ dto.setPreviousValue(previousValue.getPreviousValue());
+ dto.setTimestamp(previousValue.getTimestamp());
+ dto.setUserIdentity(previousValue.getUserIdentity());
+ previousValueDtos.add(dto);
+ }
+
+ if (!previousValueDtos.isEmpty()) {
+ final PropertyHistoryDTO propertyHistoryDto = new PropertyHistoryDTO();
+ propertyHistoryDto.setPreviousValues(previousValueDtos);
+ propertyHistoryDtos.put(entry.getKey(), propertyHistoryDto);
+ }
+ }
+
+ final ComponentHistoryDTO history = new ComponentHistoryDTO();
+ history.setComponentId(componentId);
+ history.setPropertyHistory(propertyHistoryDtos);
+
+ return history;
+ }
+
+ @Override
+ public ProcessorDiagnosticsEntity getProcessorDiagnostics(final String id) {
+ final ProcessorNode processor = processorDAO.getProcessor(id);
+ final ProcessorStatus processorStatus = controllerFacade.getProcessorStatus(id);
+
+ // Generate Processor Diagnostics
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+ final ProcessorDiagnosticsDTO dto = controllerFacade.getProcessorDiagnostics(processor, processorStatus, bulletinRepository, serviceId -> {
+ final ControllerServiceNode serviceNode = controllerServiceDAO.getControllerService(serviceId);
+ return createControllerServiceEntity(serviceNode, Collections.emptySet());
+ });
+
+ // Filter anything out of diagnostics that the user is not authorized to see.
+ final List<JVMDiagnosticsSnapshotDTO> jvmDiagnosticsSnaphots = new ArrayList<>();
+ final JVMDiagnosticsDTO jvmDiagnostics = dto.getJvmDiagnostics();
+ jvmDiagnosticsSnaphots.add(jvmDiagnostics.getAggregateSnapshot());
+
+ // filter controller-related information
+ final boolean canReadController = authorizableLookup.getController().isAuthorized(authorizer, RequestAction.READ, user);
+ if (!canReadController) {
+ for (final JVMDiagnosticsSnapshotDTO snapshot : jvmDiagnosticsSnaphots) {
+ snapshot.setControllerDiagnostics(null);
+ }
+ }
+
+ // filter system diagnostics information
+ final boolean canReadSystem = authorizableLookup.getSystem().isAuthorized(authorizer, RequestAction.READ, user);
+ if (!canReadSystem) {
+ for (final JVMDiagnosticsSnapshotDTO snapshot : jvmDiagnosticsSnaphots) {
+ snapshot.setSystemDiagnosticsDto(null);
+ }
+ }
+
+ final boolean canReadFlow = authorizableLookup.getFlow().isAuthorized(authorizer, RequestAction.READ, user);
+ if (!canReadFlow) {
+ for (final JVMDiagnosticsSnapshotDTO snapshot : jvmDiagnosticsSnaphots) {
+ snapshot.setFlowDiagnosticsDto(null);
+ }
+ }
+
+ // filter connections
+ final Predicate<ConnectionDiagnosticsDTO> connectionAuthorized = connectionDiagnostics -> {
+ final String connectionId = connectionDiagnostics.getConnection().getId();
+ return authorizableLookup.getConnection(connectionId).getAuthorizable().isAuthorized(authorizer, RequestAction.READ, user);
+ };
+
+ // Filter incoming connections by what user is authorized to READ
+ final Set<ConnectionDiagnosticsDTO> incoming = dto.getIncomingConnections();
+ final Set<ConnectionDiagnosticsDTO> filteredIncoming = incoming.stream()
+ .filter(connectionAuthorized)
+ .collect(Collectors.toSet());
+
+ dto.setIncomingConnections(filteredIncoming);
+
+ // Filter outgoing connections by what user is authorized to READ
+ final Set<ConnectionDiagnosticsDTO> outgoing = dto.getOutgoingConnections();
+ final Set<ConnectionDiagnosticsDTO> filteredOutgoing = outgoing.stream()
+ .filter(connectionAuthorized)
+ .collect(Collectors.toSet());
+ dto.setOutgoingConnections(filteredOutgoing);
+
+ // Filter out any controller services that are referenced by the Processor
+ final Set<ControllerServiceDiagnosticsDTO> referencedServices = dto.getReferencedControllerServices();
+ final Set<ControllerServiceDiagnosticsDTO> filteredReferencedServices = referencedServices.stream()
+ .filter(csDiagnostics -> {
+ final String csId = csDiagnostics.getControllerService().getId();
+ return authorizableLookup.getControllerService(csId).getAuthorizable().isAuthorized(authorizer, RequestAction.READ, user);
+ })
+ .map(csDiagnostics -> {
+ // Filter out any referencing components because those are generally not relevant from this context.
+ final ControllerServiceDTO serviceDto = csDiagnostics.getControllerService().getComponent();
+ if (serviceDto != null) {
+ serviceDto.setReferencingComponents(null);
+ }
+ return csDiagnostics;
+ })
+ .collect(Collectors.toSet());
+ dto.setReferencedControllerServices(filteredReferencedServices);
+
+ final Revision revision = revisionManager.getRevision(id);
+ final RevisionDTO revisionDto = dtoFactory.createRevisionDTO(revision);
+ final PermissionsDTO permissionsDto = dtoFactory.createPermissionsDto(processor);
+ final List<BulletinEntity> bulletins = bulletinRepository.findBulletinsForSource(id).stream()
+ .map(bulletin -> dtoFactory.createBulletinDto(bulletin))
+ .map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissionsDto.getCanRead()))
+ .collect(Collectors.toList());
+
+ final ProcessorStatusDTO processorStatusDto = dtoFactory.createProcessorStatusDto(controllerFacade.getProcessorStatus(processor.getIdentifier()));
+ return entityFactory.createProcessorDiagnosticsEntity(dto, revisionDto, permissionsDto, processorStatusDto, bulletins);
+ }
+
+ @Override
+ public boolean isClustered() {
+ return controllerFacade.isClustered();
+ }
+
+ @Override
+ public String getNodeId() {
+ final NodeIdentifier nodeId = controllerFacade.getNodeId();
+ if (nodeId != null) {
+ return nodeId.getId();
+ } else {
+ return null;
+ }
+ }
+
+ @Override
+ public ClusterDTO getCluster() {
+ // create cluster summary dto
+ final ClusterDTO clusterDto = new ClusterDTO();
+
+ // set current time
+ clusterDto.setGenerated(new Date());
+
+ // create node dtos
+ final List<NodeDTO> nodeDtos = clusterCoordinator.getNodeIdentifiers().stream()
+ .map(nodeId -> getNode(nodeId))
+ .collect(Collectors.toList());
+ clusterDto.setNodes(nodeDtos);
+
+ return clusterDto;
+ }
+
+ @Override
+ public NodeDTO getNode(final String nodeId) {
+ final NodeIdentifier nodeIdentifier = clusterCoordinator.getNodeIdentifier(nodeId);
+ return getNode(nodeIdentifier);
+ }
+
+ private NodeDTO getNode(final NodeIdentifier nodeId) {
+ final NodeConnectionStatus nodeStatus = clusterCoordinator.getConnectionStatus(nodeId);
+ final List<NodeEvent> events = clusterCoordinator.getNodeEvents(nodeId);
+ final Set<String> roles = getRoles(nodeId);
+ final NodeHeartbeat heartbeat = heartbeatMonitor.getLatestHeartbeat(nodeId);
+ return dtoFactory.createNodeDTO(nodeId, nodeStatus, heartbeat, events, roles);
+ }
+
+ private Set<String> getRoles(final NodeIdentifier nodeId) {
+ final Set<String> roles = new HashSet<>();
+ final String nodeAddress = nodeId.getSocketAddress() + ":" + nodeId.getSocketPort();
+
+ for (final String roleName : ClusterRoles.getAllRoles()) {
+ final String leader = leaderElectionManager.getLeader(roleName);
+ if (leader == null) {
+ continue;
+ }
+
+ if (leader.equals(nodeAddress)) {
+ roles.add(roleName);
+ }
+ }
+
+ return roles;
+ }
+
+ @Override
+ public void deleteNode(final String nodeId) {
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+ if (user == null) {
+ throw new WebApplicationException(new Throwable("Unable to access details for current user."));
+ }
+
+ final String userDn = user.getIdentity();
+ final NodeIdentifier nodeIdentifier = clusterCoordinator.getNodeIdentifier(nodeId);
+ if (nodeIdentifier == null) {
+ throw new UnknownNodeException("Cannot remove Node with ID " + nodeId + " because it is not part of the cluster");
+ }
+
+ final NodeConnectionStatus nodeConnectionStatus = clusterCoordinator.getConnectionStatus(nodeIdentifier);
+ if (!nodeConnectionStatus.getState().equals(NodeConnectionState.OFFLOADED) && !nodeConnectionStatus.getState().equals(NodeConnectionState.DISCONNECTED)) {
+ throw new IllegalNodeDeletionException("Cannot remove Node with ID " + nodeId +
+ " because it is not disconnected or offloaded, current state = " + nodeConnectionStatus.getState());
+ }
+
+ clusterCoordinator.removeNode(nodeIdentifier, userDn);
+ heartbeatMonitor.removeHeartbeat(nodeIdentifier);
+ }
+
+ /* reusable function declarations for converting ids to tenant entities */
+ private Function<String, TenantEntity> mapUserGroupIdToTenantEntity(final boolean enforceGroupExistence) {
+ return userGroupId -> {
+ final RevisionDTO userGroupRevision = dtoFactory.createRevisionDTO(revisionManager.getRevision(userGroupId));
+
+ final Group group;
+ if (enforceGroupExistence || userGroupDAO.hasUserGroup(userGroupId)) {
+ group = userGroupDAO.getUserGroup(userGroupId);
+ } else {
+ group = new Group.Builder().identifier(userGroupId).name("Group ID - " + userGroupId + " (removed externally)").build();
+ }
+
+ return entityFactory.createTenantEntity(dtoFactory.createTenantDTO(group), userGroupRevision,
+ dtoFactory.createPermissionsDto(authorizableLookup.getTenant()));
+ };
+ }
+
+ private Function<String, TenantEntity> mapUserIdToTenantEntity(final boolean enforceUserExistence) {
+ return userId -> {
+ final RevisionDTO userRevision = dtoFactory.createRevisionDTO(revisionManager.getRevision(userId));
+
+ final User user;
+ if (enforceUserExistence || userDAO.hasUser(userId)) {
+ user = userDAO.getUser(userId);
+ } else {
+ user = new User.Builder().identifier(userId).identity("User ID - " + userId + " (removed externally)").build();
+ }
+
+ return entityFactory.createTenantEntity(dtoFactory.createTenantDTO(user), userRevision,
+ dtoFactory.createPermissionsDto(authorizableLookup.getTenant()));
+ };
+ }
+
+
+ /* setters */
+ public void setProperties(final NiFiProperties properties) {
+ this.properties = properties;
+ }
+
+ public void setControllerFacade(final ControllerFacade controllerFacade) {
+ this.controllerFacade = controllerFacade;
+ }
+
+ public void setRemoteProcessGroupDAO(final RemoteProcessGroupDAO remoteProcessGroupDAO) {
+ this.remoteProcessGroupDAO = remoteProcessGroupDAO;
+ }
+
+ public void setLabelDAO(final LabelDAO labelDAO) {
+ this.labelDAO = labelDAO;
+ }
+
+ public void setFunnelDAO(final FunnelDAO funnelDAO) {
+ this.funnelDAO = funnelDAO;
+ }
+
+ public void setSnippetDAO(final SnippetDAO snippetDAO) {
+ this.snippetDAO = snippetDAO;
+ }
+
+ public void setProcessorDAO(final ProcessorDAO processorDAO) {
+ this.processorDAO = processorDAO;
+ }
+
+ public void setConnectionDAO(final ConnectionDAO connectionDAO) {
+ this.connectionDAO = connectionDAO;
+ }
+
+ public void setAuditService(final AuditService auditService) {
+ this.auditService = auditService;
+ }
+
+ public void setRevisionManager(final RevisionManager revisionManager) {
+ this.revisionManager = revisionManager;
+ }
+
+ public void setDtoFactory(final DtoFactory dtoFactory) {
+ this.dtoFactory = dtoFactory;
+ }
+
+ public void setEntityFactory(final EntityFactory entityFactory) {
+ this.entityFactory = entityFactory;
+ }
+
+ public void setInputPortDAO(final PortDAO inputPortDAO) {
+ this.inputPortDAO = inputPortDAO;
+ }
+
+ public void setOutputPortDAO(final PortDAO outputPortDAO) {
+ this.outputPortDAO = outputPortDAO;
+ }
+
+ public void setProcessGroupDAO(final ProcessGroupDAO processGroupDAO) {
+ this.processGroupDAO = processGroupDAO;
+ }
+
+ public void setControllerServiceDAO(final ControllerServiceDAO controllerServiceDAO) {
+ this.controllerServiceDAO = controllerServiceDAO;
+ }
+
+ public void setReportingTaskDAO(final ReportingTaskDAO reportingTaskDAO) {
+ this.reportingTaskDAO = reportingTaskDAO;
+ }
+
+ public void setTemplateDAO(final TemplateDAO templateDAO) {
+ this.templateDAO = templateDAO;
+ }
+
+ public void setSnippetUtils(final SnippetUtils snippetUtils) {
+ this.snippetUtils = snippetUtils;
+ }
+
+ public void setAuthorizableLookup(final AuthorizableLookup authorizableLookup) {
+ this.authorizableLookup = authorizableLookup;
+ }
+
+ public void setAuthorizer(final Authorizer authorizer) {
+ this.authorizer = authorizer;
+ }
+
+ public void setUserDAO(final UserDAO userDAO) {
+ this.userDAO = userDAO;
+ }
+
+ public void setUserGroupDAO(final UserGroupDAO userGroupDAO) {
+ this.userGroupDAO = userGroupDAO;
+ }
+
+ public void setAccessPolicyDAO(final AccessPolicyDAO accessPolicyDAO) {
+ this.accessPolicyDAO = accessPolicyDAO;
+ }
+
+ public void setClusterCoordinator(final ClusterCoordinator coordinator) {
+ this.clusterCoordinator = coordinator;
+ }
+
+ public void setHeartbeatMonitor(final HeartbeatMonitor heartbeatMonitor) {
+ this.heartbeatMonitor = heartbeatMonitor;
+ }
+
+ public void setBulletinRepository(final BulletinRepository bulletinRepository) {
+ this.bulletinRepository = bulletinRepository;
+ }
+
+ public void setLeaderElectionManager(final LeaderElectionManager leaderElectionManager) {
+ this.leaderElectionManager = leaderElectionManager;
+ }
+
+ public void setRegistryDAO(RegistryDAO registryDao) {
+ this.registryDAO = registryDao;
+ }
+
+ public void setFlowRegistryClient(FlowRegistryClient flowRegistryClient) {
+ this.flowRegistryClient = flowRegistryClient;
+ }
+}
diff --git a/mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
new file mode 100644
index 0000000..2943e10
--- /dev/null
+++ b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/api/dto/DtoFactory.java
@@ -0,0 +1,4354 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Modifications to the original nifi code for the ONAP project are made
+ * available under the Apache License, Version 2.0
+ */
+package org.apache.nifi.web.api.dto;
+
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.commons.lang3.ClassUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.action.Action;
+import org.apache.nifi.action.component.details.ComponentDetails;
+import org.apache.nifi.action.component.details.ExtensionDetails;
+import org.apache.nifi.action.component.details.FlowChangeExtensionDetails;
+import org.apache.nifi.action.component.details.FlowChangeRemoteProcessGroupDetails;
+import org.apache.nifi.action.component.details.RemoteProcessGroupDetails;
+import org.apache.nifi.action.details.ActionDetails;
+import org.apache.nifi.action.details.ConfigureDetails;
+import org.apache.nifi.action.details.ConnectDetails;
+import org.apache.nifi.action.details.FlowChangeConfigureDetails;
+import org.apache.nifi.action.details.FlowChangeConnectDetails;
+import org.apache.nifi.action.details.FlowChangeMoveDetails;
+import org.apache.nifi.action.details.FlowChangePurgeDetails;
+import org.apache.nifi.action.details.MoveDetails;
+import org.apache.nifi.action.details.PurgeDetails;
+import org.apache.nifi.annotation.behavior.Restricted;
+import org.apache.nifi.annotation.behavior.Restriction;
+import org.apache.nifi.annotation.behavior.Stateful;
+import org.apache.nifi.annotation.documentation.CapabilityDescription;
+import org.apache.nifi.annotation.documentation.DeprecationNotice;
+import org.apache.nifi.annotation.documentation.Tags;
+import org.apache.nifi.authorization.AccessPolicy;
+import org.apache.nifi.authorization.Authorizer;
+import org.apache.nifi.authorization.AuthorizerCapabilityDetection;
+import org.apache.nifi.authorization.Group;
+import org.apache.nifi.authorization.RequestAction;
+import org.apache.nifi.authorization.Resource;
+import org.apache.nifi.authorization.User;
+import org.apache.nifi.authorization.resource.Authorizable;
+import org.apache.nifi.authorization.resource.ComponentAuthorizable;
+import org.apache.nifi.authorization.resource.OperationAuthorizable;
+import org.apache.nifi.authorization.user.NiFiUser;
+import org.apache.nifi.authorization.user.NiFiUserUtils;
+import org.apache.nifi.bundle.Bundle;
+import org.apache.nifi.bundle.BundleCoordinate;
+import org.apache.nifi.bundle.BundleDetails;
+import org.apache.nifi.cluster.coordination.heartbeat.NodeHeartbeat;
+import org.apache.nifi.cluster.coordination.node.NodeConnectionStatus;
+import org.apache.nifi.cluster.event.NodeEvent;
+import org.apache.nifi.cluster.manager.StatusMerger;
+import org.apache.nifi.cluster.protocol.NodeIdentifier;
+import org.apache.nifi.components.AllowableValue;
+import org.apache.nifi.components.PropertyDescriptor;
+import org.apache.nifi.components.ValidationResult;
+import org.apache.nifi.components.state.Scope;
+import org.apache.nifi.components.state.StateMap;
+import org.apache.nifi.components.validation.ValidationStatus;
+import org.apache.nifi.connectable.Connectable;
+import org.apache.nifi.connectable.ConnectableType;
+import org.apache.nifi.connectable.Connection;
+import org.apache.nifi.connectable.Funnel;
+import org.apache.nifi.connectable.Port;
+import org.apache.nifi.connectable.Position;
+import org.apache.nifi.controller.ActiveThreadInfo;
+import org.apache.nifi.controller.ComponentNode;
+import org.apache.nifi.controller.ControllerService;
+import org.apache.nifi.controller.Counter;
+import org.apache.nifi.controller.FlowController;
+import org.apache.nifi.controller.ProcessorNode;
+import org.apache.nifi.controller.ReportingTaskNode;
+import org.apache.nifi.controller.Snippet;
+import org.apache.nifi.controller.Template;
+import org.apache.nifi.controller.label.Label;
+import org.apache.nifi.controller.queue.DropFlowFileState;
+import org.apache.nifi.controller.queue.DropFlowFileStatus;
+import org.apache.nifi.controller.queue.FlowFileQueue;
+import org.apache.nifi.controller.queue.FlowFileSummary;
+import org.apache.nifi.controller.queue.ListFlowFileState;
+import org.apache.nifi.controller.queue.ListFlowFileStatus;
+import org.apache.nifi.controller.queue.LoadBalanceStrategy;
+import org.apache.nifi.controller.queue.LocalQueuePartitionDiagnostics;
+import org.apache.nifi.controller.queue.QueueDiagnostics;
+import org.apache.nifi.controller.queue.QueueSize;
+import org.apache.nifi.controller.queue.RemoteQueuePartitionDiagnostics;
+import org.apache.nifi.controller.repository.FlowFileRecord;
+import org.apache.nifi.controller.repository.claim.ContentClaim;
+import org.apache.nifi.controller.repository.claim.ResourceClaim;
+import org.apache.nifi.controller.service.ControllerServiceNode;
+import org.apache.nifi.controller.service.ControllerServiceProvider;
+import org.apache.nifi.controller.state.SortedStateUtils;
+import org.apache.nifi.controller.status.ConnectionStatus;
+import org.apache.nifi.controller.status.PortStatus;
+import org.apache.nifi.controller.status.ProcessGroupStatus;
+import org.apache.nifi.controller.status.ProcessorStatus;
+import org.apache.nifi.controller.status.RemoteProcessGroupStatus;
+import org.apache.nifi.controller.status.history.GarbageCollectionHistory;
+import org.apache.nifi.controller.status.history.GarbageCollectionStatus;
+import org.apache.nifi.diagnostics.GarbageCollection;
+import org.apache.nifi.diagnostics.StorageUsage;
+import org.apache.nifi.diagnostics.SystemDiagnostics;
+import org.apache.nifi.expression.ExpressionLanguageScope;
+import org.apache.nifi.flowfile.FlowFilePrioritizer;
+import org.apache.nifi.flowfile.attributes.CoreAttributes;
+import org.apache.nifi.groups.ProcessGroup;
+import org.apache.nifi.groups.ProcessGroupCounts;
+import org.apache.nifi.groups.RemoteProcessGroup;
+import org.apache.nifi.groups.RemoteProcessGroupCounts;
+import org.apache.nifi.history.History;
+import org.apache.nifi.nar.ExtensionManager;
+import org.apache.nifi.nar.NarClassLoadersHolder;
+import org.apache.nifi.processor.Processor;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.provenance.lineage.ComputeLineageResult;
+import org.apache.nifi.provenance.lineage.ComputeLineageSubmission;
+import org.apache.nifi.provenance.lineage.LineageEdge;
+import org.apache.nifi.provenance.lineage.LineageNode;
+import org.apache.nifi.provenance.lineage.ProvenanceEventLineageNode;
+import org.apache.nifi.registry.ComponentVariableRegistry;
+import org.apache.nifi.registry.flow.FlowRegistry;
+import org.apache.nifi.registry.flow.VersionControlInformation;
+import org.apache.nifi.registry.flow.VersionedComponent;
+import org.apache.nifi.registry.flow.VersionedFlowState;
+import org.apache.nifi.registry.flow.VersionedFlowStatus;
+import org.apache.nifi.registry.flow.diff.DifferenceType;
+import org.apache.nifi.registry.flow.diff.FlowComparison;
+import org.apache.nifi.registry.flow.diff.FlowDifference;
+import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedComponent;
+import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedConnection;
+import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedControllerService;
+import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedFunnel;
+import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedLabel;
+import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedPort;
+import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedProcessGroup;
+import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedProcessor;
+import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedRemoteGroupPort;
+import org.apache.nifi.registry.flow.mapping.InstantiatedVersionedRemoteProcessGroup;
+import org.apache.nifi.registry.variable.VariableRegistryUpdateRequest;
+import org.apache.nifi.registry.variable.VariableRegistryUpdateStep;
+import org.apache.nifi.remote.RemoteGroupPort;
+import org.apache.nifi.remote.RootGroupPort;
+import org.apache.nifi.reporting.Bulletin;
+import org.apache.nifi.reporting.BulletinRepository;
+import org.apache.nifi.reporting.ReportingTask;
+import org.apache.nifi.scheduling.SchedulingStrategy;
+import org.apache.nifi.util.FlowDifferenceFilters;
+import org.apache.nifi.util.FormatUtils;
+import org.apache.nifi.web.FlowModification;
+import org.apache.nifi.web.Revision;
+import org.apache.nifi.web.api.dto.action.ActionDTO;
+import org.apache.nifi.web.api.dto.action.HistoryDTO;
+import org.apache.nifi.web.api.dto.action.component.details.ComponentDetailsDTO;
+import org.apache.nifi.web.api.dto.action.component.details.ExtensionDetailsDTO;
+import org.apache.nifi.web.api.dto.action.component.details.RemoteProcessGroupDetailsDTO;
+import org.apache.nifi.web.api.dto.action.details.ActionDetailsDTO;
+import org.apache.nifi.web.api.dto.action.details.ConfigureDetailsDTO;
+import org.apache.nifi.web.api.dto.action.details.ConnectDetailsDTO;
+import org.apache.nifi.web.api.dto.action.details.MoveDetailsDTO;
+import org.apache.nifi.web.api.dto.action.details.PurgeDetailsDTO;
+import org.apache.nifi.web.api.dto.diagnostics.ClassLoaderDiagnosticsDTO;
+import org.apache.nifi.web.api.dto.diagnostics.ConnectionDiagnosticsDTO;
+import org.apache.nifi.web.api.dto.diagnostics.ConnectionDiagnosticsSnapshotDTO;
+import org.apache.nifi.web.api.dto.diagnostics.ControllerServiceDiagnosticsDTO;
+import org.apache.nifi.web.api.dto.diagnostics.GCDiagnosticsSnapshotDTO;
+import org.apache.nifi.web.api.dto.diagnostics.GarbageCollectionDiagnosticsDTO;
+import org.apache.nifi.web.api.dto.diagnostics.JVMControllerDiagnosticsSnapshotDTO;
+import org.apache.nifi.web.api.dto.diagnostics.JVMDiagnosticsDTO;
+import org.apache.nifi.web.api.dto.diagnostics.JVMDiagnosticsSnapshotDTO;
+import org.apache.nifi.web.api.dto.diagnostics.JVMFlowDiagnosticsSnapshotDTO;
+import org.apache.nifi.web.api.dto.diagnostics.JVMSystemDiagnosticsSnapshotDTO;
+import org.apache.nifi.web.api.dto.diagnostics.LocalQueuePartitionDTO;
+import org.apache.nifi.web.api.dto.diagnostics.ProcessorDiagnosticsDTO;
+import org.apache.nifi.web.api.dto.diagnostics.RemoteQueuePartitionDTO;
+import org.apache.nifi.web.api.dto.diagnostics.RepositoryUsageDTO;
+import org.apache.nifi.web.api.dto.diagnostics.ThreadDumpDTO;
+import org.apache.nifi.web.api.dto.flow.FlowBreadcrumbDTO;
+import org.apache.nifi.web.api.dto.flow.FlowDTO;
+import org.apache.nifi.web.api.dto.flow.ProcessGroupFlowDTO;
+import org.apache.nifi.web.api.dto.provenance.lineage.LineageDTO;
+import org.apache.nifi.web.api.dto.provenance.lineage.LineageRequestDTO;
+import org.apache.nifi.web.api.dto.provenance.lineage.LineageRequestDTO.LineageRequestType;
+import org.apache.nifi.web.api.dto.provenance.lineage.LineageResultsDTO;
+import org.apache.nifi.web.api.dto.provenance.lineage.ProvenanceLinkDTO;
+import org.apache.nifi.web.api.dto.provenance.lineage.ProvenanceNodeDTO;
+import org.apache.nifi.web.api.dto.status.ConnectionStatusDTO;
+import org.apache.nifi.web.api.dto.status.ConnectionStatusSnapshotDTO;
+import org.apache.nifi.web.api.dto.status.PortStatusDTO;
+import org.apache.nifi.web.api.dto.status.PortStatusSnapshotDTO;
+import org.apache.nifi.web.api.dto.status.ProcessGroupStatusDTO;
+import org.apache.nifi.web.api.dto.status.ProcessGroupStatusSnapshotDTO;
+import org.apache.nifi.web.api.dto.status.ProcessorStatusDTO;
+import org.apache.nifi.web.api.dto.status.ProcessorStatusSnapshotDTO;
+import org.apache.nifi.web.api.dto.status.RemoteProcessGroupStatusDTO;
+import org.apache.nifi.web.api.dto.status.RemoteProcessGroupStatusSnapshotDTO;
+import org.apache.nifi.web.api.entity.AccessPolicyEntity;
+import org.apache.nifi.web.api.entity.AccessPolicySummaryEntity;
+import org.apache.nifi.web.api.entity.AffectedComponentEntity;
+import org.apache.nifi.web.api.entity.AllowableValueEntity;
+import org.apache.nifi.web.api.entity.BulletinEntity;
+import org.apache.nifi.web.api.entity.ComponentReferenceEntity;
+import org.apache.nifi.web.api.entity.ConnectionStatusSnapshotEntity;
+import org.apache.nifi.web.api.entity.ControllerServiceEntity;
+import org.apache.nifi.web.api.entity.FlowBreadcrumbEntity;
+import org.apache.nifi.web.api.entity.PortEntity;
+import org.apache.nifi.web.api.entity.PortStatusSnapshotEntity;
+import org.apache.nifi.web.api.entity.ProcessGroupStatusSnapshotEntity;
+import org.apache.nifi.web.api.entity.ProcessorEntity;
+import org.apache.nifi.web.api.entity.ProcessorStatusSnapshotEntity;
+import org.apache.nifi.web.api.entity.RemoteProcessGroupEntity;
+import org.apache.nifi.web.api.entity.RemoteProcessGroupStatusSnapshotEntity;
+import org.apache.nifi.web.api.entity.TenantEntity;
+import org.apache.nifi.web.api.entity.VariableEntity;
+import org.apache.nifi.web.controller.ControllerFacade;
+import org.apache.nifi.web.revision.RevisionManager;
+
+import javax.ws.rs.WebApplicationException;
+import java.net.UnknownHostException;
+import java.text.Collator;
+import java.text.NumberFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.TimeZone;
+import java.util.TreeMap;
+import java.util.TreeSet;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Function;
+import java.util.function.Supplier;
+import java.util.stream.Collectors;
+import java.net.InetAddress;
+
+
+public final class DtoFactory {
+
+ @SuppressWarnings("rawtypes")
+ private final static Comparator<Class> CLASS_NAME_COMPARATOR = new Comparator<Class>() {
+ @Override
+ public int compare(final Class class1, final Class class2) {
+ return Collator.getInstance(Locale.US).compare(class1.getSimpleName(), class2.getSimpleName());
+ }
+ };
+ public static final String SENSITIVE_VALUE_MASK = "********";
+
+ private BulletinRepository bulletinRepository;
+ private ControllerServiceProvider controllerServiceProvider;
+ private EntityFactory entityFactory;
+ private Authorizer authorizer;
+ private ExtensionManager extensionManager;
+
+ public ControllerConfigurationDTO createControllerConfigurationDto(final ControllerFacade controllerFacade) {
+ final ControllerConfigurationDTO dto = new ControllerConfigurationDTO();
+ dto.setMaxTimerDrivenThreadCount(controllerFacade.getMaxTimerDrivenThreadCount());
+ dto.setMaxEventDrivenThreadCount(controllerFacade.getMaxEventDrivenThreadCount());
+ return dto;
+ }
+
+ public FlowConfigurationDTO createFlowConfigurationDto(final String autoRefreshInterval,
+ final Long defaultBackPressureObjectThreshold,
+ final String defaultBackPressureDataSizeThreshold,
+ final String dcaeDistributorApiHostname) {
+ final FlowConfigurationDTO dto = new FlowConfigurationDTO();
+
+ // get the refresh interval
+ final long refreshInterval = FormatUtils.getTimeDuration(autoRefreshInterval, TimeUnit.SECONDS);
+ dto.setAutoRefreshIntervalSeconds(refreshInterval);
+ dto.setSupportsManagedAuthorizer(AuthorizerCapabilityDetection.isManagedAuthorizer(authorizer));
+ dto.setSupportsConfigurableUsersAndGroups(AuthorizerCapabilityDetection.isConfigurableUserGroupProvider(authorizer));
+ dto.setSupportsConfigurableAuthorizer(AuthorizerCapabilityDetection.isConfigurableAccessPolicyProvider(authorizer));
+
+ /* Renu - getting host IP */
+ dto.setDcaeDistributorApiHostname(dcaeDistributorApiHostname);
+
+ final Date now = new Date();
+ dto.setTimeOffset(TimeZone.getDefault().getOffset(now.getTime()));
+ dto.setCurrentTime(now);
+
+ dto.setDefaultBackPressureDataSizeThreshold(defaultBackPressureDataSizeThreshold);
+ dto.setDefaultBackPressureObjectThreshold(defaultBackPressureObjectThreshold);
+
+ return dto;
+ }
+
+ /**
+ * Creates an ActionDTO for the specified Action.
+ *
+ * @param action action
+ * @return dto
+ */
+ public ActionDTO createActionDto(final Action action) {
+ final ActionDTO actionDto = new ActionDTO();
+ actionDto.setId(action.getId());
+ actionDto.setSourceId(action.getSourceId());
+ actionDto.setSourceName(action.getSourceName());
+ actionDto.setSourceType(action.getSourceType().toString());
+ actionDto.setTimestamp(action.getTimestamp());
+ actionDto.setUserIdentity(action.getUserIdentity());
+ actionDto.setOperation(action.getOperation().toString());
+ actionDto.setActionDetails(createActionDetailsDto(action.getActionDetails()));
+ actionDto.setComponentDetails(createComponentDetailsDto(action.getComponentDetails()));
+
+ return actionDto;
+ }
+
+ /**
+ * Creates an ActionDetailsDTO for the specified ActionDetails.
+ *
+ * @param actionDetails details
+ * @return dto
+ */
+ private ActionDetailsDTO createActionDetailsDto(final ActionDetails actionDetails) {
+ if (actionDetails == null) {
+ return null;
+ }
+
+ if (actionDetails instanceof FlowChangeConfigureDetails) {
+ final ConfigureDetailsDTO configureDetails = new ConfigureDetailsDTO();
+ configureDetails.setName(((ConfigureDetails) actionDetails).getName());
+ configureDetails.setPreviousValue(((ConfigureDetails) actionDetails).getPreviousValue());
+ configureDetails.setValue(((ConfigureDetails) actionDetails).getValue());
+ return configureDetails;
+ } else if (actionDetails instanceof FlowChangeConnectDetails) {
+ final ConnectDetailsDTO connectDetails = new ConnectDetailsDTO();
+ connectDetails.setSourceId(((ConnectDetails) actionDetails).getSourceId());
+ connectDetails.setSourceName(((ConnectDetails) actionDetails).getSourceName());
+ connectDetails.setSourceType(((ConnectDetails) actionDetails).getSourceType().toString());
+ connectDetails.setRelationship(((ConnectDetails) actionDetails).getRelationship());
+ connectDetails.setDestinationId(((ConnectDetails) actionDetails).getDestinationId());
+ connectDetails.setDestinationName(((ConnectDetails) actionDetails).getDestinationName());
+ connectDetails.setDestinationType(((ConnectDetails) actionDetails).getDestinationType().toString());
+ return connectDetails;
+ } else if (actionDetails instanceof FlowChangeMoveDetails) {
+ final MoveDetailsDTO moveDetails = new MoveDetailsDTO();
+ moveDetails.setPreviousGroup(((MoveDetails) actionDetails).getPreviousGroup());
+ moveDetails.setPreviousGroupId(((MoveDetails) actionDetails).getPreviousGroupId());
+ moveDetails.setGroup(((MoveDetails) actionDetails).getGroup());
+ moveDetails.setGroupId(((MoveDetails) actionDetails).getGroupId());
+ return moveDetails;
+ } else if (actionDetails instanceof FlowChangePurgeDetails) {
+ final PurgeDetailsDTO purgeDetails = new PurgeDetailsDTO();
+ purgeDetails.setEndDate(((PurgeDetails) actionDetails).getEndDate());
+ return purgeDetails;
+ } else {
+ throw new WebApplicationException(new IllegalArgumentException(String.format("Unrecognized type of action details encountered %s during serialization.", actionDetails.toString())));
+ }
+ }
+
+ /**
+ * Creates a ComponentDetailsDTO for the specified ComponentDetails.
+ *
+ * @param componentDetails details
+ * @return dto
+ */
+ private ComponentDetailsDTO createComponentDetailsDto(final ComponentDetails componentDetails) {
+ if (componentDetails == null) {
+ return null;
+ }
+
+ if (componentDetails instanceof FlowChangeExtensionDetails) {
+ final ExtensionDetailsDTO processorDetails = new ExtensionDetailsDTO();
+ processorDetails.setType(((ExtensionDetails) componentDetails).getType());
+ return processorDetails;
+ } else if (componentDetails instanceof FlowChangeRemoteProcessGroupDetails) {
+ final RemoteProcessGroupDetailsDTO remoteProcessGroupDetails = new RemoteProcessGroupDetailsDTO();
+ remoteProcessGroupDetails.setUri(((RemoteProcessGroupDetails) componentDetails).getUri());
+ return remoteProcessGroupDetails;
+ } else {
+ throw new WebApplicationException(new IllegalArgumentException(String.format("Unrecognized type of component details encountered %s during serialization. ", componentDetails.toString())));
+ }
+ }
+
+ /**
+ * Creates a HistoryDTO from the specified History.
+ *
+ * @param history history
+ * @return dto
+ */
+ public HistoryDTO createHistoryDto(final History history) {
+ final HistoryDTO historyDto = new HistoryDTO();
+ historyDto.setTotal(history.getTotal());
+ historyDto.setLastRefreshed(history.getLastRefreshed());
+ return historyDto;
+ }
+
+ /**
+ * Creates a ComponentStateDTO for the given component and state's.
+ *
+ * @param componentId component id
+ * @param localState local state
+ * @param clusterState cluster state
+ * @return dto
+ */
+ public ComponentStateDTO createComponentStateDTO(final String componentId, final Class<?> componentClass, final StateMap localState, final StateMap clusterState) {
+ final ComponentStateDTO dto = new ComponentStateDTO();
+ dto.setComponentId(componentId);
+ dto.setStateDescription(getStateDescription(componentClass));
+ dto.setLocalState(createStateMapDTO(Scope.LOCAL, localState));
+ dto.setClusterState(createStateMapDTO(Scope.CLUSTER, clusterState));
+ return dto;
+ }
+
+ /**
+ * Gets the description of the state this component persists.
+ *
+ * @param componentClass the component class
+ * @return state description
+ */
+ private String getStateDescription(final Class<?> componentClass) {
+ final Stateful capabilityDesc = componentClass.getAnnotation(Stateful.class);
+ if (capabilityDesc != null) {
+ return capabilityDesc.description();
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * Creates a StateMapDTO for the given scope and state map.
+ *
+ * @param scope the scope
+ * @param stateMap the state map
+ * @return dto
+ */
+ public StateMapDTO createStateMapDTO(final Scope scope, final StateMap stateMap) {
+ if (stateMap == null) {
+ return null;
+ }
+
+ final StateMapDTO dto = new StateMapDTO();
+ dto.setScope(scope.toString());
+
+ final TreeMap<String, String> sortedState = new TreeMap<>(SortedStateUtils.getKeyComparator());
+ final Map<String, String> state = stateMap.toMap();
+ sortedState.putAll(state);
+
+ int count = 0;
+ final List<StateEntryDTO> stateEntries = new ArrayList<>();
+ final Set<Map.Entry<String, String>> entrySet = sortedState.entrySet();
+ for (final Iterator<Entry<String, String>> iter = entrySet.iterator(); iter.hasNext() && count++ < SortedStateUtils.MAX_COMPONENT_STATE_ENTRIES;) {
+ final Map.Entry<String, String> entry = iter.next();
+ final StateEntryDTO entryDTO = new StateEntryDTO();
+ entryDTO.setKey(entry.getKey());
+ entryDTO.setValue(entry.getValue());
+ stateEntries.add(entryDTO);
+ }
+ dto.setTotalEntryCount(state.size());
+ dto.setState(stateEntries);
+
+ return dto;
+ }
+
+ /**
+ * Creates CounterDTOs for each Counter specified.
+ *
+ * @param counterDtos dtos
+ * @return dto
+ */
+ public CountersSnapshotDTO createCountersDto(final Collection<CounterDTO> counterDtos) {
+ final CountersSnapshotDTO dto = new CountersSnapshotDTO();
+ dto.setCounters(counterDtos);
+ dto.setGenerated(new Date());
+ return dto;
+ }
+
+ /**
+ * Creates a CounterDTO from the specified Counter.
+ *
+ * @param counter counter
+ * @return dto
+ */
+ public CounterDTO createCounterDto(final Counter counter) {
+ final CounterDTO dto = new CounterDTO();
+ dto.setId(counter.getIdentifier());
+ dto.setContext(counter.getContext());
+ dto.setName(counter.getName());
+ dto.setValueCount(counter.getValue());
+ dto.setValue(FormatUtils.formatCount(counter.getValue()));
+ return dto;
+ }
+
+ /**
+ * Creates a PositionDTO from the specified position
+ *
+ * @param position position
+ * @return dto
+ */
+ public PositionDTO createPositionDto(final Position position) {
+ return new PositionDTO(position.getX(), position.getY());
+ }
+
+ private boolean isDropRequestComplete(final DropFlowFileState state) {
+ return DropFlowFileState.COMPLETE.equals(state) || DropFlowFileState.CANCELED.equals(state) || DropFlowFileState.FAILURE.equals(state);
+ }
+
+ /**
+ * Creates a DropRequestDTO from the specified flow file status.
+ *
+ * @param dropRequest dropRequest
+ * @return dto
+ */
+ public DropRequestDTO createDropRequestDTO(final DropFlowFileStatus dropRequest) {
+ final DropRequestDTO dto = new DropRequestDTO();
+ dto.setId(dropRequest.getRequestIdentifier());
+ dto.setSubmissionTime(new Date(dropRequest.getRequestSubmissionTime()));
+ dto.setLastUpdated(new Date(dropRequest.getLastUpdated()));
+ dto.setState(dropRequest.getState().toString());
+ dto.setFailureReason(dropRequest.getFailureReason());
+ dto.setFinished(isDropRequestComplete(dropRequest.getState()));
+
+ final QueueSize dropped = dropRequest.getDroppedSize();
+ dto.setDroppedCount(dropped.getObjectCount());
+ dto.setDroppedSize(dropped.getByteCount());
+ dto.setDropped(FormatUtils.formatCount(dropped.getObjectCount()) + " / " + FormatUtils.formatDataSize(dropped.getByteCount()));
+
+ final QueueSize current = dropRequest.getCurrentSize();
+ dto.setCurrentCount(current.getObjectCount());
+ dto.setCurrentSize(current.getByteCount());
+ dto.setCurrent(FormatUtils.formatCount(current.getObjectCount()) + " / " + FormatUtils.formatDataSize(current.getByteCount()));
+
+ final QueueSize original = dropRequest.getOriginalSize();
+ dto.setOriginalCount(original.getObjectCount());
+ dto.setOriginalSize(original.getByteCount());
+ dto.setOriginal(FormatUtils.formatCount(original.getObjectCount()) + " / " + FormatUtils.formatDataSize(original.getByteCount()));
+
+ if (isDropRequestComplete(dropRequest.getState())) {
+ dto.setPercentCompleted(100);
+ } else {
+ dto.setPercentCompleted((dropped.getObjectCount() * 100) / original.getObjectCount());
+ }
+
+ return dto;
+ }
+
+ private boolean isListingRequestComplete(final ListFlowFileState state) {
+ return ListFlowFileState.COMPLETE.equals(state) || ListFlowFileState.CANCELED.equals(state) || ListFlowFileState.FAILURE.equals(state);
+ }
+
+ private QueueSizeDTO createQueueSizeDTO(final QueueSize queueSize) {
+ final QueueSizeDTO dto = new QueueSizeDTO();
+ dto.setByteCount(queueSize.getByteCount());
+ dto.setObjectCount(queueSize.getObjectCount());
+ return dto;
+ }
+
+ /**
+ * Creates a ListingRequestDTO from the specified ListFlowFileStatus.
+ *
+ * @param listingRequest listingRequest
+ * @return dto
+ */
+ public ListingRequestDTO createListingRequestDTO(final ListFlowFileStatus listingRequest) {
+ final ListingRequestDTO dto = new ListingRequestDTO();
+ dto.setId(listingRequest.getRequestIdentifier());
+ dto.setSubmissionTime(new Date(listingRequest.getRequestSubmissionTime()));
+ dto.setLastUpdated(new Date(listingRequest.getLastUpdated()));
+ dto.setState(listingRequest.getState().toString());
+ dto.setFailureReason(listingRequest.getFailureReason());
+ dto.setFinished(isListingRequestComplete(listingRequest.getState()));
+ dto.setMaxResults(listingRequest.getMaxResults());
+ dto.setPercentCompleted(listingRequest.getCompletionPercentage());
+
+ dto.setQueueSize(createQueueSizeDTO(listingRequest.getQueueSize()));
+
+ if (isListingRequestComplete(listingRequest.getState())) {
+ final List<FlowFileSummary> flowFileSummaries = listingRequest.getFlowFileSummaries();
+ if (flowFileSummaries != null) {
+ final Date now = new Date();
+ final List<FlowFileSummaryDTO> summaryDtos = new ArrayList<>(flowFileSummaries.size());
+ for (final FlowFileSummary summary : flowFileSummaries) {
+ summaryDtos.add(createFlowFileSummaryDTO(summary, now));
+ }
+ dto.setFlowFileSummaries(summaryDtos);
+ }
+ }
+
+ return dto;
+ }
+
+ /**
+ * Creates a FlowFileSummaryDTO from the specified FlowFileSummary.
+ *
+ * @param summary summary
+ * @return dto
+ */
+ public FlowFileSummaryDTO createFlowFileSummaryDTO(final FlowFileSummary summary, final Date now) {
+ final FlowFileSummaryDTO dto = new FlowFileSummaryDTO();
+ dto.setUuid(summary.getUuid());
+ dto.setFilename(summary.getFilename());
+
+ dto.setPenalized(summary.isPenalized());
+ final long penaltyExpiration = summary.getPenaltyExpirationMillis() - now.getTime();
+ dto.setPenaltyExpiresIn(penaltyExpiration>=0?penaltyExpiration:0);
+
+ dto.setPosition(summary.getPosition());
+ dto.setSize(summary.getSize());
+
+ final long queuedDuration = now.getTime() - summary.getLastQueuedTime();
+ dto.setQueuedDuration(queuedDuration);
+
+ final long age = now.getTime() - summary.getLineageStartDate();
+ dto.setLineageDuration(age);
+
+ return dto;
+ }
+
+ /**
+ * Creates a FlowFileDTO from the specified FlowFileRecord.
+ *
+ * @param record record
+ * @return dto
+ */
+ public FlowFileDTO createFlowFileDTO(final FlowFileRecord record) {
+ final Date now = new Date();
+ final FlowFileDTO dto = new FlowFileDTO();
+ dto.setUuid(record.getAttribute(CoreAttributes.UUID.key()));
+ dto.setFilename(record.getAttribute(CoreAttributes.FILENAME.key()));
+
+ dto.setPenalized(record.isPenalized());
+ final long penaltyExpiration = record.getPenaltyExpirationMillis() - now.getTime();
+ dto.setPenaltyExpiresIn(penaltyExpiration>=0?penaltyExpiration:0);
+
+ dto.setSize(record.getSize());
+ dto.setAttributes(record.getAttributes());
+
+ final long queuedDuration = now.getTime() - record.getLastQueueDate();
+ dto.setQueuedDuration(queuedDuration);
+
+ final long age = now.getTime() - record.getLineageStartDate();
+ dto.setLineageDuration(age);
+
+ final ContentClaim contentClaim = record.getContentClaim();
+ if (contentClaim != null) {
+ final ResourceClaim resourceClaim = contentClaim.getResourceClaim();
+ dto.setContentClaimSection(resourceClaim.getSection());
+ dto.setContentClaimContainer(resourceClaim.getContainer());
+ dto.setContentClaimIdentifier(resourceClaim.getId());
+ dto.setContentClaimOffset(contentClaim.getOffset() + record.getContentClaimOffset());
+ dto.setContentClaimFileSizeBytes(record.getSize());
+ dto.setContentClaimFileSize(FormatUtils.formatDataSize(record.getSize()));
+ }
+
+ return dto;
+ }
+
+ /**
+ * Creates a ConnectionDTO from the specified Connection.
+ *
+ * @param connection connection
+ * @return dto
+ */
+ public ConnectionDTO createConnectionDto(final Connection connection) {
+ if (connection == null) {
+ return null;
+ }
+
+ final ConnectionDTO dto = new ConnectionDTO();
+
+ dto.setId(connection.getIdentifier());
+ dto.setParentGroupId(connection.getProcessGroup().getIdentifier());
+
+ final List<PositionDTO> bendPoints = new ArrayList<>();
+ for (final Position bendPoint : connection.getBendPoints()) {
+ bendPoints.add(createPositionDto(bendPoint));
+ }
+ dto.setBends(bendPoints);
+ dto.setName(connection.getName());
+ dto.setLabelIndex(connection.getLabelIndex());
+ dto.setzIndex(connection.getZIndex());
+ dto.setSource(createConnectableDto(connection.getSource()));
+ dto.setDestination(createConnectableDto(connection.getDestination()));
+ dto.setVersionedComponentId(connection.getVersionedComponentId().orElse(null));
+
+ final FlowFileQueue flowFileQueue = connection.getFlowFileQueue();
+
+ dto.setBackPressureObjectThreshold(flowFileQueue.getBackPressureObjectThreshold());
+ dto.setBackPressureDataSizeThreshold(flowFileQueue.getBackPressureDataSizeThreshold());
+ dto.setFlowFileExpiration(flowFileQueue.getFlowFileExpiration());
+ dto.setPrioritizers(new ArrayList<String>());
+ for (final FlowFilePrioritizer comparator : flowFileQueue.getPriorities()) {
+ dto.getPrioritizers().add(comparator.getClass().getCanonicalName());
+ }
+
+ // For ports, we do not want to populate the relationships.
+ for (final Relationship selectedRelationship : connection.getRelationships()) {
+ if (!Relationship.ANONYMOUS.equals(selectedRelationship)) {
+ if (dto.getSelectedRelationships() == null) {
+ dto.setSelectedRelationships(new TreeSet<String>(Collator.getInstance(Locale.US)));
+ }
+
+ dto.getSelectedRelationships().add(selectedRelationship.getName());
+ }
+ }
+
+ // For ports, we do not want to populate the relationships.
+ for (final Relationship availableRelationship : connection.getSource().getRelationships()) {
+ if (!Relationship.ANONYMOUS.equals(availableRelationship)) {
+ if (dto.getAvailableRelationships() == null) {
+ dto.setAvailableRelationships(new TreeSet<String>(Collator.getInstance(Locale.US)));
+ }
+
+ dto.getAvailableRelationships().add(availableRelationship.getName());
+ }
+ }
+
+ final LoadBalanceStrategy loadBalanceStrategy = flowFileQueue.getLoadBalanceStrategy();
+ dto.setLoadBalancePartitionAttribute(flowFileQueue.getPartitioningAttribute());
+ dto.setLoadBalanceStrategy(loadBalanceStrategy.name());
+ dto.setLoadBalanceCompression(flowFileQueue.getLoadBalanceCompression().name());
+
+ if (loadBalanceStrategy == LoadBalanceStrategy.DO_NOT_LOAD_BALANCE) {
+ dto.setLoadBalanceStatus(ConnectionDTO.LOAD_BALANCE_NOT_CONFIGURED);
+ } else if (flowFileQueue.isActivelyLoadBalancing()) {
+ dto.setLoadBalanceStatus(ConnectionDTO.LOAD_BALANCE_ACTIVE);
+ } else {
+ dto.setLoadBalanceStatus(ConnectionDTO.LOAD_BALANCE_INACTIVE);
+ }
+
+ return dto;
+ }
+
+ /**
+ * Creates a ConnectableDTO from the specified Connectable.
+ *
+ * @param connectable connectable
+ * @return dto
+ */
+ public ConnectableDTO createConnectableDto(final Connectable connectable) {
+ if (connectable == null) {
+ return null;
+ }
+
+ boolean isAuthorized = connectable.isAuthorized(authorizer, RequestAction.READ, NiFiUserUtils.getNiFiUser());
+
+ final ConnectableDTO dto = new ConnectableDTO();
+ dto.setId(connectable.getIdentifier());
+ dto.setName(isAuthorized ? connectable.getName() : connectable.getIdentifier());
+ dto.setType(connectable.getConnectableType().name());
+ dto.setVersionedComponentId(connectable.getVersionedComponentId().orElse(null));
+
+ if (connectable instanceof RemoteGroupPort) {
+ final RemoteGroupPort remoteGroupPort = (RemoteGroupPort) connectable;
+ final RemoteProcessGroup remoteGroup = remoteGroupPort.getRemoteProcessGroup();
+ dto.setGroupId(remoteGroup.getIdentifier());
+ dto.setRunning(remoteGroupPort.isTargetRunning());
+ dto.setTransmitting(remoteGroupPort.isRunning());
+ dto.setExists(remoteGroupPort.getTargetExists());
+ if (isAuthorized) {
+ dto.setComments(remoteGroup.getComments());
+ }
+ } else {
+ dto.setGroupId(connectable.getProcessGroup().getIdentifier());
+ dto.setRunning(connectable.isRunning());
+ if (isAuthorized) {
+ dto.setComments(connectable.getComments());
+ }
+ }
+
+ return dto;
+ }
+
+ /**
+ * Creates a LabelDTO from the specified Label.
+ *
+ * @param label label
+ * @return dto
+ */
+ public LabelDTO createLabelDto(final Label label) {
+ if (label == null) {
+ return null;
+ }
+
+ final LabelDTO dto = new LabelDTO();
+ dto.setId(label.getIdentifier());
+ dto.setPosition(createPositionDto(label.getPosition()));
+ dto.setStyle(label.getStyle());
+ dto.setHeight(label.getSize().getHeight());
+ dto.setWidth(label.getSize().getWidth());
+ dto.setLabel(label.getValue());
+ dto.setParentGroupId(label.getProcessGroup().getIdentifier());
+ dto.setVersionedComponentId(label.getVersionedComponentId().orElse(null));
+
+ return dto;
+ }
+
+ /**
+ * Creates a {@link UserDTO} from the specified {@link User}.
+ *
+ * @param user user
+ * @return dto
+ */
+ public UserDTO createUserDto(final User user, final Set<TenantEntity> groups, final Set<AccessPolicySummaryEntity> accessPolicies) {
+ if (user == null) {
+ return null;
+ }
+
+ final UserDTO dto = new UserDTO();
+ dto.setId(user.getIdentifier());
+ dto.setUserGroups(groups);
+ dto.setIdentity(user.getIdentity());
+ dto.setConfigurable(AuthorizerCapabilityDetection.isUserConfigurable(authorizer, user));
+ dto.setAccessPolicies(accessPolicies);
+
+ return dto;
+ }
+
+ /**
+ * Creates a {@link TenantDTO} from the specified {@link User}.
+ *
+ * @param user user
+ * @return dto
+ */
+ public TenantDTO createTenantDTO(User user) {
+ if (user == null) {
+ return null;
+ }
+
+ final TenantDTO dto = new TenantDTO();
+ dto.setId(user.getIdentifier());
+ dto.setIdentity(user.getIdentity());
+ dto.setConfigurable(AuthorizerCapabilityDetection.isUserConfigurable(authorizer, user));
+
+ return dto;
+ }
+
+ /**
+ * Creates a {@link UserGroupDTO} from the specified {@link Group}.
+ *
+ * @param userGroup user group
+ * @return dto
+ */
+ public UserGroupDTO createUserGroupDto(final Group userGroup, Set<TenantEntity> users, final Set<AccessPolicySummaryEntity> accessPolicies) {
+ if (userGroup == null) {
+ return null;
+ }
+
+ // convert to access policies to handle backward compatibility due to incorrect
+ // type in the UserGroupDTO
+ final Set<AccessPolicyEntity> policies = accessPolicies.stream().map(summaryEntity -> {
+ final AccessPolicyDTO policy = new AccessPolicyDTO();
+ policy.setId(summaryEntity.getId());
+
+ if (summaryEntity.getPermissions().getCanRead()) {
+ final AccessPolicySummaryDTO summary = summaryEntity.getComponent();
+ policy.setResource(summary.getResource());
+ policy.setAction(summary.getAction());
+ policy.setConfigurable(summary.getConfigurable());
+ policy.setComponentReference(summary.getComponentReference());
+ }
+
+ return entityFactory.createAccessPolicyEntity(policy, summaryEntity.getRevision(), summaryEntity.getPermissions());
+ }).collect(Collectors.toSet());
+
+ final UserGroupDTO dto = new UserGroupDTO();
+ dto.setId(userGroup.getIdentifier());
+ dto.setUsers(users);
+ dto.setIdentity(userGroup.getName());
+ dto.setConfigurable(AuthorizerCapabilityDetection.isGroupConfigurable(authorizer, userGroup));
+ dto.setAccessPolicies(policies);
+
+ return dto;
+ }
+
+ /**
+ * Creates a {@link TenantDTO} from the specified {@link User}.
+ *
+ * @param userGroup user
+ * @return dto
+ */
+ public TenantDTO createTenantDTO(Group userGroup) {
+ if (userGroup == null) {
+ return null;
+ }
+
+ final TenantDTO dto = new TenantDTO();
+ dto.setId(userGroup.getIdentifier());
+ dto.setIdentity(userGroup.getName());
+ dto.setConfigurable(AuthorizerCapabilityDetection.isGroupConfigurable(authorizer, userGroup));
+
+ return dto;
+ }
+
+ /**
+ * Creates a FunnelDTO from the specified Funnel.
+ *
+ * @param funnel funnel
+ * @return dto
+ */
+ public FunnelDTO createFunnelDto(final Funnel funnel) {
+ if (funnel == null) {
+ return null;
+ }
+
+ final FunnelDTO dto = new FunnelDTO();
+ dto.setId(funnel.getIdentifier());
+ dto.setPosition(createPositionDto(funnel.getPosition()));
+ dto.setParentGroupId(funnel.getProcessGroup().getIdentifier());
+ dto.setVersionedComponentId(funnel.getVersionedComponentId().orElse(null));
+
+ return dto;
+ }
+
+ /**
+ * Creates a SnippetDTO from the specified Snippet.
+ *
+ * @param snippet snippet
+ * @return dto
+ */
+ public SnippetDTO createSnippetDto(final Snippet snippet) {
+ final SnippetDTO dto = new SnippetDTO();
+ dto.setId(snippet.getId());
+ dto.setParentGroupId(snippet.getParentGroupId());
+
+ // populate the snippet contents ids
+ dto.setConnections(mapRevisionToDto(snippet.getConnections()));
+ dto.setFunnels(mapRevisionToDto(snippet.getFunnels()));
+ dto.setInputPorts(mapRevisionToDto(snippet.getInputPorts()));
+ dto.setLabels(mapRevisionToDto(snippet.getLabels()));
+ dto.setOutputPorts(mapRevisionToDto(snippet.getOutputPorts()));
+ dto.setProcessGroups(mapRevisionToDto(snippet.getProcessGroups()));
+ dto.setProcessors(mapRevisionToDto(snippet.getProcessors()));
+ dto.setRemoteProcessGroups(mapRevisionToDto(snippet.getRemoteProcessGroups()));
+
+ return dto;
+ }
+
+ private Map<String, RevisionDTO> mapRevisionToDto(final Map<String, Revision> revisionMap) {
+ final Map<String, RevisionDTO> dtos = new HashMap<>(revisionMap.size());
+ for (final Map.Entry<String, Revision> entry : revisionMap.entrySet()) {
+ final Revision revision = entry.getValue();
+ final RevisionDTO revisionDto = new RevisionDTO();
+ revisionDto.setClientId(revision.getClientId());
+ revisionDto.setVersion(revision.getVersion());
+
+ dtos.put(entry.getKey(), revisionDto);
+ }
+ return dtos;
+ }
+
+ /**
+ * Creates a TemplateDTO from the specified template.
+ *
+ * @param template template
+ * @return dto
+ */
+ public TemplateDTO createTemplateDTO(final Template template) {
+ if (template == null) {
+ return null;
+ }
+
+ final TemplateDTO original = template.getDetails();
+
+ final TemplateDTO copy = new TemplateDTO();
+ copy.setId(original.getId());
+ copy.setGroupId(template.getProcessGroup().getIdentifier());
+ copy.setName(original.getName());
+ copy.setDescription(original.getDescription());
+ copy.setTimestamp(original.getTimestamp());
+ copy.setUri(original.getUri());
+ copy.setEncodingVersion(original.getEncodingVersion());
+
+ return copy;
+ }
+
+
+ public RemoteProcessGroupStatusDTO createRemoteProcessGroupStatusDto(final RemoteProcessGroup remoteProcessGroup, final RemoteProcessGroupStatus remoteProcessGroupStatus) {
+ final RemoteProcessGroupStatusDTO dto = new RemoteProcessGroupStatusDTO();
+ dto.setId(remoteProcessGroupStatus.getId());
+ dto.setGroupId(remoteProcessGroupStatus.getGroupId());
+ dto.setTargetUri(remoteProcessGroupStatus.getTargetUri());
+ dto.setName(remoteProcessGroupStatus.getName());
+ dto.setTransmissionStatus(remoteProcessGroupStatus.getTransmissionStatus().toString());
+ dto.setStatsLastRefreshed(new Date());
+ dto.setValidationStatus(getRemoteProcessGroupValidationStatus(remoteProcessGroup).name());
+
+ final RemoteProcessGroupStatusSnapshotDTO snapshot = new RemoteProcessGroupStatusSnapshotDTO();
+ dto.setAggregateSnapshot(snapshot);
+
+ snapshot.setId(remoteProcessGroupStatus.getId());
+ snapshot.setGroupId(remoteProcessGroupStatus.getGroupId());
+ snapshot.setName(remoteProcessGroupStatus.getName());
+ snapshot.setTargetUri(remoteProcessGroupStatus.getTargetUri());
+ snapshot.setTransmissionStatus(remoteProcessGroupStatus.getTransmissionStatus().toString());
+
+ snapshot.setActiveThreadCount(remoteProcessGroupStatus.getActiveThreadCount());
+ snapshot.setFlowFilesSent(remoteProcessGroupStatus.getSentCount());
+ snapshot.setBytesSent(remoteProcessGroupStatus.getSentContentSize());
+ snapshot.setFlowFilesReceived(remoteProcessGroupStatus.getReceivedCount());
+ snapshot.setBytesReceived(remoteProcessGroupStatus.getReceivedContentSize());
+
+ StatusMerger.updatePrettyPrintedFields(snapshot);
+ return dto;
+ }
+
+ private ValidationStatus getRemoteProcessGroupValidationStatus(RemoteProcessGroup remoteProcessGroup) {
+ final boolean hasAuthIssue = remoteProcessGroup.getAuthorizationIssue() != null && !remoteProcessGroup.getAuthorizationIssue().isEmpty();
+ final Collection<ValidationResult> validationResults = remoteProcessGroup.validate();
+ final boolean hasValidationIssue = validationResults != null && !validationResults.isEmpty();
+ return hasAuthIssue || hasValidationIssue ? ValidationStatus.INVALID : ValidationStatus.VALID;
+ }
+
+ public ProcessGroupStatusDTO createConciseProcessGroupStatusDto(final ProcessGroupStatus processGroupStatus) {
+ final ProcessGroupStatusDTO processGroupStatusDto = new ProcessGroupStatusDTO();
+ processGroupStatusDto.setId(processGroupStatus.getId());
+ processGroupStatusDto.setName(processGroupStatus.getName());
+ processGroupStatusDto.setStatsLastRefreshed(new Date());
+
+ final ProcessGroupStatusSnapshotDTO snapshot = new ProcessGroupStatusSnapshotDTO();
+ processGroupStatusDto.setAggregateSnapshot(snapshot);
+
+ snapshot.setId(processGroupStatus.getId());
+ snapshot.setName(processGroupStatus.getName());
+
+ if (processGroupStatus.getVersionedFlowState() != null) {
+ snapshot.setVersionedFlowState(processGroupStatus.getVersionedFlowState().name());
+ }
+
+ snapshot.setFlowFilesQueued(processGroupStatus.getQueuedCount());
+ snapshot.setBytesQueued(processGroupStatus.getQueuedContentSize());
+ snapshot.setBytesRead(processGroupStatus.getBytesRead());
+ snapshot.setBytesWritten(processGroupStatus.getBytesWritten());
+ snapshot.setFlowFilesIn(processGroupStatus.getInputCount());
+ snapshot.setBytesIn(processGroupStatus.getInputContentSize());
+ snapshot.setFlowFilesOut(processGroupStatus.getOutputCount());
+ snapshot.setBytesOut(processGroupStatus.getOutputContentSize());
+ snapshot.setFlowFilesTransferred(processGroupStatus.getFlowFilesTransferred());
+ snapshot.setBytesTransferred(processGroupStatus.getBytesTransferred());
+ snapshot.setFlowFilesSent(processGroupStatus.getFlowFilesSent());
+ snapshot.setBytesSent(processGroupStatus.getBytesSent());
+ snapshot.setFlowFilesReceived(processGroupStatus.getFlowFilesReceived());
+ snapshot.setBytesReceived(processGroupStatus.getBytesReceived());
+
+ snapshot.setActiveThreadCount(processGroupStatus.getActiveThreadCount());
+ snapshot.setTerminatedThreadCount(processGroupStatus.getTerminatedThreadCount());
+
+ StatusMerger.updatePrettyPrintedFields(snapshot);
+ return processGroupStatusDto;
+ }
+
+ public ProcessGroupStatusDTO createProcessGroupStatusDto(final ProcessGroup processGroup, final ProcessGroupStatus processGroupStatus) {
+ final ProcessGroupStatusDTO processGroupStatusDto = createConciseProcessGroupStatusDto(processGroupStatus);
+ final ProcessGroupStatusSnapshotDTO snapshot = processGroupStatusDto.getAggregateSnapshot();
+
+ // processor status
+ final Collection<ProcessorStatusSnapshotEntity> processorStatusSnapshotEntities = new ArrayList<>();
+ snapshot.setProcessorStatusSnapshots(processorStatusSnapshotEntities);
+ final Collection<ProcessorStatus> processorStatusCollection = processGroupStatus.getProcessorStatus();
+ if (processorStatusCollection != null) {
+ for (final ProcessorStatus processorStatus : processorStatusCollection) {
+ final ProcessorStatusDTO processorStatusDto = createProcessorStatusDto(processorStatus);
+ final ProcessorNode processor = processGroup.findProcessor(processorStatusDto.getId());
+ final PermissionsDTO processorPermissions = createPermissionsDto(processor);
+ processorStatusSnapshotEntities.add(entityFactory.createProcessorStatusSnapshotEntity(processorStatusDto.getAggregateSnapshot(), processorPermissions));
+ }
+ }
+
+ // connection status
+ final Collection<ConnectionStatusSnapshotEntity> connectionStatusDtoCollection = new ArrayList<>();
+ snapshot.setConnectionStatusSnapshots(connectionStatusDtoCollection);
+ final Collection<ConnectionStatus> connectionStatusCollection = processGroupStatus.getConnectionStatus();
+ if (connectionStatusCollection != null) {
+ for (final ConnectionStatus connectionStatus : connectionStatusCollection) {
+ final ConnectionStatusDTO connectionStatusDto = createConnectionStatusDto(connectionStatus);
+ final Connection connection = processGroup.findConnection(connectionStatusDto.getId());
+ final PermissionsDTO connectionPermissions = createPermissionsDto(connection);
+ connectionStatusDtoCollection.add(entityFactory.createConnectionStatusSnapshotEntity(connectionStatusDto.getAggregateSnapshot(), connectionPermissions));
+ }
+ }
+
+ // local child process groups
+ final Collection<ProcessGroupStatusSnapshotEntity> childProcessGroupStatusDtoCollection = new ArrayList<>();
+ snapshot.setProcessGroupStatusSnapshots(childProcessGroupStatusDtoCollection);
+ final Collection<ProcessGroupStatus> childProcessGroupStatusCollection = processGroupStatus.getProcessGroupStatus();
+ if (childProcessGroupStatusCollection != null) {
+ for (final ProcessGroupStatus childProcessGroupStatus : childProcessGroupStatusCollection) {
+ final ProcessGroupStatusDTO childProcessGroupStatusDto = createProcessGroupStatusDto(processGroup, childProcessGroupStatus);
+ final ProcessGroup childProcessGroup = processGroup.findProcessGroup(childProcessGroupStatusDto.getId());
+ final PermissionsDTO childProcessGroupPermissions = createPermissionsDto(childProcessGroup);
+ childProcessGroupStatusDtoCollection.add(entityFactory.createProcessGroupStatusSnapshotEntity(childProcessGroupStatusDto.getAggregateSnapshot(), childProcessGroupPermissions));
+ }
+ }
+
+ // remote child process groups
+ final Collection<RemoteProcessGroupStatusSnapshotEntity> childRemoteProcessGroupStatusDtoCollection = new ArrayList<>();
+ snapshot.setRemoteProcessGroupStatusSnapshots(childRemoteProcessGroupStatusDtoCollection);
+ final Collection<RemoteProcessGroupStatus> childRemoteProcessGroupStatusCollection = processGroupStatus.getRemoteProcessGroupStatus();
+ if (childRemoteProcessGroupStatusCollection != null) {
+ for (final RemoteProcessGroupStatus childRemoteProcessGroupStatus : childRemoteProcessGroupStatusCollection) {
+ final RemoteProcessGroup remoteProcessGroup = processGroup.findRemoteProcessGroup(childRemoteProcessGroupStatus.getId());
+ final RemoteProcessGroupStatusDTO childRemoteProcessGroupStatusDto = createRemoteProcessGroupStatusDto(remoteProcessGroup, childRemoteProcessGroupStatus);
+ final PermissionsDTO remoteProcessGroupPermissions = createPermissionsDto(remoteProcessGroup);
+ childRemoteProcessGroupStatusDtoCollection.add(entityFactory.createRemoteProcessGroupStatusSnapshotEntity(childRemoteProcessGroupStatusDto.getAggregateSnapshot(),
+ remoteProcessGroupPermissions));
+ }
+ }
+
+ // input ports
+ final Collection<PortStatusSnapshotEntity> inputPortStatusDtoCollection = new ArrayList<>();
+ snapshot.setInputPortStatusSnapshots(inputPortStatusDtoCollection);
+ final Collection<PortStatus> inputPortStatusCollection = processGroupStatus.getInputPortStatus();
+ if (inputPortStatusCollection != null) {
+ for (final PortStatus portStatus : inputPortStatusCollection) {
+ final PortStatusDTO portStatusDto = createPortStatusDto(portStatus);
+ final Port inputPort = processGroup.findInputPort(portStatus.getId());
+ final PermissionsDTO inputPortPermissions = createPermissionsDto(inputPort);
+ inputPortStatusDtoCollection.add(entityFactory.createPortStatusSnapshotEntity(portStatusDto.getAggregateSnapshot(), inputPortPermissions));
+ }
+ }
+
+ // output ports
+ final Collection<PortStatusSnapshotEntity> outputPortStatusDtoCollection = new ArrayList<>();
+ snapshot.setOutputPortStatusSnapshots(outputPortStatusDtoCollection);
+ final Collection<PortStatus> outputPortStatusCollection = processGroupStatus.getOutputPortStatus();
+ if (outputPortStatusCollection != null) {
+ for (final PortStatus portStatus : outputPortStatusCollection) {
+ final PortStatusDTO portStatusDto = createPortStatusDto(portStatus);
+ final Port outputPort = processGroup.findOutputPort(portStatus.getId());
+ final PermissionsDTO outputPortPermissions = createPermissionsDto(outputPort);
+ outputPortStatusDtoCollection.add(entityFactory.createPortStatusSnapshotEntity(portStatusDto.getAggregateSnapshot(), outputPortPermissions));
+ }
+ }
+
+ return processGroupStatusDto;
+ }
+
+ public ConnectionStatusDTO createConnectionStatusDto(final ConnectionStatus connectionStatus) {
+ final ConnectionStatusDTO connectionStatusDto = new ConnectionStatusDTO();
+ connectionStatusDto.setGroupId(connectionStatus.getGroupId());
+ connectionStatusDto.setId(connectionStatus.getId());
+ connectionStatusDto.setName(connectionStatus.getName());
+ connectionStatusDto.setSourceId(connectionStatus.getSourceId());
+ connectionStatusDto.setSourceName(connectionStatus.getSourceName());
+ connectionStatusDto.setDestinationId(connectionStatus.getDestinationId());
+ connectionStatusDto.setDestinationName(connectionStatus.getDestinationName());
+ connectionStatusDto.setStatsLastRefreshed(new Date());
+
+ final ConnectionStatusSnapshotDTO snapshot = new ConnectionStatusSnapshotDTO();
+ connectionStatusDto.setAggregateSnapshot(snapshot);
+
+ snapshot.setId(connectionStatus.getId());
+ snapshot.setGroupId(connectionStatus.getGroupId());
+ snapshot.setName(connectionStatus.getName());
+ snapshot.setSourceName(connectionStatus.getSourceName());
+ snapshot.setDestinationName(connectionStatus.getDestinationName());
+
+ snapshot.setFlowFilesQueued(connectionStatus.getQueuedCount());
+ snapshot.setBytesQueued(connectionStatus.getQueuedBytes());
+
+ snapshot.setFlowFilesIn(connectionStatus.getInputCount());
+ snapshot.setBytesIn(connectionStatus.getInputBytes());
+
+ snapshot.setFlowFilesOut(connectionStatus.getOutputCount());
+ snapshot.setBytesOut(connectionStatus.getOutputBytes());
+
+ if (connectionStatus.getBackPressureObjectThreshold() > 0) {
+ snapshot.setPercentUseCount(Math.min(100, StatusMerger.getUtilization(connectionStatus.getQueuedCount(), connectionStatus.getBackPressureObjectThreshold())));
+ }
+ if (connectionStatus.getBackPressureBytesThreshold() > 0) {
+ snapshot.setPercentUseBytes(Math.min(100, StatusMerger.getUtilization(connectionStatus.getQueuedBytes(), connectionStatus.getBackPressureBytesThreshold())));
+ }
+
+ StatusMerger.updatePrettyPrintedFields(snapshot);
+
+ return connectionStatusDto;
+ }
+
+ public ProcessorStatusDTO createProcessorStatusDto(final ProcessorStatus procStatus) {
+ final ProcessorStatusDTO dto = new ProcessorStatusDTO();
+ dto.setId(procStatus.getId());
+ dto.setGroupId(procStatus.getGroupId());
+ dto.setName(procStatus.getName());
+ dto.setStatsLastRefreshed(new Date());
+ dto.setRunStatus(procStatus.getRunStatus().toString());
+
+ final ProcessorStatusSnapshotDTO snapshot = new ProcessorStatusSnapshotDTO();
+ dto.setAggregateSnapshot(snapshot);
+
+ snapshot.setId(procStatus.getId());
+ snapshot.setGroupId(procStatus.getGroupId());
+ snapshot.setName(procStatus.getName());
+
+ snapshot.setFlowFilesOut(procStatus.getOutputCount());
+ snapshot.setBytesOut(procStatus.getOutputBytes());
+
+ snapshot.setFlowFilesIn(procStatus.getInputCount());
+ snapshot.setBytesIn(procStatus.getInputBytes());
+
+ snapshot.setBytesRead(procStatus.getBytesRead());
+ snapshot.setBytesWritten(procStatus.getBytesWritten());
+
+ snapshot.setTaskCount(procStatus.getInvocations());
+ snapshot.setTasksDurationNanos(procStatus.getProcessingNanos());
+ snapshot.setTasksDuration(FormatUtils.formatHoursMinutesSeconds(procStatus.getProcessingNanos(), TimeUnit.NANOSECONDS));
+
+ // determine the run status
+ snapshot.setRunStatus(procStatus.getRunStatus().toString());
+ snapshot.setExecutionNode(procStatus.getExecutionNode().toString());
+
+ snapshot.setActiveThreadCount(procStatus.getActiveThreadCount());
+ snapshot.setTerminatedThreadCount(procStatus.getTerminatedThreadCount());
+ snapshot.setType(procStatus.getType());
+
+ StatusMerger.updatePrettyPrintedFields(snapshot);
+ return dto;
+ }
+
+ /**
+ * Creates a PortStatusDTO for the specified PortStatus.
+ *
+ * @param portStatus status
+ * @return dto
+ */
+ public PortStatusDTO createPortStatusDto(final PortStatus portStatus) {
+ final PortStatusDTO dto = new PortStatusDTO();
+ dto.setId(portStatus.getId());
+ dto.setGroupId(portStatus.getGroupId());
+ dto.setName(portStatus.getName());
+ dto.setRunStatus(portStatus.getRunStatus().toString());
+ dto.setTransmitting(portStatus.isTransmitting());
+ dto.setStatsLastRefreshed(new Date());
+
+ final PortStatusSnapshotDTO snapshot = new PortStatusSnapshotDTO();
+ dto.setAggregateSnapshot(snapshot);
+
+ snapshot.setId(portStatus.getId());
+ snapshot.setGroupId(portStatus.getGroupId());
+ snapshot.setName(portStatus.getName());
+ snapshot.setRunStatus(portStatus.getRunStatus().toString());
+
+ snapshot.setActiveThreadCount(portStatus.getActiveThreadCount());
+ snapshot.setFlowFilesOut(portStatus.getOutputCount());
+ snapshot.setBytesOut(portStatus.getOutputBytes());
+
+ snapshot.setFlowFilesIn(portStatus.getInputCount());
+ snapshot.setBytesIn(portStatus.getInputBytes());
+ StatusMerger.updatePrettyPrintedFields(snapshot);
+
+ return dto;
+ }
+
+ /**
+ * Copies the specified snippet.
+ *
+ * @param originalSnippet snippet
+ * @return dto
+ */
+ public FlowSnippetDTO copySnippetContents(final FlowSnippetDTO originalSnippet) {
+ final FlowSnippetDTO copySnippet = new FlowSnippetDTO();
+
+ if (originalSnippet.getConnections() != null) {
+ for (final ConnectionDTO connection : originalSnippet.getConnections()) {
+ copySnippet.getConnections().add(copy(connection));
+ }
+ }
+ if (originalSnippet.getInputPorts() != null) {
+ for (final PortDTO port : originalSnippet.getInputPorts()) {
+ copySnippet.getInputPorts().add(copy(port));
+ }
+ }
+ if (originalSnippet.getOutputPorts() != null) {
+ for (final PortDTO port : originalSnippet.getOutputPorts()) {
+ copySnippet.getOutputPorts().add(copy(port));
+ }
+ }
+ if (originalSnippet.getProcessGroups() != null) {
+ for (final ProcessGroupDTO processGroup : originalSnippet.getProcessGroups()) {
+ copySnippet.getProcessGroups().add(copy(processGroup, true));
+ }
+ }
+ if (originalSnippet.getProcessors() != null) {
+ for (final ProcessorDTO processor : originalSnippet.getProcessors()) {
+ copySnippet.getProcessors().add(copy(processor));
+ }
+ }
+ if (originalSnippet.getLabels() != null) {
+ for (final LabelDTO label : originalSnippet.getLabels()) {
+ copySnippet.getLabels().add(copy(label));
+ }
+ }
+ if (originalSnippet.getFunnels() != null) {
+ for (final FunnelDTO funnel : originalSnippet.getFunnels()) {
+ copySnippet.getFunnels().add(copy(funnel));
+ }
+ }
+ if (originalSnippet.getRemoteProcessGroups() != null) {
+ for (final RemoteProcessGroupDTO remoteGroup : originalSnippet.getRemoteProcessGroups()) {
+ copySnippet.getRemoteProcessGroups().add(copy(remoteGroup));
+ }
+ }
+ if (originalSnippet.getControllerServices() != null) {
+ for (final ControllerServiceDTO controllerService : originalSnippet.getControllerServices()) {
+ copySnippet.getControllerServices().add(copy(controllerService));
+ }
+ }
+
+ return copySnippet;
+ }
+
+ /**
+ * Creates a PortDTO from the specified Port.
+ *
+ * @param port port
+ * @return dto
+ */
+ public PortDTO createPortDto(final Port port) {
+ if (port == null) {
+ return null;
+ }
+
+ final PortDTO dto = new PortDTO();
+ dto.setId(port.getIdentifier());
+ dto.setPosition(createPositionDto(port.getPosition()));
+ dto.setName(port.getName());
+ dto.setComments(port.getComments());
+ dto.setConcurrentlySchedulableTaskCount(port.getMaxConcurrentTasks());
+ dto.setParentGroupId(port.getProcessGroup().getIdentifier());
+ dto.setState(port.getScheduledState().toString());
+ dto.setType(port.getConnectableType().name());
+ dto.setVersionedComponentId(port.getVersionedComponentId().orElse(null));
+
+ // if this port is on the root group, determine if its actually connected to another nifi
+ if (port instanceof RootGroupPort) {
+ final RootGroupPort rootGroupPort = (RootGroupPort) port;
+ dto.setTransmitting(rootGroupPort.isTransmitting());
+ dto.setGroupAccessControl(rootGroupPort.getGroupAccessControl());
+ dto.setUserAccessControl(rootGroupPort.getUserAccessControl());
+ }
+
+ final Collection<ValidationResult> validationErrors = port.getValidationErrors();
+ if (validationErrors != null && !validationErrors.isEmpty()) {
+ final List<String> errors = new ArrayList<>();
+ for (final ValidationResult validationResult : validationErrors) {
+ errors.add(validationResult.toString());
+ }
+
+ dto.setValidationErrors(errors);
+ }
+
+ return dto;
+ }
+
+ public ReportingTaskDTO createReportingTaskDto(final ReportingTaskNode reportingTaskNode) {
+ final BundleCoordinate bundleCoordinate = reportingTaskNode.getBundleCoordinate();
+ final List<Bundle> compatibleBundles = extensionManager.getBundles(reportingTaskNode.getCanonicalClassName()).stream().filter(bundle -> {
+ final BundleCoordinate coordinate = bundle.getBundleDetails().getCoordinate();
+ return bundleCoordinate.getGroup().equals(coordinate.getGroup()) && bundleCoordinate.getId().equals(coordinate.getId());
+ }).collect(Collectors.toList());
+
+ final ReportingTaskDTO dto = new ReportingTaskDTO();
+ dto.setId(reportingTaskNode.getIdentifier());
+ dto.setName(reportingTaskNode.getName());
+ dto.setType(reportingTaskNode.getCanonicalClassName());
+ dto.setBundle(createBundleDto(bundleCoordinate));
+ dto.setSchedulingStrategy(reportingTaskNode.getSchedulingStrategy().name());
+ dto.setSchedulingPeriod(reportingTaskNode.getSchedulingPeriod());
+ dto.setState(reportingTaskNode.getScheduledState().name());
+ dto.setActiveThreadCount(reportingTaskNode.getActiveThreadCount());
+ dto.setAnnotationData(reportingTaskNode.getAnnotationData());
+ dto.setComments(reportingTaskNode.getComments());
+ dto.setPersistsState(reportingTaskNode.getReportingTask().getClass().isAnnotationPresent(Stateful.class));
+ dto.setRestricted(reportingTaskNode.isRestricted());
+ dto.setDeprecated(reportingTaskNode.isDeprecated());
+ dto.setExtensionMissing(reportingTaskNode.isExtensionMissing());
+ dto.setMultipleVersionsAvailable(compatibleBundles.size() > 1);
+
+ final Map<String, String> defaultSchedulingPeriod = new HashMap<>();
+ defaultSchedulingPeriod.put(SchedulingStrategy.TIMER_DRIVEN.name(), SchedulingStrategy.TIMER_DRIVEN.getDefaultSchedulingPeriod());
+ defaultSchedulingPeriod.put(SchedulingStrategy.CRON_DRIVEN.name(), SchedulingStrategy.CRON_DRIVEN.getDefaultSchedulingPeriod());
+ dto.setDefaultSchedulingPeriod(defaultSchedulingPeriod);
+
+ // sort a copy of the properties
+ final Map<PropertyDescriptor, String> sortedProperties = new TreeMap<>(new Comparator<PropertyDescriptor>() {
+ @Override
+ public int compare(final PropertyDescriptor o1, final PropertyDescriptor o2) {
+ return Collator.getInstance(Locale.US).compare(o1.getName(), o2.getName());
+ }
+ });
+ sortedProperties.putAll(reportingTaskNode.getProperties());
+
+ // get the property order from the reporting task
+ final ReportingTask reportingTask = reportingTaskNode.getReportingTask();
+ final Map<PropertyDescriptor, String> orderedProperties = new LinkedHashMap<>();
+ final List<PropertyDescriptor> descriptors = reportingTask.getPropertyDescriptors();
+ if (descriptors != null && !descriptors.isEmpty()) {
+ for (final PropertyDescriptor descriptor : descriptors) {
+ orderedProperties.put(descriptor, null);
+ }
+ }
+ orderedProperties.putAll(sortedProperties);
+
+ // build the descriptor and property dtos
+ dto.setDescriptors(new LinkedHashMap<String, PropertyDescriptorDTO>());
+ dto.setProperties(new LinkedHashMap<String, String>());
+ for (final Map.Entry<PropertyDescriptor, String> entry : orderedProperties.entrySet()) {
+ final PropertyDescriptor descriptor = entry.getKey();
+
+ // store the property descriptor
+ dto.getDescriptors().put(descriptor.getName(), createPropertyDescriptorDto(descriptor, null));
+
+ // determine the property value - don't include sensitive properties
+ String propertyValue = entry.getValue();
+ if (propertyValue != null && descriptor.isSensitive()) {
+ propertyValue = SENSITIVE_VALUE_MASK;
+ }
+
+ // set the property value
+ dto.getProperties().put(descriptor.getName(), propertyValue);
+ }
+
+ final ValidationStatus validationStatus = reportingTaskNode.getValidationStatus(1, TimeUnit.MILLISECONDS);
+ dto.setValidationStatus(validationStatus.name());
+
+ // add the validation errors
+ final Collection<ValidationResult> validationErrors = reportingTaskNode.getValidationErrors();
+ if (validationErrors != null && !validationErrors.isEmpty()) {
+ final List<String> errors = new ArrayList<>();
+ for (final ValidationResult validationResult : validationErrors) {
+ errors.add(validationResult.toString());
+ }
+
+ dto.setValidationErrors(errors);
+ }
+
+ return dto;
+ }
+
+ public ControllerServiceDTO createControllerServiceDto(final ControllerServiceNode controllerServiceNode) {
+ final BundleCoordinate bundleCoordinate = controllerServiceNode.getBundleCoordinate();
+ final List<Bundle> compatibleBundles = extensionManager.getBundles(controllerServiceNode.getCanonicalClassName()).stream().filter(bundle -> {
+ final BundleCoordinate coordinate = bundle.getBundleDetails().getCoordinate();
+ return bundleCoordinate.getGroup().equals(coordinate.getGroup()) && bundleCoordinate.getId().equals(coordinate.getId());
+ }).collect(Collectors.toList());
+
+ final ControllerServiceDTO dto = new ControllerServiceDTO();
+ dto.setId(controllerServiceNode.getIdentifier());
+ dto.setParentGroupId(controllerServiceNode.getProcessGroup() == null ? null : controllerServiceNode.getProcessGroup().getIdentifier());
+ dto.setName(controllerServiceNode.getName());
+ dto.setType(controllerServiceNode.getCanonicalClassName());
+ dto.setBundle(createBundleDto(bundleCoordinate));
+ dto.setControllerServiceApis(createControllerServiceApiDto(controllerServiceNode.getControllerServiceImplementation().getClass()));
+ dto.setState(controllerServiceNode.getState().name());
+ dto.setAnnotationData(controllerServiceNode.getAnnotationData());
+ dto.setComments(controllerServiceNode.getComments());
+ dto.setPersistsState(controllerServiceNode.getControllerServiceImplementation().getClass().isAnnotationPresent(Stateful.class));
+ dto.setRestricted(controllerServiceNode.isRestricted());
+ dto.setDeprecated(controllerServiceNode.isDeprecated());
+ dto.setExtensionMissing(controllerServiceNode.isExtensionMissing());
+ dto.setMultipleVersionsAvailable(compatibleBundles.size() > 1);
+ dto.setVersionedComponentId(controllerServiceNode.getVersionedComponentId().orElse(null));
+
+ // sort a copy of the properties
+ final Map<PropertyDescriptor, String> sortedProperties = new TreeMap<>(new Comparator<PropertyDescriptor>() {
+ @Override
+ public int compare(final PropertyDescriptor o1, final PropertyDescriptor o2) {
+ return Collator.getInstance(Locale.US).compare(o1.getName(), o2.getName());
+ }
+ });
+ sortedProperties.putAll(controllerServiceNode.getProperties());
+
+ // get the property order from the controller service
+ final ControllerService controllerService = controllerServiceNode.getControllerServiceImplementation();
+ final Map<PropertyDescriptor, String> orderedProperties = new LinkedHashMap<>();
+ final List<PropertyDescriptor> descriptors = controllerService.getPropertyDescriptors();
+ if (descriptors != null && !descriptors.isEmpty()) {
+ for (final PropertyDescriptor descriptor : descriptors) {
+ orderedProperties.put(descriptor, null);
+ }
+ }
+ orderedProperties.putAll(sortedProperties);
+
+ // build the descriptor and property dtos
+ dto.setDescriptors(new LinkedHashMap<String, PropertyDescriptorDTO>());
+ dto.setProperties(new LinkedHashMap<String, String>());
+ for (final Map.Entry<PropertyDescriptor, String> entry : orderedProperties.entrySet()) {
+ final PropertyDescriptor descriptor = entry.getKey();
+
+ // store the property descriptor
+ final String groupId = controllerServiceNode.getProcessGroup() == null ? null : controllerServiceNode.getProcessGroup().getIdentifier();
+ dto.getDescriptors().put(descriptor.getName(), createPropertyDescriptorDto(descriptor, groupId));
+
+ // determine the property value - don't include sensitive properties
+ String propertyValue = entry.getValue();
+ if (propertyValue != null && descriptor.isSensitive()) {
+ propertyValue = SENSITIVE_VALUE_MASK;
+ }
+
+ // set the property value
+ dto.getProperties().put(descriptor.getName(), propertyValue);
+ }
+
+ dto.setValidationStatus(controllerServiceNode.getValidationStatus(1, TimeUnit.MILLISECONDS).name());
+
+ // add the validation errors
+ final Collection<ValidationResult> validationErrors = controllerServiceNode.getValidationErrors();
+ if (validationErrors != null && !validationErrors.isEmpty()) {
+ final List<String> errors = new ArrayList<>();
+ for (final ValidationResult validationResult : validationErrors) {
+ errors.add(validationResult.toString());
+ }
+
+ dto.setValidationErrors(errors);
+ }
+
+ return dto;
+ }
+
+ public ControllerServiceReferencingComponentDTO createControllerServiceReferencingComponentDTO(final ComponentNode component) {
+ final ControllerServiceReferencingComponentDTO dto = new ControllerServiceReferencingComponentDTO();
+ dto.setId(component.getIdentifier());
+ dto.setName(component.getName());
+
+ String processGroupId = null;
+ List<PropertyDescriptor> propertyDescriptors = null;
+ Collection<ValidationResult> validationErrors = null;
+ if (component instanceof ProcessorNode) {
+ final ProcessorNode node = ((ProcessorNode) component);
+ dto.setGroupId(node.getProcessGroup().getIdentifier());
+ dto.setState(node.getScheduledState().name());
+ dto.setActiveThreadCount(node.getActiveThreadCount());
+ dto.setType(node.getComponentType());
+ dto.setReferenceType(Processor.class.getSimpleName());
+
+ propertyDescriptors = node.getProcessor().getPropertyDescriptors();
+ validationErrors = node.getValidationErrors();
+ processGroupId = node.getProcessGroup().getIdentifier();
+ } else if (component instanceof ControllerServiceNode) {
+ final ControllerServiceNode node = ((ControllerServiceNode) component);
+ dto.setState(node.getState().name());
+ dto.setType(node.getComponentType());
+ dto.setReferenceType(ControllerService.class.getSimpleName());
+
+ propertyDescriptors = node.getControllerServiceImplementation().getPropertyDescriptors();
+ validationErrors = node.getValidationErrors();
+ processGroupId = node.getProcessGroup() == null ? null : node.getProcessGroup().getIdentifier();
+ } else if (component instanceof ReportingTaskNode) {
+ final ReportingTaskNode node = ((ReportingTaskNode) component);
+ dto.setState(node.getScheduledState().name());
+ dto.setActiveThreadCount(node.getActiveThreadCount());
+ dto.setType(node.getComponentType());
+ dto.setReferenceType(ReportingTask.class.getSimpleName());
+
+ propertyDescriptors = node.getReportingTask().getPropertyDescriptors();
+ validationErrors = node.getValidationErrors();
+ processGroupId = null;
+ }
+
+ // ensure descriptors is non null
+ if (propertyDescriptors == null) {
+ propertyDescriptors = new ArrayList<>();
+ }
+
+ // process properties unconditionally since dynamic properties are available here and not in getPropertyDescriptors
+ final Map<PropertyDescriptor, String> sortedProperties = new TreeMap<>(new Comparator<PropertyDescriptor>() {
+ @Override
+ public int compare(final PropertyDescriptor o1, final PropertyDescriptor o2) {
+ return Collator.getInstance(Locale.US).compare(o1.getName(), o2.getName());
+ }
+ });
+ sortedProperties.putAll(component.getProperties());
+
+ final Map<PropertyDescriptor, String> orderedProperties = new LinkedHashMap<>();
+ for (final PropertyDescriptor descriptor : propertyDescriptors) {
+ orderedProperties.put(descriptor, null);
+ }
+ orderedProperties.putAll(sortedProperties);
+
+ // build the descriptor and property dtos
+ dto.setDescriptors(new LinkedHashMap<String, PropertyDescriptorDTO>());
+ dto.setProperties(new LinkedHashMap<String, String>());
+ for (final Map.Entry<PropertyDescriptor, String> entry : orderedProperties.entrySet()) {
+ final PropertyDescriptor descriptor = entry.getKey();
+
+ // store the property descriptor
+ dto.getDescriptors().put(descriptor.getName(), createPropertyDescriptorDto(descriptor, processGroupId));
+
+ // determine the property value - don't include sensitive properties
+ String propertyValue = entry.getValue();
+ if (propertyValue != null && descriptor.isSensitive()) {
+ propertyValue = SENSITIVE_VALUE_MASK;
+ }
+
+ // set the property value
+ dto.getProperties().put(descriptor.getName(), propertyValue);
+ }
+
+ if (validationErrors != null && !validationErrors.isEmpty()) {
+ final List<String> errors = new ArrayList<>();
+ for (final ValidationResult validationResult : validationErrors) {
+ errors.add(validationResult.toString());
+ }
+
+ dto.setValidationErrors(errors);
+ }
+
+ return dto;
+ }
+
+ public RemoteProcessGroupPortDTO createRemoteProcessGroupPortDto(final RemoteGroupPort port) {
+ if (port == null) {
+ return null;
+ }
+
+ final RemoteProcessGroupPortDTO dto = new RemoteProcessGroupPortDTO();
+ dto.setId(port.getIdentifier());
+ dto.setGroupId(port.getRemoteProcessGroup().getIdentifier());
+ dto.setTargetId(port.getTargetIdentifier());
+ dto.setName(port.getName());
+ dto.setComments(port.getComments());
+ dto.setTransmitting(port.isRunning());
+ dto.setTargetRunning(port.isTargetRunning());
+ dto.setConcurrentlySchedulableTaskCount(port.getMaxConcurrentTasks());
+ dto.setUseCompression(port.isUseCompression());
+ dto.setExists(port.getTargetExists());
+ dto.setVersionedComponentId(port.getVersionedComponentId().orElse(null));
+
+ final BatchSettingsDTO batchDTO = new BatchSettingsDTO();
+ batchDTO.setCount(port.getBatchCount());
+ batchDTO.setSize(port.getBatchSize());
+ batchDTO.setDuration(port.getBatchDuration());
+ dto.setBatchSettings(batchDTO);
+
+ // determine if this port is currently connected to another component locally
+ if (ConnectableType.REMOTE_OUTPUT_PORT.equals(port.getConnectableType())) {
+ dto.setConnected(!port.getConnections().isEmpty());
+ } else {
+ dto.setConnected(port.hasIncomingConnection());
+ }
+
+ return dto;
+ }
+
+ /**
+ * Creates a RemoteProcessGroupDTO from the specified RemoteProcessGroup.
+ *
+ * @param group group
+ * @return dto
+ */
+ public RemoteProcessGroupDTO createRemoteProcessGroupDto(final RemoteProcessGroup group) {
+ if (group == null) {
+ return null;
+ }
+
+ final Set<RemoteProcessGroupPortDTO> inputPorts = new HashSet<>();
+ final Set<RemoteProcessGroupPortDTO> outputPorts = new HashSet<>();
+
+ int activeRemoteInputPortCount = 0;
+ int inactiveRemoteInputPortCount = 0;
+ for (final Port port : group.getInputPorts()) {
+ inputPorts.add(createRemoteProcessGroupPortDto((RemoteGroupPort) port));
+
+ if (port.hasIncomingConnection()) {
+ if (port.isRunning()) {
+ activeRemoteInputPortCount++;
+ } else {
+ inactiveRemoteInputPortCount++;
+ }
+ }
+ }
+
+ int activeRemoteOutputPortCount = 0;
+ int inactiveRemoteOutputPortCount = 0;
+ for (final Port port : group.getOutputPorts()) {
+ outputPorts.add(createRemoteProcessGroupPortDto((RemoteGroupPort) port));
+
+ if (!port.getConnections().isEmpty()) {
+ if (port.isRunning()) {
+ activeRemoteOutputPortCount++;
+ } else {
+ inactiveRemoteOutputPortCount++;
+ }
+ }
+ }
+
+ final RemoteProcessGroupContentsDTO contents = new RemoteProcessGroupContentsDTO();
+ contents.setInputPorts(inputPorts);
+ contents.setOutputPorts(outputPorts);
+
+ final RemoteProcessGroupDTO dto = new RemoteProcessGroupDTO();
+ dto.setId(group.getIdentifier());
+ dto.setName(group.getName());
+ dto.setPosition(createPositionDto(group.getPosition()));
+ dto.setComments(group.getComments());
+ dto.setTransmitting(group.isTransmitting());
+ dto.setCommunicationsTimeout(group.getCommunicationsTimeout());
+ dto.setYieldDuration(group.getYieldDuration());
+ dto.setParentGroupId(group.getProcessGroup().getIdentifier());
+ dto.setTargetUris(group.getTargetUris());
+ dto.setFlowRefreshed(group.getLastRefreshTime());
+ dto.setContents(contents);
+ dto.setTransportProtocol(group.getTransportProtocol().name());
+ dto.setProxyHost(group.getProxyHost());
+ dto.setProxyPort(group.getProxyPort());
+ dto.setProxyUser(group.getProxyUser());
+ if (!StringUtils.isEmpty(group.getProxyPassword())) {
+ dto.setProxyPassword(SENSITIVE_VALUE_MASK);
+ }
+
+ // only specify the secure flag if we know the target system has site to site enabled
+ if (group.isSiteToSiteEnabled()) {
+ dto.setTargetSecure(group.getSecureFlag());
+ }
+
+ if (group.getAuthorizationIssue() != null) {
+ dto.setAuthorizationIssues(Arrays.asList(group.getAuthorizationIssue()));
+ }
+
+ final Collection<ValidationResult> validationErrors = group.validate();
+ if (validationErrors != null && !validationErrors.isEmpty()) {
+ final List<String> errors = new ArrayList<>();
+ for (final ValidationResult validationResult : validationErrors) {
+ errors.add(validationResult.toString());
+ }
+
+ dto.setValidationErrors(errors);
+ }
+
+ dto.setLocalNetworkInterface(group.getNetworkInterface());
+
+ dto.setActiveRemoteInputPortCount(activeRemoteInputPortCount);
+ dto.setInactiveRemoteInputPortCount(inactiveRemoteInputPortCount);
+ dto.setActiveRemoteOutputPortCount(activeRemoteOutputPortCount);
+ dto.setInactiveRemoteOutputPortCount(inactiveRemoteOutputPortCount);
+ dto.setVersionedComponentId(group.getVersionedComponentId().orElse(null));
+
+ final RemoteProcessGroupCounts counts = group.getCounts();
+ if (counts != null) {
+ dto.setInputPortCount(counts.getInputPortCount());
+ dto.setOutputPortCount(counts.getOutputPortCount());
+ }
+
+ return dto;
+ }
+
+ /**
+ * Creates a FlowBreadcrumbEntity from the specified parent ProcessGroup.
+ *
+ * @param group group
+ * @return dto
+ */
+ private FlowBreadcrumbEntity createBreadcrumbEntity(final ProcessGroup group) {
+ if (group == null) {
+ return null;
+ }
+
+ final FlowBreadcrumbDTO dto = createBreadcrumbDto(group);
+ final PermissionsDTO permissions = createPermissionsDto(group);
+ final FlowBreadcrumbEntity entity = entityFactory.createFlowBreadcrumbEntity(dto, permissions);
+
+ if (group.getParent() != null) {
+ entity.setParentBreadcrumb(createBreadcrumbEntity(group.getParent()));
+ }
+
+ return entity;
+ }
+
+ /**
+ * Creates a FlowBreadcrumbDTO from the specified parent ProcessGroup.
+ *
+ * @param group group
+ * @return dto
+ */
+ private FlowBreadcrumbDTO createBreadcrumbDto(final ProcessGroup group) {
+ if (group == null) {
+ return null;
+ }
+
+ final FlowBreadcrumbDTO dto = new FlowBreadcrumbDTO();
+ dto.setId(group.getIdentifier());
+ dto.setName(group.getName());
+
+ final VersionControlInformationDTO versionControlInformation = createVersionControlInformationDto(group);
+ dto.setVersionControlInformation(versionControlInformation);
+
+ return dto;
+ }
+
+ public ComponentReferenceDTO createComponentReferenceDto(final Authorizable authorizable) {
+ if (authorizable == null || !(authorizable instanceof ComponentAuthorizable)) {
+ return null;
+ }
+
+ final ComponentAuthorizable componentAuthorizable = (ComponentAuthorizable) authorizable;
+ final ComponentReferenceDTO dto = new ComponentReferenceDTO();
+ dto.setId(componentAuthorizable.getIdentifier());
+ dto.setParentGroupId(componentAuthorizable.getProcessGroupIdentifier());
+ dto.setName(authorizable.getResource().getName());
+
+ return dto;
+ }
+
+ public AccessPolicySummaryDTO createAccessPolicySummaryDto(final AccessPolicy accessPolicy, final ComponentReferenceEntity componentReference) {
+ if (accessPolicy == null) {
+ return null;
+ }
+
+ final AccessPolicySummaryDTO dto = new AccessPolicySummaryDTO();
+ dto.setId(accessPolicy.getIdentifier());
+ dto.setResource(accessPolicy.getResource());
+ dto.setAction(accessPolicy.getAction().toString());
+ dto.setConfigurable(AuthorizerCapabilityDetection.isAccessPolicyConfigurable(authorizer, accessPolicy));
+ dto.setComponentReference(componentReference);
+ return dto;
+ }
+
+ public AccessPolicyDTO createAccessPolicyDto(final AccessPolicy accessPolicy, final Set<TenantEntity> userGroups,
+ final Set<TenantEntity> users, final ComponentReferenceEntity componentReference) {
+
+ if (accessPolicy == null) {
+ return null;
+ }
+
+ final AccessPolicyDTO dto = new AccessPolicyDTO();
+ dto.setUserGroups(userGroups);
+ dto.setUsers(users);
+ dto.setId(accessPolicy.getIdentifier());
+ dto.setResource(accessPolicy.getResource());
+ dto.setAction(accessPolicy.getAction().toString());
+ dto.setConfigurable(AuthorizerCapabilityDetection.isAccessPolicyConfigurable(authorizer, accessPolicy));
+ dto.setComponentReference(componentReference);
+ return dto;
+ }
+
+ /**
+ * Creates the PermissionsDTO based on the specified Authorizable.
+ *
+ * @param authorizable authorizable
+ * @return dto
+ */
+ public PermissionsDTO createPermissionsDto(final Authorizable authorizable) {
+ return createPermissionsDto(authorizable, NiFiUserUtils.getNiFiUser());
+ }
+
+ /**
+ * Creates the PermissionsDTO based on the specified Authorizable for the given user
+ *
+ * @param authorizable authorizable
+ * @param user the NiFi User for which the Permissions are being created
+ * @return dto
+ */
+ public PermissionsDTO createPermissionsDto(final Authorizable authorizable, final NiFiUser user) {
+ final PermissionsDTO dto = new PermissionsDTO();
+ dto.setCanRead(authorizable.isAuthorized(authorizer, RequestAction.READ, user));
+ dto.setCanWrite(authorizable.isAuthorized(authorizer, RequestAction.WRITE, user));
+ return dto;
+ }
+
+ public AffectedComponentEntity createAffectedComponentEntity(final ProcessorEntity processorEntity) {
+ if (processorEntity == null) {
+ return null;
+ }
+
+ final AffectedComponentEntity component = new AffectedComponentEntity();
+ component.setBulletins(processorEntity.getBulletins());
+ component.setId(processorEntity.getId());
+ component.setPermissions(processorEntity.getPermissions());
+ component.setPosition(processorEntity.getPosition());
+ component.setRevision(processorEntity.getRevision());
+ component.setUri(processorEntity.getUri());
+
+ final ProcessorDTO processorDto = processorEntity.getComponent();
+ final AffectedComponentDTO componentDto = new AffectedComponentDTO();
+ componentDto.setId(processorDto.getId());
+ componentDto.setName(processorDto.getName());
+ componentDto.setProcessGroupId(processorDto.getParentGroupId());
+ componentDto.setReferenceType(AffectedComponentDTO.COMPONENT_TYPE_PROCESSOR);
+ componentDto.setState(processorDto.getState());
+ componentDto.setValidationErrors(processorDto.getValidationErrors());
+ component.setComponent(componentDto);
+
+ return component;
+ }
+
+ public AffectedComponentEntity createAffectedComponentEntity(final PortEntity portEntity, final String referenceType) {
+ if (portEntity == null) {
+ return null;
+ }
+
+ final AffectedComponentEntity component = new AffectedComponentEntity();
+ component.setBulletins(portEntity.getBulletins());
+ component.setId(portEntity.getId());
+ component.setPermissions(portEntity.getPermissions());
+ component.setPosition(portEntity.getPosition());
+ component.setRevision(portEntity.getRevision());
+ component.setUri(portEntity.getUri());
+
+ final PortDTO portDto = portEntity.getComponent();
+ final AffectedComponentDTO componentDto = new AffectedComponentDTO();
+ componentDto.setId(portDto.getId());
+ componentDto.setName(portDto.getName());
+ componentDto.setProcessGroupId(portDto.getParentGroupId());
+ componentDto.setReferenceType(referenceType);
+ componentDto.setState(portDto.getState());
+ componentDto.setValidationErrors(portDto.getValidationErrors());
+ component.setComponent(componentDto);
+
+ return component;
+ }
+
+ public AffectedComponentEntity createAffectedComponentEntity(final ControllerServiceEntity serviceEntity) {
+ if (serviceEntity == null) {
+ return null;
+ }
+
+ final AffectedComponentEntity component = new AffectedComponentEntity();
+ component.setBulletins(serviceEntity.getBulletins());
+ component.setId(serviceEntity.getId());
+ component.setPermissions(serviceEntity.getPermissions());
+ component.setPosition(serviceEntity.getPosition());
+ component.setRevision(serviceEntity.getRevision());
+ component.setUri(serviceEntity.getUri());
+
+ final ControllerServiceDTO serviceDto = serviceEntity.getComponent();
+ final AffectedComponentDTO componentDto = new AffectedComponentDTO();
+ componentDto.setId(serviceDto.getId());
+ componentDto.setName(serviceDto.getName());
+ componentDto.setProcessGroupId(serviceDto.getParentGroupId());
+ componentDto.setReferenceType(AffectedComponentDTO.COMPONENT_TYPE_CONTROLLER_SERVICE);
+ componentDto.setState(serviceDto.getState());
+ componentDto.setValidationErrors(serviceDto.getValidationErrors());
+ component.setComponent(componentDto);
+
+ return component;
+ }
+
+ public AffectedComponentEntity createAffectedComponentEntity(final RemoteProcessGroupPortDTO remotePortDto, final String referenceType, final RemoteProcessGroupEntity rpgEntity) {
+ if (remotePortDto == null) {
+ return null;
+ }
+
+ final AffectedComponentEntity component = new AffectedComponentEntity();
+ component.setId(remotePortDto.getId());
+ component.setPermissions(rpgEntity.getPermissions());
+ component.setRevision(rpgEntity.getRevision());
+ component.setUri(rpgEntity.getUri());
+
+ final AffectedComponentDTO componentDto = new AffectedComponentDTO();
+ componentDto.setId(remotePortDto.getId());
+ componentDto.setName(remotePortDto.getName());
+ componentDto.setProcessGroupId(remotePortDto.getGroupId());
+ componentDto.setReferenceType(referenceType);
+ componentDto.setState(remotePortDto.isTransmitting() ? "Running" : "Stopped");
+ component.setComponent(componentDto);
+
+ return component;
+ }
+
+
+ public AffectedComponentDTO createAffectedComponentDto(final ComponentNode component) {
+ final AffectedComponentDTO dto = new AffectedComponentDTO();
+ dto.setId(component.getIdentifier());
+ dto.setName(component.getName());
+ dto.setProcessGroupId(component.getProcessGroupIdentifier());
+
+ if (component instanceof ProcessorNode) {
+ final ProcessorNode node = ((ProcessorNode) component);
+ dto.setState(node.getScheduledState().name());
+ dto.setActiveThreadCount(node.getActiveThreadCount());
+ dto.setReferenceType(AffectedComponentDTO.COMPONENT_TYPE_PROCESSOR);
+ } else if (component instanceof ControllerServiceNode) {
+ final ControllerServiceNode node = ((ControllerServiceNode) component);
+ dto.setState(node.getState().name());
+ dto.setReferenceType(AffectedComponentDTO.COMPONENT_TYPE_CONTROLLER_SERVICE);
+ }
+
+ final Collection<ValidationResult> validationErrors = component.getValidationErrors();
+ if (validationErrors != null && !validationErrors.isEmpty()) {
+ final List<String> errors = new ArrayList<>();
+ for (final ValidationResult validationResult : validationErrors) {
+ errors.add(validationResult.toString());
+ }
+
+ dto.setValidationErrors(errors);
+ }
+
+ return dto;
+ }
+
+ /**
+ * Creates a ProcessGroupDTO from the specified ProcessGroup.
+ *
+ * @param group group
+ * @return dto
+ */
+ public ProcessGroupDTO createProcessGroupDto(final ProcessGroup group) {
+ return createProcessGroupDto(group, false);
+ }
+
+ public ProcessGroupFlowDTO createProcessGroupFlowDto(final ProcessGroup group, final ProcessGroupStatus groupStatus, final RevisionManager revisionManager,
+ final Function<ProcessGroup, List<BulletinEntity>> getProcessGroupBulletins) {
+
+ final ProcessGroupFlowDTO dto = new ProcessGroupFlowDTO();
+ dto.setId(group.getIdentifier());
+ dto.setLastRefreshed(new Date());
+ dto.setBreadcrumb(createBreadcrumbEntity(group));
+ dto.setFlow(createFlowDto(group, groupStatus, revisionManager, getProcessGroupBulletins));
+
+ final ProcessGroup parent = group.getParent();
+ if (parent != null) {
+ dto.setParentGroupId(parent.getIdentifier());
+ }
+
+ return dto;
+ }
+
+ public FlowDTO createFlowDto(final ProcessGroup group, final ProcessGroupStatus groupStatus, final FlowSnippetDTO snippet, final RevisionManager revisionManager,
+ final Function<ProcessGroup, List<BulletinEntity>> getProcessGroupBulletins) {
+ if (snippet == null) {
+ return null;
+ }
+
+ final FlowDTO flow = new FlowDTO();
+
+ for (final ConnectionDTO snippetConnection : snippet.getConnections()) {
+ final Connection connection = group.getConnection(snippetConnection.getId());
+
+ // marshal the actual connection as the snippet is pruned
+ final ConnectionDTO dto = createConnectionDto(connection);
+ final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(connection.getIdentifier()));
+ final PermissionsDTO accessPolicy = createPermissionsDto(connection);
+ final ConnectionStatusDTO status = getComponentStatus(
+ () -> groupStatus.getConnectionStatus().stream().filter(connectionStatus -> connection.getIdentifier().equals(connectionStatus.getId())).findFirst().orElse(null),
+ connectionStatus -> createConnectionStatusDto(connectionStatus)
+ );
+ flow.getConnections().add(entityFactory.createConnectionEntity(dto, revision, accessPolicy, status));
+ }
+
+ for (final FunnelDTO snippetFunnel : snippet.getFunnels()) {
+ final Funnel funnel = group.getFunnel(snippetFunnel.getId());
+
+ // marshal the actual funnel as the snippet is pruned
+ final FunnelDTO dto = createFunnelDto(funnel);
+ final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(funnel.getIdentifier()));
+ final PermissionsDTO accessPolicy = createPermissionsDto(funnel);
+ flow.getFunnels().add(entityFactory.createFunnelEntity(dto, revision, accessPolicy));
+ }
+
+ for (final PortDTO snippetInputPort : snippet.getInputPorts()) {
+ final Port inputPort = group.getInputPort(snippetInputPort.getId());
+
+ // marshal the actual port as the snippet is pruned
+ final PortDTO dto = createPortDto(inputPort);
+ final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(inputPort.getIdentifier()));
+ final PermissionsDTO permissions = createPermissionsDto(inputPort);
+ final PermissionsDTO operatePermissions = createPermissionsDto(new OperationAuthorizable(inputPort));
+ final PortStatusDTO status = getComponentStatus(
+ () -> groupStatus.getInputPortStatus().stream().filter(inputPortStatus -> inputPort.getIdentifier().equals(inputPortStatus.getId())).findFirst().orElse(null),
+ inputPortStatus -> createPortStatusDto(inputPortStatus)
+ );
+ final List<BulletinDTO> bulletins = createBulletinDtos(bulletinRepository.findBulletinsForSource(inputPort.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ flow.getInputPorts().add(entityFactory.createPortEntity(dto, revision, permissions, operatePermissions, status, bulletinEntities));
+ }
+
+ for (final PortDTO snippetOutputPort : snippet.getOutputPorts()) {
+ final Port outputPort = group.getOutputPort(snippetOutputPort.getId());
+
+ // marshal the actual port as the snippet is pruned
+ final PortDTO dto = createPortDto(outputPort);
+ final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(outputPort.getIdentifier()));
+ final PermissionsDTO permissions = createPermissionsDto(outputPort);
+ final PermissionsDTO operatePermissions = createPermissionsDto(new OperationAuthorizable(outputPort));
+ final PortStatusDTO status = getComponentStatus(
+ () -> groupStatus.getOutputPortStatus().stream().filter(outputPortStatus -> outputPort.getIdentifier().equals(outputPortStatus.getId())).findFirst().orElse(null),
+ outputPortStatus -> createPortStatusDto(outputPortStatus)
+ );
+ final List<BulletinDTO> bulletins = createBulletinDtos(bulletinRepository.findBulletinsForSource(outputPort.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ flow.getOutputPorts().add(entityFactory.createPortEntity(dto, revision, permissions, operatePermissions, status, bulletinEntities));
+ }
+
+ for (final LabelDTO snippetLabel : snippet.getLabels()) {
+ final Label label = group.getLabel(snippetLabel.getId());
+
+ // marshal the actual label as the snippet is pruned
+ final LabelDTO dto = createLabelDto(label);
+ final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(label.getIdentifier()));
+ final PermissionsDTO accessPolicy = createPermissionsDto(label);
+ flow.getLabels().add(entityFactory.createLabelEntity(dto, revision, accessPolicy));
+ }
+
+ for (final ProcessGroupDTO snippetProcessGroup : snippet.getProcessGroups()) {
+ final ProcessGroup processGroup = group.getProcessGroup(snippetProcessGroup.getId());
+
+ // marshal the actual group as the snippet is pruned
+ final ProcessGroupDTO dto = createProcessGroupDto(processGroup);
+ final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(processGroup.getIdentifier()));
+ final PermissionsDTO permissions = createPermissionsDto(processGroup);
+ final ProcessGroupStatusDTO status = getComponentStatus(
+ () -> groupStatus.getProcessGroupStatus().stream().filter(processGroupStatus -> processGroup.getIdentifier().equals(processGroupStatus.getId())).findFirst().orElse(null),
+ processGroupStatus -> createConciseProcessGroupStatusDto(processGroupStatus)
+ );
+ final List<BulletinEntity> bulletins = getProcessGroupBulletins.apply(processGroup);
+ flow.getProcessGroups().add(entityFactory.createProcessGroupEntity(dto, revision, permissions, status, bulletins));
+ }
+
+ for (final ProcessorDTO snippetProcessor : snippet.getProcessors()) {
+ final ProcessorNode processor = group.getProcessor(snippetProcessor.getId());
+
+ // marshal the actual processor as the snippet is pruned
+ final ProcessorDTO dto = createProcessorDto(processor);
+ final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(processor.getIdentifier()));
+ final PermissionsDTO permissions = createPermissionsDto(processor);
+ final PermissionsDTO operatePermissions = createPermissionsDto(new OperationAuthorizable(processor));
+ final ProcessorStatusDTO status = getComponentStatus(
+ () -> groupStatus.getProcessorStatus().stream().filter(processorStatus -> processor.getIdentifier().equals(processorStatus.getId())).findFirst().orElse(null),
+ processorStatus -> createProcessorStatusDto(processorStatus)
+ );
+ final List<BulletinDTO> bulletins = createBulletinDtos(bulletinRepository.findBulletinsForSource(processor.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ flow.getProcessors().add(entityFactory.createProcessorEntity(dto, revision, permissions, operatePermissions, status, bulletinEntities));
+ }
+
+ for (final RemoteProcessGroupDTO snippetRemoteProcessGroup : snippet.getRemoteProcessGroups()) {
+ final RemoteProcessGroup remoteProcessGroup = group.getRemoteProcessGroup(snippetRemoteProcessGroup.getId());
+
+ // marshal the actual rpm as the snippet is pruned
+ final RemoteProcessGroupDTO dto = createRemoteProcessGroupDto(remoteProcessGroup);
+ final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(remoteProcessGroup.getIdentifier()));
+ final PermissionsDTO permissions = createPermissionsDto(remoteProcessGroup);
+ final PermissionsDTO operatePermissions = createPermissionsDto(new OperationAuthorizable(remoteProcessGroup));
+ final RemoteProcessGroupStatusDTO status = getComponentStatus(
+ () -> groupStatus.getRemoteProcessGroupStatus().stream().filter(rpgStatus -> remoteProcessGroup.getIdentifier().equals(rpgStatus.getId())).findFirst().orElse(null),
+ remoteProcessGroupStatus -> createRemoteProcessGroupStatusDto(remoteProcessGroup, remoteProcessGroupStatus)
+ );
+ final List<BulletinDTO> bulletins = createBulletinDtos(bulletinRepository.findBulletinsForSource(remoteProcessGroup.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ flow.getRemoteProcessGroups().add(entityFactory.createRemoteProcessGroupEntity(dto, revision, permissions, operatePermissions, status, bulletinEntities));
+ }
+
+ return flow;
+ }
+
+ private <T, S> T getComponentStatus(final Supplier<S> getComponentStatus, final Function<S, T> convertToDto) {
+ final T statusDTO;
+ final S status = getComponentStatus.get();
+ if (status != null) {
+ statusDTO = convertToDto.apply(status);
+ } else {
+ statusDTO = null;
+ }
+ return statusDTO;
+ }
+
+ public FlowDTO createFlowDto(final ProcessGroup group, final ProcessGroupStatus groupStatus, final RevisionManager revisionManager,
+ final Function<ProcessGroup, List<BulletinEntity>> getProcessGroupBulletins) {
+ final FlowDTO dto = new FlowDTO();
+
+ for (final ProcessorNode procNode : group.getProcessors()) {
+ final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(procNode.getIdentifier()));
+ final PermissionsDTO permissions = createPermissionsDto(procNode);
+ final PermissionsDTO operatePermissions = createPermissionsDto(new OperationAuthorizable(procNode));
+ final ProcessorStatusDTO status = getComponentStatus(
+ () -> groupStatus.getProcessorStatus().stream().filter(processorStatus -> procNode.getIdentifier().equals(processorStatus.getId())).findFirst().orElse(null),
+ processorStatus -> createProcessorStatusDto(processorStatus)
+ );
+ final List<BulletinDTO> bulletins = createBulletinDtos(bulletinRepository.findBulletinsForSource(procNode.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ dto.getProcessors().add(entityFactory.createProcessorEntity(createProcessorDto(procNode), revision, permissions, operatePermissions, status, bulletinEntities));
+ }
+
+ for (final Connection connNode : group.getConnections()) {
+ final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(connNode.getIdentifier()));
+ final PermissionsDTO permissions = createPermissionsDto(connNode);
+ final ConnectionStatusDTO status = getComponentStatus(
+ () -> groupStatus.getConnectionStatus().stream().filter(connectionStatus -> connNode.getIdentifier().equals(connectionStatus.getId())).findFirst().orElse(null),
+ connectionStatus -> createConnectionStatusDto(connectionStatus)
+ );
+ dto.getConnections().add(entityFactory.createConnectionEntity(createConnectionDto(connNode), revision, permissions, status));
+ }
+
+ for (final Label label : group.getLabels()) {
+ final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(label.getIdentifier()));
+ final PermissionsDTO permissions = createPermissionsDto(label);
+ dto.getLabels().add(entityFactory.createLabelEntity(createLabelDto(label), revision, permissions));
+ }
+
+ for (final Funnel funnel : group.getFunnels()) {
+ final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(funnel.getIdentifier()));
+ final PermissionsDTO permissions = createPermissionsDto(funnel);
+ dto.getFunnels().add(entityFactory.createFunnelEntity(createFunnelDto(funnel), revision, permissions));
+ }
+
+ for (final ProcessGroup childGroup : group.getProcessGroups()) {
+ final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(childGroup.getIdentifier()));
+ final PermissionsDTO permissions = createPermissionsDto(childGroup);
+ final ProcessGroupStatusDTO status = getComponentStatus(
+ () -> groupStatus.getProcessGroupStatus().stream().filter(processGroupStatus -> childGroup.getIdentifier().equals(processGroupStatus.getId())).findFirst().orElse(null),
+ processGroupStatus -> createConciseProcessGroupStatusDto(processGroupStatus)
+ );
+ final List<BulletinEntity> bulletins = getProcessGroupBulletins.apply(childGroup);
+ dto.getProcessGroups().add(entityFactory.createProcessGroupEntity(createProcessGroupDto(childGroup), revision, permissions, status, bulletins));
+ }
+
+ for (final RemoteProcessGroup rpg : group.getRemoteProcessGroups()) {
+ final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(rpg.getIdentifier()));
+ final PermissionsDTO permissions = createPermissionsDto(rpg);
+ final PermissionsDTO operatePermissions = createPermissionsDto(new OperationAuthorizable(rpg));
+ final RemoteProcessGroupStatusDTO status = getComponentStatus(
+ () -> groupStatus.getRemoteProcessGroupStatus().stream().filter(remoteProcessGroupStatus -> rpg.getIdentifier().equals(remoteProcessGroupStatus.getId())).findFirst().orElse(null),
+ remoteProcessGroupStatus -> createRemoteProcessGroupStatusDto(rpg, remoteProcessGroupStatus)
+ );
+ final List<BulletinDTO> bulletins = createBulletinDtos(bulletinRepository.findBulletinsForSource(rpg.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ dto.getRemoteProcessGroups().add(entityFactory.createRemoteProcessGroupEntity(createRemoteProcessGroupDto(rpg), revision, permissions, operatePermissions, status, bulletinEntities));
+ }
+
+ for (final Port inputPort : group.getInputPorts()) {
+ final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(inputPort.getIdentifier()));
+ final PermissionsDTO permissions = createPermissionsDto(inputPort);
+ final PermissionsDTO operatePermissions = createPermissionsDto(new OperationAuthorizable(inputPort));
+ final PortStatusDTO status = getComponentStatus(
+ () -> groupStatus.getInputPortStatus().stream().filter(inputPortStatus -> inputPort.getIdentifier().equals(inputPortStatus.getId())).findFirst().orElse(null),
+ inputPortStatus -> createPortStatusDto(inputPortStatus)
+ );
+ final List<BulletinDTO> bulletins = createBulletinDtos(bulletinRepository.findBulletinsForSource(inputPort.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ dto.getInputPorts().add(entityFactory.createPortEntity(createPortDto(inputPort), revision, permissions, operatePermissions, status, bulletinEntities));
+ }
+
+ for (final Port outputPort : group.getOutputPorts()) {
+ final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(outputPort.getIdentifier()));
+ final PermissionsDTO permissions = createPermissionsDto(outputPort);
+ final PermissionsDTO operatePermissions = createPermissionsDto(new OperationAuthorizable(outputPort));
+ final PortStatusDTO status = getComponentStatus(
+ () -> groupStatus.getOutputPortStatus().stream().filter(outputPortStatus -> outputPort.getIdentifier().equals(outputPortStatus.getId())).findFirst().orElse(null),
+ outputPortStatus -> createPortStatusDto(outputPortStatus)
+ );
+ final List<BulletinDTO> bulletins = createBulletinDtos(bulletinRepository.findBulletinsForSource(outputPort.getIdentifier()));
+ final List<BulletinEntity> bulletinEntities = bulletins.stream().map(bulletin -> entityFactory.createBulletinEntity(bulletin, permissions.getCanRead())).collect(Collectors.toList());
+ dto.getOutputPorts().add(entityFactory.createPortEntity(createPortDto(outputPort), revision, permissions, operatePermissions, status, bulletinEntities));
+ }
+
+ return dto;
+ }
+
+ /**
+ * Creates a ProcessGroupDTO from the specified ProcessGroup.
+ *
+ * @param group group
+ * @param recurse recurse
+ * @return dto
+ */
+ public ProcessGroupDTO createProcessGroupDto(final ProcessGroup group, final boolean recurse) {
+ final ProcessGroupDTO dto = createConciseProcessGroupDto(group);
+ dto.setContents(createProcessGroupContentsDto(group, recurse));
+ return dto;
+ }
+
+ /**
+ * Creates a ProcessGroupDTO from the specified ProcessGroup.
+ *
+ * @param group group
+ * @return dto
+ */
+ private ProcessGroupDTO createConciseProcessGroupDto(final ProcessGroup group) {
+ if (group == null) {
+ return null;
+ }
+
+ final ProcessGroupDTO dto = new ProcessGroupDTO();
+ dto.setId(group.getIdentifier());
+ dto.setPosition(createPositionDto(group.getPosition()));
+ dto.setComments(group.getComments());
+ dto.setName(group.getName());
+ dto.setVersionedComponentId(group.getVersionedComponentId().orElse(null));
+ dto.setVersionControlInformation(createVersionControlInformationDto(group));
+
+ final Map<String, String> variables = group.getVariableRegistry().getVariableMap().entrySet().stream()
+ .collect(Collectors.toMap(entry -> entry.getKey().getName(), entry -> entry.getValue()));
+ dto.setVariables(variables);
+
+ final ProcessGroup parentGroup = group.getParent();
+ if (parentGroup != null) {
+ dto.setParentGroupId(parentGroup.getIdentifier());
+ }
+
+ final ProcessGroupCounts counts = group.getCounts();
+ dto.setRunningCount(counts.getRunningCount());
+ dto.setStoppedCount(counts.getStoppedCount());
+ dto.setInvalidCount(counts.getInvalidCount());
+ dto.setDisabledCount(counts.getDisabledCount());
+ dto.setInputPortCount(counts.getInputPortCount());
+ dto.setOutputPortCount(counts.getOutputPortCount());
+ dto.setActiveRemotePortCount(counts.getActiveRemotePortCount());
+ dto.setInactiveRemotePortCount(counts.getInactiveRemotePortCount());
+ dto.setUpToDateCount(counts.getUpToDateCount());
+ dto.setLocallyModifiedCount(counts.getLocallyModifiedCount());
+ dto.setStaleCount(counts.getStaleCount());
+ dto.setLocallyModifiedAndStaleCount(counts.getLocallyModifiedAndStaleCount());
+ dto.setSyncFailureCount(counts.getSyncFailureCount());
+
+ return dto;
+ }
+
+
+ public Set<ComponentDifferenceDTO> createComponentDifferenceDtos(final FlowComparison comparison) {
+ final Map<ComponentDifferenceDTO, List<DifferenceDTO>> differencesByComponent = new HashMap<>();
+
+ for (final FlowDifference difference : comparison.getDifferences()) {
+ // Ignore these as local differences for now because we can't do anything with it
+ if (difference.getDifferenceType() == DifferenceType.BUNDLE_CHANGED) {
+ continue;
+ }
+
+ // Ignore differences for adding remote ports
+ if (FlowDifferenceFilters.isAddedOrRemovedRemotePort(difference)) {
+ continue;
+ }
+
+ if (FlowDifferenceFilters.isIgnorableVersionedFlowCoordinateChange(difference)) {
+ continue;
+ }
+
+ final ComponentDifferenceDTO componentDiff = createComponentDifference(difference);
+ final List<DifferenceDTO> differences = differencesByComponent.computeIfAbsent(componentDiff, key -> new ArrayList<>());
+
+ final DifferenceDTO dto = new DifferenceDTO();
+ dto.setDifferenceType(difference.getDifferenceType().getDescription());
+ dto.setDifference(difference.getDescription());
+
+ differences.add(dto);
+ }
+
+ for (final Map.Entry<ComponentDifferenceDTO, List<DifferenceDTO>> entry : differencesByComponent.entrySet()) {
+ entry.getKey().setDifferences(entry.getValue());
+ }
+
+ return differencesByComponent.keySet();
+ }
+
+ private ComponentDifferenceDTO createComponentDifference(final FlowDifference difference) {
+ VersionedComponent component = difference.getComponentA();
+ if (component == null || difference.getComponentB() instanceof InstantiatedVersionedComponent) {
+ component = difference.getComponentB();
+ }
+
+ final ComponentDifferenceDTO dto = new ComponentDifferenceDTO();
+ dto.setComponentName(component.getName());
+ dto.setComponentType(component.getComponentType().toString());
+
+ if (component instanceof InstantiatedVersionedComponent) {
+ final InstantiatedVersionedComponent instantiatedComponent = (InstantiatedVersionedComponent) component;
+ dto.setComponentId(instantiatedComponent.getInstanceId());
+ dto.setProcessGroupId(instantiatedComponent.getInstanceGroupId());
+ } else {
+ dto.setComponentId(component.getIdentifier());
+ dto.setProcessGroupId(dto.getProcessGroupId());
+ }
+
+ return dto;
+ }
+
+
+ public VersionControlInformationDTO createVersionControlInformationDto(final ProcessGroup group) {
+ if (group == null) {
+ return null;
+ }
+
+ final VersionControlInformation versionControlInfo = group.getVersionControlInformation();
+ if (versionControlInfo == null) {
+ return null;
+ }
+
+ final VersionControlInformationDTO dto = new VersionControlInformationDTO();
+ dto.setGroupId(group.getIdentifier());
+ dto.setRegistryId(versionControlInfo.getRegistryIdentifier());
+ dto.setRegistryName(versionControlInfo.getRegistryName());
+ dto.setBucketId(versionControlInfo.getBucketIdentifier());
+ dto.setBucketName(versionControlInfo.getBucketName());
+ dto.setFlowId(versionControlInfo.getFlowIdentifier());
+ dto.setFlowName(versionControlInfo.getFlowName());
+ dto.setFlowDescription(versionControlInfo.getFlowDescription());
+ dto.setVersion(versionControlInfo.getVersion());
+
+ final VersionedFlowStatus status = versionControlInfo.getStatus();
+ final VersionedFlowState state = status.getState();
+ dto.setState(state == null ? null : state.name());
+ dto.setStateExplanation(status.getStateExplanation());
+
+ return dto;
+ }
+
+ public Map<String, String> createVersionControlComponentMappingDto(final InstantiatedVersionedProcessGroup group) {
+ final Map<String, String> mapping = new HashMap<>();
+
+ mapping.put(group.getInstanceId(), group.getIdentifier());
+ group.getProcessors().stream()
+ .map(proc -> (InstantiatedVersionedProcessor) proc)
+ .forEach(proc -> mapping.put(proc.getInstanceId(), proc.getIdentifier()));
+ group.getFunnels().stream()
+ .map(funnel -> (InstantiatedVersionedFunnel) funnel)
+ .forEach(funnel -> mapping.put(funnel.getInstanceId(), funnel.getIdentifier()));
+ group.getInputPorts().stream()
+ .map(port -> (InstantiatedVersionedPort) port)
+ .forEach(port -> mapping.put(port.getInstanceId(), port.getIdentifier()));
+ group.getOutputPorts().stream()
+ .map(port -> (InstantiatedVersionedPort) port)
+ .forEach(port -> mapping.put(port.getInstanceId(), port.getIdentifier()));
+ group.getControllerServices().stream()
+ .map(service -> (InstantiatedVersionedControllerService) service)
+ .forEach(service -> mapping.put(service.getInstanceId(), service.getIdentifier()));
+ group.getLabels().stream()
+ .map(label -> (InstantiatedVersionedLabel) label)
+ .forEach(label -> mapping.put(label.getInstanceId(), label.getIdentifier()));
+ group.getConnections().stream()
+ .map(conn -> (InstantiatedVersionedConnection) conn)
+ .forEach(conn -> mapping.put(conn.getInstanceId(), conn.getIdentifier()));
+ group.getRemoteProcessGroups().stream()
+ .map(rpg -> (InstantiatedVersionedRemoteProcessGroup) rpg)
+ .forEach(rpg -> {
+ mapping.put(rpg.getInstanceId(), rpg.getIdentifier());
+
+ if (rpg.getInputPorts() != null) {
+ rpg.getInputPorts().stream()
+ .map(port -> (InstantiatedVersionedRemoteGroupPort) port)
+ .forEach(port -> mapping.put(port.getInstanceId(), port.getIdentifier()));
+ }
+
+ if (rpg.getOutputPorts() != null) {
+ rpg.getOutputPorts().stream()
+ .map(port -> (InstantiatedVersionedRemoteGroupPort) port)
+ .forEach(port -> mapping.put(port.getInstanceId(), port.getIdentifier()));
+ }
+ });
+
+ group.getProcessGroups().stream()
+ .map(child -> (InstantiatedVersionedProcessGroup) child)
+ .forEach(child -> {
+ final Map<String, String> childMapping = createVersionControlComponentMappingDto(child);
+ mapping.putAll(childMapping);
+ });
+
+ return mapping;
+ }
+
+
+ /**
+ * Creates a ProcessGroupContentDTO from the specified ProcessGroup.
+ *
+ * @param group group
+ * @param recurse recurse
+ * @return dto
+ */
+ private FlowSnippetDTO createProcessGroupContentsDto(final ProcessGroup group, final boolean recurse) {
+ if (group == null) {
+ return null;
+ }
+
+ final FlowSnippetDTO dto = new FlowSnippetDTO();
+
+ for (final ProcessorNode procNode : group.getProcessors()) {
+ dto.getProcessors().add(createProcessorDto(procNode));
+ }
+
+ for (final Connection connNode : group.getConnections()) {
+ dto.getConnections().add(createConnectionDto(connNode));
+ }
+
+ for (final Label label : group.getLabels()) {
+ dto.getLabels().add(createLabelDto(label));
+ }
+
+ for (final Funnel funnel : group.getFunnels()) {
+ dto.getFunnels().add(createFunnelDto(funnel));
+ }
+
+ for (final ProcessGroup childGroup : group.getProcessGroups()) {
+ if (recurse) {
+ dto.getProcessGroups().add(createProcessGroupDto(childGroup, recurse));
+ } else {
+ dto.getProcessGroups().add(createConciseProcessGroupDto(childGroup));
+ }
+ }
+
+ for (final RemoteProcessGroup remoteProcessGroup : group.getRemoteProcessGroups()) {
+ dto.getRemoteProcessGroups().add(createRemoteProcessGroupDto(remoteProcessGroup));
+ }
+
+ for (final Port inputPort : group.getInputPorts()) {
+ dto.getInputPorts().add(createPortDto(inputPort));
+ }
+
+ for (final Port outputPort : group.getOutputPorts()) {
+ dto.getOutputPorts().add(createPortDto(outputPort));
+ }
+
+ return dto;
+ }
+
+ private boolean isRestricted(final Class<?> cls) {
+ return cls.isAnnotationPresent(Restricted.class);
+ }
+
+ private String getUsageRestriction(final Class<?> cls) {
+ final Restricted restricted = cls.getAnnotation(Restricted.class);
+
+ if (restricted == null) {
+ return null;
+ }
+
+ if (StringUtils.isBlank(restricted.value())) {
+ return null;
+ }
+
+ return restricted.value();
+ }
+
+ private Set<ExplicitRestrictionDTO> getExplicitRestrictions(final Class<?> cls) {
+ final Restricted restricted = cls.getAnnotation(Restricted.class);
+
+ if (restricted == null) {
+ return null;
+ }
+
+ final Restriction[] restrictions = restricted.restrictions();
+
+ if (restrictions == null || restrictions.length == 0) {
+ return null;
+ }
+
+ return Arrays.stream(restrictions).map(restriction -> {
+ final RequiredPermissionDTO requiredPermission = new RequiredPermissionDTO();
+ requiredPermission.setId(restriction.requiredPermission().getPermissionIdentifier());
+ requiredPermission.setLabel(restriction.requiredPermission().getPermissionLabel());
+
+ final ExplicitRestrictionDTO usageRestriction = new ExplicitRestrictionDTO();
+ usageRestriction.setRequiredPermission(requiredPermission);
+ usageRestriction.setExplanation(restriction.explanation());
+ return usageRestriction;
+ }).collect(Collectors.toSet());
+ }
+
+ private String getDeprecationReason(final Class<?> cls) {
+ final DeprecationNotice deprecationNotice = cls.getAnnotation(DeprecationNotice.class);
+ return deprecationNotice == null ? null : deprecationNotice.reason();
+ }
+
+ public Set<AffectedComponentEntity> createAffectedComponentEntities(final Set<ComponentNode> affectedComponents, final RevisionManager revisionManager) {
+ return affectedComponents.stream()
+ .map(component -> {
+ final AffectedComponentDTO affectedComponent = createAffectedComponentDto(component);
+ final PermissionsDTO permissions = createPermissionsDto(component);
+ final RevisionDTO revision = createRevisionDTO(revisionManager.getRevision(component.getIdentifier()));
+ return entityFactory.createAffectedComponentEntity(affectedComponent, revision, permissions);
+ })
+ .collect(Collectors.toSet());
+ }
+
+ public VariableRegistryDTO createVariableRegistryDto(final ProcessGroup processGroup, final RevisionManager revisionManager) {
+ final ComponentVariableRegistry variableRegistry = processGroup.getVariableRegistry();
+
+ final List<String> variableNames = variableRegistry.getVariableMap().keySet().stream()
+ .map(descriptor -> descriptor.getName())
+ .collect(Collectors.toList());
+
+ final Set<VariableEntity> variableEntities = new LinkedHashSet<>();
+
+ for (final String variableName : variableNames) {
+ final VariableDTO variableDto = new VariableDTO();
+ variableDto.setName(variableName);
+ variableDto.setValue(variableRegistry.getVariableValue(variableName));
+ variableDto.setProcessGroupId(processGroup.getIdentifier());
+
+ final Set<AffectedComponentEntity> affectedComponentEntities = createAffectedComponentEntities(processGroup.getComponentsAffectedByVariable(variableName), revisionManager);
+
+ boolean canWrite = true;
+ for (final AffectedComponentEntity affectedComponent : affectedComponentEntities) {
+ final PermissionsDTO permissions = affectedComponent.getPermissions();
+ if (!permissions.getCanRead() || !permissions.getCanWrite()) {
+ canWrite = false;
+ break;
+ }
+ }
+
+ variableDto.setAffectedComponents(affectedComponentEntities);
+
+ final VariableEntity variableEntity = new VariableEntity();
+ variableEntity.setVariable(variableDto);
+ variableEntity.setCanWrite(canWrite);
+
+ variableEntities.add(variableEntity);
+ }
+
+ final VariableRegistryDTO registryDto = new VariableRegistryDTO();
+ registryDto.setProcessGroupId(processGroup.getIdentifier());
+ registryDto.setVariables(variableEntities);
+
+ return registryDto;
+ }
+
+ public VariableRegistryUpdateRequestDTO createVariableRegistryUpdateRequestDto(final VariableRegistryUpdateRequest request) {
+ final VariableRegistryUpdateRequestDTO dto = new VariableRegistryUpdateRequestDTO();
+ dto.setComplete(request.isComplete());
+ dto.setFailureReason(request.getFailureReason());
+ dto.setLastUpdated(request.getLastUpdated());
+ dto.setProcessGroupId(request.getProcessGroupId());
+ dto.setRequestId(request.getRequestId());
+ dto.setSubmissionTime(request.getSubmissionTime());
+
+ final List<VariableRegistryUpdateStepDTO> updateSteps = new ArrayList<>();
+ updateSteps.add(createVariableRegistryUpdateStepDto(request.getIdentifyRelevantComponentsStep()));
+ updateSteps.add(createVariableRegistryUpdateStepDto(request.getStopProcessorsStep()));
+ updateSteps.add(createVariableRegistryUpdateStepDto(request.getDisableServicesStep()));
+ updateSteps.add(createVariableRegistryUpdateStepDto(request.getApplyUpdatesStep()));
+ updateSteps.add(createVariableRegistryUpdateStepDto(request.getEnableServicesStep()));
+ updateSteps.add(createVariableRegistryUpdateStepDto(request.getStartProcessorsStep()));
+ dto.setUpdateSteps(updateSteps);
+
+ dto.setAffectedComponents(new HashSet<>(request.getAffectedComponents().values()));
+
+ return dto;
+ }
+
+ public VariableRegistryUpdateStepDTO createVariableRegistryUpdateStepDto(final VariableRegistryUpdateStep step) {
+ final VariableRegistryUpdateStepDTO dto = new VariableRegistryUpdateStepDTO();
+ dto.setComplete(step.isComplete());
+ dto.setDescription(step.getDescription());
+ dto.setFailureReason(step.getFailureReason());
+ return dto;
+ }
+
+
+ public VariableRegistryDTO populateAffectedComponents(final VariableRegistryDTO variableRegistry, final ProcessGroup group, final RevisionManager revisionManager) {
+ if (!group.getIdentifier().equals(variableRegistry.getProcessGroupId())) {
+ throw new IllegalArgumentException("Variable Registry does not have the same Group ID as the given Process Group");
+ }
+
+ final Set<VariableEntity> variableEntities = new LinkedHashSet<>();
+
+ if (variableRegistry.getVariables() != null) {
+ for (final VariableEntity inputEntity : variableRegistry.getVariables()) {
+ final VariableEntity entity = new VariableEntity();
+
+ final VariableDTO inputDto = inputEntity.getVariable();
+ final VariableDTO variableDto = new VariableDTO();
+ variableDto.setName(inputDto.getName());
+ variableDto.setValue(inputDto.getValue());
+ variableDto.setProcessGroupId(group.getIdentifier());
+
+ final Set<AffectedComponentEntity> affectedComponentEntities = createAffectedComponentEntities(group.getComponentsAffectedByVariable(variableDto.getName()), revisionManager);
+
+ boolean canWrite = true;
+ for (final AffectedComponentEntity affectedComponent : affectedComponentEntities) {
+ final PermissionsDTO permissions = affectedComponent.getPermissions();
+ if (!permissions.getCanRead() || !permissions.getCanWrite()) {
+ canWrite = false;
+ break;
+ }
+ }
+
+ variableDto.setAffectedComponents(affectedComponentEntities);
+
+ entity.setCanWrite(canWrite);
+ entity.setVariable(inputDto);
+
+ variableEntities.add(entity);
+ }
+ }
+
+ final VariableRegistryDTO registryDto = new VariableRegistryDTO();
+ registryDto.setProcessGroupId(group.getIdentifier());
+ registryDto.setVariables(variableEntities);
+
+ return registryDto;
+ }
+
+
+ /**
+ * Gets the capability description from the specified class.
+ */
+ private String getCapabilityDescription(final Class<?> cls) {
+ final CapabilityDescription capabilityDesc = cls.getAnnotation(CapabilityDescription.class);
+ return capabilityDesc == null ? null : capabilityDesc.value();
+ }
+
+ /**
+ * Gets the tags from the specified class.
+ */
+ private Set<String> getTags(final Class<?> cls) {
+ final Set<String> tags = new HashSet<>();
+ final Tags tagsAnnotation = cls.getAnnotation(Tags.class);
+ if (tagsAnnotation != null) {
+ for (final String tag : tagsAnnotation.value()) {
+ tags.add(tag);
+ }
+ }
+
+ if (cls.isAnnotationPresent(Restricted.class)) {
+ tags.add("restricted");
+ }
+
+ return tags;
+ }
+
+ /**
+ * Creates a bundle DTO from the specified class.
+ *
+ * @param coordinate bundle coordinates
+ * @return dto
+ */
+ public BundleDTO createBundleDto(final BundleCoordinate coordinate) {
+ final BundleDTO dto = new BundleDTO();
+ dto.setGroup(coordinate.getGroup());
+ dto.setArtifact(coordinate.getId());
+ dto.setVersion(coordinate.getVersion());
+ return dto;
+ }
+
+ private List<ControllerServiceApiDTO> createControllerServiceApiDto(final Class cls) {
+ final Set<Class> serviceApis = new HashSet<>();
+
+ // if this is a controller service
+ if (ControllerService.class.isAssignableFrom(cls)) {
+ // get all of it's interfaces to determine the controller service api's it implements
+ final List<Class<?>> interfaces = ClassUtils.getAllInterfaces(cls);
+ for (final Class i : interfaces) {
+ // add all controller services that's not ControllerService itself
+ if (ControllerService.class.isAssignableFrom(i) && !ControllerService.class.equals(i)) {
+ serviceApis.add(i);
+ }
+ }
+
+ final List<ControllerServiceApiDTO> dtos = new ArrayList<>();
+ for (final Class serviceApi : serviceApis) {
+ final Bundle bundle = extensionManager.getBundle(serviceApi.getClassLoader());
+ final BundleCoordinate bundleCoordinate = bundle.getBundleDetails().getCoordinate();
+
+ final ControllerServiceApiDTO dto = new ControllerServiceApiDTO();
+ dto.setType(serviceApi.getName());
+ dto.setBundle(createBundleDto(bundleCoordinate));
+ dtos.add(dto);
+ }
+ return dtos;
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * Gets the DocumentedTypeDTOs from the specified classes.
+ *
+ * @param classes classes
+ * @param bundleGroupFilter if specified, must be member of bundle group
+ * @param bundleArtifactFilter if specified, must be member of bundle artifact
+ * @param typeFilter if specified, type must match
+ * @return dtos
+ */
+ public Set<DocumentedTypeDTO> fromDocumentedTypes(final Map<Class, Bundle> classes, final String bundleGroupFilter, final String bundleArtifactFilter, final String typeFilter) {
+ final Set<DocumentedTypeDTO> types = new LinkedHashSet<>();
+ final List<Class> sortedClasses = new ArrayList<>(classes.keySet());
+ Collections.sort(sortedClasses, CLASS_NAME_COMPARATOR);
+
+ for (final Class cls : sortedClasses) {
+ final Bundle bundle = classes.get(cls);
+ final BundleCoordinate coordinate = bundle.getBundleDetails().getCoordinate();
+
+ // only include classes that meet the criteria if specified
+ if (bundleGroupFilter != null && !bundleGroupFilter.equals(coordinate.getGroup())) {
+ continue;
+ }
+ if (bundleArtifactFilter != null && !bundleArtifactFilter.equals(coordinate.getId())) {
+ continue;
+ }
+ if (typeFilter != null && !typeFilter.equals(cls.getName())) {
+ continue;
+ }
+
+ final DocumentedTypeDTO dto = new DocumentedTypeDTO();
+ dto.setType(cls.getName());
+ dto.setBundle(createBundleDto(coordinate));
+ dto.setControllerServiceApis(createControllerServiceApiDto(cls));
+ dto.setDescription(getCapabilityDescription(cls));
+ dto.setRestricted(isRestricted(cls));
+ dto.setUsageRestriction(getUsageRestriction(cls));
+ dto.setExplicitRestrictions(getExplicitRestrictions(cls));
+ dto.setDeprecationReason(getDeprecationReason(cls));
+ dto.setTags(getTags(cls));
+ types.add(dto);
+ }
+
+ return types;
+ }
+
+ /**
+ * Gets the DocumentedTypeDTOs from the specified classes.
+ *
+ * @param classes classes
+ * @param bundleGroupFilter if specified, must be member of bundle group
+ * @param bundleArtifactFilter if specified, must be member of bundle artifact
+ * @param typeFilter if specified, type must match
+ * @return dtos
+ */
+ public Set<DocumentedTypeDTO> fromDocumentedTypes(final Set<Class> classes, final String bundleGroupFilter, final String bundleArtifactFilter, final String typeFilter) {
+ final Map<Class, Bundle> classBundles = new HashMap<>();
+ for (final Class cls : classes) {
+ classBundles.put(cls, extensionManager.getBundle(cls.getClassLoader()));
+ }
+ return fromDocumentedTypes(classBundles, bundleGroupFilter, bundleArtifactFilter, typeFilter);
+ }
+
+ /**
+ * Creates a ProcessorDTO from the specified ProcessorNode.
+ *
+ * @param node node
+ * @return dto
+ */
+ public ProcessorDTO createProcessorDto(final ProcessorNode node) {
+ if (node == null) {
+ return null;
+ }
+
+ final BundleCoordinate bundleCoordinate = node.getBundleCoordinate();
+ final List<Bundle> compatibleBundles = extensionManager.getBundles(node.getCanonicalClassName()).stream().filter(bundle -> {
+ final BundleCoordinate coordinate = bundle.getBundleDetails().getCoordinate();
+ return bundleCoordinate.getGroup().equals(coordinate.getGroup()) && bundleCoordinate.getId().equals(coordinate.getId());
+ }).collect(Collectors.toList());
+
+ final ProcessorDTO dto = new ProcessorDTO();
+ dto.setId(node.getIdentifier());
+ dto.setPosition(createPositionDto(node.getPosition()));
+ dto.setStyle(node.getStyle());
+ dto.setParentGroupId(node.getProcessGroup().getIdentifier());
+ dto.setInputRequirement(node.getInputRequirement().name());
+ dto.setPersistsState(node.getProcessor().getClass().isAnnotationPresent(Stateful.class));
+ dto.setRestricted(node.isRestricted());
+ dto.setDeprecated(node.isDeprecated());
+ dto.setExecutionNodeRestricted(node.isExecutionNodeRestricted());
+ dto.setExtensionMissing(node.isExtensionMissing());
+ dto.setMultipleVersionsAvailable(compatibleBundles.size() > 1);
+ dto.setVersionedComponentId(node.getVersionedComponentId().orElse(null));
+
+ dto.setType(node.getCanonicalClassName());
+ dto.setBundle(createBundleDto(bundleCoordinate));
+ dto.setName(node.getName());
+ dto.setState(node.getScheduledState().toString());
+
+ // build the relationship dtos
+ final List<RelationshipDTO> relationships = new ArrayList<>();
+ for (final Relationship rel : node.getRelationships()) {
+ final RelationshipDTO relationshipDTO = new RelationshipDTO();
+ relationshipDTO.setDescription(rel.getDescription());
+ relationshipDTO.setName(rel.getName());
+ relationshipDTO.setAutoTerminate(node.isAutoTerminated(rel));
+ relationships.add(relationshipDTO);
+ }
+
+ // sort the relationships
+ Collections.sort(relationships, new Comparator<RelationshipDTO>() {
+ @Override
+ public int compare(final RelationshipDTO r1, final RelationshipDTO r2) {
+ return Collator.getInstance(Locale.US).compare(r1.getName(), r2.getName());
+ }
+ });
+
+ // set the relationships
+ dto.setRelationships(relationships);
+
+ dto.setDescription(getCapabilityDescription(node.getClass()));
+ dto.setSupportsParallelProcessing(!node.isTriggeredSerially());
+ dto.setSupportsEventDriven(node.isEventDrivenSupported());
+ dto.setSupportsBatching(node.isSessionBatchingSupported());
+ dto.setConfig(createProcessorConfigDto(node));
+
+ final ValidationStatus validationStatus = node.getValidationStatus(1, TimeUnit.MILLISECONDS);
+ dto.setValidationStatus(validationStatus.name());
+
+ final Collection<ValidationResult> validationErrors = node.getValidationErrors();
+ if (validationErrors != null && !validationErrors.isEmpty()) {
+ final List<String> errors = new ArrayList<>();
+ for (final ValidationResult validationResult : validationErrors) {
+ errors.add(validationResult.toString());
+ }
+
+ dto.setValidationErrors(errors);
+ }
+
+ return dto;
+ }
+
+ /**
+ * Creates a BulletinBoardDTO for the specified bulletins.
+ *
+ * @param bulletins bulletins
+ * @return dto
+ */
+ public BulletinBoardDTO createBulletinBoardDto(final List<BulletinEntity> bulletins) {
+ // sort the bulletins
+ Collections.sort(bulletins, new Comparator<BulletinEntity>() {
+ @Override
+ public int compare(final BulletinEntity bulletin1, final BulletinEntity bulletin2) {
+ if (bulletin1 == null && bulletin2 == null) {
+ return 0;
+ } else if (bulletin1 == null) {
+ return 1;
+ } else if (bulletin2 == null) {
+ return -1;
+ }
+
+ final Date timestamp1 = bulletin1.getTimestamp();
+ final Date timestamp2 = bulletin2.getTimestamp();
+ if (timestamp1 == null && timestamp2 == null) {
+ return 0;
+ } else if (timestamp1 == null) {
+ return 1;
+ } else if (timestamp2 == null) {
+ return -1;
+ } else {
+ return timestamp1.compareTo(timestamp2);
+ }
+ }
+ });
+
+ // create the bulletin board
+ final BulletinBoardDTO bulletinBoard = new BulletinBoardDTO();
+ bulletinBoard.setBulletins(bulletins);
+ bulletinBoard.setGenerated(new Date());
+ return bulletinBoard;
+ }
+
+ /**
+ * Creates BulletinDTOs for the specified Bulletins.
+ *
+ * @param bulletins bulletin
+ * @return dto
+ */
+ public List<BulletinDTO> createBulletinDtos(final List<Bulletin> bulletins) {
+ final List<BulletinDTO> bulletinDtos = new ArrayList<>(bulletins.size());
+ for (final Bulletin bulletin : bulletins) {
+ bulletinDtos.add(createBulletinDto(bulletin));
+ }
+ return bulletinDtos;
+ }
+
+ /**
+ * Creates a BulletinDTO for the specified Bulletin.
+ *
+ * @param bulletin bulletin
+ * @return dto
+ */
+ public BulletinDTO createBulletinDto(final Bulletin bulletin) {
+ final BulletinDTO dto = new BulletinDTO();
+ dto.setId(bulletin.getId());
+ dto.setNodeAddress(bulletin.getNodeAddress());
+ dto.setTimestamp(bulletin.getTimestamp());
+ dto.setGroupId(bulletin.getGroupId());
+ dto.setSourceId(bulletin.getSourceId());
+ dto.setSourceName(bulletin.getSourceName());
+ dto.setCategory(bulletin.getCategory());
+ dto.setLevel(bulletin.getLevel());
+ dto.setMessage(bulletin.getMessage());
+ return dto;
+ }
+
+ /**
+ * Creates a ProvenanceEventNodeDTO for the specified ProvenanceEventLineageNode.
+ *
+ * @param node node
+ * @return dto
+ */
+ public ProvenanceNodeDTO createProvenanceEventNodeDTO(final ProvenanceEventLineageNode node) {
+ final ProvenanceNodeDTO dto = new ProvenanceNodeDTO();
+ dto.setId(node.getIdentifier());
+ dto.setType("EVENT");
+ dto.setEventType(node.getEventType().toString());
+ dto.setTimestamp(new Date(node.getTimestamp()));
+ dto.setMillis(node.getTimestamp());
+ dto.setFlowFileUuid(node.getFlowFileUuid());
+ dto.setParentUuids(node.getParentUuids());
+ dto.setChildUuids(node.getChildUuids());
+ return dto;
+ }
+
+ /**
+ * Creates a FlowFileNodeDTO for the specified LineageNode.
+ *
+ * @param node node
+ * @return dto
+ */
+ public ProvenanceNodeDTO createFlowFileNodeDTO(final LineageNode node) {
+ final ProvenanceNodeDTO dto = new ProvenanceNodeDTO();
+ dto.setId(node.getIdentifier());
+ dto.setType("FLOWFILE");
+ dto.setTimestamp(new Date(node.getTimestamp()));
+ dto.setMillis(node.getTimestamp());
+ dto.setFlowFileUuid(node.getFlowFileUuid());
+ return dto;
+ }
+
+ /**
+ * Creates a ProvenanceLinkDTO for the specified LineageEdge.
+ *
+ * @param edge edge
+ * @return dto
+ */
+ public ProvenanceLinkDTO createProvenanceLinkDTO(final LineageEdge edge) {
+ final LineageNode source = edge.getSource();
+ final LineageNode target = edge.getDestination();
+
+ final ProvenanceLinkDTO dto = new ProvenanceLinkDTO();
+ dto.setTimestamp(new Date(target.getTimestamp()));
+ dto.setMillis(target.getTimestamp());
+ dto.setFlowFileUuid(edge.getUuid());
+ dto.setSourceId(source.getIdentifier());
+ dto.setTargetId(target.getIdentifier());
+ return dto;
+ }
+
+ /**
+ * Creates a LineageDTO for the specified Lineage.
+ *
+ * @param computeLineageSubmission submission
+ * @return dto
+ */
+ public LineageDTO createLineageDto(final ComputeLineageSubmission computeLineageSubmission) {
+ // build the lineage dto
+ final LineageDTO dto = new LineageDTO();
+ final LineageRequestDTO requestDto = new LineageRequestDTO();
+ final LineageResultsDTO resultsDto = new LineageResultsDTO();
+
+ // include the original request and results
+ dto.setRequest(requestDto);
+ dto.setResults(resultsDto);
+
+ // rebuild the request from the submission object
+ switch (computeLineageSubmission.getLineageComputationType()) {
+ case EXPAND_CHILDREN:
+ requestDto.setEventId(computeLineageSubmission.getExpandedEventId());
+ requestDto.setLineageRequestType(LineageRequestType.CHILDREN);
+ break;
+ case EXPAND_PARENTS:
+ requestDto.setEventId(computeLineageSubmission.getExpandedEventId());
+ requestDto.setLineageRequestType(LineageRequestType.PARENTS);
+ break;
+ case FLOWFILE_LINEAGE:
+ final Collection<String> uuids = computeLineageSubmission.getLineageFlowFileUuids();
+ if (uuids.size() == 1) {
+ requestDto.setUuid(uuids.iterator().next());
+ }
+ requestDto.setEventId(computeLineageSubmission.getExpandedEventId());
+ requestDto.setLineageRequestType(LineageRequestType.FLOWFILE);
+ break;
+ }
+
+ // include lineage details
+ dto.setId(computeLineageSubmission.getLineageIdentifier());
+ dto.setSubmissionTime(computeLineageSubmission.getSubmissionTime());
+
+ // create the results dto
+ final ComputeLineageResult results = computeLineageSubmission.getResult();
+ dto.setFinished(results.isFinished());
+ dto.setPercentCompleted(results.getPercentComplete());
+ dto.setExpiration(results.getExpiration());
+
+ final List<LineageNode> nodes = results.getNodes();
+ final List<LineageEdge> edges = results.getEdges();
+
+ final List<ProvenanceNodeDTO> nodeDtos = new ArrayList<>();
+ if (results.isFinished()) {
+ // create the node dto's
+ for (final LineageNode node : nodes) {
+ switch (node.getNodeType()) {
+ case FLOWFILE_NODE:
+ nodeDtos.add(createFlowFileNodeDTO(node));
+ break;
+ case PROVENANCE_EVENT_NODE:
+ nodeDtos.add(createProvenanceEventNodeDTO((ProvenanceEventLineageNode) node));
+ break;
+ }
+ }
+ }
+ resultsDto.setNodes(nodeDtos);
+
+ // include any errors
+ if (results.getError() != null) {
+ final Set<String> errors = new HashSet<>();
+ errors.add(results.getError());
+ resultsDto.setErrors(errors);
+ }
+
+ // create the link dto's
+ final List<ProvenanceLinkDTO> linkDtos = new ArrayList<>();
+ for (final LineageEdge edge : edges) {
+ linkDtos.add(createProvenanceLinkDTO(edge));
+ }
+ resultsDto.setLinks(linkDtos);
+
+ return dto;
+ }
+
+ /**
+ * Creates a SystemDiagnosticsDTO for the specified SystemDiagnostics.
+ *
+ * @param sysDiagnostics diags
+ * @return dto
+ */
+ public SystemDiagnosticsDTO createSystemDiagnosticsDto(final SystemDiagnostics sysDiagnostics) {
+
+ final SystemDiagnosticsDTO dto = new SystemDiagnosticsDTO();
+ final SystemDiagnosticsSnapshotDTO snapshot = new SystemDiagnosticsSnapshotDTO();
+ dto.setAggregateSnapshot(snapshot);
+
+ snapshot.setStatsLastRefreshed(new Date(sysDiagnostics.getCreationTimestamp()));
+
+ // processors
+ snapshot.setAvailableProcessors(sysDiagnostics.getAvailableProcessors());
+ snapshot.setProcessorLoadAverage(sysDiagnostics.getProcessorLoadAverage());
+
+ // threads
+ snapshot.setDaemonThreads(sysDiagnostics.getDaemonThreads());
+ snapshot.setTotalThreads(sysDiagnostics.getTotalThreads());
+
+ // heap
+ snapshot.setMaxHeap(FormatUtils.formatDataSize(sysDiagnostics.getMaxHeap()));
+ snapshot.setMaxHeapBytes(sysDiagnostics.getMaxHeap());
+ snapshot.setTotalHeap(FormatUtils.formatDataSize(sysDiagnostics.getTotalHeap()));
+ snapshot.setTotalHeapBytes(sysDiagnostics.getTotalHeap());
+ snapshot.setUsedHeap(FormatUtils.formatDataSize(sysDiagnostics.getUsedHeap()));
+ snapshot.setUsedHeapBytes(sysDiagnostics.getUsedHeap());
+ snapshot.setFreeHeap(FormatUtils.formatDataSize(sysDiagnostics.getFreeHeap()));
+ snapshot.setFreeHeapBytes(sysDiagnostics.getFreeHeap());
+ if (sysDiagnostics.getHeapUtilization() != -1) {
+ snapshot.setHeapUtilization(FormatUtils.formatUtilization(sysDiagnostics.getHeapUtilization()));
+ }
+
+ // non heap
+ snapshot.setMaxNonHeap(FormatUtils.formatDataSize(sysDiagnostics.getMaxNonHeap()));
+ snapshot.setMaxNonHeapBytes(sysDiagnostics.getMaxNonHeap());
+ snapshot.setTotalNonHeap(FormatUtils.formatDataSize(sysDiagnostics.getTotalNonHeap()));
+ snapshot.setTotalNonHeapBytes(sysDiagnostics.getTotalNonHeap());
+ snapshot.setUsedNonHeap(FormatUtils.formatDataSize(sysDiagnostics.getUsedNonHeap()));
+ snapshot.setUsedNonHeapBytes(sysDiagnostics.getUsedNonHeap());
+ snapshot.setFreeNonHeap(FormatUtils.formatDataSize(sysDiagnostics.getFreeNonHeap()));
+ snapshot.setFreeNonHeapBytes(sysDiagnostics.getFreeNonHeap());
+ if (sysDiagnostics.getNonHeapUtilization() != -1) {
+ snapshot.setNonHeapUtilization(FormatUtils.formatUtilization(sysDiagnostics.getNonHeapUtilization()));
+ }
+
+ // flow file disk usage
+ final SystemDiagnosticsSnapshotDTO.StorageUsageDTO flowFileRepositoryStorageUsageDto = createStorageUsageDTO(null, sysDiagnostics.getFlowFileRepositoryStorageUsage());
+ snapshot.setFlowFileRepositoryStorageUsage(flowFileRepositoryStorageUsageDto);
+
+ // content disk usage
+ final Set<SystemDiagnosticsSnapshotDTO.StorageUsageDTO> contentRepositoryStorageUsageDtos = new LinkedHashSet<>();
+ snapshot.setContentRepositoryStorageUsage(contentRepositoryStorageUsageDtos);
+ for (final Map.Entry<String, StorageUsage> entry : sysDiagnostics.getContentRepositoryStorageUsage().entrySet()) {
+ contentRepositoryStorageUsageDtos.add(createStorageUsageDTO(entry.getKey(), entry.getValue()));
+ }
+
+ // provenance disk usage
+ final Set<SystemDiagnosticsSnapshotDTO.StorageUsageDTO> provenanceRepositoryStorageUsageDtos = new LinkedHashSet<>();
+ snapshot.setProvenanceRepositoryStorageUsage(provenanceRepositoryStorageUsageDtos);
+ for (final Map.Entry<String, StorageUsage> entry : sysDiagnostics.getProvenanceRepositoryStorageUsage().entrySet()) {
+ provenanceRepositoryStorageUsageDtos.add(createStorageUsageDTO(entry.getKey(), entry.getValue()));
+ }
+
+ // garbage collection
+ final Set<SystemDiagnosticsSnapshotDTO.GarbageCollectionDTO> garbageCollectionDtos = new LinkedHashSet<>();
+ snapshot.setGarbageCollection(garbageCollectionDtos);
+ for (final Map.Entry<String, GarbageCollection> entry : sysDiagnostics.getGarbageCollection().entrySet()) {
+ garbageCollectionDtos.add(createGarbageCollectionDTO(entry.getKey(), entry.getValue()));
+ }
+
+ // version info
+ final SystemDiagnosticsSnapshotDTO.VersionInfoDTO versionInfoDto = createVersionInfoDTO();
+ snapshot.setVersionInfo(versionInfoDto);
+
+ // uptime
+ snapshot.setUptime(FormatUtils.formatHoursMinutesSeconds(sysDiagnostics.getUptime(), TimeUnit.MILLISECONDS));
+
+ return dto;
+ }
+
+ /**
+ * Creates a StorageUsageDTO from the specified StorageUsage.
+ *
+ * @param identifier id
+ * @param storageUsage usage
+ * @return dto
+ */
+ public SystemDiagnosticsSnapshotDTO.StorageUsageDTO createStorageUsageDTO(final String identifier, final StorageUsage storageUsage) {
+ final SystemDiagnosticsSnapshotDTO.StorageUsageDTO dto = new SystemDiagnosticsSnapshotDTO.StorageUsageDTO();
+ dto.setIdentifier(identifier);
+ dto.setFreeSpace(FormatUtils.formatDataSize(storageUsage.getFreeSpace()));
+ dto.setTotalSpace(FormatUtils.formatDataSize(storageUsage.getTotalSpace()));
+ dto.setUsedSpace(FormatUtils.formatDataSize(storageUsage.getUsedSpace()));
+ dto.setFreeSpaceBytes(storageUsage.getFreeSpace());
+ dto.setTotalSpaceBytes(storageUsage.getTotalSpace());
+ dto.setUsedSpaceBytes(storageUsage.getUsedSpace());
+ dto.setUtilization(FormatUtils.formatUtilization(storageUsage.getDiskUtilization()));
+ return dto;
+ }
+
+ /**
+ * Creates a GarbageCollectionDTO from the specified GarbageCollection.
+ *
+ * @param name name
+ * @param garbageCollection gc
+ * @return dto
+ */
+ public SystemDiagnosticsSnapshotDTO.GarbageCollectionDTO createGarbageCollectionDTO(final String name, final GarbageCollection garbageCollection) {
+ final SystemDiagnosticsSnapshotDTO.GarbageCollectionDTO dto = new SystemDiagnosticsSnapshotDTO.GarbageCollectionDTO();
+ dto.setName(name);
+ dto.setCollectionCount(garbageCollection.getCollectionCount());
+ dto.setCollectionTime(FormatUtils.formatHoursMinutesSeconds(garbageCollection.getCollectionTime(), TimeUnit.MILLISECONDS));
+ dto.setCollectionMillis(garbageCollection.getCollectionTime());
+ return dto;
+ }
+
+ public SystemDiagnosticsSnapshotDTO.VersionInfoDTO createVersionInfoDTO() {
+ final SystemDiagnosticsSnapshotDTO.VersionInfoDTO dto = new SystemDiagnosticsSnapshotDTO.VersionInfoDTO();
+ dto.setJavaVendor(System.getProperty("java.vendor"));
+ dto.setJavaVersion(System.getProperty("java.version"));
+ dto.setOsName(System.getProperty("os.name"));
+ dto.setOsVersion(System.getProperty("os.version"));
+ dto.setOsArchitecture(System.getProperty("os.arch"));
+
+ final Bundle frameworkBundle = NarClassLoadersHolder.getInstance().getFrameworkBundle();
+ if (frameworkBundle != null) {
+ final BundleDetails frameworkDetails = frameworkBundle.getBundleDetails();
+
+ dto.setNiFiVersion(frameworkDetails.getCoordinate().getVersion());
+
+ // Get build info
+ dto.setBuildTag(frameworkDetails.getBuildTag());
+ dto.setBuildRevision(frameworkDetails.getBuildRevision());
+ dto.setBuildBranch(frameworkDetails.getBuildBranch());
+ dto.setBuildTimestamp(frameworkDetails.getBuildTimestampDate());
+ }
+
+ return dto;
+ }
+
+ /**
+ * Creates a ResourceDTO from the specified Resource.
+ *
+ * @param resource resource
+ * @return dto
+ */
+ public ResourceDTO createResourceDto(final Resource resource) {
+ final ResourceDTO dto = new ResourceDTO();
+ dto.setIdentifier(resource.getIdentifier());
+ dto.setName(resource.getName());
+ return dto;
+ }
+
+ /**
+ * Creates a ProcessorDiagnosticsDTO from the given Processor and status information with some additional supporting information
+ *
+ * @param procNode the processor to create diagnostics for
+ * @param procStatus the status of given processor
+ * @param bulletinRepo the bulletin repository
+ * @param flowController flowController
+ * @param serviceEntityFactory function for creating a ControllerServiceEntity from a given ID
+ * @return ProcessorDiagnosticsDTO for the given Processor
+ */
+ public ProcessorDiagnosticsDTO createProcessorDiagnosticsDto(final ProcessorNode procNode, final ProcessorStatus procStatus, final BulletinRepository bulletinRepo,
+ final FlowController flowController, final Function<String, ControllerServiceEntity> serviceEntityFactory) {
+
+ final ProcessorDiagnosticsDTO procDiagnostics = new ProcessorDiagnosticsDTO();
+
+ procDiagnostics.setClassLoaderDiagnostics(createClassLoaderDiagnosticsDto(procNode));
+ procDiagnostics.setIncomingConnections(procNode.getIncomingConnections().stream()
+ .map(this::createConnectionDiagnosticsDto)
+ .collect(Collectors.toSet()));
+ procDiagnostics.setOutgoingConnections(procNode.getConnections().stream()
+ .map(this::createConnectionDiagnosticsDto)
+ .collect(Collectors.toSet()));
+ procDiagnostics.setJvmDiagnostics(createJvmDiagnosticsDto(flowController));
+ procDiagnostics.setProcessor(createProcessorDto(procNode));
+ procDiagnostics.setProcessorStatus(createProcessorStatusDto(procStatus));
+ procDiagnostics.setThreadDumps(createThreadDumpDtos(procNode));
+
+ final Set<ControllerServiceDiagnosticsDTO> referencedServiceDiagnostics = createReferencedServiceDiagnostics(procNode.getProperties(),
+ flowController.getControllerServiceProvider(), serviceEntityFactory);
+ procDiagnostics.setReferencedControllerServices(referencedServiceDiagnostics);
+
+ return procDiagnostics;
+ }
+
+ private Set<ControllerServiceDiagnosticsDTO> createReferencedServiceDiagnostics(final Map<PropertyDescriptor, String> properties, final ControllerServiceProvider serviceProvider,
+ final Function<String, ControllerServiceEntity> serviceEntityFactory) {
+
+ final Set<ControllerServiceDiagnosticsDTO> referencedServiceDiagnostics = new HashSet<>();
+ for (final Map.Entry<PropertyDescriptor, String> entry : properties.entrySet()) {
+ final PropertyDescriptor descriptor = entry.getKey();
+ if (descriptor.getControllerServiceDefinition() == null) {
+ continue;
+ }
+
+ final String serviceId = entry.getValue();
+ if (serviceId == null) {
+ continue;
+ }
+
+ final ControllerServiceNode serviceNode = serviceProvider.getControllerServiceNode(serviceId);
+ if (serviceNode == null) {
+ continue;
+ }
+
+ final ControllerServiceDiagnosticsDTO serviceDiagnostics = createControllerServiceDiagnosticsDto(serviceNode, serviceEntityFactory, serviceProvider);
+ if (serviceDiagnostics != null) {
+ referencedServiceDiagnostics.add(serviceDiagnostics);
+ }
+ }
+
+ return referencedServiceDiagnostics;
+ }
+
+ /**
+ * Creates a ControllerServiceDiagnosticsDTO from the given Controller Service with some additional supporting information
+ *
+ * @param serviceNode the controller service to create diagnostics for
+ * @param serviceEntityFactory a function to convert a controller service id to a controller service entity
+ * @param serviceProvider the controller service provider
+ * @return ControllerServiceDiagnosticsDTO for the given Controller Service
+ */
+ public ControllerServiceDiagnosticsDTO createControllerServiceDiagnosticsDto(final ControllerServiceNode serviceNode, final Function<String, ControllerServiceEntity> serviceEntityFactory,
+ final ControllerServiceProvider serviceProvider) {
+
+ final ControllerServiceDiagnosticsDTO serviceDiagnostics = new ControllerServiceDiagnosticsDTO();
+ final ControllerServiceEntity serviceEntity = serviceEntityFactory.apply(serviceNode.getIdentifier());
+ serviceDiagnostics.setControllerService(serviceEntity);
+
+ serviceDiagnostics.setClassLoaderDiagnostics(createClassLoaderDiagnosticsDto(serviceNode));
+ return serviceDiagnostics;
+ }
+
+
+ private ClassLoaderDiagnosticsDTO createClassLoaderDiagnosticsDto(final ControllerServiceNode serviceNode) {
+ ClassLoader componentClassLoader = extensionManager.getInstanceClassLoader(serviceNode.getIdentifier());
+ if (componentClassLoader == null) {
+ componentClassLoader = serviceNode.getControllerServiceImplementation().getClass().getClassLoader();
+ }
+
+ return createClassLoaderDiagnosticsDto(componentClassLoader);
+ }
+
+
+ private ClassLoaderDiagnosticsDTO createClassLoaderDiagnosticsDto(final ProcessorNode procNode) {
+ ClassLoader componentClassLoader = extensionManager.getInstanceClassLoader(procNode.getIdentifier());
+ if (componentClassLoader == null) {
+ componentClassLoader = procNode.getProcessor().getClass().getClassLoader();
+ }
+
+ return createClassLoaderDiagnosticsDto(componentClassLoader);
+ }
+
+ private ClassLoaderDiagnosticsDTO createClassLoaderDiagnosticsDto(final ClassLoader classLoader) {
+ final ClassLoaderDiagnosticsDTO dto = new ClassLoaderDiagnosticsDTO();
+
+ final Bundle bundle = extensionManager.getBundle(classLoader);
+ if (bundle != null) {
+ dto.setBundle(createBundleDto(bundle.getBundleDetails().getCoordinate()));
+ }
+
+ final ClassLoader parentClassLoader = classLoader.getParent();
+ if (parentClassLoader != null) {
+ dto.setParentClassLoader(createClassLoaderDiagnosticsDto(parentClassLoader));
+ }
+
+ return dto;
+ }
+
+
+ private ConnectionDiagnosticsDTO createConnectionDiagnosticsDto(final Connection connection) {
+ final ConnectionDiagnosticsDTO dto = new ConnectionDiagnosticsDTO();
+ dto.setConnection(createConnectionDto(connection));
+ dto.setAggregateSnapshot(createConnectionDiagnosticsSnapshotDto(connection));
+ return dto;
+ }
+
+ private ConnectionDiagnosticsSnapshotDTO createConnectionDiagnosticsSnapshotDto(final Connection connection) {
+ final ConnectionDiagnosticsSnapshotDTO dto = new ConnectionDiagnosticsSnapshotDTO();
+
+ final QueueDiagnostics queueDiagnostics = connection.getFlowFileQueue().getQueueDiagnostics();
+
+ final FlowFileQueue queue = connection.getFlowFileQueue();
+ final QueueSize totalSize = queue.size();
+ dto.setTotalByteCount(totalSize.getByteCount());
+ dto.setTotalFlowFileCount(totalSize.getObjectCount());
+
+ final LocalQueuePartitionDiagnostics localDiagnostics = queueDiagnostics.getLocalQueuePartitionDiagnostics();
+ dto.setLocalQueuePartition(createLocalQueuePartitionDto(localDiagnostics));
+
+ final List<RemoteQueuePartitionDiagnostics> remoteDiagnostics = queueDiagnostics.getRemoteQueuePartitionDiagnostics();
+ if (remoteDiagnostics != null) {
+ final List<RemoteQueuePartitionDTO> remoteDiagnosticsDtos = remoteDiagnostics.stream()
+ .map(this::createRemoteQueuePartitionDto)
+ .collect(Collectors.toList());
+
+ dto.setRemoteQueuePartitions(remoteDiagnosticsDtos);
+ }
+
+ return dto;
+ }
+
+ private LocalQueuePartitionDTO createLocalQueuePartitionDto(final LocalQueuePartitionDiagnostics queueDiagnostics) {
+ final LocalQueuePartitionDTO dto = new LocalQueuePartitionDTO();
+
+ final QueueSize activeSize = queueDiagnostics.getActiveQueueSize();
+ dto.setActiveQueueByteCount(activeSize.getByteCount());
+ dto.setActiveQueueFlowFileCount(activeSize.getObjectCount());
+
+ final QueueSize inFlightSize = queueDiagnostics.getUnacknowledgedQueueSize();
+ dto.setInFlightByteCount(inFlightSize.getByteCount());
+ dto.setInFlightFlowFileCount(inFlightSize.getObjectCount());
+
+ final QueueSize swapSize = queueDiagnostics.getSwapQueueSize();
+ dto.setSwapByteCount(swapSize.getByteCount());
+ dto.setSwapFlowFileCount(swapSize.getObjectCount());
+ dto.setSwapFiles(queueDiagnostics.getSwapFileCount());
+
+ dto.setTotalByteCount(activeSize.getByteCount() + inFlightSize.getByteCount() + swapSize.getByteCount());
+ dto.setTotalFlowFileCount(activeSize.getObjectCount() + inFlightSize.getObjectCount() + swapSize.getObjectCount());
+
+ dto.setAllActiveQueueFlowFilesPenalized(queueDiagnostics.isAllActiveFlowFilesPenalized());
+ dto.setAnyActiveQueueFlowFilesPenalized(queueDiagnostics.isAnyActiveFlowFilePenalized());
+
+ return dto;
+ }
+
+ private RemoteQueuePartitionDTO createRemoteQueuePartitionDto(final RemoteQueuePartitionDiagnostics queueDiagnostics) {
+ final RemoteQueuePartitionDTO dto = new RemoteQueuePartitionDTO();
+
+ dto.setNodeIdentifier(queueDiagnostics.getNodeIdentifier());
+
+ final QueueSize activeSize = queueDiagnostics.getActiveQueueSize();
+ dto.setActiveQueueByteCount(activeSize.getByteCount());
+ dto.setActiveQueueFlowFileCount(activeSize.getObjectCount());
+
+ final QueueSize inFlightSize = queueDiagnostics.getUnacknowledgedQueueSize();
+ dto.setInFlightByteCount(inFlightSize.getByteCount());
+ dto.setInFlightFlowFileCount(inFlightSize.getObjectCount());
+
+ final QueueSize swapSize = queueDiagnostics.getSwapQueueSize();
+ dto.setSwapByteCount(swapSize.getByteCount());
+ dto.setSwapFlowFileCount(swapSize.getObjectCount());
+ dto.setSwapFiles(queueDiagnostics.getSwapFileCount());
+
+ dto.setTotalByteCount(activeSize.getByteCount() + inFlightSize.getByteCount() + swapSize.getByteCount());
+ dto.setTotalFlowFileCount(activeSize.getObjectCount() + inFlightSize.getObjectCount() + swapSize.getObjectCount());
+
+ return dto;
+ }
+
+ private JVMDiagnosticsDTO createJvmDiagnosticsDto(final FlowController flowController) {
+ final JVMDiagnosticsDTO dto = new JVMDiagnosticsDTO();
+ dto.setAggregateSnapshot(createJvmDiagnosticsSnapshotDto(flowController));
+ dto.setClustered(flowController.isClustered());
+ dto.setConnected(flowController.isConnected());
+ return dto;
+ }
+
+ private JVMDiagnosticsSnapshotDTO createJvmDiagnosticsSnapshotDto(final FlowController flowController) {
+ final JVMDiagnosticsSnapshotDTO dto = new JVMDiagnosticsSnapshotDTO();
+
+ final JVMControllerDiagnosticsSnapshotDTO controllerDiagnosticsDto = new JVMControllerDiagnosticsSnapshotDTO();
+ final JVMFlowDiagnosticsSnapshotDTO flowDiagnosticsDto = new JVMFlowDiagnosticsSnapshotDTO();
+ final JVMSystemDiagnosticsSnapshotDTO systemDiagnosticsDto = new JVMSystemDiagnosticsSnapshotDTO();
+
+ dto.setControllerDiagnostics(controllerDiagnosticsDto);
+ dto.setFlowDiagnosticsDto(flowDiagnosticsDto);
+ dto.setSystemDiagnosticsDto(systemDiagnosticsDto);
+
+ final SystemDiagnostics systemDiagnostics = flowController.getSystemDiagnostics();
+
+ // flow-related information
+ final Set<BundleDTO> bundlesLoaded = extensionManager.getAllBundles().stream()
+ .map(bundle -> bundle.getBundleDetails().getCoordinate())
+ .sorted((a, b) -> a.getCoordinate().compareTo(b.getCoordinate()))
+ .map(this::createBundleDto)
+ .collect(Collectors.toCollection(LinkedHashSet::new));
+
+ flowDiagnosticsDto.setActiveEventDrivenThreads(flowController.getActiveEventDrivenThreadCount());
+ flowDiagnosticsDto.setActiveTimerDrivenThreads(flowController.getActiveTimerDrivenThreadCount());
+ flowDiagnosticsDto.setBundlesLoaded(bundlesLoaded);
+ flowDiagnosticsDto.setTimeZone(System.getProperty("user.timezone"));
+ flowDiagnosticsDto.setUptime(FormatUtils.formatHoursMinutesSeconds(systemDiagnostics.getUptime(), TimeUnit.MILLISECONDS));
+
+ // controller-related information
+ controllerDiagnosticsDto.setClusterCoordinator(flowController.isClusterCoordinator());
+ controllerDiagnosticsDto.setPrimaryNode(flowController.isPrimary());
+ controllerDiagnosticsDto.setMaxEventDrivenThreads(flowController.getMaxEventDrivenThreadCount());
+ controllerDiagnosticsDto.setMaxTimerDrivenThreads(flowController.getMaxTimerDrivenThreadCount());
+
+ // system-related information
+ systemDiagnosticsDto.setMaxOpenFileDescriptors(systemDiagnostics.getMaxOpenFileHandles());
+ systemDiagnosticsDto.setOpenFileDescriptors(systemDiagnostics.getOpenFileHandles());
+ systemDiagnosticsDto.setPhysicalMemoryBytes(systemDiagnostics.getTotalPhysicalMemory());
+ systemDiagnosticsDto.setPhysicalMemory(FormatUtils.formatDataSize(systemDiagnostics.getTotalPhysicalMemory()));
+
+ final NumberFormat percentageFormat = NumberFormat.getPercentInstance();
+ percentageFormat.setMaximumFractionDigits(2);
+
+ final Set<RepositoryUsageDTO> contentRepoUsage = new HashSet<>();
+ for (final Map.Entry<String, StorageUsage> entry : systemDiagnostics.getContentRepositoryStorageUsage().entrySet()) {
+ final String repoName = entry.getKey();
+ final StorageUsage usage = entry.getValue();
+
+ final RepositoryUsageDTO usageDto = new RepositoryUsageDTO();
+ usageDto.setName(repoName);
+
+ usageDto.setFileStoreHash(DigestUtils.sha256Hex(flowController.getContentRepoFileStoreName(repoName)));
+ usageDto.setFreeSpace(FormatUtils.formatDataSize(usage.getFreeSpace()));
+ usageDto.setFreeSpaceBytes(usage.getFreeSpace());
+ usageDto.setTotalSpace(FormatUtils.formatDataSize(usage.getTotalSpace()));
+ usageDto.setTotalSpaceBytes(usage.getTotalSpace());
+
+ final double usedPercentage = (usage.getTotalSpace() - usage.getFreeSpace()) / (double) usage.getTotalSpace();
+ final String utilization = percentageFormat.format(usedPercentage);
+ usageDto.setUtilization(utilization);
+ contentRepoUsage.add(usageDto);
+ }
+
+ final Set<RepositoryUsageDTO> provRepoUsage = new HashSet<>();
+ for (final Map.Entry<String, StorageUsage> entry : systemDiagnostics.getProvenanceRepositoryStorageUsage().entrySet()) {
+ final String repoName = entry.getKey();
+ final StorageUsage usage = entry.getValue();
+
+ final RepositoryUsageDTO usageDto = new RepositoryUsageDTO();
+ usageDto.setName(repoName);
+
+ usageDto.setFileStoreHash(DigestUtils.sha256Hex(flowController.getProvenanceRepoFileStoreName(repoName)));
+ usageDto.setFreeSpace(FormatUtils.formatDataSize(usage.getFreeSpace()));
+ usageDto.setFreeSpaceBytes(usage.getFreeSpace());
+ usageDto.setTotalSpace(FormatUtils.formatDataSize(usage.getTotalSpace()));
+ usageDto.setTotalSpaceBytes(usage.getTotalSpace());
+
+ final double usedPercentage = (usage.getTotalSpace() - usage.getFreeSpace()) / (double) usage.getTotalSpace();
+ final String utilization = percentageFormat.format(usedPercentage);
+ usageDto.setUtilization(utilization);
+ provRepoUsage.add(usageDto);
+ }
+
+ final RepositoryUsageDTO flowFileRepoUsage = new RepositoryUsageDTO();
+ for (final Map.Entry<String, StorageUsage> entry : systemDiagnostics.getProvenanceRepositoryStorageUsage().entrySet()) {
+ final String repoName = entry.getKey();
+ final StorageUsage usage = entry.getValue();
+
+ flowFileRepoUsage.setName(repoName);
+
+ flowFileRepoUsage.setFileStoreHash(DigestUtils.sha256Hex(flowController.getFlowRepoFileStoreName()));
+ flowFileRepoUsage.setFreeSpace(FormatUtils.formatDataSize(usage.getFreeSpace()));
+ flowFileRepoUsage.setFreeSpaceBytes(usage.getFreeSpace());
+ flowFileRepoUsage.setTotalSpace(FormatUtils.formatDataSize(usage.getTotalSpace()));
+ flowFileRepoUsage.setTotalSpaceBytes(usage.getTotalSpace());
+
+ final double usedPercentage = (usage.getTotalSpace() - usage.getFreeSpace()) / (double) usage.getTotalSpace();
+ final String utilization = percentageFormat.format(usedPercentage);
+ flowFileRepoUsage.setUtilization(utilization);
+ }
+
+ systemDiagnosticsDto.setContentRepositoryStorageUsage(contentRepoUsage);
+ systemDiagnosticsDto.setCpuCores(systemDiagnostics.getAvailableProcessors());
+ systemDiagnosticsDto.setCpuLoadAverage(systemDiagnostics.getProcessorLoadAverage());
+ systemDiagnosticsDto.setFlowFileRepositoryStorageUsage(flowFileRepoUsage);
+ systemDiagnosticsDto.setMaxHeapBytes(systemDiagnostics.getMaxHeap());
+ systemDiagnosticsDto.setMaxHeap(FormatUtils.formatDataSize(systemDiagnostics.getMaxHeap()));
+ systemDiagnosticsDto.setProvenanceRepositoryStorageUsage(provRepoUsage);
+
+ // Create the Garbage Collection History info
+ final GarbageCollectionHistory gcHistory = flowController.getGarbageCollectionHistory();
+ final List<GarbageCollectionDiagnosticsDTO> gcDiagnostics = new ArrayList<>();
+ for (final String memoryManager : gcHistory.getMemoryManagerNames()) {
+ final List<GarbageCollectionStatus> statuses = gcHistory.getGarbageCollectionStatuses(memoryManager);
+
+ final List<GCDiagnosticsSnapshotDTO> gcSnapshots = new ArrayList<>();
+ for (final GarbageCollectionStatus status : statuses) {
+ final GCDiagnosticsSnapshotDTO snapshotDto = new GCDiagnosticsSnapshotDTO();
+ snapshotDto.setTimestamp(status.getTimestamp());
+ snapshotDto.setCollectionCount(status.getCollectionCount());
+ snapshotDto.setCollectionMillis(status.getCollectionMillis());
+ gcSnapshots.add(snapshotDto);
+ }
+
+ gcSnapshots.sort(Comparator.comparing(GCDiagnosticsSnapshotDTO::getTimestamp).reversed());
+
+ final GarbageCollectionDiagnosticsDTO gcDto = new GarbageCollectionDiagnosticsDTO();
+ gcDto.setMemoryManagerName(memoryManager);
+ gcDto.setSnapshots(gcSnapshots);
+ gcDiagnostics.add(gcDto);
+ }
+
+ systemDiagnosticsDto.setGarbageCollectionDiagnostics(gcDiagnostics);
+
+ return dto;
+ }
+
+ private List<ThreadDumpDTO> createThreadDumpDtos(final ProcessorNode procNode) {
+ final List<ThreadDumpDTO> threadDumps = new ArrayList<>();
+
+ final List<ActiveThreadInfo> activeThreads = procNode.getActiveThreads();
+ for (final ActiveThreadInfo threadInfo : activeThreads) {
+ final ThreadDumpDTO dto = new ThreadDumpDTO();
+ dto.setStackTrace(threadInfo.getStackTrace());
+ dto.setThreadActiveMillis(threadInfo.getActiveMillis());
+ dto.setThreadName(threadInfo.getThreadName());
+ dto.setTaskTerminated(threadInfo.isTerminated());
+ threadDumps.add(dto);
+ }
+
+ return threadDumps;
+ }
+
+ /**
+ * Creates a ProcessorConfigDTO from the specified ProcessorNode.
+ *
+ * @param procNode node
+ * @return dto
+ */
+ public ProcessorConfigDTO createProcessorConfigDto(final ProcessorNode procNode) {
+ if (procNode == null) {
+ return null;
+ }
+
+ final ProcessorConfigDTO dto = new ProcessorConfigDTO();
+
+ // sort a copy of the properties
+ final Map<PropertyDescriptor, String> sortedProperties = new TreeMap<>(new Comparator<PropertyDescriptor>() {
+ @Override
+ public int compare(final PropertyDescriptor o1, final PropertyDescriptor o2) {
+ return Collator.getInstance(Locale.US).compare(o1.getName(), o2.getName());
+ }
+ });
+ sortedProperties.putAll(procNode.getProperties());
+
+ // get the property order from the processor
+ final Processor processor = procNode.getProcessor();
+ final Map<PropertyDescriptor, String> orderedProperties = new LinkedHashMap<>();
+ final List<PropertyDescriptor> descriptors = processor.getPropertyDescriptors();
+ if (descriptors != null && !descriptors.isEmpty()) {
+ for (final PropertyDescriptor descriptor : descriptors) {
+ orderedProperties.put(descriptor, null);
+ }
+ }
+ orderedProperties.putAll(sortedProperties);
+
+ // build the descriptor and property dtos
+ dto.setDescriptors(new LinkedHashMap<String, PropertyDescriptorDTO>());
+ dto.setProperties(new LinkedHashMap<String, String>());
+ for (final Map.Entry<PropertyDescriptor, String> entry : orderedProperties.entrySet()) {
+ final PropertyDescriptor descriptor = entry.getKey();
+
+ // store the property descriptor
+ dto.getDescriptors().put(descriptor.getName(), createPropertyDescriptorDto(descriptor, procNode.getProcessGroup().getIdentifier()));
+
+ // determine the property value - don't include sensitive properties
+ String propertyValue = entry.getValue();
+ if (propertyValue != null && descriptor.isSensitive()) {
+ propertyValue = SENSITIVE_VALUE_MASK;
+ } else if (propertyValue == null && descriptor.getDefaultValue() != null) {
+ propertyValue = descriptor.getDefaultValue();
+ }
+
+ // set the property value
+ dto.getProperties().put(descriptor.getName(), propertyValue);
+ }
+
+ dto.setSchedulingPeriod(procNode.getSchedulingPeriod());
+ dto.setPenaltyDuration(procNode.getPenalizationPeriod());
+ dto.setYieldDuration(procNode.getYieldPeriod());
+ dto.setRunDurationMillis(procNode.getRunDuration(TimeUnit.MILLISECONDS));
+ dto.setConcurrentlySchedulableTaskCount(procNode.getMaxConcurrentTasks());
+ dto.setLossTolerant(procNode.isLossTolerant());
+ dto.setComments(procNode.getComments());
+ dto.setBulletinLevel(procNode.getBulletinLevel().name());
+ dto.setSchedulingStrategy(procNode.getSchedulingStrategy().name());
+ dto.setExecutionNode(procNode.getExecutionNode().name());
+ dto.setAnnotationData(procNode.getAnnotationData());
+
+ // set up the default values for concurrent tasks and scheduling period
+ final Map<String, String> defaultConcurrentTasks = new HashMap<>();
+ defaultConcurrentTasks.put(SchedulingStrategy.TIMER_DRIVEN.name(), String.valueOf(SchedulingStrategy.TIMER_DRIVEN.getDefaultConcurrentTasks()));
+ defaultConcurrentTasks.put(SchedulingStrategy.EVENT_DRIVEN.name(), String.valueOf(SchedulingStrategy.EVENT_DRIVEN.getDefaultConcurrentTasks()));
+ defaultConcurrentTasks.put(SchedulingStrategy.CRON_DRIVEN.name(), String.valueOf(SchedulingStrategy.CRON_DRIVEN.getDefaultConcurrentTasks()));
+ dto.setDefaultConcurrentTasks(defaultConcurrentTasks);
+
+ final Map<String, String> defaultSchedulingPeriod = new HashMap<>();
+ defaultSchedulingPeriod.put(SchedulingStrategy.TIMER_DRIVEN.name(), SchedulingStrategy.TIMER_DRIVEN.getDefaultSchedulingPeriod());
+ defaultSchedulingPeriod.put(SchedulingStrategy.CRON_DRIVEN.name(), SchedulingStrategy.CRON_DRIVEN.getDefaultSchedulingPeriod());
+ dto.setDefaultSchedulingPeriod(defaultSchedulingPeriod);
+
+ return dto;
+ }
+
+ /**
+ * Creates a PropertyDesriptorDTO from the specified PropertyDesriptor.
+ *
+ * @param propertyDescriptor descriptor
+ * @param groupId the Identifier of the Process Group that the component belongs to
+ * @return dto
+ */
+ public PropertyDescriptorDTO createPropertyDescriptorDto(final PropertyDescriptor propertyDescriptor, final String groupId) {
+ if (propertyDescriptor == null) {
+ return null;
+ }
+
+ final PropertyDescriptorDTO dto = new PropertyDescriptorDTO();
+
+ dto.setName(propertyDescriptor.getName());
+ dto.setDisplayName(propertyDescriptor.getDisplayName());
+ dto.setRequired(propertyDescriptor.isRequired());
+ dto.setSensitive(propertyDescriptor.isSensitive());
+ dto.setDynamic(propertyDescriptor.isDynamic());
+ dto.setDescription(propertyDescriptor.getDescription());
+ dto.setDefaultValue(propertyDescriptor.getDefaultValue());
+ dto.setSupportsEl(propertyDescriptor.isExpressionLanguageSupported());
+
+ // to support legacy/deprecated method .expressionLanguageSupported(true)
+ String description = propertyDescriptor.isExpressionLanguageSupported()
+ && propertyDescriptor.getExpressionLanguageScope().equals(ExpressionLanguageScope.NONE)
+ ? "true (undefined scope)" : propertyDescriptor.getExpressionLanguageScope().getDescription();
+ dto.setExpressionLanguageScope(description);
+
+ // set the identifies controller service is applicable
+ if (propertyDescriptor.getControllerServiceDefinition() != null) {
+ final Class serviceClass = propertyDescriptor.getControllerServiceDefinition();
+ final Bundle serviceBundle = extensionManager.getBundle(serviceClass.getClassLoader());
+
+ dto.setIdentifiesControllerService(serviceClass.getName());
+ dto.setIdentifiesControllerServiceBundle(createBundleDto(serviceBundle.getBundleDetails().getCoordinate()));
+ }
+
+ final Class<? extends ControllerService> serviceDefinition = propertyDescriptor.getControllerServiceDefinition();
+ if (propertyDescriptor.getAllowableValues() == null) {
+ if (serviceDefinition == null) {
+ dto.setAllowableValues(null);
+ } else {
+ final List<AllowableValueEntity> allowableValues = new ArrayList<>();
+ final List<String> controllerServiceIdentifiers = new ArrayList<>(controllerServiceProvider.getControllerServiceIdentifiers(serviceDefinition, groupId));
+ Collections.sort(controllerServiceIdentifiers, Collator.getInstance(Locale.US));
+ for (final String serviceIdentifier : controllerServiceIdentifiers) {
+ final ControllerServiceNode service = controllerServiceProvider.getControllerServiceNode(serviceIdentifier);
+ final boolean isServiceAuthorized = service.isAuthorized(authorizer, RequestAction.READ, NiFiUserUtils.getNiFiUser());
+ final String displayName = isServiceAuthorized ? service.getName() : serviceIdentifier;
+
+ final AllowableValueDTO allowableValue = new AllowableValueDTO();
+ allowableValue.setDisplayName(displayName);
+ allowableValue.setValue(serviceIdentifier);
+ allowableValues.add(entityFactory.createAllowableValueEntity(allowableValue, isServiceAuthorized));
+ }
+ dto.setAllowableValues(allowableValues);
+ }
+ } else {
+ final List<AllowableValueEntity> allowableValues = new ArrayList<>();
+ for (final AllowableValue allowableValue : propertyDescriptor.getAllowableValues()) {
+ final AllowableValueDTO allowableValueDto = new AllowableValueDTO();
+ allowableValueDto.setDisplayName(allowableValue.getDisplayName());
+ allowableValueDto.setValue(allowableValue.getValue());
+ allowableValueDto.setDescription(allowableValue.getDescription());
+ allowableValues.add(entityFactory.createAllowableValueEntity(allowableValueDto, true));
+ }
+
+ dto.setAllowableValues(allowableValues);
+ }
+
+ return dto;
+ }
+
+ // Copy methods
+ public LabelDTO copy(final LabelDTO original) {
+ final LabelDTO copy = new LabelDTO();
+ copy.setId(original.getId());
+ copy.setParentGroupId(original.getParentGroupId());
+ copy.setLabel(original.getLabel());
+ copy.setStyle(copy(original.getStyle()));
+ copy.setPosition(original.getPosition());
+ copy.setWidth(original.getWidth());
+ copy.setHeight(original.getHeight());
+ copy.setVersionedComponentId(original.getVersionedComponentId());
+
+ return copy;
+ }
+
+ public ControllerServiceDTO copy(final ControllerServiceDTO original) {
+ final ControllerServiceDTO copy = new ControllerServiceDTO();
+ copy.setAnnotationData(original.getAnnotationData());
+ copy.setControllerServiceApis(original.getControllerServiceApis());
+ copy.setComments(original.getComments());
+ copy.setCustomUiUrl(original.getCustomUiUrl());
+ copy.setDescriptors(copy(original.getDescriptors()));
+ copy.setId(original.getId());
+ copy.setParentGroupId(original.getParentGroupId());
+ copy.setName(original.getName());
+ copy.setProperties(copy(original.getProperties()));
+ copy.setReferencingComponents(copy(original.getReferencingComponents()));
+ copy.setState(original.getState());
+ copy.setType(original.getType());
+ copy.setBundle(copy(original.getBundle()));
+ copy.setExtensionMissing(original.getExtensionMissing());
+ copy.setMultipleVersionsAvailable(original.getMultipleVersionsAvailable());
+ copy.setPersistsState(original.getPersistsState());
+ copy.setValidationErrors(copy(original.getValidationErrors()));
+ copy.setValidationStatus(original.getValidationStatus());
+ copy.setVersionedComponentId(original.getVersionedComponentId());
+ return copy;
+ }
+
+ public FunnelDTO copy(final FunnelDTO original) {
+ final FunnelDTO copy = new FunnelDTO();
+ copy.setId(original.getId());
+ copy.setParentGroupId(original.getParentGroupId());
+ copy.setPosition(original.getPosition());
+ copy.setVersionedComponentId(original.getVersionedComponentId());
+
+ return copy;
+ }
+
+ private <T> List<T> copy(final List<T> original) {
+ if (original == null) {
+ return null;
+ } else {
+ return new ArrayList<>(original);
+ }
+ }
+
+ private <T> List<T> copy(final Collection<T> original) {
+ if (original == null) {
+ return null;
+ } else {
+ return new ArrayList<>(original);
+ }
+ }
+
+ private <T> Set<T> copy(final Set<T> original) {
+ if (original == null) {
+ return null;
+ } else {
+ return new LinkedHashSet<>(original);
+ }
+ }
+
+ private <S, T> Map<S, T> copy(final Map<S, T> original) {
+ if (original == null) {
+ return null;
+ } else {
+ return new LinkedHashMap<>(original);
+ }
+ }
+
+ public BundleDTO copy(final BundleDTO original) {
+ if (original == null) {
+ return null;
+ }
+
+ final BundleDTO copy = new BundleDTO();
+ copy.setGroup(original.getGroup());
+ copy.setArtifact(original.getArtifact());
+ copy.setVersion(original.getVersion());
+ return copy;
+ }
+
+ public ProcessorDTO copy(final ProcessorDTO original) {
+ final ProcessorDTO copy = new ProcessorDTO();
+ copy.setConfig(copy(original.getConfig()));
+ copy.setPosition(original.getPosition());
+ copy.setId(original.getId());
+ copy.setName(original.getName());
+ copy.setDescription(original.getDescription());
+ copy.setParentGroupId(original.getParentGroupId());
+ copy.setRelationships(copy(original.getRelationships()));
+ copy.setState(original.getState());
+ copy.setStyle(copy(original.getStyle()));
+ copy.setType(original.getType());
+ copy.setBundle(copy(original.getBundle()));
+ copy.setSupportsParallelProcessing(original.getSupportsParallelProcessing());
+ copy.setSupportsEventDriven(original.getSupportsEventDriven());
+ copy.setSupportsBatching(original.getSupportsBatching());
+ copy.setPersistsState(original.getPersistsState());
+ copy.setExecutionNodeRestricted(original.isExecutionNodeRestricted());
+ copy.setExtensionMissing(original.getExtensionMissing());
+ copy.setMultipleVersionsAvailable(original.getMultipleVersionsAvailable());
+ copy.setValidationErrors(copy(original.getValidationErrors()));
+ copy.setValidationStatus(original.getValidationStatus());
+ copy.setVersionedComponentId(original.getVersionedComponentId());
+
+ return copy;
+ }
+
+ private ProcessorConfigDTO copy(final ProcessorConfigDTO original) {
+ final ProcessorConfigDTO copy = new ProcessorConfigDTO();
+ copy.setAnnotationData(original.getAnnotationData());
+ copy.setAutoTerminatedRelationships(copy(original.getAutoTerminatedRelationships()));
+ copy.setComments(original.getComments());
+ copy.setSchedulingStrategy(original.getSchedulingStrategy());
+ copy.setExecutionNode(original.getExecutionNode());
+ copy.setConcurrentlySchedulableTaskCount(original.getConcurrentlySchedulableTaskCount());
+ copy.setCustomUiUrl(original.getCustomUiUrl());
+ copy.setDescriptors(copy(original.getDescriptors()));
+ copy.setProperties(copy(original.getProperties()));
+ copy.setSchedulingPeriod(original.getSchedulingPeriod());
+ copy.setPenaltyDuration(original.getPenaltyDuration());
+ copy.setYieldDuration(original.getYieldDuration());
+ copy.setRunDurationMillis(original.getRunDurationMillis());
+ copy.setBulletinLevel(original.getBulletinLevel());
+ copy.setDefaultConcurrentTasks(original.getDefaultConcurrentTasks());
+ copy.setDefaultSchedulingPeriod(original.getDefaultSchedulingPeriod());
+ copy.setLossTolerant(original.isLossTolerant());
+
+ return copy;
+ }
+
+ public ConnectionDTO copy(final ConnectionDTO original) {
+ final ConnectionDTO copy = new ConnectionDTO();
+ copy.setAvailableRelationships(copy(original.getAvailableRelationships()));
+ copy.setDestination(original.getDestination());
+ copy.setPosition(original.getPosition());
+ copy.setId(original.getId());
+ copy.setName(original.getName());
+ copy.setParentGroupId(original.getParentGroupId());
+ copy.setSelectedRelationships(copy(original.getSelectedRelationships()));
+ copy.setFlowFileExpiration(original.getFlowFileExpiration());
+ copy.setBackPressureObjectThreshold(original.getBackPressureObjectThreshold());
+ copy.setBackPressureDataSizeThreshold(original.getBackPressureDataSizeThreshold());
+ copy.setPrioritizers(copy(original.getPrioritizers()));
+ copy.setSource(original.getSource());
+ copy.setzIndex(original.getzIndex());
+ copy.setLabelIndex(original.getLabelIndex());
+ copy.setBends(copy(original.getBends()));
+ copy.setLoadBalancePartitionAttribute(original.getLoadBalancePartitionAttribute());
+ copy.setLoadBalanceStrategy(original.getLoadBalanceStrategy());
+ copy.setLoadBalanceCompression(original.getLoadBalanceCompression());
+ copy.setLoadBalanceStatus(original.getLoadBalanceStatus());
+ copy.setVersionedComponentId(original.getVersionedComponentId());
+
+ return copy;
+ }
+
+ public BulletinDTO copy(final BulletinDTO original) {
+ final BulletinDTO copy = new BulletinDTO();
+ copy.setId(original.getId());
+ copy.setTimestamp(original.getTimestamp());
+ copy.setGroupId(original.getGroupId());
+ copy.setSourceId(original.getSourceId());
+ copy.setSourceName(original.getSourceName());
+ copy.setCategory(original.getCategory());
+ copy.setLevel(original.getLevel());
+ copy.setMessage(original.getMessage());
+ copy.setNodeAddress(original.getNodeAddress());
+ return copy;
+ }
+
+ public PortDTO copy(final PortDTO original) {
+ final PortDTO copy = new PortDTO();
+ copy.setPosition(original.getPosition());
+ copy.setId(original.getId());
+ copy.setName(original.getName());
+ copy.setComments(original.getComments());
+ copy.setParentGroupId(original.getParentGroupId());
+ copy.setState(original.getState());
+ copy.setType(original.getType());
+ copy.setTransmitting(original.isTransmitting());
+ copy.setConcurrentlySchedulableTaskCount(original.getConcurrentlySchedulableTaskCount());
+ copy.setUserAccessControl(copy(original.getUserAccessControl()));
+ copy.setGroupAccessControl(copy(original.getGroupAccessControl()));
+ copy.setValidationErrors(copy(original.getValidationErrors()));
+ copy.setVersionedComponentId(original.getVersionedComponentId());
+ return copy;
+ }
+
+ public RemoteProcessGroupPortDTO copy(final RemoteProcessGroupPortDTO original) {
+ final RemoteProcessGroupPortDTO copy = new RemoteProcessGroupPortDTO();
+ copy.setId(original.getId());
+ copy.setTargetId(original.getTargetId());
+ copy.setGroupId(original.getGroupId());
+ copy.setName(original.getName());
+ copy.setComments(original.getComments());
+ copy.setConnected(original.isConnected());
+ copy.setTargetRunning(original.isTargetRunning());
+ copy.setTransmitting(original.isTransmitting());
+ copy.setConcurrentlySchedulableTaskCount(original.getConcurrentlySchedulableTaskCount());
+ copy.setUseCompression(original.getUseCompression());
+ copy.setExists(original.getExists());
+ copy.setVersionedComponentId(original.getVersionedComponentId());
+
+ final BatchSettingsDTO batchOrg = original.getBatchSettings();
+ if (batchOrg != null) {
+ final BatchSettingsDTO batchCopy = new BatchSettingsDTO();
+ batchCopy.setCount(batchOrg.getCount());
+ batchCopy.setSize(batchOrg.getSize());
+ batchCopy.setDuration(batchOrg.getDuration());
+ copy.setBatchSettings(batchCopy);
+ }
+ return copy;
+ }
+
+ public ProcessGroupDTO copy(final ProcessGroupDTO original, final boolean deep) {
+ final ProcessGroupDTO copy = new ProcessGroupDTO();
+ copy.setComments(original.getComments());
+ copy.setContents(copy(original.getContents(), deep));
+ copy.setPosition(original.getPosition());
+ copy.setId(original.getId());
+ copy.setInputPortCount(original.getInputPortCount());
+ copy.setInvalidCount(original.getInvalidCount());
+ copy.setName(original.getName());
+ copy.setVersionControlInformation(copy(original.getVersionControlInformation()));
+ copy.setOutputPortCount(original.getOutputPortCount());
+ copy.setParentGroupId(original.getParentGroupId());
+ copy.setVersionedComponentId(original.getVersionedComponentId());
+
+ copy.setRunningCount(original.getRunningCount());
+ copy.setStoppedCount(original.getStoppedCount());
+ copy.setDisabledCount(original.getDisabledCount());
+ copy.setActiveRemotePortCount(original.getActiveRemotePortCount());
+ copy.setInactiveRemotePortCount(original.getInactiveRemotePortCount());
+
+ copy.setUpToDateCount(original.getUpToDateCount());
+ copy.setLocallyModifiedCount(original.getLocallyModifiedCount());
+ copy.setStaleCount(original.getStaleCount());
+ copy.setLocallyModifiedAndStaleCount(original.getLocallyModifiedAndStaleCount());
+ copy.setSyncFailureCount(original.getSyncFailureCount());
+
+ if (original.getVariables() != null) {
+ copy.setVariables(new HashMap<>(original.getVariables()));
+ }
+
+ return copy;
+ }
+
+ public VersionControlInformationDTO copy(final VersionControlInformationDTO original) {
+ if (original == null) {
+ return null;
+ }
+
+ final VersionControlInformationDTO copy = new VersionControlInformationDTO();
+ copy.setRegistryId(original.getRegistryId());
+ copy.setRegistryName(original.getRegistryName());
+ copy.setBucketId(original.getBucketId());
+ copy.setBucketName(original.getBucketName());
+ copy.setFlowId(original.getFlowId());
+ copy.setFlowName(original.getFlowName());
+ copy.setFlowDescription(original.getFlowDescription());
+ copy.setVersion(original.getVersion());
+ copy.setState(original.getState());
+ copy.setStateExplanation(original.getStateExplanation());
+ return copy;
+ }
+
+ public RemoteProcessGroupDTO copy(final RemoteProcessGroupDTO original) {
+ final RemoteProcessGroupContentsDTO originalContents = original.getContents();
+ final RemoteProcessGroupContentsDTO copyContents = new RemoteProcessGroupContentsDTO();
+
+ if (originalContents.getInputPorts() != null) {
+ final Set<RemoteProcessGroupPortDTO> inputPorts = new HashSet<>();
+ for (final RemoteProcessGroupPortDTO port : originalContents.getInputPorts()) {
+ inputPorts.add(copy(port));
+ }
+ copyContents.setInputPorts(inputPorts);
+ }
+
+ if (originalContents.getOutputPorts() != null) {
+ final Set<RemoteProcessGroupPortDTO> outputPorts = new HashSet<>();
+ for (final RemoteProcessGroupPortDTO port : originalContents.getOutputPorts()) {
+ outputPorts.add(copy(port));
+ }
+ copyContents.setOutputPorts(outputPorts);
+ }
+
+ final RemoteProcessGroupDTO copy = new RemoteProcessGroupDTO();
+ copy.setComments(original.getComments());
+ copy.setPosition(original.getPosition());
+ copy.setId(original.getId());
+ copy.setCommunicationsTimeout(original.getCommunicationsTimeout());
+ copy.setYieldDuration(original.getYieldDuration());
+ copy.setName(original.getName());
+ copy.setInputPortCount(original.getInputPortCount());
+ copy.setOutputPortCount(original.getOutputPortCount());
+ copy.setActiveRemoteInputPortCount(original.getActiveRemoteInputPortCount());
+ copy.setInactiveRemoteInputPortCount(original.getInactiveRemoteInputPortCount());
+ copy.setActiveRemoteOutputPortCount(original.getActiveRemoteOutputPortCount());
+ copy.setInactiveRemoteOutputPortCount(original.getInactiveRemoteOutputPortCount());
+ copy.setParentGroupId(original.getParentGroupId());
+ copy.setTargetUris(original.getTargetUris());
+ copy.setTransportProtocol(original.getTransportProtocol());
+ copy.setProxyHost(original.getProxyHost());
+ copy.setProxyPort(original.getProxyPort());
+ copy.setProxyUser(original.getProxyUser());
+ copy.setProxyPassword(original.getProxyPassword());
+ copy.setLocalNetworkInterface(original.getLocalNetworkInterface());
+ copy.setVersionedComponentId(original.getVersionedComponentId());
+
+ copy.setContents(copyContents);
+
+ return copy;
+ }
+
+ public ConnectableDTO createConnectableDto(final PortDTO port, final ConnectableType type) {
+ final ConnectableDTO connectable = new ConnectableDTO();
+ connectable.setGroupId(port.getParentGroupId());
+ connectable.setId(port.getId());
+ connectable.setName(port.getName());
+ connectable.setType(type.name());
+ connectable.setVersionedComponentId(port.getVersionedComponentId());
+ return connectable;
+ }
+
+ public ConnectableDTO createConnectableDto(final ProcessorDTO processor) {
+ final ConnectableDTO connectable = new ConnectableDTO();
+ connectable.setGroupId(processor.getParentGroupId());
+ connectable.setId(processor.getId());
+ connectable.setName(processor.getName());
+ connectable.setType(ConnectableType.PROCESSOR.name());
+ connectable.setVersionedComponentId(processor.getVersionedComponentId());
+ return connectable;
+ }
+
+ public ConnectableDTO createConnectableDto(final FunnelDTO funnel) {
+ final ConnectableDTO connectable = new ConnectableDTO();
+ connectable.setGroupId(funnel.getParentGroupId());
+ connectable.setId(funnel.getId());
+ connectable.setType(ConnectableType.FUNNEL.name());
+ connectable.setVersionedComponentId(funnel.getVersionedComponentId());
+ return connectable;
+ }
+
+ public ConnectableDTO createConnectableDto(final RemoteProcessGroupPortDTO remoteGroupPort, final ConnectableType type) {
+ final ConnectableDTO connectable = new ConnectableDTO();
+ connectable.setGroupId(remoteGroupPort.getGroupId());
+ connectable.setId(remoteGroupPort.getId());
+ connectable.setName(remoteGroupPort.getName());
+ connectable.setType(type.name());
+ connectable.setVersionedComponentId(connectable.getVersionedComponentId());
+ return connectable;
+ }
+
+ /**
+ *
+ * @param original orig
+ * @param deep if <code>true</code>, all Connections, ProcessGroups, Ports, Processors, etc. will be copied. If <code>false</code>, the copy will have links to the same objects referenced by
+ * <code>original</code>.
+ *
+ * @return dto
+ */
+ private FlowSnippetDTO copy(final FlowSnippetDTO original, final boolean deep) {
+ final FlowSnippetDTO copy = new FlowSnippetDTO();
+
+ final Set<ConnectionDTO> connections = new LinkedHashSet<>();
+ final Set<ProcessGroupDTO> groups = new LinkedHashSet<>();
+ final Set<PortDTO> inputPorts = new LinkedHashSet<>();
+ final Set<PortDTO> outputPorts = new LinkedHashSet<>();
+ final Set<LabelDTO> labels = new LinkedHashSet<>();
+ final Set<ProcessorDTO> processors = new LinkedHashSet<>();
+ final Set<RemoteProcessGroupDTO> remoteProcessGroups = new LinkedHashSet<>();
+ final Set<FunnelDTO> funnels = new LinkedHashSet<>();
+ final Set<ControllerServiceDTO> controllerServices = new LinkedHashSet<>();
+
+ if (deep) {
+ for (final ProcessGroupDTO group : original.getProcessGroups()) {
+ groups.add(copy(group, deep));
+ }
+
+ for (final PortDTO port : original.getInputPorts()) {
+ inputPorts.add(copy(port));
+ }
+
+ for (final PortDTO port : original.getOutputPorts()) {
+ outputPorts.add(copy(port));
+ }
+
+ for (final LabelDTO label : original.getLabels()) {
+ labels.add(copy(label));
+ }
+
+ for (final ProcessorDTO processor : original.getProcessors()) {
+ processors.add(copy(processor));
+ }
+
+ for (final RemoteProcessGroupDTO remoteGroup : original.getRemoteProcessGroups()) {
+ remoteProcessGroups.add(copy(remoteGroup));
+ }
+
+ for (final FunnelDTO funnel : original.getFunnels()) {
+ funnels.add(copy(funnel));
+ }
+
+ for (final ConnectionDTO connection : original.getConnections()) {
+ connections.add(copy(connection));
+ }
+
+ for (final ControllerServiceDTO controllerService : original.getControllerServices()) {
+ controllerServices.add(copy(controllerService));
+ }
+ } else {
+ if (original.getConnections() != null) {
+ connections.addAll(copy(original.getConnections()));
+ }
+ if (original.getProcessGroups() != null) {
+ groups.addAll(copy(original.getProcessGroups()));
+ }
+ if (original.getInputPorts() != null) {
+ inputPorts.addAll(copy(original.getInputPorts()));
+ }
+ if (original.getOutputPorts() != null) {
+ outputPorts.addAll(copy(original.getOutputPorts()));
+ }
+ if (original.getLabels() != null) {
+ labels.addAll(copy(original.getLabels()));
+ }
+ if (original.getProcessors() != null) {
+ processors.addAll(copy(original.getProcessors()));
+ }
+ if (original.getRemoteProcessGroups() != null) {
+ remoteProcessGroups.addAll(copy(original.getRemoteProcessGroups()));
+ }
+ if (original.getFunnels() != null) {
+ funnels.addAll(copy(original.getFunnels()));
+ }
+ if (original.getControllerServices() != null) {
+ controllerServices.addAll(copy(original.getControllerServices()));
+ }
+ }
+
+ copy.setConnections(connections);
+ copy.setProcessGroups(groups);
+ copy.setInputPorts(inputPorts);
+ copy.setLabels(labels);
+ copy.setOutputPorts(outputPorts);
+ copy.setProcessors(processors);
+ copy.setRemoteProcessGroups(remoteProcessGroups);
+ copy.setFunnels(funnels);
+ copy.setControllerServices(controllerServices);
+
+ return copy;
+ }
+
+ /**
+ * Factory method for creating a new RevisionDTO based on this controller.
+ *
+ * @param lastMod mod
+ * @return dto
+ */
+ public RevisionDTO createRevisionDTO(final FlowModification lastMod) {
+ final Revision revision = lastMod.getRevision();
+
+ // create the dto
+ final RevisionDTO revisionDTO = new RevisionDTO();
+ revisionDTO.setVersion(revision.getVersion());
+ revisionDTO.setClientId(revision.getClientId());
+ revisionDTO.setLastModifier(lastMod.getLastModifier());
+
+ return revisionDTO;
+ }
+
+ public RevisionDTO createRevisionDTO(final Revision revision) {
+ final RevisionDTO dto = new RevisionDTO();
+ dto.setVersion(revision.getVersion());
+ dto.setClientId(revision.getClientId());
+ return dto;
+ }
+
+ public NodeDTO createNodeDTO(final NodeIdentifier nodeId, final NodeConnectionStatus status, final NodeHeartbeat nodeHeartbeat, final List<NodeEvent> events, final Set<String> roles) {
+ final NodeDTO nodeDto = new NodeDTO();
+
+ // populate node dto
+ nodeDto.setNodeId(nodeId.getId());
+ nodeDto.setAddress(nodeId.getApiAddress());
+ nodeDto.setApiPort(nodeId.getApiPort());
+ nodeDto.setStatus(status.getState().name());
+ nodeDto.setRoles(roles);
+ if (status.getConnectionRequestTime() != null) {
+ final Date connectionRequested = new Date(status.getConnectionRequestTime());
+ nodeDto.setConnectionRequested(connectionRequested);
+ }
+
+ // only connected nodes have heartbeats
+ if (nodeHeartbeat != null) {
+ final Date heartbeat = new Date(nodeHeartbeat.getTimestamp());
+ nodeDto.setHeartbeat(heartbeat);
+ nodeDto.setNodeStartTime(new Date(nodeHeartbeat.getSystemStartTime()));
+ nodeDto.setActiveThreadCount(nodeHeartbeat.getActiveThreadCount());
+ nodeDto.setQueued(FormatUtils.formatCount(nodeHeartbeat.getFlowFileCount()) + " / " + FormatUtils.formatDataSize(nodeHeartbeat.getFlowFileBytes()));
+ }
+
+ // populate node events
+ final List<NodeEvent> nodeEvents = new ArrayList<>(events);
+ Collections.sort(nodeEvents, new Comparator<NodeEvent>() {
+ @Override
+ public int compare(final NodeEvent event1, final NodeEvent event2) {
+ return new Date(event2.getTimestamp()).compareTo(new Date(event1.getTimestamp()));
+ }
+ });
+
+ // create the node event dtos
+ final List<NodeEventDTO> nodeEventDtos = new ArrayList<>();
+ for (final NodeEvent event : nodeEvents) {
+ // create node event dto
+ final NodeEventDTO nodeEventDto = new NodeEventDTO();
+ nodeEventDtos.add(nodeEventDto);
+
+ // populate node event dto
+ nodeEventDto.setMessage(event.getMessage());
+ nodeEventDto.setCategory(event.getSeverity().name());
+ nodeEventDto.setTimestamp(new Date(event.getTimestamp()));
+ }
+ nodeDto.setEvents(nodeEventDtos);
+
+ return nodeDto;
+ }
+
+ public RegistryDTO createRegistryDto(FlowRegistry registry) {
+ final RegistryDTO dto = new RegistryDTO();
+ dto.setDescription(registry.getDescription());
+ dto.setId(registry.getIdentifier());
+ dto.setName(registry.getName());
+ dto.setUri(registry.getURL());
+ return dto;
+ }
+
+
+ /* setters */
+ public void setControllerServiceProvider(final ControllerServiceProvider controllerServiceProvider) {
+ this.controllerServiceProvider = controllerServiceProvider;
+ }
+
+ public void setAuthorizer(final Authorizer authorizer) {
+ this.authorizer = authorizer;
+ }
+
+ public void setEntityFactory(final EntityFactory entityFactory) {
+ this.entityFactory = entityFactory;
+ }
+
+ public void setBulletinRepository(BulletinRepository bulletinRepository) {
+ this.bulletinRepository = bulletinRepository;
+ }
+
+ public void setExtensionManager(ExtensionManager extensionManager) {
+ this.extensionManager = extensionManager;
+ }
+}
diff --git a/mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/api/dto/FlowConfigurationDTO.java b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/api/dto/FlowConfigurationDTO.java
new file mode 100644
index 0000000..2dd91ad
--- /dev/null
+++ b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/api/dto/FlowConfigurationDTO.java
@@ -0,0 +1,182 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Modifications to the original nifi code for the ONAP project are made
+ * available under the Apache License, Version 2.0
+ */
+package org.apache.nifi.web.api.dto;
+
+import io.swagger.annotations.ApiModelProperty;
+import org.apache.nifi.web.api.dto.util.TimeAdapter;
+
+import javax.xml.bind.annotation.XmlType;
+import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
+import java.util.Date;
+
+/**
+ * Details for the controller configuration.
+ */
+@XmlType(name = "flowConfiguration")
+public class FlowConfigurationDTO {
+
+ private Boolean supportsManagedAuthorizer;
+ private Boolean supportsConfigurableAuthorizer;
+ private Boolean supportsConfigurableUsersAndGroups;
+ private Long autoRefreshIntervalSeconds;
+
+ private Date currentTime;
+ private Integer timeOffset;
+
+ private Long defaultBackPressureObjectThreshold;
+ private String defaultBackPressureDataSizeThreshold;
+
+ private String dcaeDistributorApiHostname;
+
+ /**
+ * @author Renu
+ * @return getter and setter for dcae distributor api hostname. This value is read only
+ */
+ @ApiModelProperty(
+ value = "Whether it picks up configurable host address.",
+ readOnly = true
+ )
+ public String getDcaeDistributorApiHostname() {
+ return dcaeDistributorApiHostname;
+ }
+
+ public void setDcaeDistributorApiHostname(String dcaeDistributorApiHostname) {
+ this.dcaeDistributorApiHostname = dcaeDistributorApiHostname;
+ }
+
+ /**
+ * @return interval in seconds between the automatic NiFi refresh requests. This value is read only
+ */
+ @ApiModelProperty(
+ value = "The interval in seconds between the automatic NiFi refresh requests.",
+ readOnly = true
+ )
+ public Long getAutoRefreshIntervalSeconds() {
+ return autoRefreshIntervalSeconds;
+ }
+
+ public void setAutoRefreshIntervalSeconds(Long autoRefreshIntervalSeconds) {
+ this.autoRefreshIntervalSeconds = autoRefreshIntervalSeconds;
+ }
+
+ /**
+ * @return whether this NiFi supports a managed authorizer. Managed authorizers can visualize users, groups,
+ * and policies in the UI. This value is read only
+ */
+ @ApiModelProperty(
+ value = "Whether this NiFi supports a managed authorizer. Managed authorizers can visualize users, groups, and policies in the UI.",
+ readOnly = true
+ )
+ public Boolean getSupportsManagedAuthorizer() {
+ return supportsManagedAuthorizer;
+ }
+
+ public void setSupportsManagedAuthorizer(Boolean supportsManagedAuthorizer) {
+ this.supportsManagedAuthorizer = supportsManagedAuthorizer;
+ }
+
+ /**
+ * @return whether this NiFi supports configurable users and groups. This value is read only
+ */
+ @ApiModelProperty(
+ value = "Whether this NiFi supports configurable users and groups.",
+ readOnly = true
+ )
+ public Boolean getSupportsConfigurableUsersAndGroups() {
+ return supportsConfigurableUsersAndGroups;
+ }
+
+ public void setSupportsConfigurableUsersAndGroups(Boolean supportsConfigurableUsersAndGroups) {
+ this.supportsConfigurableUsersAndGroups = supportsConfigurableUsersAndGroups;
+ }
+
+ /**
+ * @return whether this NiFi supports a configurable authorizer. This value is read only
+ */
+ @ApiModelProperty(
+ value = "Whether this NiFi supports a configurable authorizer.",
+ readOnly = true
+ )
+ public Boolean getSupportsConfigurableAuthorizer() {
+ return supportsConfigurableAuthorizer;
+ }
+
+ public void setSupportsConfigurableAuthorizer(Boolean supportsConfigurableAuthorizer) {
+ this.supportsConfigurableAuthorizer = supportsConfigurableAuthorizer;
+ }
+
+ /**
+ * @return current time on the server
+ */
+ @XmlJavaTypeAdapter(TimeAdapter.class)
+ @ApiModelProperty(
+ value = "The current time on the system.",
+ dataType = "string"
+ )
+ public Date getCurrentTime() {
+ return currentTime;
+ }
+
+ public void setCurrentTime(Date currentTime) {
+ this.currentTime = currentTime;
+ }
+
+ /**
+ * @return time offset of the server
+ */
+ @ApiModelProperty(
+ value = "The time offset of the system."
+ )
+ public Integer getTimeOffset() {
+ return timeOffset;
+ }
+
+ public void setTimeOffset(Integer timeOffset) {
+ this.timeOffset = timeOffset;
+ }
+
+ /**
+ * @return the default back pressure object threshold
+ */
+ @ApiModelProperty(
+ value = "The default back pressure object threshold."
+ )
+ public Long getDefaultBackPressureObjectThreshold() {
+ return defaultBackPressureObjectThreshold;
+ }
+
+ public void setDefaultBackPressureObjectThreshold(Long backPressureObjectThreshold) {
+ this.defaultBackPressureObjectThreshold = backPressureObjectThreshold;
+ }
+
+ /**
+ * @return the default back pressure data size threshold
+ */
+ @ApiModelProperty(
+ value = "The default back pressure data size threshold."
+ )
+ public String getDefaultBackPressureDataSizeThreshold() {
+ return defaultBackPressureDataSizeThreshold;
+ }
+
+ public void setDefaultBackPressureDataSizeThreshold(String backPressureDataSizeThreshold) {
+ this.defaultBackPressureDataSizeThreshold = backPressureDataSizeThreshold;
+ }
+}
diff --git a/mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/dao/impl/StandardConnectionDAO.java b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/dao/impl/StandardConnectionDAO.java
new file mode 100644
index 0000000..1343400
--- /dev/null
+++ b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/dao/impl/StandardConnectionDAO.java
@@ -0,0 +1,700 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Modifications to the original nifi code for the ONAP project are made
+ * available under the Apache License, Version 2.0
+ */
+package org.apache.nifi.web.dao.impl;
+
+import org.apache.nifi.authorization.Authorizer;
+import org.apache.nifi.authorization.RequestAction;
+import org.apache.nifi.authorization.resource.Authorizable;
+import org.apache.nifi.authorization.resource.DataAuthorizable;
+import org.apache.nifi.authorization.user.NiFiUser;
+import org.apache.nifi.authorization.user.NiFiUserUtils;
+import org.apache.nifi.connectable.Connectable;
+import org.apache.nifi.connectable.ConnectableType;
+import org.apache.nifi.connectable.Connection;
+import org.apache.nifi.controller.queue.LoadBalanceCompression;
+import org.apache.nifi.controller.queue.LoadBalanceStrategy;
+import org.apache.nifi.connectable.Position;
+import org.apache.nifi.controller.FlowController;
+import org.apache.nifi.controller.exception.ValidationException;
+import org.apache.nifi.controller.queue.DropFlowFileStatus;
+import org.apache.nifi.controller.queue.FlowFileQueue;
+import org.apache.nifi.controller.queue.ListFlowFileStatus;
+import org.apache.nifi.controller.repository.ContentNotFoundException;
+import org.apache.nifi.controller.repository.FlowFileRecord;
+import org.apache.nifi.flowfile.FlowFilePrioritizer;
+import org.apache.nifi.flowfile.attributes.CoreAttributes;
+import org.apache.nifi.groups.ProcessGroup;
+import org.apache.nifi.groups.RemoteProcessGroup;
+import org.apache.nifi.processor.Relationship;
+import org.apache.nifi.remote.RemoteGroupPort;
+import org.apache.nifi.util.FormatUtils;
+import org.apache.nifi.web.DownloadableContent;
+import org.apache.nifi.web.ResourceNotFoundException;
+import org.apache.nifi.web.api.dto.ConnectableDTO;
+import org.apache.nifi.web.api.dto.ConnectionDTO;
+import org.apache.nifi.web.api.dto.PositionDTO;
+import org.apache.nifi.web.dao.ConnectionDAO;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.WebApplicationException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.regex.Matcher;
+
+public class StandardConnectionDAO extends ComponentDAO implements ConnectionDAO {
+
+ private static final Logger logger = LoggerFactory.getLogger(StandardConnectionDAO.class);
+
+ private FlowController flowController;
+ private Authorizer authorizer;
+
+ private Connection locateConnection(final String connectionId) {
+ final ProcessGroup rootGroup = flowController.getFlowManager().getRootGroup();
+ final Connection connection = rootGroup.findConnection(connectionId);
+
+ if (connection == null) {
+ throw new ResourceNotFoundException(String.format("Unable to find connection with id '%s'.", connectionId));
+ } else {
+ return connection;
+ }
+ }
+
+ @Override
+ public boolean hasConnection(String id) {
+ final ProcessGroup rootGroup = flowController.getFlowManager().getRootGroup();
+ return rootGroup.findConnection(id) != null;
+ }
+
+ @Override
+ public Connection getConnection(final String id) {
+ return locateConnection(id);
+ }
+
+ @Override
+ public Set<Connection> getConnections(final String groupId) {
+ final ProcessGroup group = locateProcessGroup(flowController, groupId);
+ return group.getConnections();
+ }
+
+ @Override
+ public DropFlowFileStatus getFlowFileDropRequest(String connectionId, String dropRequestId) {
+ final Connection connection = locateConnection(connectionId);
+ final FlowFileQueue queue = connection.getFlowFileQueue();
+
+ final DropFlowFileStatus dropRequest = queue.getDropFlowFileStatus(dropRequestId);
+ if (dropRequest == null) {
+ throw new ResourceNotFoundException(String.format("Unable to find drop request with id '%s'.", dropRequestId));
+ }
+
+ return dropRequest;
+ }
+
+ @Override
+ public ListFlowFileStatus getFlowFileListingRequest(String connectionId, String listingRequestId) {
+ final Connection connection = locateConnection(connectionId);
+ final FlowFileQueue queue = connection.getFlowFileQueue();
+
+ final ListFlowFileStatus listRequest = queue.getListFlowFileStatus(listingRequestId);
+ if (listRequest == null) {
+ throw new ResourceNotFoundException(String.format("Unable to find listing request with id '%s'.", listingRequestId));
+ }
+
+ return listRequest;
+ }
+
+ @Override
+ public FlowFileRecord getFlowFile(String id, String flowFileUuid) {
+ try {
+ final Connection connection = locateConnection(id);
+ final FlowFileQueue queue = connection.getFlowFileQueue();
+ final FlowFileRecord flowFile = queue.getFlowFile(flowFileUuid);
+
+ if (flowFile == null) {
+ throw new ResourceNotFoundException(String.format("The FlowFile with UUID %s is no longer in the active queue.", flowFileUuid));
+ }
+
+ // get the attributes and ensure appropriate access
+ final Map<String, String> attributes = flowFile.getAttributes();
+ final Authorizable dataAuthorizable = new DataAuthorizable(connection.getSourceAuthorizable());
+ dataAuthorizable.authorize(authorizer, RequestAction.READ, NiFiUserUtils.getNiFiUser(), attributes);
+
+ return flowFile;
+ } catch (final IOException ioe) {
+ logger.error(String.format("Unable to get the flowfile (%s) at this time.", flowFileUuid), ioe);
+ throw new IllegalStateException("Unable to get the FlowFile at this time.");
+ }
+ }
+
+ /**
+ * Configures the specified connection using the specified dto.
+ */
+ private void configureConnection(Connection connection, ConnectionDTO connectionDTO) {
+ // validate flow file comparators/prioritizers
+ List<FlowFilePrioritizer> newPrioritizers = null;
+ final List<String> prioritizers = connectionDTO.getPrioritizers();
+ if (isNotNull(prioritizers)) {
+ final List<String> newPrioritizersClasses = new ArrayList<>(prioritizers);
+ newPrioritizers = new ArrayList<>();
+ for (final String className : newPrioritizersClasses) {
+ try {
+ newPrioritizers.add(flowController.getFlowManager().createPrioritizer(className));
+ } catch (final ClassNotFoundException | InstantiationException | IllegalAccessException e) {
+ throw new IllegalArgumentException("Unable to set prioritizer " + className + ": " + e);
+ }
+ }
+ }
+
+ // update connection queue
+ if (isNotNull(connectionDTO.getFlowFileExpiration())) {
+ connection.getFlowFileQueue().setFlowFileExpiration(connectionDTO.getFlowFileExpiration());
+ }
+ if (isNotNull(connectionDTO.getBackPressureObjectThreshold())) {
+ connection.getFlowFileQueue().setBackPressureObjectThreshold(connectionDTO.getBackPressureObjectThreshold());
+ }
+ if (isNotNull(connectionDTO.getBackPressureDataSizeThreshold())) {
+ connection.getFlowFileQueue().setBackPressureDataSizeThreshold(connectionDTO.getBackPressureDataSizeThreshold());
+ }
+ if (isNotNull(newPrioritizers)) {
+ connection.getFlowFileQueue().setPriorities(newPrioritizers);
+ }
+
+ final String loadBalanceStrategyName = connectionDTO.getLoadBalanceStrategy();
+ final String loadBalancePartitionAttribute = connectionDTO.getLoadBalancePartitionAttribute();
+ if (isNotNull(loadBalanceStrategyName)) {
+ final LoadBalanceStrategy loadBalanceStrategy = LoadBalanceStrategy.valueOf(loadBalanceStrategyName);
+ connection.getFlowFileQueue().setLoadBalanceStrategy(loadBalanceStrategy, loadBalancePartitionAttribute);
+ }
+
+ final String loadBalanceCompressionName = connectionDTO.getLoadBalanceCompression();
+ if (isNotNull(loadBalanceCompressionName)) {
+ connection.getFlowFileQueue().setLoadBalanceCompression(LoadBalanceCompression.valueOf(loadBalanceCompressionName));
+ }
+
+ // update the connection state
+ if (isNotNull(connectionDTO.getBends())) {
+ final List<Position> bendPoints = new ArrayList<>();
+ for (final PositionDTO bend : connectionDTO.getBends()) {
+ if (bend != null) {
+ bendPoints.add(new Position(bend.getX(), bend.getY()));
+ }
+ }
+ connection.setBendPoints(bendPoints);
+ }
+ if (isNotNull(connectionDTO.getName())) {
+ connection.setName(connectionDTO.getName());
+ }
+ if (isNotNull(connectionDTO.getLabelIndex())) {
+ connection.setLabelIndex(connectionDTO.getLabelIndex());
+ }
+ if (isNotNull(connectionDTO.getzIndex())) {
+ connection.setZIndex(connectionDTO.getzIndex());
+ }
+ }
+
+ /**
+ * Validates the proposed processor configuration.
+ */
+ private List<String> validateProposedConfiguration(final String groupId, final ConnectionDTO connectionDTO) {
+ List<String> validationErrors = new ArrayList<>();
+
+ if (isNotNull(connectionDTO.getBackPressureObjectThreshold()) && connectionDTO.getBackPressureObjectThreshold() < 0) {
+ validationErrors.add("Max queue size must be a non-negative integer");
+ }
+ if (isNotNull(connectionDTO.getFlowFileExpiration())) {
+ Matcher expirationMatcher = FormatUtils.TIME_DURATION_PATTERN.matcher(connectionDTO.getFlowFileExpiration());
+ if (!expirationMatcher.matches()) {
+ validationErrors.add("Flow file expiration is not a valid time duration (ie 30 sec, 5 min)");
+ }
+ }
+ if (isNotNull(connectionDTO.getLabelIndex())) {
+ if (connectionDTO.getLabelIndex() < 0) {
+ validationErrors.add("The label index must be positive.");
+ }
+ }
+
+ // validation is required when connecting to a remote process group since each node in a
+ // cluster may or may not be authorized
+ final ConnectableDTO proposedDestination = connectionDTO.getDestination();
+ if (proposedDestination != null && ConnectableType.REMOTE_INPUT_PORT.name().equals(proposedDestination.getType())) {
+ // the group id must be specified
+ if (proposedDestination.getGroupId() == null) {
+ validationErrors.add("When the destination is a remote input port its group id is required.");
+ return validationErrors;
+ }
+
+ // attempt to location the proprosed destination
+ final ProcessGroup destinationParentGroup = locateProcessGroup(flowController, groupId);
+ final RemoteProcessGroup remoteProcessGroup = destinationParentGroup.getRemoteProcessGroup(proposedDestination.getGroupId());
+ if (remoteProcessGroup == null) {
+ validationErrors.add("Unable to find the specified remote process group.");
+ return validationErrors;
+ }
+
+ // ensure the new destination was found
+ final RemoteGroupPort remoteInputPort = remoteProcessGroup.getInputPort(proposedDestination.getId());
+ if (remoteInputPort == null) {
+ validationErrors.add("Unable to find the specified destination.");
+ return validationErrors;
+ }
+ }
+
+ return validationErrors;
+ }
+
+ @Override
+ public Connection createConnection(final String groupId, final ConnectionDTO connectionDTO) {
+ final ProcessGroup group = locateProcessGroup(flowController, groupId);
+
+ if (isNotNull(connectionDTO.getParentGroupId()) && !flowController.getFlowManager().areGroupsSame(connectionDTO.getParentGroupId(), groupId)) {
+ throw new IllegalStateException("Cannot specify a different Parent Group ID than the Group to which the Connection is being added");
+ }
+
+ // get the source and destination connectables
+ final ConnectableDTO sourceConnectableDTO = connectionDTO.getSource();
+ final ConnectableDTO destinationConnectableDTO = connectionDTO.getDestination();
+
+ // ensure both are specified
+ if (sourceConnectableDTO == null || destinationConnectableDTO == null) {
+ throw new IllegalArgumentException("Both source and destinations must be specified.");
+ }
+
+ // if the source/destination connectable's group id has not been set, its inferred to be the current group
+ if (sourceConnectableDTO.getGroupId() == null) {
+ sourceConnectableDTO.setGroupId(groupId);
+ }
+ if (destinationConnectableDTO.getGroupId() == null) {
+ destinationConnectableDTO.setGroupId(groupId);
+ }
+
+ // validate the proposed configuration
+ final List<String> validationErrors = validateProposedConfiguration(groupId, connectionDTO);
+
+ // ensure there was no validation errors
+ if (!validationErrors.isEmpty()) {
+ throw new ValidationException(validationErrors);
+ }
+
+ // find the source
+ final Connectable source;
+ if (ConnectableType.REMOTE_OUTPUT_PORT.name().equals(sourceConnectableDTO.getType())) {
+ final ProcessGroup sourceParentGroup = locateProcessGroup(flowController, groupId);
+ final RemoteProcessGroup remoteProcessGroup = sourceParentGroup.getRemoteProcessGroup(sourceConnectableDTO.getGroupId());
+ source = remoteProcessGroup.getOutputPort(sourceConnectableDTO.getId());
+ } else {
+ final ProcessGroup sourceGroup = locateProcessGroup(flowController, sourceConnectableDTO.getGroupId());
+ source = sourceGroup.getConnectable(sourceConnectableDTO.getId());
+ }
+
+ // find the destination
+ final Connectable destination;
+ if (ConnectableType.REMOTE_INPUT_PORT.name().equals(destinationConnectableDTO.getType())) {
+ final ProcessGroup destinationParentGroup = locateProcessGroup(flowController, groupId);
+ final RemoteProcessGroup remoteProcessGroup = destinationParentGroup.getRemoteProcessGroup(destinationConnectableDTO.getGroupId());
+ destination = remoteProcessGroup.getInputPort(destinationConnectableDTO.getId());
+ } else {
+ final ProcessGroup destinationGroup = locateProcessGroup(flowController, destinationConnectableDTO.getGroupId());
+ destination = destinationGroup.getConnectable(destinationConnectableDTO.getId());
+ }
+
+ // determine the relationships
+ final Set<String> relationships = new HashSet<>();
+ if (isNotNull(connectionDTO.getSelectedRelationships())) {
+ relationships.addAll(connectionDTO.getSelectedRelationships());
+ }
+
+ // create the connection
+ final Connection connection = flowController.createConnection(connectionDTO.getId(), connectionDTO.getName(), source, destination, relationships);
+
+ // configure the connection
+ configureConnection(connection, connectionDTO);
+
+ // add the connection to the group
+ group.addConnection(connection);
+ return connection;
+ }
+
+ @Override
+ public DropFlowFileStatus createFlowFileDropRequest(String id, String dropRequestId) {
+ final Connection connection = locateConnection(id);
+ final FlowFileQueue queue = connection.getFlowFileQueue();
+
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+ if (user == null) {
+ throw new WebApplicationException(new Throwable("Unable to access details for current user."));
+ }
+
+ return queue.dropFlowFiles(dropRequestId, user.getIdentity());
+ }
+
+ @Override
+ public ListFlowFileStatus createFlowFileListingRequest(String id, String listingRequestId) {
+ final Connection connection = locateConnection(id);
+ final FlowFileQueue queue = connection.getFlowFileQueue();
+
+ // ensure we can list
+ verifyList(queue);
+
+ return queue.listFlowFiles(listingRequestId, 100);
+ }
+
+ @Override
+ public void verifyCreate(String groupId, ConnectionDTO connectionDTO) {
+ // validate the incoming request
+ final List<String> validationErrors = validateProposedConfiguration(groupId, connectionDTO);
+
+ // ensure there was no validation errors
+ if (!validationErrors.isEmpty()) {
+ throw new ValidationException(validationErrors);
+ }
+
+ // Ensure that both the source and the destination for the connection exist.
+ // In the case that the source or destination is a port in a Remote Process Group,
+ // this is necessary because the ports can change in the background. It may still be
+ // possible for a port to disappear between the 'verify' stage and the creation stage,
+ // but this prevents the case where some nodes already know about the port while other
+ // nodes in the cluster do not. This is a more common case, as users may try to connect
+ // to the port as soon as the port is created.
+ final ConnectableDTO sourceDto = connectionDTO.getSource();
+ if (sourceDto == null || sourceDto.getId() == null) {
+ throw new IllegalArgumentException("Cannot create connection without specifying source");
+ }
+
+ final ConnectableDTO destinationDto = connectionDTO.getDestination();
+ if (destinationDto == null || destinationDto.getId() == null) {
+ throw new IllegalArgumentException("Cannot create connection without specifying destination");
+ }
+
+ if (ConnectableType.REMOTE_OUTPUT_PORT.name().equals(sourceDto.getType())) {
+ final ProcessGroup sourceParentGroup = locateProcessGroup(flowController, groupId);
+
+ final RemoteProcessGroup remoteProcessGroup = sourceParentGroup.getRemoteProcessGroup(sourceDto.getGroupId());
+ if (remoteProcessGroup == null) {
+ throw new IllegalArgumentException("Unable to find the specified remote process group.");
+ }
+
+ final RemoteGroupPort sourceConnectable = remoteProcessGroup.getOutputPort(sourceDto.getId());
+ if (sourceConnectable == null) {
+ throw new IllegalArgumentException("The specified source for the connection does not exist");
+ } else if (!sourceConnectable.getTargetExists()) {
+ throw new IllegalArgumentException("The specified remote output port does not exist.");
+ }
+ } else {
+ final ProcessGroup sourceGroup = locateProcessGroup(flowController, sourceDto.getGroupId());
+ final Connectable sourceConnectable = sourceGroup.getConnectable(sourceDto.getId());
+ if (sourceConnectable == null) {
+ throw new IllegalArgumentException("The specified source for the connection does not exist");
+ }
+ }
+
+ if (ConnectableType.REMOTE_INPUT_PORT.name().equals(destinationDto.getType())) {
+ final ProcessGroup destinationParentGroup = locateProcessGroup(flowController, groupId);
+
+ final RemoteProcessGroup remoteProcessGroup = destinationParentGroup.getRemoteProcessGroup(destinationDto.getGroupId());
+ if (remoteProcessGroup == null) {
+ throw new IllegalArgumentException("Unable to find the specified remote process group.");
+ }
+
+ final RemoteGroupPort destinationConnectable = remoteProcessGroup.getInputPort(destinationDto.getId());
+ if (destinationConnectable == null) {
+ throw new IllegalArgumentException("The specified destination for the connection does not exist");
+ } else if (!destinationConnectable.getTargetExists()) {
+ throw new IllegalArgumentException("The specified remote input port does not exist.");
+ }
+ } else {
+ final ProcessGroup destinationGroup = locateProcessGroup(flowController, destinationDto.getGroupId());
+ final Connectable destinationConnectable = destinationGroup.getConnectable(destinationDto.getId());
+ if (destinationConnectable == null) {
+ throw new IllegalArgumentException("The specified destination for the connection does not exist");
+ }
+ }
+ }
+
+ private void verifyList(final FlowFileQueue queue) {
+ queue.verifyCanList();
+ }
+
+ @Override
+ public void verifyList(String id) {
+ final Connection connection = locateConnection(id);
+ final FlowFileQueue queue = connection.getFlowFileQueue();
+ verifyList(queue);
+ }
+
+ @Override
+ public void verifyUpdate(ConnectionDTO connectionDTO) {
+ verifyUpdate(locateConnection(connectionDTO.getId()), connectionDTO);
+ }
+
+ private void verifyUpdate(final Connection connection, final ConnectionDTO connectionDTO) {
+ // determine what the request is attempting
+ if (isAnyNotNull(connectionDTO.getBackPressureDataSizeThreshold(),
+ connectionDTO.getBackPressureObjectThreshold(),
+ connectionDTO.getDestination(),
+ connectionDTO.getFlowFileExpiration(),
+ connectionDTO.getName(),
+ connectionDTO.getPosition(),
+ connectionDTO.getPrioritizers(),
+ connectionDTO.getSelectedRelationships())) {
+
+ // validate the incoming request
+ final List<String> validationErrors = validateProposedConfiguration(connection.getProcessGroup().getIdentifier(), connectionDTO);
+
+ // ensure there was no validation errors
+ if (!validationErrors.isEmpty()) {
+ throw new ValidationException(validationErrors);
+ }
+
+ // If destination is changing, ensure that current destination is not running. This check is done here, rather than
+ // in the Connection object itself because the Connection object itself does not know which updates are to occur and
+ // we don't want to prevent updating things like the connection name or backpressure just because the destination is running
+ final Connectable destination = connection.getDestination();
+ if (destination != null && destination.isRunning() && destination.getConnectableType() != ConnectableType.FUNNEL && destination.getConnectableType() != ConnectableType.INPUT_PORT) {
+ throw new ValidationException(Collections.singletonList("Cannot change the destination of connection because the current destination is running"));
+ }
+
+ // verify that this connection supports modification
+ connection.verifyCanUpdate();
+ }
+ }
+
+ @Override
+ public Connection updateConnection(final ConnectionDTO connectionDTO) {
+ final Connection connection = locateConnection(connectionDTO.getId());
+ final ProcessGroup group = connection.getProcessGroup();
+
+ // ensure we can update
+ verifyUpdate(connection, connectionDTO);
+
+ final Collection<Relationship> newProcessorRelationships = new ArrayList<>();
+ Connectable newDestination = null;
+
+ // ensure that the source ID is correct, if specified.
+ final Connectable existingSource = connection.getSource();
+ if (isNotNull(connectionDTO.getSource()) && !existingSource.getIdentifier().equals(connectionDTO.getSource().getId())) {
+ throw new IllegalStateException("Connection with ID " + connectionDTO.getId() + " has conflicting Source ID");
+ }
+
+ // determine if the destination changed
+ final ConnectableDTO proposedDestination = connectionDTO.getDestination();
+ if (proposedDestination != null) {
+ final Connectable currentDestination = connection.getDestination();
+
+ // handle remote input port differently
+ if (ConnectableType.REMOTE_INPUT_PORT.name().equals(proposedDestination.getType())) {
+ // the group id must be specified
+ if (proposedDestination.getGroupId() == null) {
+ throw new IllegalArgumentException("When the destination is a remote input port its group id is required.");
+ }
+
+ // if the current destination is a remote input port
+ boolean isDifferentRemoteProcessGroup = false;
+ if (currentDestination.getConnectableType() == ConnectableType.REMOTE_INPUT_PORT) {
+ RemoteGroupPort remotePort = (RemoteGroupPort) currentDestination;
+ if (!proposedDestination.getGroupId().equals(remotePort.getRemoteProcessGroup().getIdentifier())) {
+ isDifferentRemoteProcessGroup = true;
+ }
+ }
+
+ // if the destination is changing or the previous destination was a different remote process group
+ if (!proposedDestination.getId().equals(currentDestination.getIdentifier()) || isDifferentRemoteProcessGroup) {
+ final ProcessGroup destinationParentGroup = locateProcessGroup(flowController, group.getIdentifier());
+ final RemoteProcessGroup remoteProcessGroup = destinationParentGroup.getRemoteProcessGroup(proposedDestination.getGroupId());
+
+ // ensure the remote process group was found
+ if (remoteProcessGroup == null) {
+ throw new IllegalArgumentException("Unable to find the specified remote process group.");
+ }
+
+ final RemoteGroupPort remoteInputPort = remoteProcessGroup.getInputPort(proposedDestination.getId());
+
+ // ensure the new destination was found
+ if (remoteInputPort == null) {
+ throw new IllegalArgumentException("Unable to find the specified destination.");
+ }
+
+ // ensure the remote port actually exists
+ if (!remoteInputPort.getTargetExists()) {
+ throw new IllegalArgumentException("The specified remote input port does not exist.");
+ } else {
+ newDestination = remoteInputPort;
+ }
+ }
+ } else {
+ // if there is a different destination id
+ if (!proposedDestination.getId().equals(currentDestination.getIdentifier())) {
+ // if the destination connectable's group id has not been set, its inferred to be the current group
+ if (proposedDestination.getGroupId() == null) {
+ proposedDestination.setGroupId(group.getIdentifier());
+ }
+
+ final ProcessGroup destinationGroup = locateProcessGroup(flowController, proposedDestination.getGroupId());
+ newDestination = destinationGroup.getConnectable(proposedDestination.getId());
+
+ // ensure the new destination was found
+ if (newDestination == null) {
+ throw new IllegalArgumentException("Unable to find the specified destination.");
+ }
+ }
+ }
+ }
+
+ // determine any new relationships
+ final Set<String> relationships = connectionDTO.getSelectedRelationships();
+ if (isNotNull(relationships)) {
+ if (relationships.isEmpty()) {
+ throw new IllegalArgumentException("Cannot remove all relationships from Connection with ID " + connection.getIdentifier() + " -- remove the Connection instead");
+ }
+ if (existingSource == null) {
+ throw new IllegalArgumentException("Cannot specify new relationships without including the source.");
+ }
+
+ final Connectable destination = newDestination == null ? connection.getDestination() : newDestination;
+
+ for (final String relationship : relationships) {
+ int prevSize = newProcessorRelationships.size();
+
+ final Relationship processorRelationshipSource = existingSource.getRelationship(relationship);
+
+ if (processorRelationshipSource != null) {
+ newProcessorRelationships.add(processorRelationshipSource);
+ }
+
+ final Relationship processorRelationshipDest = destination.getRelationship(relationship);
+
+ if (processorRelationshipDest != null) {
+ newProcessorRelationships.add(processorRelationshipDest);
+ }
+
+ if (newProcessorRelationships.size() == prevSize) {
+ throw new IllegalArgumentException("Unable to locate " + relationship + " relationship.");
+ }
+ }
+ }
+
+ // configure the connection
+ configureConnection(connection, connectionDTO);
+ group.onComponentModified();
+
+ // update the relationships if necessary
+ if (!newProcessorRelationships.isEmpty()) {
+ connection.setRelationships(newProcessorRelationships);
+ }
+
+ // update the destination if necessary
+ if (isNotNull(newDestination)) {
+ connection.setDestination(newDestination);
+ }
+
+ return connection;
+ }
+
+ @Override
+ public void verifyDelete(String id) {
+ final Connection connection = locateConnection(id);
+ connection.verifyCanDelete();
+ }
+
+ @Override
+ public void deleteConnection(final String id) {
+ final Connection connection = locateConnection(id);
+ connection.getProcessGroup().removeConnection(connection);
+ }
+
+ @Override
+ public DropFlowFileStatus deleteFlowFileDropRequest(String connectionId, String dropRequestId) {
+ final Connection connection = locateConnection(connectionId);
+ final FlowFileQueue queue = connection.getFlowFileQueue();
+
+ final DropFlowFileStatus dropFlowFileStatus = queue.cancelDropFlowFileRequest(dropRequestId);
+ if (dropFlowFileStatus == null) {
+ throw new ResourceNotFoundException(String.format("Unable to find drop request with id '%s'.", dropRequestId));
+ }
+
+ return dropFlowFileStatus;
+ }
+
+ @Override
+ public ListFlowFileStatus deleteFlowFileListingRequest(String connectionId, String listingRequestId) {
+ final Connection connection = locateConnection(connectionId);
+ final FlowFileQueue queue = connection.getFlowFileQueue();
+
+ final ListFlowFileStatus listFlowFileStatus = queue.cancelListFlowFileRequest(listingRequestId);
+ if (listFlowFileStatus == null) {
+ throw new ResourceNotFoundException(String.format("Unable to find listing request with id '%s'.", listingRequestId));
+ }
+
+ return listFlowFileStatus;
+ }
+
+ @Override
+ public DownloadableContent getContent(String id, String flowFileUuid, String requestUri) {
+ try {
+ final NiFiUser user = NiFiUserUtils.getNiFiUser();
+
+ final Connection connection = locateConnection(id);
+ final FlowFileQueue queue = connection.getFlowFileQueue();
+ final FlowFileRecord flowFile = queue.getFlowFile(flowFileUuid);
+
+ if (flowFile == null) {
+ throw new ResourceNotFoundException(String.format("The FlowFile with UUID %s is no longer in the active queue.", flowFileUuid));
+ }
+
+ // get the attributes and ensure appropriate access
+ final Map<String, String> attributes = flowFile.getAttributes();
+ final Authorizable dataAuthorizable = new DataAuthorizable(connection.getSourceAuthorizable());
+ dataAuthorizable.authorize(authorizer, RequestAction.READ, user, attributes);
+
+ // get the filename and fall back to the identifier (should never happen)
+ String filename = attributes.get(CoreAttributes.FILENAME.key());
+ if (filename == null) {
+ filename = flowFileUuid;
+ }
+
+ // get the mime-type
+ final String type = attributes.get(CoreAttributes.MIME_TYPE.key());
+
+ // get the content
+ final InputStream content = flowController.getContent(flowFile, user.getIdentity(), requestUri);
+ return new DownloadableContent(filename, type, content);
+ } catch (final ContentNotFoundException cnfe) {
+ throw new ResourceNotFoundException("Unable to find the specified content.");
+ } catch (final IOException ioe) {
+ logger.error(String.format("Unable to get the content for flowfile (%s) at this time.", flowFileUuid), ioe);
+ throw new IllegalStateException("Unable to get the content at this time.");
+ }
+ }
+
+ /* setters */
+ public void setFlowController(final FlowController flowController) {
+ this.flowController = flowController;
+ }
+
+ public void setAuthorizer(Authorizer authorizer) {
+ this.authorizer = authorizer;
+ }
+}
diff --git a/mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/server/JettyServer.java b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/server/JettyServer.java
new file mode 100644
index 0000000..a3a1840
--- /dev/null
+++ b/mod/designtool/designtool-web/src/main/java/org/apache/nifi/web/server/JettyServer.java
@@ -0,0 +1,1226 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Modifications to the original nifi code for the ONAP project are made
+ * available under the Apache License, Version 2.0
+ */
+package org.apache.nifi.web.server;
+
+import com.google.common.base.Strings;
+import com.google.common.collect.Lists;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileFilter;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.InetAddress;
+import java.net.NetworkInterface;
+import java.net.SocketException;
+import java.net.URI;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
+import java.util.stream.Collectors;
+import javax.servlet.DispatcherType;
+import javax.servlet.Filter;
+import javax.servlet.ServletContext;
+import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.nifi.NiFiServer;
+import org.apache.nifi.bundle.Bundle;
+import org.apache.nifi.bundle.BundleDetails;
+import org.apache.nifi.controller.UninheritableFlowException;
+import org.apache.nifi.controller.serialization.FlowSerializationException;
+import org.apache.nifi.controller.serialization.FlowSynchronizationException;
+import org.apache.nifi.documentation.DocGenerator;
+import org.apache.nifi.lifecycle.LifeCycleStartException;
+import org.apache.nifi.nar.ExtensionDiscoveringManager;
+import org.apache.nifi.nar.ExtensionManagerHolder;
+import org.apache.nifi.nar.ExtensionMapping;
+import org.apache.nifi.nar.ExtensionUiLoader;
+import org.apache.nifi.nar.NarAutoLoader;
+import org.apache.nifi.nar.DCAEAutoLoader;
+import org.apache.nifi.nar.NarClassLoadersHolder;
+import org.apache.nifi.nar.NarLoader;
+import org.apache.nifi.nar.StandardExtensionDiscoveringManager;
+import org.apache.nifi.nar.StandardNarLoader;
+import org.apache.nifi.processor.DataUnit;
+import org.apache.nifi.security.util.KeyStoreUtils;
+import org.apache.nifi.services.FlowService;
+import org.apache.nifi.ui.extension.UiExtension;
+import org.apache.nifi.ui.extension.UiExtensionMapping;
+import org.apache.nifi.util.FormatUtils;
+import org.apache.nifi.util.NiFiProperties;
+import org.apache.nifi.web.ContentAccess;
+import org.apache.nifi.web.NiFiWebConfigurationContext;
+import org.apache.nifi.web.UiExtensionType;
+import org.apache.nifi.web.security.headers.ContentSecurityPolicyFilter;
+import org.apache.nifi.web.security.headers.StrictTransportSecurityFilter;
+import org.apache.nifi.web.security.headers.XFrameOptionsFilter;
+import org.apache.nifi.web.security.headers.XSSProtectionFilter;
+import org.eclipse.jetty.annotations.AnnotationConfiguration;
+import org.eclipse.jetty.deploy.App;
+import org.eclipse.jetty.deploy.DeploymentManager;
+import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.Handler;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.SecureRequestCustomizer;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.SslConnectionFactory;
+import org.eclipse.jetty.server.handler.ContextHandlerCollection;
+import org.eclipse.jetty.server.handler.HandlerCollection;
+import org.eclipse.jetty.server.handler.HandlerList;
+import org.eclipse.jetty.server.handler.gzip.GzipHandler;
+import org.eclipse.jetty.servlet.DefaultServlet;
+import org.eclipse.jetty.servlet.FilterHolder;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
+import org.eclipse.jetty.util.thread.QueuedThreadPool;
+import org.eclipse.jetty.webapp.Configuration;
+import org.eclipse.jetty.webapp.JettyWebXmlConfiguration;
+import org.eclipse.jetty.webapp.WebAppClassLoader;
+import org.eclipse.jetty.webapp.WebAppContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.BeansException;
+import org.springframework.context.ApplicationContext;
+import org.springframework.web.context.WebApplicationContext;
+import org.springframework.web.context.support.WebApplicationContextUtils;
+
+/**
+ * Encapsulates the Jetty instance.
+ */
+public class JettyServer implements NiFiServer, ExtensionUiLoader {
+
+ private static final Logger logger = LoggerFactory.getLogger(JettyServer.class);
+ private static final String WEB_DEFAULTS_XML = "org/apache/nifi/web/webdefault.xml";
+
+ private static final String CONTAINER_INCLUDE_PATTERN_KEY = "org.eclipse.jetty.server.webapp.ContainerIncludeJarPattern";
+ private static final String CONTAINER_INCLUDE_PATTERN_VALUE = ".*/[^/]*servlet-api-[^/]*\\.jar$|.*/javax.servlet.jsp.jstl-.*\\\\.jar$|.*/[^/]*taglibs.*\\.jar$";
+
+ private static final FileFilter WAR_FILTER = new FileFilter() {
+ @Override
+ public boolean accept(File pathname) {
+ final String nameToTest = pathname.getName().toLowerCase();
+ return nameToTest.endsWith(".war") && pathname.isFile();
+ }
+ };
+
+ private final Server server;
+ private final NiFiProperties props;
+
+ private Bundle systemBundle;
+ private Set<Bundle> bundles;
+ private ExtensionMapping extensionMapping;
+ private NarAutoLoader narAutoLoader;
+ private DCAEAutoLoader dcaeAutoLoader;
+
+ private WebAppContext webApiContext;
+ private WebAppContext webDocsContext;
+
+ // content viewer and mime type specific extensions
+ private WebAppContext webContentViewerContext;
+ private Collection<WebAppContext> contentViewerWebContexts;
+
+ // component (processor, controller service, reporting task) ui extensions
+ private UiExtensionMapping componentUiExtensions;
+ private Collection<WebAppContext> componentUiExtensionWebContexts;
+
+ private DeploymentManager deploymentManager;
+
+ public JettyServer(final NiFiProperties props, final Set<Bundle> bundles) {
+ final QueuedThreadPool threadPool = new QueuedThreadPool(props.getWebThreads());
+ threadPool.setName("NiFi Web Server");
+
+ // create the server
+ this.server = new Server(threadPool);
+ this.props = props;
+
+ // enable the annotation based configuration to ensure the jsp container is initialized properly
+ final Configuration.ClassList classlist = Configuration.ClassList.setServerDefault(server);
+ classlist.addBefore(JettyWebXmlConfiguration.class.getName(), AnnotationConfiguration.class.getName());
+
+ // configure server
+ configureConnectors(server);
+
+ // load wars from the bundle
+ final Handler warHandlers = loadInitialWars(bundles);
+
+ final HandlerList allHandlers = new HandlerList();
+
+ // Only restrict the host header if running in HTTPS mode
+ if (props.isHTTPSConfigured()) {
+ // Create a handler for the host header and add it to the server
+ HostHeaderHandler hostHeaderHandler = new HostHeaderHandler(props);
+ logger.info("Created HostHeaderHandler [" + hostHeaderHandler.toString() + "]");
+
+ // Add this before the WAR handlers
+ allHandlers.addHandler(hostHeaderHandler);
+ } else {
+ logger.info("Running in HTTP mode; host headers not restricted");
+ }
+
+
+ final ContextHandlerCollection contextHandlers = new ContextHandlerCollection();
+ contextHandlers.addHandler(warHandlers);
+ allHandlers.addHandler(contextHandlers);
+ server.setHandler(allHandlers);
+
+ deploymentManager = new DeploymentManager();
+ deploymentManager.setContextAttribute(CONTAINER_INCLUDE_PATTERN_KEY, CONTAINER_INCLUDE_PATTERN_VALUE);
+ deploymentManager.setContexts(contextHandlers);
+ server.addBean(deploymentManager);
+ }
+
+ /**
+ * Instantiates this object but does not perform any configuration. Used for unit testing.
+ */
+ JettyServer(Server server, NiFiProperties properties) {
+ this.server = server;
+ this.props = properties;
+ }
+
+ private Handler loadInitialWars(final Set<Bundle> bundles) {
+
+ // load WARs
+ final Map<File, Bundle> warToBundleLookup = findWars(bundles);
+
+ // locate each war being deployed
+ File webUiWar = null;
+ File webApiWar = null;
+ File webErrorWar = null;
+ File webDocsWar = null;
+ File webContentViewerWar = null;
+ Map<File, Bundle> otherWars = new HashMap<>();
+ for (Map.Entry<File,Bundle> warBundleEntry : warToBundleLookup.entrySet()) {
+ final File war = warBundleEntry.getKey();
+ final Bundle warBundle = warBundleEntry.getValue();
+
+ if (war.getName().toLowerCase().startsWith("nifi-web-api")) {
+ webApiWar = war;
+ } else if (war.getName().toLowerCase().startsWith("nifi-web-error")) {
+ webErrorWar = war;
+ } else if (war.getName().toLowerCase().startsWith("nifi-web-docs")) {
+ webDocsWar = war;
+ } else if (war.getName().toLowerCase().startsWith("nifi-web-content-viewer")) {
+ webContentViewerWar = war;
+ } else if (war.getName().toLowerCase().startsWith("nifi-web")) {
+ webUiWar = war;
+ } else {
+ otherWars.put(war, warBundle);
+ }
+ }
+
+ // ensure the required wars were found
+ if (webUiWar == null) {
+ throw new RuntimeException("Unable to load nifi-web WAR");
+ } else if (webApiWar == null) {
+ throw new RuntimeException("Unable to load nifi-web-api WAR");
+ } else if (webDocsWar == null) {
+ throw new RuntimeException("Unable to load nifi-web-docs WAR");
+ } else if (webErrorWar == null) {
+ throw new RuntimeException("Unable to load nifi-web-error WAR");
+ } else if (webContentViewerWar == null) {
+ throw new RuntimeException("Unable to load nifi-web-content-viewer WAR");
+ }
+
+ // handlers for each war and init params for the web api
+ final ExtensionUiInfo extensionUiInfo = loadWars(otherWars);
+ componentUiExtensionWebContexts = new ArrayList<>(extensionUiInfo.getComponentUiExtensionWebContexts());
+ contentViewerWebContexts = new ArrayList<>(extensionUiInfo.getContentViewerWebContexts());
+ componentUiExtensions = new UiExtensionMapping(extensionUiInfo.getComponentUiExtensionsByType());
+
+ final HandlerCollection webAppContextHandlers = new HandlerCollection();
+ final Collection<WebAppContext> extensionUiContexts = extensionUiInfo.getWebAppContexts();
+ extensionUiContexts.stream().forEach(c -> webAppContextHandlers.addHandler(c));
+
+ final ClassLoader frameworkClassLoader = getClass().getClassLoader();
+
+ // load the web ui app
+ final WebAppContext webUiContext = loadWar(webUiWar, "/nifi", frameworkClassLoader);
+ webUiContext.getInitParams().put("oidc-supported", String.valueOf(props.isOidcEnabled()));
+ webUiContext.getInitParams().put("knox-supported", String.valueOf(props.isKnoxSsoEnabled()));
+ webUiContext.getInitParams().put("whitelistedContextPaths", props.getWhitelistedContextPaths());
+ webAppContextHandlers.addHandler(webUiContext);
+
+ // load the web api app
+ webApiContext = loadWar(webApiWar, "/nifi-api", frameworkClassLoader);
+ webAppContextHandlers.addHandler(webApiContext);
+
+ // load the content viewer app
+ webContentViewerContext = loadWar(webContentViewerWar, "/nifi-content-viewer", frameworkClassLoader);
+ webContentViewerContext.getInitParams().putAll(extensionUiInfo.getMimeMappings());
+ webAppContextHandlers.addHandler(webContentViewerContext);
+
+ // create a web app for the docs
+ final String docsContextPath = "/nifi-docs";
+
+ // load the documentation war
+ webDocsContext = loadWar(webDocsWar, docsContextPath, frameworkClassLoader);
+
+ // add the servlets which serve the HTML documentation within the documentation web app
+ addDocsServlets(webDocsContext);
+
+ webAppContextHandlers.addHandler(webDocsContext);
+
+ // load the web error app
+ final WebAppContext webErrorContext = loadWar(webErrorWar, "/", frameworkClassLoader);
+ webErrorContext.getInitParams().put("whitelistedContextPaths", props.getWhitelistedContextPaths());
+ webAppContextHandlers.addHandler(webErrorContext);
+
+ // deploy the web apps
+ return gzip(webAppContextHandlers);
+ }
+
+ @Override
+ public void loadExtensionUis(final Set<Bundle> bundles) {
+ // Find and load any WARs contained within the set of bundles...
+ final Map<File, Bundle> warToBundleLookup = findWars(bundles);
+ final ExtensionUiInfo extensionUiInfo = loadWars(warToBundleLookup);
+
+ final Collection<WebAppContext> webAppContexts = extensionUiInfo.getWebAppContexts();
+ if (CollectionUtils.isEmpty(webAppContexts)) {
+ logger.debug("No webapp contexts were loaded, returning...");
+ return;
+ }
+
+ // Deploy each WAR that was loaded...
+ for (final WebAppContext webAppContext : webAppContexts) {
+ final App extensionUiApp = new App(deploymentManager, null, "nifi-jetty-server", webAppContext);
+ deploymentManager.addApp(extensionUiApp);
+ }
+
+ final Collection<WebAppContext> componentUiExtensionWebContexts = extensionUiInfo.getComponentUiExtensionWebContexts();
+ final Collection<WebAppContext> contentViewerWebContexts = extensionUiInfo.getContentViewerWebContexts();
+
+ // Inject the configuration context and security filter into contexts that need it
+ final ServletContext webApiServletContext = webApiContext.getServletHandler().getServletContext();
+ final WebApplicationContext webApplicationContext = WebApplicationContextUtils.getRequiredWebApplicationContext(webApiServletContext);
+ final NiFiWebConfigurationContext configurationContext = webApplicationContext.getBean("nifiWebConfigurationContext", NiFiWebConfigurationContext.class);
+ final FilterHolder securityFilter = webApiContext.getServletHandler().getFilter("springSecurityFilterChain");
+
+ performInjectionForComponentUis(componentUiExtensionWebContexts, configurationContext, securityFilter);
+ performInjectionForContentViewerUis(contentViewerWebContexts, securityFilter);
+
+ // Merge results of current loading into previously loaded results...
+ this.componentUiExtensionWebContexts.addAll(componentUiExtensionWebContexts);
+ this.contentViewerWebContexts.addAll(contentViewerWebContexts);
+ this.componentUiExtensions.addUiExtensions(extensionUiInfo.getComponentUiExtensionsByType());
+
+ for (final WebAppContext webAppContext : webAppContexts) {
+ final Throwable t = webAppContext.getUnavailableException();
+ if (t != null) {
+ logger.error("Unable to start context due to " + t.getMessage(), t);
+ }
+ }
+ }
+
+ private ExtensionUiInfo loadWars(final Map<File, Bundle> warToBundleLookup) {
+ // handlers for each war and init params for the web api
+ final List<WebAppContext> webAppContexts = new ArrayList<>();
+ final Map<String, String> mimeMappings = new HashMap<>();
+ final Collection<WebAppContext> componentUiExtensionWebContexts = new ArrayList<>();
+ final Collection<WebAppContext> contentViewerWebContexts = new ArrayList<>();
+ final Map<String, List<UiExtension>> componentUiExtensionsByType = new HashMap<>();
+
+ final ClassLoader frameworkClassLoader = getClass().getClassLoader();
+ final ClassLoader jettyClassLoader = frameworkClassLoader.getParent();
+
+ // deploy the other wars
+ if (!warToBundleLookup.isEmpty()) {
+ // ui extension organized by component type
+ for (Map.Entry<File,Bundle> warBundleEntry : warToBundleLookup.entrySet()) {
+ final File war = warBundleEntry.getKey();
+ final Bundle warBundle = warBundleEntry.getValue();
+
+ // identify all known extension types in the war
+ final Map<UiExtensionType, List<String>> uiExtensionInWar = new HashMap<>();
+ identifyUiExtensionsForComponents(uiExtensionInWar, war);
+
+ // only include wars that are for custom processor ui's
+ if (!uiExtensionInWar.isEmpty()) {
+ // get the context path
+ String warName = StringUtils.substringBeforeLast(war.getName(), ".");
+ String warContextPath = String.format("/%s", warName);
+
+ // get the classloader for this war
+ ClassLoader narClassLoaderForWar = warBundle.getClassLoader();
+
+ // this should never be null
+ if (narClassLoaderForWar == null) {
+ narClassLoaderForWar = jettyClassLoader;
+ }
+
+ // create the extension web app context
+ WebAppContext extensionUiContext = loadWar(war, warContextPath, narClassLoaderForWar);
+
+ // create the ui extensions
+ for (final Map.Entry<UiExtensionType, List<String>> entry : uiExtensionInWar.entrySet()) {
+ final UiExtensionType extensionType = entry.getKey();
+ final List<String> types = entry.getValue();
+
+ if (UiExtensionType.ContentViewer.equals(extensionType)) {
+ // consider each content type identified
+ for (final String contentType : types) {
+ // map the content type to the context path
+ mimeMappings.put(contentType, warContextPath);
+ }
+
+ // this ui extension provides a content viewer
+ contentViewerWebContexts.add(extensionUiContext);
+ } else {
+ // consider each component type identified
+ for (final String componentTypeCoordinates : types) {
+ logger.info(String.format("Loading UI extension [%s, %s] for %s", extensionType, warContextPath, componentTypeCoordinates));
+
+ // record the extension definition
+ final UiExtension uiExtension = new UiExtension(extensionType, warContextPath);
+
+ // create if this is the first extension for this component type
+ List<UiExtension> componentUiExtensionsForType = componentUiExtensionsByType.get(componentTypeCoordinates);
+ if (componentUiExtensionsForType == null) {
+ componentUiExtensionsForType = new ArrayList<>();
+ componentUiExtensionsByType.put(componentTypeCoordinates, componentUiExtensionsForType);
+ }
+
+ // see if there is already a ui extension of this same time
+ if (containsUiExtensionType(componentUiExtensionsForType, extensionType)) {
+ throw new IllegalStateException(String.format("Encountered duplicate UI for %s", componentTypeCoordinates));
+ }
+
+ // record this extension
+ componentUiExtensionsForType.add(uiExtension);
+ }
+
+ // this ui extension provides a component custom ui
+ componentUiExtensionWebContexts.add(extensionUiContext);
+ }
+ }
+
+ // include custom ui web context in the handlers
+ webAppContexts.add(extensionUiContext);
+ }
+ }
+ }
+
+ return new ExtensionUiInfo(webAppContexts, mimeMappings, componentUiExtensionWebContexts, contentViewerWebContexts, componentUiExtensionsByType);
+ }
+
+ /**
+ * Returns whether or not the specified ui extensions already contains an extension of the specified type.
+ *
+ * @param componentUiExtensionsForType ui extensions for the type
+ * @param extensionType type of ui extension
+ * @return whether or not the specified ui extensions already contains an extension of the specified type
+ */
+ private boolean containsUiExtensionType(final List<UiExtension> componentUiExtensionsForType, final UiExtensionType extensionType) {
+ for (final UiExtension uiExtension : componentUiExtensionsForType) {
+ if (extensionType.equals(uiExtension.getExtensionType())) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ /**
+ * Enables compression for the specified handler.
+ *
+ * @param handler handler to enable compression for
+ * @return compression enabled handler
+ */
+ private Handler gzip(final Handler handler) {
+ final GzipHandler gzip = new GzipHandler();
+ gzip.setIncludedMethods("GET", "POST", "PUT", "DELETE");
+ gzip.setHandler(handler);
+ return gzip;
+ }
+
+ private Map<File, Bundle> findWars(final Set<Bundle> bundles) {
+ final Map<File, Bundle> wars = new HashMap<>();
+
+ // consider each nar working directory
+ bundles.forEach(bundle -> {
+ final BundleDetails details = bundle.getBundleDetails();
+ final File narDependencies = new File(details.getWorkingDirectory(), "NAR-INF/bundled-dependencies");
+ if (narDependencies.isDirectory()) {
+ // list the wars from this nar
+ final File[] narDependencyDirs = narDependencies.listFiles(WAR_FILTER);
+ if (narDependencyDirs == null) {
+ throw new IllegalStateException(String.format("Unable to access working directory for NAR dependencies in: %s", narDependencies.getAbsolutePath()));
+ }
+
+ // add each war
+ for (final File war : narDependencyDirs) {
+ wars.put(war, bundle);
+ }
+ }
+ });
+
+ return wars;
+ }
+
+ private void readUiExtensions(final Map<UiExtensionType, List<String>> uiExtensions, final UiExtensionType uiExtensionType, final JarFile jarFile, final JarEntry jarEntry) throws IOException {
+ if (jarEntry == null) {
+ return;
+ }
+
+ // get an input stream for the nifi-processor configuration file
+ try (BufferedReader in = new BufferedReader(new InputStreamReader(jarFile.getInputStream(jarEntry)))) {
+
+ // read in each configured type
+ String rawComponentType;
+ while ((rawComponentType = in.readLine()) != null) {
+ // extract the component type
+ final String componentType = extractComponentType(rawComponentType);
+ if (componentType != null) {
+ List<String> extensions = uiExtensions.get(uiExtensionType);
+
+ // if there are currently no extensions for this type create it
+ if (extensions == null) {
+ extensions = new ArrayList<>();
+ uiExtensions.put(uiExtensionType, extensions);
+ }
+
+ // add the specified type
+ extensions.add(componentType);
+ }
+ }
+ }
+ }
+
+ /**
+ * Identifies all known UI extensions and stores them in the specified map.
+ *
+ * @param uiExtensions extensions
+ * @param warFile war
+ */
+ private void identifyUiExtensionsForComponents(final Map<UiExtensionType, List<String>> uiExtensions, final File warFile) {
+ try (final JarFile jarFile = new JarFile(warFile)) {
+ // locate the ui extensions
+ readUiExtensions(uiExtensions, UiExtensionType.ContentViewer, jarFile, jarFile.getJarEntry("META-INF/nifi-content-viewer"));
+ readUiExtensions(uiExtensions, UiExtensionType.ProcessorConfiguration, jarFile, jarFile.getJarEntry("META-INF/nifi-processor-configuration"));
+ readUiExtensions(uiExtensions, UiExtensionType.ControllerServiceConfiguration, jarFile, jarFile.getJarEntry("META-INF/nifi-controller-service-configuration"));
+ readUiExtensions(uiExtensions, UiExtensionType.ReportingTaskConfiguration, jarFile, jarFile.getJarEntry("META-INF/nifi-reporting-task-configuration"));
+ } catch (IOException ioe) {
+ logger.warn(String.format("Unable to inspect %s for a UI extensions.", warFile));
+ }
+ }
+
+ /**
+ * Extracts the component type. Trims the line and considers comments.
+ * Returns null if no type was found.
+ *
+ * @param line line
+ * @return type
+ */
+ private String extractComponentType(final String line) {
+ final String trimmedLine = line.trim();
+ if (!trimmedLine.isEmpty() && !trimmedLine.startsWith("#")) {
+ final int indexOfPound = trimmedLine.indexOf("#");
+ return (indexOfPound > 0) ? trimmedLine.substring(0, indexOfPound) : trimmedLine;
+ }
+ return null;
+ }
+
+ private WebAppContext loadWar(final File warFile, final String contextPath, final ClassLoader parentClassLoader) {
+ final WebAppContext webappContext = new WebAppContext(warFile.getPath(), contextPath);
+ webappContext.setContextPath(contextPath);
+ webappContext.setDisplayName(contextPath);
+
+ // instruction jetty to examine these jars for tlds, web-fragments, etc
+ webappContext.setAttribute(CONTAINER_INCLUDE_PATTERN_KEY, CONTAINER_INCLUDE_PATTERN_VALUE);
+
+ // remove slf4j server class to allow WAR files to have slf4j dependencies in WEB-INF/lib
+ List<String> serverClasses = new ArrayList<>(Arrays.asList(webappContext.getServerClasses()));
+ serverClasses.remove("org.slf4j.");
+ webappContext.setServerClasses(serverClasses.toArray(new String[0]));
+ webappContext.setDefaultsDescriptor(WEB_DEFAULTS_XML);
+
+ // get the temp directory for this webapp
+ File tempDir = new File(props.getWebWorkingDirectory(), warFile.getName());
+ if (tempDir.exists() && !tempDir.isDirectory()) {
+ throw new RuntimeException(tempDir.getAbsolutePath() + " is not a directory");
+ } else if (!tempDir.exists()) {
+ final boolean made = tempDir.mkdirs();
+ if (!made) {
+ throw new RuntimeException(tempDir.getAbsolutePath() + " could not be created");
+ }
+ }
+ if (!(tempDir.canRead() && tempDir.canWrite())) {
+ throw new RuntimeException(tempDir.getAbsolutePath() + " directory does not have read/write privilege");
+ }
+
+ // configure the temp dir
+ webappContext.setTempDirectory(tempDir);
+
+ // configure the max form size (3x the default)
+ webappContext.setMaxFormContentSize(600000);
+
+ // add HTTP security headers to all responses
+ final String ALL_PATHS = "/*";
+ ArrayList<Class<? extends Filter>> filters = new ArrayList<>(Arrays.asList(XFrameOptionsFilter.class, ContentSecurityPolicyFilter.class, XSSProtectionFilter.class));
+ if(props.isHTTPSConfigured()) {
+ filters.add(StrictTransportSecurityFilter.class);
+ }
+ filters.forEach( (filter) -> addFilters(filter, ALL_PATHS, webappContext));
+
+ try {
+ // configure the class loader - webappClassLoader -> jetty nar -> web app's nar -> ...
+ webappContext.setClassLoader(new WebAppClassLoader(parentClassLoader, webappContext));
+ } catch (final IOException ioe) {
+ startUpFailure(ioe);
+ }
+
+ logger.info("Loading WAR: " + warFile.getAbsolutePath() + " with context path set to " + contextPath);
+ return webappContext;
+ }
+
+ private void addFilters(Class<? extends Filter> clazz, String path, WebAppContext webappContext) {
+ FilterHolder holder = new FilterHolder(clazz);
+ holder.setName(clazz.getSimpleName());
+ webappContext.addFilter(holder, path, EnumSet.allOf(DispatcherType.class));
+ }
+
+ private void addDocsServlets(WebAppContext docsContext) {
+ try {
+ // Load the nifi/docs directory
+ final File docsDir = getDocsDir("docs");
+
+ // load the component documentation working directory
+ final File componentDocsDirPath = props.getComponentDocumentationWorkingDirectory();
+ final File workingDocsDirectory = getWorkingDocsDirectory(componentDocsDirPath);
+
+ // Load the API docs
+ final File webApiDocsDir = getWebApiDocsDir();
+
+ // Create the servlet which will serve the static resources
+ ServletHolder defaultHolder = new ServletHolder("default", DefaultServlet.class);
+ defaultHolder.setInitParameter("dirAllowed", "false");
+
+ ServletHolder docs = new ServletHolder("docs", DefaultServlet.class);
+ docs.setInitParameter("resourceBase", docsDir.getPath());
+
+ ServletHolder components = new ServletHolder("components", DefaultServlet.class);
+ components.setInitParameter("resourceBase", workingDocsDirectory.getPath());
+
+ ServletHolder restApi = new ServletHolder("rest-api", DefaultServlet.class);
+ restApi.setInitParameter("resourceBase", webApiDocsDir.getPath());
+
+ docsContext.addServlet(docs, "/html/*");
+ docsContext.addServlet(components, "/components/*");
+ docsContext.addServlet(restApi, "/rest-api/*");
+
+ docsContext.addServlet(defaultHolder, "/");
+
+ logger.info("Loading documents web app with context path set to " + docsContext.getContextPath());
+
+ } catch (Exception ex) {
+ logger.error("Unhandled Exception in createDocsWebApp: " + ex.getMessage());
+ startUpFailure(ex);
+ }
+ }
+
+
+ /**
+ * Returns a File object for the directory containing NIFI documentation.
+ * <p>
+ * Formerly, if the docsDirectory did not exist NIFI would fail to start
+ * with an IllegalStateException and a rather unhelpful log message.
+ * NIFI-2184 updates the process such that if the docsDirectory does not
+ * exist an attempt will be made to create the directory. If that is
+ * successful NIFI will no longer fail and will start successfully barring
+ * any other errors. The side effect of the docsDirectory not being present
+ * is that the documentation links under the 'General' portion of the help
+ * page will not be accessible, but at least the process will be running.
+ *
+ * @param docsDirectory Name of documentation directory in installation directory.
+ * @return A File object to the documentation directory; else startUpFailure called.
+ */
+ private File getDocsDir(final String docsDirectory) {
+ File docsDir;
+ try {
+ docsDir = Paths.get(docsDirectory).toRealPath().toFile();
+ } catch (IOException ex) {
+ logger.info("Directory '" + docsDirectory + "' is missing. Some documentation will be unavailable.");
+ docsDir = new File(docsDirectory).getAbsoluteFile();
+ final boolean made = docsDir.mkdirs();
+ if (!made) {
+ logger.error("Failed to create 'docs' directory!");
+ startUpFailure(new IOException(docsDir.getAbsolutePath() + " could not be created"));
+ }
+ }
+ return docsDir;
+ }
+
+ private File getWorkingDocsDirectory(final File componentDocsDirPath) {
+ File workingDocsDirectory = null;
+ try {
+ workingDocsDirectory = componentDocsDirPath.toPath().toRealPath().getParent().toFile();
+ } catch (IOException ex) {
+ logger.error("Failed to load :" + componentDocsDirPath.getAbsolutePath());
+ startUpFailure(ex);
+ }
+ return workingDocsDirectory;
+ }
+
+ private File getWebApiDocsDir() {
+ // load the rest documentation
+ final File webApiDocsDir = new File(webApiContext.getTempDirectory(), "webapp/docs");
+ if (!webApiDocsDir.exists()) {
+ final boolean made = webApiDocsDir.mkdirs();
+ if (!made) {
+ logger.error("Failed to create " + webApiDocsDir.getAbsolutePath());
+ startUpFailure(new IOException(webApiDocsDir.getAbsolutePath() + " could not be created"));
+ }
+ }
+ return webApiDocsDir;
+ }
+
+ private void configureConnectors(final Server server) throws ServerConfigurationException {
+ // create the http configuration
+ final HttpConfiguration httpConfiguration = new HttpConfiguration();
+ final int headerSize = DataUnit.parseDataSize(props.getWebMaxHeaderSize(), DataUnit.B).intValue();
+ httpConfiguration.setRequestHeaderSize(headerSize);
+ httpConfiguration.setResponseHeaderSize(headerSize);
+
+ // Check if both HTTP and HTTPS connectors are configured and fail if both are configured
+ if (bothHttpAndHttpsConnectorsConfigured(props)) {
+ logger.error("NiFi only supports one mode of HTTP or HTTPS operation, not both simultaneously. " +
+ "Check the nifi.properties file and ensure that either the HTTP hostname and port or the HTTPS hostname and port are empty");
+ startUpFailure(new IllegalStateException("Only one of the HTTP and HTTPS connectors can be configured at one time"));
+ }
+
+ if (props.getSslPort() != null) {
+ configureHttpsConnector(server, httpConfiguration);
+ } else if (props.getPort() != null) {
+ configureHttpConnector(server, httpConfiguration);
+ } else {
+ logger.error("Neither the HTTP nor HTTPS connector was configured in nifi.properties");
+ startUpFailure(new IllegalStateException("Must configure HTTP or HTTPS connector"));
+ }
+ }
+
+ /**
+ * Configures an HTTPS connector and adds it to the server.
+ *
+ * @param server the Jetty server instance
+ * @param httpConfiguration the configuration object for the HTTPS protocol settings
+ */
+ private void configureHttpsConnector(Server server, HttpConfiguration httpConfiguration) {
+ String hostname = props.getProperty(NiFiProperties.WEB_HTTPS_HOST);
+ final Integer port = props.getSslPort();
+ String connectorLabel = "HTTPS";
+ final Map<String, String> httpsNetworkInterfaces = props.getHttpsNetworkInterfaces();
+ ServerConnectorCreator<Server, HttpConfiguration, ServerConnector> scc = (s, c) -> createUnconfiguredSslServerConnector(s, c, port);
+
+ configureGenericConnector(server, httpConfiguration, hostname, port, connectorLabel, httpsNetworkInterfaces, scc);
+ }
+
+ /**
+ * Configures an HTTP connector and adds it to the server.
+ *
+ * @param server the Jetty server instance
+ * @param httpConfiguration the configuration object for the HTTP protocol settings
+ */
+ private void configureHttpConnector(Server server, HttpConfiguration httpConfiguration) {
+ String hostname = props.getProperty(NiFiProperties.WEB_HTTP_HOST);
+ final Integer port = props.getPort();
+ String connectorLabel = "HTTP";
+ final Map<String, String> httpNetworkInterfaces = props.getHttpNetworkInterfaces();
+ ServerConnectorCreator<Server, HttpConfiguration, ServerConnector> scc = (s, c) -> new ServerConnector(s, new HttpConnectionFactory(c));
+
+ configureGenericConnector(server, httpConfiguration, hostname, port, connectorLabel, httpNetworkInterfaces, scc);
+ }
+
+ /**
+ * Configures an HTTP(S) connector for the server given the provided parameters. The functionality between HTTP and HTTPS connectors is largely similar.
+ * Here the common behavior has been extracted into a shared method and the respective calling methods obtain the right values and a lambda function for the differing behavior.
+ *
+ * @param server the Jetty server instance
+ * @param configuration the HTTP/HTTPS configuration instance
+ * @param hostname the hostname from the nifi.properties file
+ * @param port the port to expose
+ * @param connectorLabel used for log output (e.g. "HTTP" or "HTTPS")
+ * @param networkInterfaces the map of network interfaces from nifi.properties
+ * @param serverConnectorCreator a function which accepts a {@code Server} and {@code HttpConnection} instance and returns a {@code ServerConnector}
+ */
+ private void configureGenericConnector(Server server, HttpConfiguration configuration, String hostname, Integer port, String connectorLabel, Map<String, String> networkInterfaces,
+ ServerConnectorCreator<Server, HttpConfiguration, ServerConnector> serverConnectorCreator) {
+ if (port < 0 || (int) Math.pow(2, 16) <= port) {
+ throw new ServerConfigurationException("Invalid " + connectorLabel + " port: " + port);
+ }
+
+ logger.info("Configuring Jetty for " + connectorLabel + " on port: " + port);
+
+ final List<Connector> serverConnectors = Lists.newArrayList();
+
+ // Calculate Idle Timeout as twice the auto-refresh interval. This ensures that even with some variance in timing,
+ // we are able to avoid closing connections from users' browsers most of the time. This can make a significant difference
+ // in HTTPS connections, as each HTTPS connection that is established must perform the SSL handshake.
+ final String autoRefreshInterval = props.getAutoRefreshInterval();
+ final long autoRefreshMillis = autoRefreshInterval == null ? 30000L : FormatUtils.getTimeDuration(autoRefreshInterval, TimeUnit.MILLISECONDS);
+ final long idleTimeout = autoRefreshMillis * 2;
+
+ // If the interfaces collection is empty or each element is empty
+ if (networkInterfaces.isEmpty() || networkInterfaces.values().stream().filter(value -> !Strings.isNullOrEmpty(value)).collect(Collectors.toList()).isEmpty()) {
+ final ServerConnector serverConnector = serverConnectorCreator.create(server, configuration);
+
+ // Set host and port
+ if (StringUtils.isNotBlank(hostname)) {
+ serverConnector.setHost(hostname);
+ }
+ serverConnector.setPort(port);
+ serverConnector.setIdleTimeout(idleTimeout);
+ serverConnectors.add(serverConnector);
+ } else {
+ // Add connectors for all IPs from network interfaces
+ serverConnectors.addAll(Lists.newArrayList(networkInterfaces.values().stream().map(ifaceName -> {
+ NetworkInterface iface = null;
+ try {
+ iface = NetworkInterface.getByName(ifaceName);
+ } catch (SocketException e) {
+ logger.error("Unable to get network interface by name {}", ifaceName, e);
+ }
+ if (iface == null) {
+ logger.warn("Unable to find network interface named {}", ifaceName);
+ }
+ return iface;
+ }).filter(Objects::nonNull).flatMap(iface -> Collections.list(iface.getInetAddresses()).stream())
+ .map(inetAddress -> {
+ final ServerConnector serverConnector = serverConnectorCreator.create(server, configuration);
+
+ // Set host and port
+ serverConnector.setHost(inetAddress.getHostAddress());
+ serverConnector.setPort(port);
+ serverConnector.setIdleTimeout(idleTimeout);
+
+ return serverConnector;
+ }).collect(Collectors.toList())));
+ }
+ // Add all connectors
+ serverConnectors.forEach(server::addConnector);
+ }
+
+ /**
+ * Returns true if there are configured properties for both HTTP and HTTPS connectors (specifically port because the hostname can be left blank in the HTTP connector).
+ * Prints a warning log message with the relevant properties.
+ *
+ * @param props the NiFiProperties
+ * @return true if both ports are present
+ */
+ static boolean bothHttpAndHttpsConnectorsConfigured(NiFiProperties props) {
+ Integer httpPort = props.getPort();
+ String httpHostname = props.getProperty(NiFiProperties.WEB_HTTP_HOST);
+
+ Integer httpsPort = props.getSslPort();
+ String httpsHostname = props.getProperty(NiFiProperties.WEB_HTTPS_HOST);
+
+ if (httpPort != null && httpsPort != null) {
+ logger.warn("Both the HTTP and HTTPS connectors are configured in nifi.properties. Only one of these connectors should be configured. See the NiFi Admin Guide for more details");
+ logger.warn("HTTP connector: http://" + httpHostname + ":" + httpPort);
+ logger.warn("HTTPS connector: https://" + httpsHostname + ":" + httpsPort);
+ return true;
+ }
+
+ return false;
+ }
+
+ private ServerConnector createUnconfiguredSslServerConnector(Server server, HttpConfiguration httpConfiguration, int port) {
+ // add some secure config
+ final HttpConfiguration httpsConfiguration = new HttpConfiguration(httpConfiguration);
+ httpsConfiguration.setSecureScheme("https");
+ httpsConfiguration.setSecurePort(port);
+ httpsConfiguration.addCustomizer(new SecureRequestCustomizer());
+
+ // build the connector
+ return new ServerConnector(server,
+ new SslConnectionFactory(createSslContextFactory(), "http/1.1"),
+ new HttpConnectionFactory(httpsConfiguration));
+ }
+
+ private SslContextFactory createSslContextFactory() {
+ final SslContextFactory contextFactory = new SslContextFactory();
+ configureSslContextFactory(contextFactory, props);
+ return contextFactory;
+ }
+
+ protected static void configureSslContextFactory(SslContextFactory contextFactory, NiFiProperties props) {
+ // require client auth when not supporting login, Kerberos service, or anonymous access
+ if (props.isClientAuthRequiredForRestApi()) {
+ contextFactory.setNeedClientAuth(true);
+ } else {
+ contextFactory.setWantClientAuth(true);
+ }
+
+ /* below code sets JSSE system properties when values are provided */
+ // keystore properties
+ if (StringUtils.isNotBlank(props.getProperty(NiFiProperties.SECURITY_KEYSTORE))) {
+ contextFactory.setKeyStorePath(props.getProperty(NiFiProperties.SECURITY_KEYSTORE));
+ }
+ String keyStoreType = props.getProperty(NiFiProperties.SECURITY_KEYSTORE_TYPE);
+ if (StringUtils.isNotBlank(keyStoreType)) {
+ contextFactory.setKeyStoreType(keyStoreType);
+ String keyStoreProvider = KeyStoreUtils.getKeyStoreProvider(keyStoreType);
+ if (StringUtils.isNoneEmpty(keyStoreProvider)) {
+ contextFactory.setKeyStoreProvider(keyStoreProvider);
+ }
+ }
+ final String keystorePassword = props.getProperty(NiFiProperties.SECURITY_KEYSTORE_PASSWD);
+ final String keyPassword = props.getProperty(NiFiProperties.SECURITY_KEY_PASSWD);
+ if (StringUtils.isNotBlank(keystorePassword)) {
+ // if no key password was provided, then assume the keystore password is the same as the key password.
+ final String defaultKeyPassword = (StringUtils.isBlank(keyPassword)) ? keystorePassword : keyPassword;
+ contextFactory.setKeyStorePassword(keystorePassword);
+ contextFactory.setKeyManagerPassword(defaultKeyPassword);
+ } else if (StringUtils.isNotBlank(keyPassword)) {
+ // since no keystore password was provided, there will be no keystore integrity check
+ contextFactory.setKeyManagerPassword(keyPassword);
+ }
+
+ // truststore properties
+ if (StringUtils.isNotBlank(props.getProperty(NiFiProperties.SECURITY_TRUSTSTORE))) {
+ contextFactory.setTrustStorePath(props.getProperty(NiFiProperties.SECURITY_TRUSTSTORE));
+ }
+ String trustStoreType = props.getProperty(NiFiProperties.SECURITY_TRUSTSTORE_TYPE);
+ if (StringUtils.isNotBlank(trustStoreType)) {
+ contextFactory.setTrustStoreType(trustStoreType);
+ String trustStoreProvider = KeyStoreUtils.getKeyStoreProvider(trustStoreType);
+ if (StringUtils.isNoneEmpty(trustStoreProvider)) {
+ contextFactory.setTrustStoreProvider(trustStoreProvider);
+ }
+ }
+ if (StringUtils.isNotBlank(props.getProperty(NiFiProperties.SECURITY_TRUSTSTORE_PASSWD))) {
+ contextFactory.setTrustStorePassword(props.getProperty(NiFiProperties.SECURITY_TRUSTSTORE_PASSWD));
+ }
+ }
+
+ @Override
+ public void start() {
+ try {
+ // Create a standard extension manager and discover extensions
+ final ExtensionDiscoveringManager extensionManager = new StandardExtensionDiscoveringManager();
+ extensionManager.discoverExtensions(systemBundle, bundles);
+ extensionManager.logClassLoaderMapping();
+
+ // Set the extension manager into the holder which makes it available to the Spring context via a factory bean
+ ExtensionManagerHolder.init(extensionManager);
+
+ // Generate docs for extensions
+ DocGenerator.generate(props, extensionManager, extensionMapping);
+
+ // start the server
+ server.start();
+
+ // ensure everything started successfully
+ for (Handler handler : server.getChildHandlers()) {
+ // see if the handler is a web app
+ if (handler instanceof WebAppContext) {
+ WebAppContext context = (WebAppContext) handler;
+
+ // see if this webapp had any exceptions that would
+ // cause it to be unavailable
+ if (context.getUnavailableException() != null) {
+ startUpFailure(context.getUnavailableException());
+ }
+ }
+ }
+
+ // ensure the appropriate wars deployed successfully before injecting the NiFi context and security filters
+ // this must be done after starting the server (and ensuring there were no start up failures)
+ if (webApiContext != null) {
+ // give the web api the component ui extensions
+ final ServletContext webApiServletContext = webApiContext.getServletHandler().getServletContext();
+ webApiServletContext.setAttribute("nifi-ui-extensions", componentUiExtensions);
+
+ // get the application context
+ final WebApplicationContext webApplicationContext = WebApplicationContextUtils.getRequiredWebApplicationContext(webApiServletContext);
+ final NiFiWebConfigurationContext configurationContext = webApplicationContext.getBean("nifiWebConfigurationContext", NiFiWebConfigurationContext.class);
+ final FilterHolder securityFilter = webApiContext.getServletHandler().getFilter("springSecurityFilterChain");
+
+ // component ui extensions
+ performInjectionForComponentUis(componentUiExtensionWebContexts, configurationContext, securityFilter);
+
+ // content viewer extensions
+ performInjectionForContentViewerUis(contentViewerWebContexts, securityFilter);
+
+ // content viewer controller
+ if (webContentViewerContext != null) {
+ final ContentAccess contentAccess = webApplicationContext.getBean("contentAccess", ContentAccess.class);
+
+ // add the content access
+ final ServletContext webContentViewerServletContext = webContentViewerContext.getServletHandler().getServletContext();
+ webContentViewerServletContext.setAttribute("nifi-content-access", contentAccess);
+
+ if (securityFilter != null) {
+ webContentViewerContext.addFilter(securityFilter, "/*", EnumSet.allOf(DispatcherType.class));
+ }
+ }
+ }
+
+ // ensure the web document war was loaded and provide the extension mapping
+ if (webDocsContext != null) {
+ final ServletContext webDocsServletContext = webDocsContext.getServletHandler().getServletContext();
+ webDocsServletContext.setAttribute("nifi-extension-mapping", extensionMapping);
+ }
+
+ // if this nifi is a node in a cluster, start the flow service and load the flow - the
+ // flow service is loaded here for clustered nodes because the loading of the flow will
+ // initialize the connection between the node and the NCM. if the node connects (starts
+ // heartbeating, etc), the NCM may issue web requests before the application (wars) have
+ // finished loading. this results in the node being disconnected since its unable to
+ // successfully respond to the requests. to resolve this, flow loading was moved to here
+ // (after the wars have been successfully deployed) when this nifi instance is a node
+ // in a cluster
+ if (props.isNode()) {
+
+ FlowService flowService = null;
+ try {
+
+ logger.info("Loading Flow...");
+
+ ApplicationContext ctx = WebApplicationContextUtils.getWebApplicationContext(webApiContext.getServletContext());
+ flowService = ctx.getBean("flowService", FlowService.class);
+
+ // start and load the flow
+ flowService.start();
+ flowService.load(null);
+
+ logger.info("Flow loaded successfully.");
+
+ } catch (BeansException | LifeCycleStartException | IOException | FlowSerializationException | FlowSynchronizationException | UninheritableFlowException e) {
+ // ensure the flow service is terminated
+ if (flowService != null && flowService.isRunning()) {
+ flowService.stop(false);
+ }
+ logger.error("Unable to load flow due to: " + e, e);
+ throw new Exception("Unable to load flow due to: " + e); // cannot wrap the exception as they are not defined in a classloader accessible to the caller
+ }
+ }
+
+ final NarLoader narLoader = new StandardNarLoader(
+ props.getExtensionsWorkingDirectory(),
+ props.getComponentDocumentationWorkingDirectory(),
+ NarClassLoadersHolder.getInstance(),
+ extensionManager,
+ extensionMapping,
+ this);
+
+ narAutoLoader = new NarAutoLoader(props.getNarAutoLoadDirectory(), narLoader);
+ narAutoLoader.start();
+
+ URI jarsIndex = props.getDCAEJarIndexURI();
+
+ // REVIEW: Added ability to turn off the loaidng of dcae jars by providing no url
+ if (jarsIndex == null) {
+ StringBuilder sb = new StringBuilder();
+ sb.append("Auto-loading of DCAE jars is turned off.");
+ sb.append(" You must set the value of \"nifi.dcae.jars.index.url\"");
+ sb.append(" to the full url to the index JSON of DCAE jars in the nifi.properties file");
+ sb.append(" in order to activate this feature.");
+ logger.warn(sb.toString());
+ } else {
+ this.dcaeAutoLoader = new DCAEAutoLoader();
+ this.dcaeAutoLoader.start(jarsIndex, extensionManager);
+ }
+
+ // dump the application url after confirming everything started successfully
+ dumpUrls();
+ } catch (Exception ex) {
+ startUpFailure(ex);
+ }
+ }
+
+ private void performInjectionForComponentUis(final Collection<WebAppContext> componentUiExtensionWebContexts,
+ final NiFiWebConfigurationContext configurationContext, final FilterHolder securityFilter) {
+ if (CollectionUtils.isNotEmpty(componentUiExtensionWebContexts)) {
+ for (final WebAppContext customUiContext : componentUiExtensionWebContexts) {
+ // set the NiFi context in each custom ui servlet context
+ final ServletContext customUiServletContext = customUiContext.getServletHandler().getServletContext();
+ customUiServletContext.setAttribute("nifi-web-configuration-context", configurationContext);
+
+ // add the security filter to any ui extensions wars
+ if (securityFilter != null) {
+ customUiContext.addFilter(securityFilter, "/*", EnumSet.allOf(DispatcherType.class));
+ }
+ }
+ }
+ }
+
+ private void performInjectionForContentViewerUis(final Collection<WebAppContext> contentViewerWebContexts,
+ final FilterHolder securityFilter) {
+ if (CollectionUtils.isNotEmpty(contentViewerWebContexts)) {
+ for (final WebAppContext contentViewerContext : contentViewerWebContexts) {
+ // add the security filter to any content viewer wars
+ if (securityFilter != null) {
+ contentViewerContext.addFilter(securityFilter, "/*", EnumSet.allOf(DispatcherType.class));
+ }
+ }
+ }
+ }
+
+ private void dumpUrls() throws SocketException {
+ final List<String> urls = new ArrayList<>();
+
+ for (Connector connector : server.getConnectors()) {
+ if (connector instanceof ServerConnector) {
+ final ServerConnector serverConnector = (ServerConnector) connector;
+
+ Set<String> hosts = new HashSet<>();
+
+ // determine the hosts
+ if (StringUtils.isNotBlank(serverConnector.getHost())) {
+ hosts.add(serverConnector.getHost());
+ } else {
+ Enumeration<NetworkInterface> networkInterfaces = NetworkInterface.getNetworkInterfaces();
+ if (networkInterfaces != null) {
+ for (NetworkInterface networkInterface : Collections.list(networkInterfaces)) {
+ for (InetAddress inetAddress : Collections.list(networkInterface.getInetAddresses())) {
+ hosts.add(inetAddress.getHostAddress());
+ }
+ }
+ }
+ }
+
+ // ensure some hosts were found
+ if (!hosts.isEmpty()) {
+ String scheme = "http";
+ if (props.getSslPort() != null && serverConnector.getPort() == props.getSslPort()) {
+ scheme = "https";
+ }
+
+ // dump each url
+ for (String host : hosts) {
+ urls.add(String.format("%s://%s:%s", scheme, host, serverConnector.getPort()));
+ }
+ }
+ }
+ }
+
+ if (urls.isEmpty()) {
+ logger.warn("NiFi has started, but the UI is not available on any hosts. Please verify the host properties.");
+ } else {
+ // log the ui location
+ logger.info("NiFi has started. The UI is available at the following URLs:");
+ for (final String url : urls) {
+ logger.info(String.format("%s/nifi", url));
+ }
+ }
+ }
+
+ private void startUpFailure(Throwable t) {
+ System.err.println("Failed to start web server: " + t.getMessage());
+ System.err.println("Shutting down...");
+ logger.warn("Failed to start web server... shutting down.", t);
+ System.exit(1);
+ }
+
+ @Override
+ public void setExtensionMapping(ExtensionMapping extensionMapping) {
+ this.extensionMapping = extensionMapping;
+ }
+
+ @Override
+ public void setBundles(Bundle systemBundle, Set<Bundle> bundles) {
+ this.systemBundle = systemBundle;
+ this.bundles = bundles;
+ }
+
+ @Override
+ public void stop() {
+ try {
+ server.stop();
+ } catch (Exception ex) {
+ logger.warn("Failed to stop web server", ex);
+ }
+
+ try {
+ if (narAutoLoader != null) {
+ narAutoLoader.stop();
+ }
+
+ if (dcaeAutoLoader != null) {
+ dcaeAutoLoader.stop();
+ }
+ } catch (Exception e) {
+ logger.warn("Failed to stop NAR auto-loader", e);
+ }
+ }
+
+ /**
+ * Holds the result of loading WARs for custom UIs.
+ */
+ private static class ExtensionUiInfo {
+
+ private final Collection<WebAppContext> webAppContexts;
+ private final Map<String, String> mimeMappings;
+ private final Collection<WebAppContext> componentUiExtensionWebContexts;
+ private final Collection<WebAppContext> contentViewerWebContexts;
+ private final Map<String, List<UiExtension>> componentUiExtensionsByType;
+
+ public ExtensionUiInfo(final Collection<WebAppContext> webAppContexts,
+ final Map<String, String> mimeMappings,
+ final Collection<WebAppContext> componentUiExtensionWebContexts,
+ final Collection<WebAppContext> contentViewerWebContexts,
+ final Map<String, List<UiExtension>> componentUiExtensionsByType) {
+ this.webAppContexts = webAppContexts;
+ this.mimeMappings = mimeMappings;
+ this.componentUiExtensionWebContexts = componentUiExtensionWebContexts;
+ this.contentViewerWebContexts = contentViewerWebContexts;
+ this.componentUiExtensionsByType = componentUiExtensionsByType;
+ }
+
+ public Collection<WebAppContext> getWebAppContexts() {
+ return webAppContexts;
+ }
+
+ public Map<String, String> getMimeMappings() {
+ return mimeMappings;
+ }
+
+ public Collection<WebAppContext> getComponentUiExtensionWebContexts() {
+ return componentUiExtensionWebContexts;
+ }
+
+ public Collection<WebAppContext> getContentViewerWebContexts() {
+ return contentViewerWebContexts;
+ }
+
+ public Map<String, List<UiExtension>> getComponentUiExtensionsByType() {
+ return componentUiExtensionsByType;
+ }
+ }
+}
+
+@FunctionalInterface
+interface ServerConnectorCreator<Server, HttpConfiguration, ServerConnector> {
+ ServerConnector create(Server server, HttpConfiguration httpConfiguration);
+}