diff options
author | shrek2000 <orenkle@amdocs.com> | 2017-12-27 14:24:00 +0200 |
---|---|---|
committer | Avi Gaffa <avi.gaffa@amdocs.com> | 2017-12-27 13:30:23 +0000 |
commit | a80a6e4fca573de7a90e099c697945383a70493e (patch) | |
tree | 7e133717f11a37e5a3ada0441e6a543eac199f16 /openecomp-be | |
parent | e4fee8d899d4bf90fb41de8d23dca174915c1096 (diff) |
Sonar issue Dix - Session is closeable
Cassandra session is closeable and should be protected by try block
Issue-ID: SDC-801
Change-Id: Ib2cc178e029dce1915e2931fe58e4a5d6527dd5f
Signed-off-by: shrek2000 <orenkle@amdocs.com>
Diffstat (limited to 'openecomp-be')
2 files changed, 27 insertions, 28 deletions
diff --git a/openecomp-be/tools/zusammen-tools/src/main/java/org/openecomp/core/tools/exportinfo/ExportDataCommand.java b/openecomp-be/tools/zusammen-tools/src/main/java/org/openecomp/core/tools/exportinfo/ExportDataCommand.java index c5e140ed7c..b5486f57be 100644 --- a/openecomp-be/tools/zusammen-tools/src/main/java/org/openecomp/core/tools/exportinfo/ExportDataCommand.java +++ b/openecomp-be/tools/zusammen-tools/src/main/java/org/openecomp/core/tools/exportinfo/ExportDataCommand.java @@ -42,7 +42,7 @@ import static java.nio.file.Files.createDirectories; public final class ExportDataCommand { private static final Logger logger = LoggerFactory.getLogger(ExportDataCommand.class); public static final String JOIN_DELIMITER = "$#"; - public static final String JOIN_DELIMITER_SPILTTER = "\\$\\#"; + public static final String JOIN_DELIMITER_SPLITTER = "\\$\\#"; public static final String MAP_DELIMITER = "!@"; public static final String MAP_DELIMITER_SPLITTER = "\\!\\@"; public static final int THREAD_POOL_SIZE = 4; @@ -54,33 +54,33 @@ public final class ExportDataCommand { ExecutorService executor = null; try { CassandraConnectionInitializer.setCassandraConnectionPropertiesToSystem(); - final Set<String> filteredItems = Sets.newHashSet(filterItem); Path rootDir = Paths.get(ImportProperties.ROOT_DIRECTORY); initDir(rootDir); - Set<String> fis = filteredItems.stream().map(fi -> fi.replaceAll("\\r", "")).collect(Collectors.toSet()); - - Map<String, List<String>> queries; - Yaml yaml = new Yaml(); - try (InputStream is = ExportDataCommand.class.getResourceAsStream("/queries.yaml")) { - queries = (Map<String, List<String>>) yaml.load(is); - } - List<String> queriesList = queries.get("queries"); - List<String> itemsColumns = queries.get("item_columns"); - Set<String> vlms = new HashSet<>(); - CountDownLatch doneQueries = new CountDownLatch(queriesList.size()); - executor = Executors.newFixedThreadPool(THREAD_POOL_SIZE); - for (int i = 0; i < queriesList.size(); i++) { - executeQuery(queriesList.get(i), fis, itemsColumns.get(i), vlms, doneQueries, executor); - } - doneQueries.await(); - if (!vlms.isEmpty()) { - CountDownLatch doneVmls = new CountDownLatch(queriesList.size()); - + try(Session session = CassandraSessionFactory.getSession()) { + final Set<String> filteredItems = Sets.newHashSet(filterItem); + Set<String> fis = filteredItems.stream().map(fi -> fi.replaceAll("\\r", "")).collect(Collectors.toSet()); + Map<String, List<String>> queries; + Yaml yaml = new Yaml(); + try (InputStream is = ExportDataCommand.class.getResourceAsStream("/queries.yaml")) { + queries = (Map<String, List<String>>) yaml.load(is); + } + List<String> queriesList = queries.get("queries"); + List<String> itemsColumns = queries.get("item_columns"); + Set<String> vlms = new HashSet<>(); + CountDownLatch doneQueries = new CountDownLatch(queriesList.size()); + executor = Executors.newFixedThreadPool(THREAD_POOL_SIZE); for (int i = 0; i < queriesList.size(); i++) { - executeQuery(queriesList.get(i), vlms, itemsColumns.get(i), null, doneVmls, executor); + executeQuery(session, queriesList.get(i), fis, itemsColumns.get(i), vlms, doneQueries, executor); + } + doneQueries.await(); + if (!vlms.isEmpty()) { + CountDownLatch doneVmls = new CountDownLatch(queriesList.size()); + for (int i = 0; i < queriesList.size(); i++) { + executeQuery(session, queriesList.get(i), vlms, itemsColumns.get(i), null, doneVmls, executor); + } + + doneVmls.await(); } - - doneVmls.await(); } zipPath(rootDir); FileUtils.forceDelete(rootDir.toFile()); @@ -95,9 +95,8 @@ public final class ExportDataCommand { } - private static boolean executeQuery(final String query, final Set<String> filteredItems, final String filteredColumn, + private static boolean executeQuery(final Session session, final String query, final Set<String> filteredItems, final String filteredColumn, final Set<String> vlms, final CountDownLatch donequerying, Executor executor) { - Session session = CassandraSessionFactory.getSession(); ResultSetFuture resultSetFuture = session.executeAsync(query); Futures.addCallback(resultSetFuture, new FutureCallback<ResultSet>() { @Override diff --git a/openecomp-be/tools/zusammen-tools/src/main/java/org/openecomp/core/tools/importinfo/ImportSingleTable.java b/openecomp-be/tools/zusammen-tools/src/main/java/org/openecomp/core/tools/importinfo/ImportSingleTable.java index 7504ad1194..8a671c5f8d 100644 --- a/openecomp-be/tools/zusammen-tools/src/main/java/org/openecomp/core/tools/importinfo/ImportSingleTable.java +++ b/openecomp-be/tools/zusammen-tools/src/main/java/org/openecomp/core/tools/importinfo/ImportSingleTable.java @@ -114,7 +114,7 @@ public class ImportSingleTable { byte[] decoded = Base64.getDecoder().decode(rowData); String decodedStr = new String(decoded); if (!StringUtils.isEmpty(decodedStr)) { - String[] splitted = decodedStr.split(ExportDataCommand.JOIN_DELIMITER_SPILTTER); + String[] splitted = decodedStr.split(ExportDataCommand.JOIN_DELIMITER_SPLITTER); Set set = Sets.newHashSet(splitted); set.remove(""); bind.setSet(i, set); @@ -126,7 +126,7 @@ public class ImportSingleTable { byte[] decodedMap = Base64.getDecoder().decode(rowData); String mapStr = new String(decodedMap); if (!StringUtils.isEmpty(mapStr)) { - String[] splittedMap = mapStr.split(ExportDataCommand.JOIN_DELIMITER_SPILTTER); + String[] splittedMap = mapStr.split(ExportDataCommand.JOIN_DELIMITER_SPLITTER); Map<String, String> map = new HashMap<>(); for (String keyValue : splittedMap) { String[] split = keyValue.split(ExportDataCommand.MAP_DELIMITER_SPLITTER); |