diff options
Diffstat (limited to 'src/main/java/org/onap/dmaap/dmf/mr/service/impl')
10 files changed, 3232 insertions, 0 deletions
diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/AdminServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/AdminServiceImpl.java new file mode 100644 index 0000000..deed9c2 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/AdminServiceImpl.java @@ -0,0 +1,189 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.limits.Blacklist; +import com.att.nsa.security.NsaApiKey; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; +import org.onap.dmaap.dmf.mr.backends.Consumer; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.service.AdminService; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import org.springframework.stereotype.Component; + +import java.io.IOException; +import java.util.Collection; +import java.util.Set; + + +/** + * @author muzainulhaque.qazi + * + */ +@Component +public class AdminServiceImpl implements AdminService { + + //private Logger log = Logger.getLogger(AdminServiceImpl.class.toString()); + private static final EELFLogger log = EELFManager.getInstance().getLogger(AdminServiceImpl.class); + /** + * getConsumerCache returns consumer cache + * @param dMaaPContext context + * @throws IOException ex + * @throws AccessDeniedException + */ + @Override + public void showConsumerCache(DMaaPContext dMaaPContext) throws IOException, AccessDeniedException { + adminAuthenticate(dMaaPContext); + + JSONObject consumers = new JSONObject(); + JSONArray jsonConsumersList = new JSONArray(); + + for (Consumer consumer : getConsumerFactory(dMaaPContext).getConsumers()) { + JSONObject consumerObject = new JSONObject(); + consumerObject.put("name", consumer.getName()); + consumerObject.put("created", consumer.getCreateTimeMs()); + consumerObject.put("accessed", consumer.getLastAccessMs()); + jsonConsumersList.put(consumerObject); + } + + consumers.put("consumers", jsonConsumersList); + log.info("========== AdminServiceImpl: getConsumerCache: " + jsonConsumersList.toString() + "==========="); + DMaaPResponseBuilder.respondOk(dMaaPContext, consumers); + } + + /** + * + * dropConsumerCache() method clears consumer cache + * @param dMaaPContext context + * @throws JSONException ex + * @throws IOException ex + * @throws AccessDeniedException + * + */ + @Override + public void dropConsumerCache(DMaaPContext dMaaPContext) throws JSONException, IOException, AccessDeniedException { + adminAuthenticate(dMaaPContext); + getConsumerFactory(dMaaPContext).dropCache(); + DMaaPResponseBuilder.respondOkWithHtml(dMaaPContext, "Consumer cache cleared successfully"); + // log.info("========== AdminServiceImpl: dropConsumerCache: Consumer + // Cache successfully dropped.==========="); + } + + /** + * getfConsumerFactory returns CosnumerFactory details + * @param dMaaPContext contxt + * @return ConsumerFactory obj + * + */ + private ConsumerFactory getConsumerFactory(DMaaPContext dMaaPContext) { + return dMaaPContext.getConfigReader().getfConsumerFactory(); + } + + /** + * return ipblacklist + * @param dMaaPContext context + * @return blacklist obj + */ + private static Blacklist getIpBlacklist(DMaaPContext dMaaPContext) { + return dMaaPContext.getConfigReader().getfIpBlackList(); + } + + + /** + * Get list of blacklisted ips + */ + @Override + public void getBlacklist ( DMaaPContext dMaaPContext ) throws IOException, AccessDeniedException + { + adminAuthenticate ( dMaaPContext ); + + DMaaPResponseBuilder.respondOk ( dMaaPContext, + new JSONObject().put ( "blacklist", + setToJsonArray ( getIpBlacklist (dMaaPContext).asSet() ) ) ); + } + + public static JSONArray setToJsonArray ( Set<?> fields ) + { + return collectionToJsonArray ( fields ); + } + + public static JSONArray collectionToJsonArray ( Collection<?> fields ) + { + final JSONArray a = new JSONArray (); + if ( fields != null ) + { + for ( Object o : fields ) + { + a.put ( o ); + } + } + return a; + } + + /** + * Add ip to blacklist + */ + @Override + public void addToBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException + { + adminAuthenticate ( dMaaPContext ); + + getIpBlacklist (dMaaPContext).add ( ip ); + DMaaPResponseBuilder.respondOkNoContent ( dMaaPContext ); + } + + /** + * Remove ip from blacklist + */ + @Override + public void removeFromBlacklist ( DMaaPContext dMaaPContext, String ip ) throws IOException, ConfigDbException, AccessDeniedException + { + adminAuthenticate ( dMaaPContext ); + + getIpBlacklist (dMaaPContext).remove ( ip ); + DMaaPResponseBuilder.respondOkNoContent ( dMaaPContext ); + } + + /** + * Authenticate if user is admin + * @param dMaaPContext context + * @throws AccessDeniedException ex + */ + private static void adminAuthenticate ( DMaaPContext dMaaPContext ) throws AccessDeniedException + { + + final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(dMaaPContext); + if ( user == null || !user.getKey ().equals ( "admin" ) ) + { + throw new AccessDeniedException (); + } + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/ApiKeysServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/ApiKeysServiceImpl.java new file mode 100644 index 0000000..fe206c0 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/ApiKeysServiceImpl.java @@ -0,0 +1,320 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.drumlin.service.standards.HttpStatusCodes; +import com.att.nsa.security.NsaApiKey; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; +import com.att.nsa.security.db.NsaApiDb; +import com.att.nsa.security.db.NsaApiDb.KeyExistsException; +import com.att.nsa.security.db.simple.NsaSimpleApiKey; +import org.json.JSONArray; +import org.json.JSONObject; +import org.onap.dmaap.dmf.mr.beans.ApiKeyBean; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.service.ApiKeysService; +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import org.onap.dmaap.dmf.mr.utils.Emailer; +import org.springframework.stereotype.Service; + +import java.io.IOException; + +/** + * Implementation of the ApiKeysService, this will provide the below operations, + * getAllApiKeys, getApiKey, createApiKey, updateApiKey, deleteApiKey + * + * @author nilanjana.maity + */ +@Service +public class ApiKeysServiceImpl implements ApiKeysService { + + + private static final EELFLogger log = EELFManager.getInstance().getLogger(ApiKeysServiceImpl.class.toString()); + /** + * This method will provide all the ApiKeys present in kafka server. + * + * @param dmaapContext + * @throws ConfigDbException + * @throws IOException + */ + public void getAllApiKeys(DMaaPContext dmaapContext) + throws ConfigDbException, IOException { + + ConfigurationReader configReader = dmaapContext.getConfigReader(); + + log.info("configReader : " + configReader.toString()); + + final JSONObject result = new JSONObject(); + final JSONArray keys = new JSONArray(); + result.put("apiKeys", keys); + + NsaApiDb<NsaSimpleApiKey> apiDb = configReader.getfApiKeyDb(); + + for (String key : apiDb.loadAllKeys()) { + keys.put(key); + } + log.info("========== ApiKeysServiceImpl: getAllApiKeys: Api Keys are : " + + keys.toString() + "==========="); + DMaaPResponseBuilder.respondOk(dmaapContext, result); + } + + /** + * @param dmaapContext + * @param apikey + * @throws ConfigDbException + * @throws IOException + */ + @Override + public void getApiKey(DMaaPContext dmaapContext, String apikey) + throws ConfigDbException, IOException { + + String errorMsg = "Api key name is not mentioned."; + int errorCode = HttpStatusCodes.k400_badRequest; + + if (null != apikey) { + NsaSimpleApiKey simpleApiKey = getApiKeyDb(dmaapContext) + .loadApiKey(apikey); + + + if (null != simpleApiKey) { + JSONObject result = simpleApiKey.asJsonObject(); + DMaaPResponseBuilder.respondOk(dmaapContext, result); + log.info("========== ApiKeysServiceImpl: getApiKey : " + + result.toString() + "==========="); + return; + } else { + errorMsg = "Api key [" + apikey + "] does not exist."; + errorCode = HttpStatusCodes.k404_notFound; + log.info("========== ApiKeysServiceImpl: getApiKey: Error : API Key does not exist. " + + "==========="); + DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode, + errorMsg); + throw new IOException(); + } + } + + } + + /** + * @param dmaapContext + * @param nsaApiKey + * @throws KeyExistsException + * @throws ConfigDbException + * @throws IOException + */ + @Override + public void createApiKey(DMaaPContext dmaapContext, ApiKeyBean nsaApiKey) + throws KeyExistsException, ConfigDbException, IOException { + + log.debug("TopicService: : createApiKey...."); + + String contactEmail = nsaApiKey.getEmail(); + final boolean emailProvided = contactEmail != null && contactEmail.length() > 0 && contactEmail.indexOf("@") > 1 ; + String kSetting_AllowAnonymousKeys= com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop,"apiKeys.allowAnonymous"); + if(null==kSetting_AllowAnonymousKeys) { + kSetting_AllowAnonymousKeys ="false"; + } + + if ( kSetting_AllowAnonymousKeys.equalsIgnoreCase("true") && !emailProvided ) + { + DMaaPResponseBuilder.respondWithErrorInJson(dmaapContext, 400, "You must provide an email address."); + return; + } + + + final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext); + String apiKey = nsaApiKey.getKey(); + String sharedSecret = nsaApiKey.getSharedSecret(); + final NsaSimpleApiKey key = apiKeyDb.createApiKey(apiKey, + sharedSecret); + if (null != key) { + + if (null != nsaApiKey.getEmail()) { + key.setContactEmail(nsaApiKey.getEmail()); + } + + if (null != nsaApiKey.getDescription()) { + key.setDescription(nsaApiKey.getDescription()); + } + + log.debug("=======ApiKeysServiceImpl: createApiKey : saving api key : " + + key.toString() + "====="); + apiKeyDb.saveApiKey(key); + + // email out the secret to validate the email address + if ( emailProvided ) + { + String body = "\n" + "Your email address was provided as the creator of new API key \"" + + apiKey + "\".\n" + "\n" + "If you did not make this request, please let us know." + + "but don't worry -" + + " the API key is useless without the information below, which has been provided " + + "only to you.\n" + "\n\n" + "For API key \"" + apiKey + "\", use API key secret:\n\n\t" + + sharedSecret + "\n\n" + "Note that it's normal to share the API key" + + " (" + apiKey + "). " + + "This is how you are granted access to resources " + "like a UEB topic or Flatiron scope. " + + "However, you should NOT share the API key's secret. " + "The API key is associated with your" + + " email alone. ALL access to data made with this " + "key will be your responsibility. If you " + + "share the secret, someone else can use the API key " + "to access proprietary data with your " + + "identity.\n" + "\n" + "Enjoy!\n" + "\n" + "The GFP/SA-2020 Team"; + + Emailer em = dmaapContext.getConfigReader().getSystemEmailer(); + em.send(contactEmail, "New API Key", body); + } + log.debug("TopicService: : sending response."); + + JSONObject o = key.asJsonObject(); + + o.put ( NsaSimpleApiKey.kApiSecretField, + emailProvided ? + "Emailed to " + contactEmail + "." : + key.getSecret () + ); + DMaaPResponseBuilder.respondOk(dmaapContext, + o); + + return; + } else { + log.debug("=======ApiKeysServiceImpl: createApiKey : Error in creating API Key.====="); + DMaaPResponseBuilder.respondWithError(dmaapContext, + HttpStatusCodes.k500_internalServerError, + "Failed to create api key."); + throw new KeyExistsException(apiKey); + } + } + + /** + * @param dmaapContext + * @param apikey + * @param nsaApiKey + * @throws ConfigDbException + * @throws IOException + * @throws AccessDeniedException + */ + @Override + public void updateApiKey(DMaaPContext dmaapContext, String apikey, + ApiKeyBean nsaApiKey) throws ConfigDbException, IOException, AccessDeniedException { + + String errorMsg = "Api key name is not mentioned."; + int errorCode = HttpStatusCodes.k400_badRequest; + + if (null != apikey) { + final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext); + final NsaSimpleApiKey key = apiKeyDb.loadApiKey(apikey); + boolean shouldUpdate = false; + + if (null != key) { + final NsaApiKey user = DMaaPAuthenticatorImpl + .getAuthenticatedUser(dmaapContext); + + if (user == null || !user.getKey().equals(key.getKey())) { + throw new AccessDeniedException("You must authenticate with the key you'd like to update."); + } + + if (null != nsaApiKey.getEmail()) { + key.setContactEmail(nsaApiKey.getEmail()); + shouldUpdate = true; + } + + if (null != nsaApiKey.getDescription()) { + key.setDescription(nsaApiKey.getDescription()); + shouldUpdate = true; + } + + if (shouldUpdate) { + apiKeyDb.saveApiKey(key); + } + + log.info("======ApiKeysServiceImpl : updateApiKey : Key Updated Successfully :" + + key.toString() + "========="); + DMaaPResponseBuilder.respondOk(dmaapContext, + key.asJsonObject()); + return; + } + } else { + errorMsg = "Api key [" + apikey + "] does not exist."; + errorCode = HttpStatusCodes.k404_notFound; + DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode, + errorMsg); + log.info("======ApiKeysServiceImpl : updateApiKey : Error in Updating Key.============"); + throw new IOException(); + } + } + + /** + * @param dmaapContext + * @param apikey + * @throws ConfigDbException + * @throws IOException + * @throws AccessDeniedException + */ + @Override + public void deleteApiKey(DMaaPContext dmaapContext, String apikey) + throws ConfigDbException, IOException, AccessDeniedException { + + String errorMsg = "Api key name is not mentioned."; + int errorCode = HttpStatusCodes.k400_badRequest; + + if (null != apikey) { + final NsaApiDb<NsaSimpleApiKey> apiKeyDb = getApiKeyDb(dmaapContext); + final NsaSimpleApiKey key = apiKeyDb.loadApiKey(apikey); + + if (null != key) { + + final NsaApiKey user = DMaaPAuthenticatorImpl + .getAuthenticatedUser(dmaapContext); + if (user == null || !user.getKey().equals(key.getKey())) { + throw new AccessDeniedException("You don't own the API key."); + } + + apiKeyDb.deleteApiKey(key); + log.info("======ApiKeysServiceImpl : deleteApiKey : Deleted Key successfully.============"); + DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, + "Api key [" + apikey + "] deleted successfully."); + return; + } + } else { + errorMsg = "Api key [" + apikey + "] does not exist."; + errorCode = HttpStatusCodes.k404_notFound; + DMaaPResponseBuilder.respondWithError(dmaapContext, errorCode, + errorMsg); + log.info("======ApiKeysServiceImpl : deleteApiKey : Error while deleting key.============"); + throw new IOException(); + } + } + + /** + * + * @param dmaapContext + * @return + */ + private NsaApiDb<NsaSimpleApiKey> getApiKeyDb(DMaaPContext dmaapContext) { + ConfigurationReader configReader = dmaapContext.getConfigReader(); + return configReader.getfApiKeyDb(); + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/BaseTransactionDbImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/BaseTransactionDbImpl.java new file mode 100644 index 0000000..1ad7e3a --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/BaseTransactionDbImpl.java @@ -0,0 +1,153 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import com.att.nsa.configs.ConfigDb; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.configs.ConfigPath; +import org.onap.dmaap.dmf.mr.transaction.DMaaPTransactionFactory; +import org.onap.dmaap.dmf.mr.transaction.DMaaPTransactionObj; +import org.onap.dmaap.dmf.mr.transaction.DMaaPTransactionObjDB; +import org.onap.dmaap.dmf.mr.transaction.TransactionObj; + +import java.util.Set; +import java.util.TreeSet; + +/** + * Persistent storage for Transaction objects built over an abstract config db. + * + * @author anowarul.islam + * + * @param <K> + */ +public class BaseTransactionDbImpl<K extends DMaaPTransactionObj> implements DMaaPTransactionObjDB<K> { + + private final ConfigDb fDb; + private final ConfigPath fBasePath; + private final DMaaPTransactionFactory<K> fKeyFactory; + + private static final String kStdRootPath = "/transaction"; + + private ConfigPath makePath(String transactionId) { + return fBasePath.getChild(transactionId); + } + + /** + * Construct an Transaction db over the given config db at the standard + * location + * + * @param db + * @param keyFactory + * @throws ConfigDbException + */ + public BaseTransactionDbImpl(ConfigDb db, DMaaPTransactionFactory<K> keyFactory) throws ConfigDbException { + this(db, kStdRootPath, keyFactory); + } + + /** + * Construct an Transaction db over the given config db using the given root + * location + * + * @param db + * @param rootPath + * @param keyFactory + * @throws ConfigDbException + */ + public BaseTransactionDbImpl(ConfigDb db, String rootPath, DMaaPTransactionFactory<K> keyFactory) + throws ConfigDbException { + fDb = db; + fBasePath = db.parse(rootPath); + fKeyFactory = keyFactory; + + if (!db.exists(fBasePath)) { + db.store(fBasePath, ""); + } + } + + /** + * Create a new Transaction Obj. If one exists, + * + * @param id + * @return the new Transaction record + * @throws ConfigDbException + */ + public synchronized K createTransactionObj(String id) throws KeyExistsException, ConfigDbException { + final ConfigPath path = makePath(id); + if (fDb.exists(path)) { + throw new KeyExistsException(id); + } + + // make one, store it, return it + final K newKey = fKeyFactory.makeNewTransactionId(id); + fDb.store(path, newKey.serialize()); + return newKey; + } + + /** + * Save an Transaction record. This must be used after changing auxiliary + * data on the record. Note that the transaction object must exist (via + * createTransactionObj). + * + * @param transaction + * object + * @throws ConfigDbException + */ + @Override + public synchronized void saveTransactionObj(K trnObj) throws ConfigDbException { + final ConfigPath path = makePath(trnObj.getId()); + if (!fDb.exists(path) || !(trnObj instanceof TransactionObj)) { + throw new IllegalStateException(trnObj.getId() + " is not known to this database"); + } + fDb.store(path, ((TransactionObj) trnObj).serialize()); + } + + /** + * Load an Transaction record based on the Transaction Id value + * + * @param transactionId + * @return an Transaction Object record or null + * @throws ConfigDbException + */ + @Override + public synchronized K loadTransactionObj(String transactionId) throws ConfigDbException { + final String data = fDb.load(makePath(transactionId)); + if (data != null) { + return fKeyFactory.makeNewTransactionObj(data); + } + return null; + } + + /** + * Load all transactions known to this database. (This could be expensive.) + * + * @return a set of all Transaction objects + * @throws ConfigDbException + */ + public synchronized Set<String> loadAllTransactionObjs() throws ConfigDbException { + final TreeSet<String> result = new TreeSet<>(); + for (ConfigPath cp : fDb.loadChildrenNames(fBasePath)) { + result.add(cp.getName()); + } + return result; + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/ErrorResponseProvider.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/ErrorResponseProvider.java new file mode 100644 index 0000000..3456eb9 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/ErrorResponseProvider.java @@ -0,0 +1,147 @@ +/******************************************************************************* + * ============LICENSE_START=================================================== + * org.onap.dmaap + * ============================================================================ + * Copyright © 2019 Nokia Intellectual Property. All rights reserved. + * ============================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END===================================================== + ******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import com.google.common.base.Preconditions; +import org.apache.http.HttpStatus; +import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages; +import org.onap.dmaap.dmf.mr.exception.DMaaPResponseCode; +import org.onap.dmaap.dmf.mr.exception.ErrorResponse; +import org.onap.dmaap.dmf.mr.utils.Utils; + +import java.util.Date; + +class ErrorResponseProvider { + + private String clientId; + private String topicName; + private String consumerGroup; + private String remoteHost; + private String publisherId; + private String publisherIp; + private DMaaPErrorMessages errorMessages; + + private ErrorResponseProvider() { + + } + + ErrorResponse getIpBlacklistedError(String remoteAddr) { + return new ErrorResponse(HttpStatus.SC_FORBIDDEN, + DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.", + null, Utils.getFormattedDate(new Date()), topicName, publisherId, + publisherIp, consumerGroup + "/" + clientId, remoteHost); + } + + ErrorResponse getTopicNotFoundError() { + return new ErrorResponse(HttpStatus.SC_NOT_FOUND, + DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(), + errorMessages.getTopicNotExist() + "-[" + topicName + "]", null, Utils.getFormattedDate(new Date()), + topicName, publisherId, publisherIp, consumerGroup + "/" + clientId, remoteHost); + } + + ErrorResponse getAafAuthorizationError(String permission, String action) { + return new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, + DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + errorMessages.getNotPermitted1() + action + errorMessages.getNotPermitted2() + topicName + " on " + + permission, + null, Utils.getFormattedDate(new Date()), topicName, publisherId, publisherIp, consumerGroup + "/" + clientId, + remoteHost); + } + + ErrorResponse getServiceUnavailableError(String msg) { + return new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, + DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), + errorMessages.getServerUnav() + msg, null, Utils.getFormattedDate(new Date()), topicName, + publisherId, publisherIp, consumerGroup + "/" + clientId, remoteHost); + } + + ErrorResponse getConcurrentModificationError() { + return new ErrorResponse(HttpStatus.SC_CONFLICT, + DMaaPResponseCode.TOO_MANY_REQUESTS.getResponseCode(), + "Couldn't respond to client, possible of consumer requests from more than one server. Please contact MR team if you see this issue occurs continously", null, + Utils.getFormattedDate(new Date()), topicName, publisherId, publisherIp, consumerGroup + "/" + clientId, remoteHost); + } + + ErrorResponse getGenericError(String msg) { + return new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, + DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), + "Couldn't respond to client, closing cambria consumer" + msg, null, + Utils.getFormattedDate(new Date()), topicName, publisherId, publisherIp, consumerGroup + "/" + clientId, remoteHost); + } + + public static class Builder { + + private String clientId; + private String topicName; + private String consumerGroup; + private String remoteHost; + private String publisherId; + private String publisherIp; + DMaaPErrorMessages errorMessages; + + Builder withErrorMessages(DMaaPErrorMessages errorMessages) { + this.errorMessages = errorMessages; + return this; + } + + Builder withTopic(String topic) { + this.topicName = topic; + return this; + } + + Builder withClient(String client) { + this.clientId = client; + return this; + } + + Builder withConsumerGroup(String consumerGroup) { + this.consumerGroup = consumerGroup; + return this; + } + + Builder withRemoteHost(String remoteHost) { + this.remoteHost = remoteHost; + return this; + } + + Builder withPublisherId(String publisherId) { + this.publisherId = publisherId; + return this; + } + + Builder withPublisherIp(String publisherIp) { + this.publisherIp = publisherIp; + return this; + } + + public ErrorResponseProvider build() { + Preconditions.checkArgument(errorMessages!=null); + ErrorResponseProvider errRespProvider = new ErrorResponseProvider(); + errRespProvider.errorMessages = this.errorMessages; + errRespProvider.clientId = this.clientId; + errRespProvider.consumerGroup = this.consumerGroup; + errRespProvider.topicName = this.topicName; + errRespProvider.remoteHost = this.remoteHost; + errRespProvider.publisherId = this.publisherId; + errRespProvider.publisherIp = this.publisherIp; + return errRespProvider; + } + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/EventsServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/EventsServiceImpl.java new file mode 100644 index 0000000..9f35812 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/EventsServiceImpl.java @@ -0,0 +1,768 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.drumlin.service.standards.MimeTypes; +import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; +import com.att.nsa.security.NsaApiKey; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; +import com.att.nsa.util.rrConvertor; +import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.math.NumberUtils; +import org.apache.http.HttpStatus; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.errors.TopicExistsException; +import org.json.JSONObject; +import org.json.JSONTokener; +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.Consumer; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory.UnavailableException; +import org.onap.dmaap.dmf.mr.backends.MetricsSet; +import org.onap.dmaap.dmf.mr.backends.Publisher.message; +import org.onap.dmaap.dmf.mr.beans.DMaaPCambriaLimiter; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.beans.LogDetails; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.exception.DMaaPAccessDeniedException; +import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages; +import org.onap.dmaap.dmf.mr.exception.DMaaPResponseCode; +import org.onap.dmaap.dmf.mr.exception.ErrorResponse; +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import org.onap.dmaap.dmf.mr.resources.CambriaEventSet; +import org.onap.dmaap.dmf.mr.resources.CambriaOutboundEventStream; +import org.onap.dmaap.dmf.mr.security.DMaaPAAFAuthenticator; +import org.onap.dmaap.dmf.mr.security.DMaaPAAFAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.service.EventsService; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder.StreamWriter; +import org.onap.dmaap.dmf.mr.utils.Utils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.core.MediaType; +import java.io.IOException; +import java.io.InputStream; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.ConcurrentModificationException; +import java.util.Date; +import java.util.LinkedList; + +/** + * This class provides the functinality to publish and subscribe message to + * kafka + * + * @author Ramkumar Sembaiyam + * + */ +@Service +public class EventsServiceImpl implements EventsService { + + private static final EELFLogger LOG = EELFManager.getInstance().getLogger(EventsServiceImpl.class); + private static final String BATCH_LENGTH = "event.batch.length"; + private static final String TRANSFER_ENCODING = "Transfer-Encoding"; + private static final String TIMEOUT_PROPERTY = "timeout"; + private static final String SUBSCRIBE_ACTION = "sub"; + private static final String PUBLISH_ACTION = "pub"; + + @Autowired + private DMaaPErrorMessages errorMessages; + + String getPropertyFromAJSCmap(String propertyKey) { + return AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, propertyKey); + } + + public DMaaPErrorMessages getErrorMessages() { + return errorMessages; + } + + public void setErrorMessages(DMaaPErrorMessages errorMessages) { + this.errorMessages = errorMessages; + } + + /** + * @param ctx + * @param topic + * @param consumerGroup + * @param clientId + * @throws ConfigDbException, + * TopicExistsException, AccessDeniedException, + * UnavailableException, CambriaApiException, IOException + * + * + */ + @Override + public void getEvents(DMaaPContext ctx, String topic, String consumerGroup, String clientId) + throws ConfigDbException, AccessDeniedException, UnavailableException, + CambriaApiException, IOException { + + final long startTime = System.currentTimeMillis(); + final HttpServletRequest req = ctx.getRequest(); + final LogWrap logger = new LogWrap(topic, consumerGroup, clientId); + final String remoteHost = req.getRemoteHost(); + ErrorResponseProvider errRespProvider = new ErrorResponseProvider.Builder().withErrorMessages(errorMessages) + .withTopic(topic).withConsumerGroup(consumerGroup).withClient(clientId).withRemoteHost(remoteHost).build(); + + validateIpBlacklist(errRespProvider, ctx); + + final Topic metaTopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic); + if (metaTopic == null) { + throw new CambriaApiException(errRespProvider.getTopicNotFoundError()); + } + + boolean isAAFTopic = authorizeClientWhenNeeded(ctx, metaTopic, topic, errRespProvider, SUBSCRIBE_ACTION); + + final long elapsedMs1 = System.currentTimeMillis() - startTime; + logger.info("Time taken in getEvents Authorization " + elapsedMs1 + " ms for " + topic + " " + consumerGroup + + " " + clientId); + + verifyHostId(); + final boolean pretty = isPrettyPrintEnabled(); + final boolean withMeta = isMetaOffsetEnabled(); + int timeoutMs = getMessageTimeout(req); + int limit = getMessageLimit(req); + String topicFilter = (null != req.getParameter("filter")) ? req.getParameter("filter") : CambriaConstants.kNoFilter; + logger.info("fetch: timeout=" + timeoutMs + ", limit=" + limit + ", filter=" + topicFilter + " from Remote host "+ctx.getRequest().getRemoteHost()); + + Consumer consumer = null; + try { + final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); + final DMaaPCambriaLimiter rl = ctx.getConfigReader().getfRateLimiter(); + rl.onCall(topic, consumerGroup, clientId, remoteHost); + consumer = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs, + remoteHost); + CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(consumer).timeout(timeoutMs) + .limit(limit).filter(topicFilter).pretty(pretty).withMeta(withMeta).build(); + coes.setDmaapContext(ctx); + coes.setTopic(metaTopic); + coes.setTransEnabled(isTransEnabled() || isAAFTopic); + coes.setTopicStyle(isAAFTopic); + final long elapsedMs2 = System.currentTimeMillis() - startTime; + logger.info("Time taken in getEvents getConsumerFor " + elapsedMs2 + " ms for " + topic + " " + + consumerGroup + " " + clientId); + + respondOkWithStream(ctx, coes); + // No IOException thrown during respondOkWithStream, so commit the + // new offsets to all the brokers + consumer.commitOffsets(); + final int sent = coes.getSentCount(); + metricsSet.consumeTick(sent); + rl.onSend(topic, consumerGroup, clientId, sent); + final long elapsedMs = System.currentTimeMillis() - startTime; + logger.info("Sent " + sent + " msgs in " + elapsedMs + " ms; committed to offset " + consumer.getOffset() + " for " + + topic + " " + consumerGroup + " " + clientId + " on to the server " + + remoteHost); + + } catch (UnavailableException excp) { + logger.warn(excp.getMessage(), excp); + ErrorResponse errRes = errRespProvider.getServiceUnavailableError(excp.getMessage()); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + + } catch (ConcurrentModificationException excp1) { + LOG.info(excp1.getMessage() + "on " + topic + " " + consumerGroup + " ****** " + clientId + " from Remote"+remoteHost); + ErrorResponse errRes = errRespProvider.getConcurrentModificationError(); + logger.info(errRes.toString()); + throw new CambriaApiException(errRes); + + } catch (Exception excp) { + logger.info("Couldn't respond to client, closing cambria consumer " + " " + topic + " " + consumerGroup + + " " + clientId + " " + HttpStatus.SC_SERVICE_UNAVAILABLE + " ****** " + excp); + ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId); + ErrorResponse errRes = errRespProvider.getGenericError(excp.getMessage()); + logger.info(errRes.toString()); + throw new CambriaApiException(errRes); + } finally { + if (consumer != null && !isCacheEnabled()) { + try { + consumer.close(); + } catch (Exception e) { + logger.info("***Exception occurred in getEvents finally block while closing the consumer " + " " + + topic + " " + consumerGroup + " " + clientId + " " + HttpStatus.SC_SERVICE_UNAVAILABLE + + " " + e); + } + } + } + } + + private void validateIpBlacklist(ErrorResponseProvider errResponseProvider, DMaaPContext ctx) throws CambriaApiException { + final String remoteAddr = Utils.getRemoteAddress(ctx); + if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) { + ErrorResponse errRes = errResponseProvider.getIpBlacklistedError(remoteAddr); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + } + + private boolean authorizeClientWhenNeeded(DMaaPContext ctx, Topic metaTopic, String topicName, + ErrorResponseProvider errRespProvider, String action) throws CambriaApiException, AccessDeniedException { + + boolean isAAFTopic = false; + String metricTopicName = getMetricTopicName(); + if(!metricTopicName.equalsIgnoreCase(topicName)) { + if(isCadiEnabled() && isTopicNameEnforcedAaf(topicName)) { + isAAFTopic = true; + DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl(); + String permission = aaf.aafPermissionString(topicName, action); + if (!aaf.aafAuthentication(ctx.getRequest(), permission)) { + ErrorResponse errRes = errRespProvider.getAafAuthorizationError(permission, action); + LOG.info(errRes.toString()); + throw new DMaaPAccessDeniedException(errRes); + + } + } else if(metaTopic!=null && null != metaTopic.getOwner() && !metaTopic.getOwner().isEmpty()) { + final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx); + if(SUBSCRIBE_ACTION.equals(action)) { + metaTopic.checkUserRead(user); + } else if(PUBLISH_ACTION.equals(action)) { + metaTopic.checkUserWrite(user); + } + } + } + return isAAFTopic; + } + + boolean isCadiEnabled() { + return Utils.isCadiEnabled(); + } + + void respondOkWithStream(DMaaPContext ctx, StreamWriter coes) throws IOException{ + DMaaPResponseBuilder.setNoCacheHeadings(ctx); + DMaaPResponseBuilder.respondOkWithStream(ctx, MediaType.APPLICATION_JSON, coes); + } + + private int getMessageLimit(HttpServletRequest request) { + return NumberUtils.toInt(request.getParameter("limit"), CambriaConstants.kNoLimit); + } + + private int getMessageTimeout(HttpServletRequest request) { + String timeoutMsAsString = getPropertyFromAJSCmap(TIMEOUT_PROPERTY); + int defaultTimeoutMs = StringUtils.isNotEmpty(timeoutMsAsString) ? NumberUtils.toInt(timeoutMsAsString, CambriaConstants.kNoTimeout) : + CambriaConstants.kNoTimeout; + + String timeoutProperty = request.getParameter(TIMEOUT_PROPERTY); + return timeoutProperty != null ? NumberUtils.toInt(timeoutProperty, defaultTimeoutMs) : defaultTimeoutMs; + } + + private boolean isPrettyPrintEnabled() { + return rrConvertor.convertToBooleanBroad(getPropertyFromAJSCmap("pretty")); + } + + private boolean isMetaOffsetEnabled() { + return rrConvertor.convertToBooleanBroad(getPropertyFromAJSCmap( "meta")); + } + + private boolean isTopicNameEnforcedAaf(String topicName) { + String topicNameStd = getPropertyFromAJSCmap("enforced.topic.name.AAF"); + return StringUtils.isNotEmpty(topicNameStd) && topicName.startsWith(topicNameStd); + } + + private boolean isCacheEnabled() { + String cachePropsSetting = getPropertyFromAJSCmap(ConsumerFactory.kSetting_EnableCache); + return StringUtils.isNotEmpty(cachePropsSetting) ? Boolean.parseBoolean(cachePropsSetting) : ConsumerFactory.kDefault_IsCacheEnabled; + } + + private void verifyHostId() { + String lhostId = getPropertyFromAJSCmap("clusterhostid"); + if (StringUtils.isEmpty(lhostId)) { + try { + InetAddress.getLocalHost().getCanonicalHostName(); + } catch (UnknownHostException e) { + LOG.warn("Unknown Host Exception error occurred while getting getting hostid", e); + } + + } + } + + private String getMetricTopicName() { + String metricTopicFromProps = getPropertyFromAJSCmap("metrics.send.cambria.topic"); + return StringUtils.isNotEmpty(metricTopicFromProps) ? metricTopicFromProps : "msgrtr.apinode.metrics.dmaap"; + } + + /** + * @throws missingReqdSetting + * + */ + @Override + public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition, + final String requestTime) throws ConfigDbException, AccessDeniedException, + CambriaApiException, IOException, missingReqdSetting { + + final long startMs = System.currentTimeMillis(); + String remoteHost = ctx.getRequest().getRemoteHost(); + ErrorResponseProvider errRespProvider = new ErrorResponseProvider.Builder().withErrorMessages(errorMessages) + .withTopic(topic).withRemoteHost(remoteHost).withPublisherIp(remoteHost) + .withPublisherId(Utils.getUserApiKey(ctx.getRequest())).build(); + + validateIpBlacklist(errRespProvider, ctx); + + final Topic metaTopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic); + + final boolean isAAFTopic = authorizeClientWhenNeeded(ctx, metaTopic, topic, errRespProvider, PUBLISH_ACTION); + + final HttpServletRequest req = ctx.getRequest(); + boolean chunked = isRequestedChunk(req); + String mediaType = getMediaType(req); + boolean transactionRequired = isTransactionIdRequired(); + + if (isAAFTopic || transactionRequired) { + pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType); + } else { + pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType); + } + + final long endMs = System.currentTimeMillis(); + final long totalMs = endMs - startMs; + LOG.info("Overall Response time - Published " + " msgs in " + totalMs + " ms for topic " + topic); + } + + private boolean isRequestedChunk(HttpServletRequest request) { + return null != request.getHeader(TRANSFER_ENCODING) && + request.getHeader(TRANSFER_ENCODING).contains("chunked"); + } + + private String getMediaType(HttpServletRequest request) { + String mediaType = request.getContentType(); + if (mediaType == null || mediaType.length() == 0) { + return MimeTypes.kAppGenericBinary; + } + return mediaType.replace("; charset=UTF-8", "").trim(); + } + + private boolean isTransactionIdRequired() { + String transIdReqProperty = getPropertyFromAJSCmap("transidUEBtopicreqd"); + return StringUtils.isNotEmpty(transIdReqProperty) && transIdReqProperty.equalsIgnoreCase("true"); + } + + /** + * + * @param ctx + * @param topic + * @param msg + * @param defaultPartition + * @param chunked + * @param mediaType + * @throws ConfigDbException + * @throws AccessDeniedException + * @throws TopicExistsException + * @throws CambriaApiException + * @throws IOException + */ + private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition, boolean chunked, + String mediaType) + throws ConfigDbException, AccessDeniedException, CambriaApiException, IOException { + final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); + // setup the event set + final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition); + + // start processing, building a batch to push to the backend + final long startMs = System.currentTimeMillis(); + long count = 0; + long maxEventBatch = 1024L* 16; + String batchlen = getPropertyFromAJSCmap( BATCH_LENGTH); + if (null != batchlen && !batchlen.isEmpty()) + maxEventBatch = Long.parseLong(batchlen); + // long maxEventBatch = + + final LinkedList<message> batch = new LinkedList<>(); + // final ArrayList<KeyedMessage<String, String>> kms = new + + final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<>(); + try { + // for each message... + message m = null; + while ((m = events.next()) != null) { + // add the message to the batch + batch.add(m); + // final KeyedMessage<String, String> data = new + // KeyedMessage<String, String>(topic, m.getKey(), + + // kms.add(data); + final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(), + m.getMessage()); + + pms.add(data); + // check if the batch is full + final int sizeNow = batch.size(); + if (sizeNow > maxEventBatch) { + // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, + + // kms.clear(); + ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); + pms.clear(); + batch.clear(); + metricsSet.publishTick(sizeNow); + count += sizeNow; + } + } + + // send the pending batch + final int sizeNow = batch.size(); + if (sizeNow > 0) { + // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, + + // kms.clear(); + ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); + pms.clear(); + batch.clear(); + metricsSet.publishTick(sizeNow); + count += sizeNow; + } + + final long endMs = System.currentTimeMillis(); + final long totalMs = endMs - startMs; + + LOG.info("Published " + count + " msgs in " + totalMs + " ms for topic " + topic + " from server " + + ctx.getRequest().getRemoteHost()); + + // build a responseP + final JSONObject response = new JSONObject(); + response.put("count", count); + response.put("serverTimeMs", totalMs); + respondOk(ctx, response); + + } catch (Exception excp) { + int status = HttpStatus.SC_NOT_FOUND; + String errorMsg = null; + if (excp instanceof CambriaApiException) { + status = ((CambriaApiException) excp).getStatus(); + JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); + JSONObject errObject = new JSONObject(jsonTokener); + errorMsg = (String) errObject.get("message"); + + } + ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), + errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount() + count + + "." + errorMsg, + null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null, + null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + + } + } + + /** + * + * @param ctx + * @param inputStream + * @param topic + * @param partitionKey + * @param requestTime + * @param chunked + * @param mediaType + * @throws ConfigDbException + * @throws AccessDeniedException + * @throws TopicExistsException + * @throws IOException + * @throws CambriaApiException + */ + private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic, + final String partitionKey, final String requestTime, final boolean chunked, final String mediaType) + throws ConfigDbException, AccessDeniedException, IOException, CambriaApiException { + + final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); + + // setup the event set + final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey); + + // start processing, building a batch to push to the backend + final long startMs = System.currentTimeMillis(); + long count = 0; + long maxEventBatch = 1024L * 16; + String evenlen = getPropertyFromAJSCmap( BATCH_LENGTH); + if (null != evenlen && !evenlen.isEmpty()) + maxEventBatch = Long.parseLong(evenlen); + // final long maxEventBatch = + + final LinkedList<message> batch = new LinkedList<message>(); + // final ArrayList<KeyedMessage<String, String>> kms = new + + final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<ProducerRecord<String, String>>(); + message m = null; + int messageSequence = 1; + Long batchId = 1L; + final boolean transactionEnabled = true; + int publishBatchCount = 0; + SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS"); + + // LOG.warn("Batch Start Id: " + + + try { + // for each message... + batchId = DMaaPContext.getBatchID(); + + String responseTransactionId = null; + + while ((m = events.next()) != null) { + + // LOG.warn("Batch Start Id: " + + + + addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId, + transactionEnabled); + messageSequence++; + + + batch.add(m); + + responseTransactionId = m.getLogDetails().getTransactionId(); + + //JSONObject jsonObject = new JSONObject(); + //jsonObject.put("msgWrapMR", m.getMessage()); + //jsonObject.put("transactionId", responseTransactionId); + // final KeyedMessage<String, String> data = new + // KeyedMessage<String, String>(topic, m.getKey(), + + + final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(), + m.getMessage()); + + pms.add(data); + // check if the batch is full + final int sizeNow = batch.size(); + if (sizeNow >= maxEventBatch) { + String startTime = sdf.format(new Date()); + LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id=" + + batchId + "]"); + try { + // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, + + ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); + // transactionLogs(batch); + for (message msg : batch) { + LogDetails logDetails = msg.getLogDetails(); + LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails()); + } + } catch (Exception excp) { + + int status = HttpStatus.SC_NOT_FOUND; + String errorMsg = null; + if (excp instanceof CambriaApiException) { + status = ((CambriaApiException) excp).getStatus(); + JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); + JSONObject errObject = new JSONObject(jsonTokener); + errorMsg = (String) errObject.get("message"); + } + ErrorResponse errRes = new ErrorResponse(status, + DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), + "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." + + errorMessages.getPublishMsgCount() + count + "." + errorMsg, + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + pms.clear(); + batch.clear(); + metricsSet.publishTick(sizeNow); + publishBatchCount = sizeNow; + count += sizeNow; + + String endTime = sdf.format(new Date()); + LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + + ",Batch End Time=" + endTime + "]"); + batchId = DMaaPContext.getBatchID(); + } + } + + // send the pending batch + final int sizeNow = batch.size(); + if (sizeNow > 0) { + String startTime = sdf.format(new Date()); + LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id=" + + batchId + "]"); + try { + // ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, + + ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); + + for (message msg : batch) { + LogDetails logDetails = msg.getLogDetails(); + LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails()); + } + } catch (Exception excp) { + int status = HttpStatus.SC_NOT_FOUND; + String errorMsg = null; + if (excp instanceof CambriaApiException) { + status = ((CambriaApiException) excp).getStatus(); + JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); + JSONObject errObject = new JSONObject(jsonTokener); + errorMsg = (String) errObject.get("message"); + } + + ErrorResponse errRes = new ErrorResponse(status, + DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), + "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." + + errorMessages.getPublishMsgCount() + count + "." + errorMsg, + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + pms.clear(); + metricsSet.publishTick(sizeNow); + count += sizeNow; + + String endTime = sdf.format(new Date()); + publishBatchCount = sizeNow; + LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + batchId + + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + ",Batch End Time=" + + endTime + "]"); + } + + final long endMs = System.currentTimeMillis(); + final long totalMs = endMs - startMs; + + LOG.info("Published " + count + " msgs(with transaction id) in " + totalMs + " ms for topic " + topic); + + if (null != responseTransactionId) { + ctx.getResponse().setHeader("transactionId", Utils.getResponseTransactionId(responseTransactionId)); + } + + // build a response + final JSONObject response = new JSONObject(); + response.put("count", count); + response.put("transactionId", responseTransactionId); + response.put("serverTimeMs", totalMs); + respondOk(ctx, response); + + } catch (Exception excp) { + int status = HttpStatus.SC_NOT_FOUND; + String errorMsg = null; + if (excp instanceof CambriaApiException) { + status = ((CambriaApiException) excp).getStatus(); + JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); + JSONObject errObject = new JSONObject(jsonTokener); + errorMsg = (String) errObject.get("message"); + } + + ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), + "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." + + errorMessages.getPublishMsgCount() + count + "." + errorMsg, + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + } + + /** + * + * @param msg + * @param topic + * @param request + * @param messageCreationTime + * @param messageSequence + * @param batchId + * @param transactionEnabled + */ + private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request, + final String messageCreationTime, final int messageSequence, final Long batchId, + final boolean transactionEnabled) { + LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId, + transactionEnabled); + logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage())); + msg.setTransactionEnabled(transactionEnabled); + msg.setLogDetails(logDetails); + } + + void respondOk(DMaaPContext ctx, JSONObject response) throws IOException { + DMaaPResponseBuilder.respondOk(ctx, response); + } + + /** + * + * @author anowarul.islam + * + */ + private static class LogWrap { + private final String fId; + + /** + * constructor initialization + * + * @param topic + * @param cgroup + * @param cid + */ + public LogWrap(String topic, String cgroup, String cid) { + fId = "[" + topic + "/" + cgroup + "/" + cid + "] "; + } + + /** + * + * @param msg + */ + public void info(String msg) { + LOG.info(fId + msg); + } + + /** + * + * @param msg + * @param t + */ + public void warn(String msg, Exception t) { + LOG.warn(fId + msg, t); + } + + } + + public boolean isTransEnabled() { + String istransidUEBtopicreqd = getPropertyFromAJSCmap("transidUEBtopicreqd"); + boolean istransidreqd = false; + if ((null != istransidUEBtopicreqd && istransidUEBtopicreqd.equalsIgnoreCase("true"))) { + istransidreqd = true; + } + + return istransidreqd; + + } + + private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request, + final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) { + LogDetails logDetails = new LogDetails(); + logDetails.setTopicId(topicName); + logDetails.setMessageTimestamp(messageTimestamp); + logDetails.setPublisherId(Utils.getUserApiKey(request)); + logDetails.setPublisherIp(request.getRemoteHost()); + logDetails.setMessageBatchId(batchId); + logDetails.setMessageSequence(String.valueOf(messageSequence)); + logDetails.setTransactionEnabled(transactionEnabled); + logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date())); + logDetails.setServerIp(request.getLocalAddr()); + return logDetails; + } + + +}
\ No newline at end of file diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/MMServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/MMServiceImpl.java new file mode 100644 index 0000000..bfa48cf --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/MMServiceImpl.java @@ -0,0 +1,596 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.drumlin.service.standards.MimeTypes; +import com.att.nsa.drumlin.till.nv.rrNvReadable.missingReqdSetting; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; +import com.att.nsa.util.rrConvertor; +import org.apache.http.HttpStatus; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.json.JSONObject; +import org.json.JSONTokener; +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.Consumer; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory; +import org.onap.dmaap.dmf.mr.backends.ConsumerFactory.UnavailableException; +import org.onap.dmaap.dmf.mr.backends.MetricsSet; +import org.onap.dmaap.dmf.mr.backends.Publisher.message; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.beans.LogDetails; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages; +import org.onap.dmaap.dmf.mr.exception.DMaaPResponseCode; +import org.onap.dmaap.dmf.mr.exception.ErrorResponse; +import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException; +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import org.onap.dmaap.dmf.mr.resources.CambriaEventSet; +import org.onap.dmaap.dmf.mr.resources.CambriaOutboundEventStream; +import org.onap.dmaap.dmf.mr.service.MMService; +import org.onap.dmaap.dmf.mr.utils.ConfigurationReader; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import org.onap.dmaap.dmf.mr.utils.Utils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.stereotype.Service; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.core.Context; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.LinkedList; + + +@Service +public class MMServiceImpl implements MMService { + private static final String BATCH_LENGTH = "event.batch.length"; + private static final String TRANSFER_ENCODING = "Transfer-Encoding"; + //private static final Logger LOG = Logger.getLogger(MMServiceImpl.class); + private static final EELFLogger LOG = EELFManager.getInstance().getLogger(MMServiceImpl.class); + @Autowired + private DMaaPErrorMessages errorMessages; + + @Autowired + @Qualifier("configurationReader") + private ConfigurationReader configReader; + + // HttpServletRequest object + @Context + private HttpServletRequest request; + + // HttpServletResponse object + @Context + private HttpServletResponse response; + + @Override + public void addWhiteList() { + + } + + @Override + public void removeWhiteList() { + + } + + @Override + public void listWhiteList() { + + } + + @Override + public String subscribe(DMaaPContext ctx, String topic, String consumerGroup, String clientId) + throws ConfigDbException, TopicExistsException, AccessDeniedException, UnavailableException, + CambriaApiException, IOException { + + + final HttpServletRequest req = ctx.getRequest(); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + + // was this host blacklisted? + final String remoteAddr = Utils.getRemoteAddress(ctx); + + if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) { + + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, + DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.", + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + + int limit = CambriaConstants.kNoLimit; + + if (req.getParameter("limit") != null) { + limit = Integer.parseInt(req.getParameter("limit")); + } + limit = 1; + + int timeoutMs = CambriaConstants.kNoTimeout; + String strtimeoutMS = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "timeout"); + if (strtimeoutMS != null) + timeoutMs = Integer.parseInt(strtimeoutMS); + // int timeoutMs = ctx.getConfigReader().getSettings().getInt("timeout", + + if (req.getParameter("timeout") != null) { + timeoutMs = Integer.parseInt(req.getParameter("timeout")); + } + + // By default no filter is applied if filter is not passed as a + // parameter in the request URI + String topicFilter = CambriaConstants.kNoFilter; + if (null != req.getParameter("filter")) { + topicFilter = req.getParameter("filter"); + } + // pretty to print the messaages in new line + String prettyval = "0"; + String strPretty = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "pretty"); + if (null != strPretty) + prettyval = strPretty; + + String metaval = "0"; + String strmeta = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "meta"); + if (null != strmeta) + metaval = strmeta; + + final boolean pretty = rrConvertor.convertToBooleanBroad(prettyval); + // withMeta to print offset along with message + final boolean withMeta = rrConvertor.convertToBooleanBroad(metaval); + + // is this user allowed to read this topic? + //final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx); + final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic); + + if (metatopic == null) { + // no such topic. + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_NOT_FOUND, + DMaaPResponseCode.RESOURCE_NOT_FOUND.getResponseCode(), + errorMessages.getTopicNotExist() + "-[" + topic + "]", null, Utils.getFormattedDate(new Date()), + topic, null, null, clientId, ctx.getRequest().getRemoteHost()); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + //String metricTopicname = com.att.ajsc.filemonitor.AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, "metrics.send.cambria.topic"); + /* + * if (null==metricTopicname) + * metricTopicname="msgrtr.apinode.metrics.dmaap"; //else if(user!=null) + * if(null==ctx.getRequest().getHeader("Authorization")&& + * !topic.equalsIgnoreCase(metricTopicname)) { if (null != + * metatopic.getOwner() && !("".equals(metatopic.getOwner()))){ // check + * permissions metatopic.checkUserRead(user); } } + */ + + Consumer c = null; + try { + final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); + + c = ctx.getConfigReader().getfConsumerFactory().getConsumerFor(topic, consumerGroup, clientId, timeoutMs,ctx.getRequest().getRemoteHost()); + + final CambriaOutboundEventStream coes = new CambriaOutboundEventStream.Builder(c).timeout(timeoutMs) + .limit(limit).filter(topicFilter).pretty(pretty).withMeta(withMeta).build(); + coes.setDmaapContext(ctx); + coes.setTopic(metatopic); + + DMaaPResponseBuilder.setNoCacheHeadings(ctx); + + try { + coes.write(baos); + } catch (Exception ex) { + + } + + c.commitOffsets(); + final int sent = coes.getSentCount(); + + metricsSet.consumeTick(sent); + + } catch (UnavailableException excp) { + + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, + DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), + errorMessages.getServerUnav() + excp.getMessage(), null, Utils.getFormattedDate(new Date()), topic, + null, null, clientId, ctx.getRequest().getRemoteHost()); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + + } catch (CambriaApiException excp) { + + throw excp; + } catch (Exception excp) { + + ctx.getConfigReader().getfConsumerFactory().destroyConsumer(topic, consumerGroup, clientId); + + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_SERVICE_UNAVAILABLE, + DMaaPResponseCode.SERVER_UNAVAILABLE.getResponseCode(), + "Couldn't respond to client, closing cambria consumer" + excp.getMessage(), null, + Utils.getFormattedDate(new Date()), topic, null, null, clientId, ctx.getRequest().getRemoteHost()); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } finally { + + boolean kSetting_EnableCache = ConsumerFactory.kDefault_IsCacheEnabled; + String strkSetting_EnableCache = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + ConsumerFactory.kSetting_EnableCache); + if (null != strkSetting_EnableCache) + kSetting_EnableCache = Boolean.parseBoolean(strkSetting_EnableCache); + + if (!kSetting_EnableCache && (c != null)) { + c.close(); + + } + } + return baos.toString(); + } + + @Override + public void pushEvents(DMaaPContext ctx, final String topic, InputStream msg, final String defaultPartition, + final String requestTime) throws ConfigDbException, AccessDeniedException, TopicExistsException, + CambriaApiException, IOException, missingReqdSetting { + + //final NsaApiKey user = DMaaPAuthenticatorImpl.getAuthenticatedUser(ctx); + //final Topic metatopic = ctx.getConfigReader().getfMetaBroker().getTopic(topic); + + final String remoteAddr = Utils.getRemoteAddress(ctx); + + if (ctx.getConfigReader().getfIpBlackList().contains(remoteAddr)) { + + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, + DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + "Source address [" + remoteAddr + "] is blacklisted. Please contact the cluster management team.", + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + + String topicNameStd = null; + + topicNameStd = com.att.ajsc.beans.PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop, + "enforced.topic.name.AAF"); + String metricTopicname = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, + "metrics.send.cambria.topic"); + if (null == metricTopicname) + metricTopicname = "msgrtr.apinode.metrics.dmaap"; + boolean topicNameEnforced = false; + if (null != topicNameStd && topic.startsWith(topicNameStd)) { + topicNameEnforced = true; + } + + final HttpServletRequest req = ctx.getRequest(); + + boolean chunked = false; + if (null != req.getHeader(TRANSFER_ENCODING)) { + chunked = req.getHeader(TRANSFER_ENCODING).contains("chunked"); + } + + String mediaType = req.getContentType(); + if (mediaType == null || mediaType.length() == 0) { + mediaType = MimeTypes.kAppGenericBinary; + } + + if (mediaType.contains("charset=UTF-8")) { + mediaType = mediaType.replace("; charset=UTF-8", "").trim(); + } + + if (!topic.equalsIgnoreCase(metricTopicname)) { + pushEventsWithTransaction(ctx, msg, topic, defaultPartition, requestTime, chunked, mediaType); + } else { + pushEvents(ctx, topic, msg, defaultPartition, chunked, mediaType); + } + } + + private static void addTransactionDetailsToMessage(message msg, final String topic, HttpServletRequest request, + final String messageCreationTime, final int messageSequence, final Long batchId, + final boolean transactionEnabled) { + LogDetails logDetails = generateLogDetails(topic, request, messageCreationTime, messageSequence, batchId, + transactionEnabled); + logDetails.setMessageLengthInBytes(Utils.messageLengthInBytes(msg.getMessage())); + msg.setTransactionEnabled(transactionEnabled); + msg.setLogDetails(logDetails); + } + + private static LogDetails generateLogDetails(final String topicName, HttpServletRequest request, + final String messageTimestamp, int messageSequence, Long batchId, final boolean transactionEnabled) { + LogDetails logDetails = new LogDetails(); + logDetails.setTopicId(topicName); + logDetails.setMessageTimestamp(messageTimestamp); + logDetails.setPublisherId(Utils.getUserApiKey(request)); + logDetails.setPublisherIp(request.getRemoteHost()); + logDetails.setMessageBatchId(batchId); + logDetails.setMessageSequence(String.valueOf(messageSequence)); + logDetails.setTransactionEnabled(transactionEnabled); + logDetails.setTransactionIdTs(Utils.getFormattedDate(new Date())); + logDetails.setServerIp(request.getLocalAddr()); + return logDetails; + } + + private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition, boolean chunked, + String mediaType) throws ConfigDbException, AccessDeniedException, TopicExistsException, + CambriaApiException, IOException { + final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); + + // setup the event set + final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition); + + // start processing, building a batch to push to the backend + final long startMs = System.currentTimeMillis(); + long count = 0; + + long maxEventBatch = 1024L * 16; + String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH); + if (null != batchlen) + maxEventBatch = Long.parseLong(batchlen); + + // long maxEventBatch = + // ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16); + final LinkedList<message> batch = new LinkedList<message>(); + final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<ProducerRecord<String, String>>(); + //final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>(); + + try { + // for each message... + message m = null; + while ((m = events.next()) != null) { + // add the message to the batch + batch.add(m); + final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(), + m.getMessage()); + // check if the batch is full + final int sizeNow = batch.size(); + if (sizeNow > maxEventBatch) { + ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); + pms.clear(); + batch.clear(); + metricsSet.publishTick(sizeNow); + count += sizeNow; + } + } + + // send the pending batch + final int sizeNow = batch.size(); + if (sizeNow > 0) { + ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); + pms.clear(); + batch.clear(); + metricsSet.publishTick(sizeNow); + count += sizeNow; + } + + final long endMs = System.currentTimeMillis(); + final long totalMs = endMs - startMs; + + LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic); + + // build a responseP + final JSONObject response = new JSONObject(); + response.put("count", count); + response.put("serverTimeMs", totalMs); + // DMaaPResponseBuilder.respondOk(ctx, response); + + } catch (Exception excp) { + + int status = HttpStatus.SC_NOT_FOUND; + String errorMsg = null; + if (excp.getClass().toString().contains("CambriaApiException")) { + status = ((CambriaApiException) excp).getStatus(); + JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); + JSONObject errObject = new JSONObject(jsonTokener); + errorMsg = (String) errObject.get("message"); + + } + ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), + errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount() + count + + "." + errorMsg, + null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null, + null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + + } + } + + private void pushEventsWithTransaction(DMaaPContext ctx, InputStream inputStream, final String topic, + final String partitionKey, final String requestTime, final boolean chunked, final String mediaType) + throws ConfigDbException, AccessDeniedException, TopicExistsException, IOException, + CambriaApiException { + + final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); + + // setup the event set + final CambriaEventSet events = new CambriaEventSet(mediaType, inputStream, chunked, partitionKey); + + // start processing, building a batch to push to the backend + final long startMs = System.currentTimeMillis(); + long count = 0; + long maxEventBatch = 1024L * 16L; + String evenlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH); + if (null != evenlen) + maxEventBatch = Long.parseLong(evenlen); + + final LinkedList<message> batch = new LinkedList<message>(); + final ArrayList<ProducerRecord<String, String>> pms = new ArrayList<ProducerRecord<String, String>>(); + + message m = null; + int messageSequence = 1; + Long batchId = 1L; + final boolean transactionEnabled = true; + int publishBatchCount = 0; + SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SS"); + + // LOG.warn("Batch Start Id: " + + // Utils.getFromattedBatchSequenceId(batchId)); + try { + // for each message... + batchId = DMaaPContext.getBatchID(); + + String responseTransactionId = null; + + while ((m = events.next()) != null) { + + // LOG.warn("Batch Start Id: " + + // Utils.getFromattedBatchSequenceId(batchId)); + + addTransactionDetailsToMessage(m, topic, ctx.getRequest(), requestTime, messageSequence, batchId, + transactionEnabled); + messageSequence++; + + // add the message to the batch + batch.add(m); + + responseTransactionId = m.getLogDetails().getTransactionId(); + + JSONObject jsonObject = new JSONObject(); + jsonObject.put("message", m.getMessage()); + jsonObject.put("transactionId", responseTransactionId); + final ProducerRecord<String, String> data = new ProducerRecord<String, String>(topic, m.getKey(), + m.getMessage()); + pms.add(data); + + // check if the batch is full + final int sizeNow = batch.size(); + if (sizeNow >= maxEventBatch) { + String startTime = sdf.format(new Date()); + LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id=" + + batchId + "]"); + try { + ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); + // transactionLogs(batch); + for (message msg : batch) { + LogDetails logDetails = msg.getLogDetails(); + LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails()); + } + } catch (Exception excp) { + + int status = HttpStatus.SC_NOT_FOUND; + String errorMsg = null; + if (excp.getClass().toString().contains("CambriaApiException")) { + status = ((CambriaApiException) excp).getStatus(); + JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); + JSONObject errObject = new JSONObject(jsonTokener); + errorMsg = (String) errObject.get("message"); + } + ErrorResponse errRes = new ErrorResponse(status, + DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), + "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." + + errorMessages.getPublishMsgCount() + count + "." + errorMsg, + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + pms.clear(); + batch.clear(); + metricsSet.publishTick(sizeNow); + publishBatchCount = sizeNow; + count += sizeNow; + // batchId++; + String endTime = sdf.format(new Date()); + LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + + batchId + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + + ",Batch End Time=" + endTime + "]"); + batchId = DMaaPContext.getBatchID(); + } + } + + // send the pending batch + final int sizeNow = batch.size(); + if (sizeNow > 0) { + String startTime = sdf.format(new Date()); + LOG.info("Batch Start Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch Start Id=" + + batchId + "]"); + try { + ctx.getConfigReader().getfPublisher().sendBatchMessageNew(topic, pms); + // transactionLogs(batch); + for (message msg : batch) { + LogDetails logDetails = msg.getLogDetails(); + LOG.info("Publisher Log Details : " + logDetails.getPublisherLogDetails()); + } + } catch (Exception excp) { + int status = HttpStatus.SC_NOT_FOUND; + String errorMsg = null; + if (excp.getClass().toString().contains("CambriaApiException")) { + status = ((CambriaApiException) excp).getStatus(); + JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); + JSONObject errObject = new JSONObject(jsonTokener); + errorMsg = (String) errObject.get("message"); + } + + ErrorResponse errRes = new ErrorResponse(status, + DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), + "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." + + errorMessages.getPublishMsgCount() + count + "." + errorMsg, + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + pms.clear(); + metricsSet.publishTick(sizeNow); + count += sizeNow; + // batchId++; + String endTime = sdf.format(new Date()); + publishBatchCount = sizeNow; + LOG.info("Batch End Details:[serverIp=" + ctx.getRequest().getLocalAddr() + ",Batch End Id=" + batchId + + ",Batch Total=" + publishBatchCount + ",Batch Start Time=" + startTime + ",Batch End Time=" + + endTime + "]"); + } + + final long endMs = System.currentTimeMillis(); + final long totalMs = endMs - startMs; + + LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic); + + // build a response + final JSONObject response = new JSONObject(); + response.put("count", count); + response.put("serverTimeMs", totalMs); + + } catch (Exception excp) { + int status = HttpStatus.SC_NOT_FOUND; + String errorMsg = null; + if (excp.getClass().toString().contains("CambriaApiException")) { + status = ((CambriaApiException) excp).getStatus(); + JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); + JSONObject errObject = new JSONObject(jsonTokener); + errorMsg = (String) errObject.get("message"); + } + + ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), + "Transaction-" + errorMessages.getPublishMsgError() + ":" + topic + "." + + errorMessages.getPublishMsgCount() + count + "." + errorMsg, + null, Utils.getFormattedDate(new Date()), topic, Utils.getUserApiKey(ctx.getRequest()), + ctx.getRequest().getRemoteHost(), null, null); + LOG.info(errRes.toString()); + throw new CambriaApiException(errRes); + } + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/MetricsServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/MetricsServiceImpl.java new file mode 100644 index 0000000..3774a47 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/MetricsServiceImpl.java @@ -0,0 +1,114 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.metrics.CdmMeasuredItem; +import org.json.JSONObject; +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.backends.MetricsSet; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.service.MetricsService; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import org.springframework.stereotype.Component; + +import java.io.IOException; + +/** + * + * + * This will provide all the generated metrics details also it can provide the + * get metrics details + * + * + * @author nilanjana.maity + * + * + */ +@Component +public class MetricsServiceImpl implements MetricsService { + + + private static final EELFLogger LOG = EELFManager.getInstance().getLogger(MetricsService.class); + /** + * + * + * @param ctx + * @throws IOException + * + * + * get Metric details + * + */ + @Override + + public void get(DMaaPContext ctx) throws IOException { + LOG.info("Inside : MetricsServiceImpl : get()"); + final MetricsSet metrics = ctx.getConfigReader().getfMetrics(); + DMaaPResponseBuilder.setNoCacheHeadings(ctx); + final JSONObject result = metrics.toJson(); + DMaaPResponseBuilder.respondOk(ctx, result); + LOG.info("============ Metrics generated : " + result.toString() + "================="); + + } + + + @Override + /** + * + * get Metric by name + * + * + * @param ctx + * @param name + * @throws IOException + * @throws CambriaApiException + * + * + */ + public void getMetricByName(DMaaPContext ctx, String name) throws IOException, CambriaApiException { + LOG.info("Inside : MetricsServiceImpl : getMetricByName()"); + final MetricsSet metrics = ctx.getConfigReader().getfMetrics(); + + final CdmMeasuredItem item = metrics.getItem(name); + /** + * check if item is null + */ + if (item == null) { + throw new CambriaApiException(404, "No metric named [" + name + "]."); + } + + final JSONObject entry = new JSONObject(); + entry.put("summary", item.summarize()); + entry.put("raw", item.getRawValueString()); + + DMaaPResponseBuilder.setNoCacheHeadings(ctx); + + final JSONObject result = new JSONObject(); + result.put(name, entry); + + DMaaPResponseBuilder.respondOk(ctx, result); + LOG.info("============ Metrics generated : " + entry.toString() + "================="); + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/TopicServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/TopicServiceImpl.java new file mode 100644 index 0000000..2235098 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/TopicServiceImpl.java @@ -0,0 +1,637 @@ +/* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Copyright (C) 2019 Nokia Intellectual Property. All rights reserved. + * ================================================================================= + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + */ +package org.onap.dmaap.dmf.mr.service.impl; + +import com.att.ajsc.beans.PropertiesMapBean; +import com.att.ajsc.filemonitor.AJSCPropertiesMap; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.NsaAcl; +import com.att.nsa.security.NsaApiKey; +import com.att.nsa.security.ReadWriteSecuredResource.AccessDeniedException; +import joptsimple.internal.Strings; +import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.math.NumberUtils; +import org.apache.http.HttpStatus; +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker; +import org.onap.dmaap.dmf.mr.beans.TopicBean; +import org.onap.dmaap.dmf.mr.constants.CambriaConstants; +import org.onap.dmaap.dmf.mr.exception.DMaaPAccessDeniedException; +import org.onap.dmaap.dmf.mr.exception.DMaaPErrorMessages; +import org.onap.dmaap.dmf.mr.exception.DMaaPResponseCode; +import org.onap.dmaap.dmf.mr.exception.ErrorResponse; +import org.onap.dmaap.dmf.mr.metabroker.Broker.TopicExistsException; +import org.onap.dmaap.dmf.mr.metabroker.Broker1; +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import org.onap.dmaap.dmf.mr.security.DMaaPAAFAuthenticator; +import org.onap.dmaap.dmf.mr.security.DMaaPAAFAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.security.DMaaPAuthenticatorImpl; +import org.onap.dmaap.dmf.mr.service.TopicService; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import org.onap.dmaap.dmf.mr.utils.Utils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import javax.servlet.http.HttpServletRequest; +import java.io.IOException; +import java.security.Principal; + +/** + * @author muzainulhaque.qazi + * + */ +@Service +public class TopicServiceImpl implements TopicService { + + private static final String TOPIC_CREATE_OP = "create"; + private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(TopicServiceImpl.class); + @Autowired + private DMaaPErrorMessages errorMessages; + + public DMaaPErrorMessages getErrorMessages() { + return errorMessages; + } + + public void setErrorMessages(DMaaPErrorMessages errorMessages) { + this.errorMessages = errorMessages; + } + + + String getPropertyFromAJSCbean(String propertyKey) { + return PropertiesMapBean.getProperty(CambriaConstants.msgRtr_prop, propertyKey); + } + + String getPropertyFromAJSCmap(String propertyKey) { + return AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, propertyKey); + } + + NsaApiKey getDmaapAuthenticatedUser(DMaaPContext dmaapContext) { + return DMaaPAuthenticatorImpl.getAuthenticatedUser(dmaapContext); + } + + void respondOk(DMaaPContext context, String msg) { + DMaaPResponseBuilder.respondOkWithHtml(context, msg); + } + + void respondOk(DMaaPContext context, JSONObject json) throws IOException { + DMaaPResponseBuilder.respondOk(context, json); + } + + boolean isCadiEnabled() { + return Utils.isCadiEnabled(); + } + /** + * @param dmaapContext + * @throws JSONException + * @throws ConfigDbException + * @throws IOException + * + */ + @Override + public void getTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException { + LOGGER.info("Fetching list of all the topics."); + JSONObject json = new JSONObject(); + + JSONArray topicsList = new JSONArray(); + + for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) { + topicsList.put(topic.getName()); + } + + json.put("topics", topicsList); + + LOGGER.info("Returning list of all the topics."); + respondOk(dmaapContext, json); + + } + + /** + * @param dmaapContext + * @throws JSONException + * @throws ConfigDbException + * @throws IOException + * + */ + public void getAllTopics(DMaaPContext dmaapContext) throws JSONException, ConfigDbException, IOException { + + LOGGER.info("Fetching list of all the topics."); + JSONObject json = new JSONObject(); + + JSONArray topicsList = new JSONArray(); + + for (Topic topic : getMetaBroker(dmaapContext).getAllTopics()) { + JSONObject obj = new JSONObject(); + obj.put("topicName", topic.getName()); + + obj.put("owner", topic.getOwner()); + obj.put("txenabled", topic.isTransactionEnabled()); + topicsList.put(obj); + } + + json.put("topics", topicsList); + + LOGGER.info("Returning list of all the topics."); + respondOk(dmaapContext, json); + + } + + /** + * @param dmaapContext + * @param topicName + * @throws ConfigDbException + * @throws IOException + * @throws TopicExistsException + */ + @Override + public void getTopic(DMaaPContext dmaapContext, String topicName) + throws ConfigDbException, IOException, TopicExistsException { + + LOGGER.info("Fetching details of topic " + topicName); + Topic t = getMetaBroker(dmaapContext).getTopic(topicName); + + if (null == t) { + LOGGER.error("Topic [" + topicName + "] does not exist."); + throw new TopicExistsException("Topic [" + topicName + "] does not exist."); + } + + JSONObject o = new JSONObject(); + o.put("name", t.getName()); + o.put("description", t.getDescription()); + + if (null != t.getOwners()) + o.put("owner", t.getOwners().iterator().next()); + if (null != t.getReaderAcl()) + o.put("readerAcl", aclToJson(t.getReaderAcl())); + if (null != t.getWriterAcl()) + o.put("writerAcl", aclToJson(t.getWriterAcl())); + + LOGGER.info("Returning details of topic " + topicName); + respondOk(dmaapContext, o); + + } + + /** + * @param dmaapContext + * @param topicBean + * @throws CambriaApiException + * @throws AccessDeniedException + * @throws IOException + * @throws TopicExistsException + * @throws JSONException + * + * + * + */ + @Override + public void createTopic(DMaaPContext dmaapContext, TopicBean topicBean) throws CambriaApiException, IOException { + String topicName = topicBean.getTopicName(); + LOGGER.info("Creating topic {}",topicName); + String key = authorizeClient(dmaapContext, topicName, TOPIC_CREATE_OP); + + try { + final int partitions = getValueOrDefault(topicBean.getPartitionCount(), "default.partitions"); + final int replicas = getValueOrDefault(topicBean.getReplicationCount(), "default.replicas"); + + final Topic t = getMetaBroker(dmaapContext).createTopic(topicName, topicBean.getTopicDescription(), + key, partitions, replicas, topicBean.isTransactionEnabled()); + + LOGGER.info("Topic {} created successfully. Sending response", topicName); + respondOk(dmaapContext, topicToJson(t)); + } catch (JSONException ex) { + + LOGGER.error("Failed to create topic "+ topicName +". Couldn't parse JSON data.", ex); + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST, + DMaaPResponseCode.INCORRECT_JSON.getResponseCode(), errorMessages.getIncorrectJson()); + LOGGER.info(errRes.toString()); + throw new CambriaApiException(errRes); + + } catch (ConfigDbException ex) { + + LOGGER.error("Failed to create topic "+ topicName +". Config DB Exception", ex); + ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_BAD_REQUEST, + DMaaPResponseCode.INCORRECT_JSON.getResponseCode(), errorMessages.getIncorrectJson()); + LOGGER.info(errRes.toString()); + throw new CambriaApiException(errRes); + } catch (Broker1.TopicExistsException ex) { + LOGGER.error( "Failed to create topic "+ topicName +". Topic already exists.",ex); + } + } + + private String authorizeClient(DMaaPContext dmaapContext, String topicName, String operation) throws DMaaPAccessDeniedException { + String clientId = Strings.EMPTY; + if(isCadiEnabled() && isTopicWithEnforcedAuthorization(topicName)) { + LOGGER.info("Performing AAF authorization for topic {} creation.", topicName); + String permission = buildPermission(topicName, operation); + DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl(); + clientId = getAAFclientId(dmaapContext.getRequest()); + + if (!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) { + LOGGER.error("Failed to {} topic {}. Authorization failed for client {} and permission {}", + operation, topicName, clientId, permission); + throw new DMaaPAccessDeniedException(new ErrorResponse(HttpStatus.SC_UNAUTHORIZED, + DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + "Failed to "+ operation +" topic: Access Denied. User does not have permission to create topic with perm " + permission)); + } + } else if(operation.equals(TOPIC_CREATE_OP)){ + final NsaApiKey user = getDmaapAuthenticatedUser(dmaapContext); + clientId = (user != null) ? user.getKey() : Strings.EMPTY; + } + return clientId; + } + + private String getAAFclientId(HttpServletRequest request) { + Principal principal = request.getUserPrincipal(); + if (principal !=null) { + return principal.getName(); + } else { + LOGGER.warn("Performing AAF authorization but user has not been provided in request."); + return null; + } + } + + private boolean isTopicWithEnforcedAuthorization(String topicName) { + String enfTopicNamespace = getPropertyFromAJSCbean("enforced.topic.name.AAF"); + return enfTopicNamespace != null && topicName.startsWith(enfTopicNamespace); + } + + int getValueOrDefault(int value, String defaultProperty) { + int returnValue = value; + if (returnValue <= 0) { + String defaultValue = getPropertyFromAJSCmap(defaultProperty); + returnValue = StringUtils.isNotEmpty(defaultValue) ? NumberUtils.toInt(defaultValue) : 1; + returnValue = (returnValue <= 0) ? 1 : returnValue; + } + return returnValue; + } + + private String buildPermission(String topicName, String operation) { + String nameSpace = (topicName.indexOf('.') > 1) ? + topicName.substring(0, topicName.lastIndexOf('.')) : ""; + + String mrFactoryValue = getPropertyFromAJSCmap("msgRtr.topicfactory.aaf"); + return mrFactoryValue + nameSpace + "|" + operation; + } + + /** + * @param dmaapContext + * @param topicName + * @throws ConfigDbException + * @throws IOException + * @throws TopicExistsException + * @throws CambriaApiException + * @throws AccessDeniedException + */ + @Override + public void deleteTopic(DMaaPContext dmaapContext, String topicName) throws IOException, ConfigDbException, + CambriaApiException, TopicExistsException, DMaaPAccessDeniedException, AccessDeniedException { + + LOGGER.info(" Deleting topic " + topicName); + authorizeClient(dmaapContext, topicName, "destroy"); + + final Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); + if (topic == null) { + LOGGER.error("Failed to delete topic. Topic [" + topicName + "] does not exist."); + throw new TopicExistsException("Failed to delete topic. Topic [" + topicName + "] does not exist."); + } + + // metabroker.deleteTopic(topicName); + + LOGGER.info("Topic [" + topicName + "] deleted successfully. Sending response."); + respondOk(dmaapContext, "Topic [" + topicName + "] deleted successfully"); + } + + /** + * + * @param dmaapContext + * @return + */ + DMaaPKafkaMetaBroker getMetaBroker(DMaaPContext dmaapContext) { + return (DMaaPKafkaMetaBroker) dmaapContext.getConfigReader().getfMetaBroker(); + } + + /** + * @param dmaapContext + * @param topicName + * @throws ConfigDbException + * @throws IOException + * @throws TopicExistsException + * + */ + @Override + public void getPublishersByTopicName(DMaaPContext dmaapContext, String topicName) + throws ConfigDbException, IOException, TopicExistsException { + LOGGER.info("Retrieving list of all the publishers for topic " + topicName); + Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); + + if (topic == null) { + LOGGER.error("Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist."); + throw new TopicExistsException( + "Failed to retrieve publishers list for topic. Topic [" + topicName + "] does not exist."); + } + + final NsaAcl acl = topic.getWriterAcl(); + + LOGGER.info("Returning list of all the publishers for topic " + topicName + ". Sending response."); + respondOk(dmaapContext, aclToJson(acl)); + + } + + /** + * + * @param acl + * @return + */ + private static JSONObject aclToJson(NsaAcl acl) { + final JSONObject o = new JSONObject(); + if (acl == null) { + o.put("enabled", false); + o.put("users", new JSONArray()); + } else { + o.put("enabled", acl.isActive()); + + final JSONArray a = new JSONArray(); + for (String user : acl.getUsers()) { + a.put(user); + } + o.put("users", a); + } + return o; + } + + /** + * @param dmaapContext + * @param topicName + */ + @Override + public void getConsumersByTopicName(DMaaPContext dmaapContext, String topicName) + throws IOException, ConfigDbException, TopicExistsException { + LOGGER.info("Retrieving list of all the consumers for topic " + topicName); + Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); + + if (topic == null) { + LOGGER.error("Failed to retrieve consumers list for topic. Topic [" + topicName + "] does not exist."); + throw new TopicExistsException( + "Failed to retrieve consumers list for topic. Topic [" + topicName + "] does not exist."); + } + + final NsaAcl acl = topic.getReaderAcl(); + + LOGGER.info("Returning list of all the consumers for topic " + topicName + ". Sending response."); + respondOk(dmaapContext, aclToJson(acl)); + + } + + /** + * + * @param t + * @return + */ + static JSONObject topicToJson(Topic t) { + final JSONObject o = new JSONObject(); + + o.put("name", t.getName()); + o.put("description", t.getDescription()); + o.put("owner", t.getOwner()); + o.put("readerAcl", aclToJson(t.getReaderAcl())); + o.put("writerAcl", aclToJson(t.getWriterAcl())); + + return o; + } + + /** + * @param dmaapContext + * @param topicName @param producerId @throws + * ConfigDbException @throws IOException @throws + * TopicExistsException @throws AccessDeniedException @throws + * + */ + @Override + public void permitPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId) + throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, CambriaApiException { + + LOGGER.info("Granting write access to producer [" + producerId + "] for topic " + topicName); + final NsaApiKey user = getDmaapAuthenticatedUser(dmaapContext); + + + // + // LOGGER.info("Authenticating the user, as ACL authentication is not + + //// String permission = + + // + + + + // { + // LOGGER.error("Failed to permit write access to producer [" + + // producerId + "] for topic " + topicName + + // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, + // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + // errorMessages.getNotPermitted1()+" <Grant publish permissions> + + + + // } + // } + + Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); + + if (null == topic) { + LOGGER.error("Failed to permit write access to producer [" + producerId + "] for topic. Topic [" + topicName + + "] does not exist."); + throw new TopicExistsException("Failed to permit write access to producer [" + producerId + + "] for topic. Topic [" + topicName + "] does not exist."); + } + + topic.permitWritesFromUser(producerId, user); + + LOGGER.info("Write access has been granted to producer [" + producerId + "] for topic [" + topicName + + "]. Sending response."); + respondOk(dmaapContext, "Write access has been granted to publisher."); + + } + + /** + * @param dmaapContext + * @param topicName + * @param producerId + * @throws ConfigDbException + * @throws IOException + * @throws TopicExistsException + * @throws AccessDeniedException + * @throws DMaaPAccessDeniedException + * + */ + @Override + public void denyPublisherForTopic(DMaaPContext dmaapContext, String topicName, String producerId) + throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, + DMaaPAccessDeniedException { + + LOGGER.info("Revoking write access to producer [" + producerId + "] for topic " + topicName); + final NsaApiKey user = getDmaapAuthenticatedUser(dmaapContext); + + // + //// String permission = + + // DMaaPAAFAuthenticator aaf = new DMaaPAAFAuthenticatorImpl(); + // String permission = aaf.aafPermissionString(topicName, "manage"); + // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) + // { + // LOGGER.error("Failed to revoke write access to producer [" + + // producerId + "] for topic " + topicName + + // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, + // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + // errorMessages.getNotPermitted1()+" <Revoke publish permissions> + + + // throw new DMaaPAccessDeniedException(errRes); + // + + // } + + Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); + + if (null == topic) { + LOGGER.error("Failed to revoke write access to producer [" + producerId + "] for topic. Topic [" + topicName + + "] does not exist."); + throw new TopicExistsException("Failed to revoke write access to producer [" + producerId + + "] for topic. Topic [" + topicName + "] does not exist."); + } + + topic.denyWritesFromUser(producerId, user); + + LOGGER.info("Write access has been revoked to producer [" + producerId + "] for topic [" + topicName + + "]. Sending response."); + respondOk(dmaapContext, "Write access has been revoked for publisher."); + + } + + /** + * @param dmaapContext + * @param topicName + * @param consumerId + * @throws DMaaPAccessDeniedException + */ + @Override + public void permitConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId) + throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, + DMaaPAccessDeniedException { + + LOGGER.info("Granting read access to consumer [" + consumerId + "] for topic " + topicName); + final NsaApiKey user = getDmaapAuthenticatedUser(dmaapContext); + + // + //// String permission = + + + // String permission = aaf.aafPermissionString(topicName, "manage"); + // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) + // { + // LOGGER.error("Failed to permit read access to consumer [" + + // consumerId + "] for topic " + topicName + + // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, + // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + // errorMessages.getNotPermitted1()+" <Grant consume permissions> + + + + // } + // } + + Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); + + if (null == topic) { + LOGGER.error("Failed to permit read access to consumer [" + consumerId + "] for topic. Topic [" + topicName + + "] does not exist."); + throw new TopicExistsException("Failed to permit read access to consumer [" + consumerId + + "] for topic. Topic [" + topicName + "] does not exist."); + } + + topic.permitReadsByUser(consumerId, user); + + LOGGER.info("Read access has been granted to consumer [" + consumerId + "] for topic [" + topicName + + "]. Sending response."); + respondOk(dmaapContext, + "Read access has been granted for consumer [" + consumerId + "] for topic [" + topicName + "]."); + } + + /** + * @param dmaapContext + * @param topicName + * @param consumerId + * @throws DMaaPAccessDeniedException + */ + @Override + public void denyConsumerForTopic(DMaaPContext dmaapContext, String topicName, String consumerId) + throws AccessDeniedException, ConfigDbException, IOException, TopicExistsException, + DMaaPAccessDeniedException { + + LOGGER.info("Revoking read access to consumer [" + consumerId + "] for topic " + topicName); + final NsaApiKey user = getDmaapAuthenticatedUser(dmaapContext); + + //// String permission = + + + // String permission = aaf.aafPermissionString(topicName, "manage"); + // if(!aaf.aafAuthentication(dmaapContext.getRequest(), permission)) + // { + // LOGGER.error("Failed to revoke read access to consumer [" + + // consumerId + "] for topic " + topicName + + // ErrorResponse errRes = new ErrorResponse(HttpStatus.SC_FORBIDDEN, + // DMaaPResponseCode.ACCESS_NOT_PERMITTED.getResponseCode(), + // errorMessages.getNotPermitted1()+" <Grant consume permissions> + + + // throw new DMaaPAccessDeniedException(errRes); + // } + // + // + + Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); + + if (null == topic) { + LOGGER.error("Failed to revoke read access to consumer [" + consumerId + "] for topic. Topic [" + topicName + + "] does not exist."); + throw new TopicExistsException("Failed to permit read access to consumer [" + consumerId + + "] for topic. Topic [" + topicName + "] does not exist."); + } + + topic.denyReadsByUser(consumerId, user); + + LOGGER.info("Read access has been revoked to consumer [" + consumerId + "] for topic [" + topicName + + "]. Sending response."); + respondOk(dmaapContext, + "Read access has been revoked for consumer [" + consumerId + "] for topic [" + topicName + "]."); + + } + +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/TransactionServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/TransactionServiceImpl.java new file mode 100644 index 0000000..973a9eb --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/TransactionServiceImpl.java @@ -0,0 +1,99 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import com.att.aft.dme2.internal.jettison.json.JSONException; +import com.att.nsa.configs.ConfigDbException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.service.TransactionService; +import org.onap.dmaap.dmf.mr.transaction.TransactionObj; +import org.springframework.stereotype.Service; + +import java.io.IOException; + +/** + * Once the transaction rest gateway will be using that time it will provide all + * the transaction details like fetching all the transactional objects or get + * any particular transaction object details + * + * @author nilanjana.maity + * + */ +@Service +public class TransactionServiceImpl implements TransactionService { + + @Override + public void checkTransaction(TransactionObj trnObj) { + /* Need to implement the method */ + } + + @Override + public void getAllTransactionObjs(DMaaPContext dmaapContext) + throws ConfigDbException, IOException { + + /* + + * + * LOG.info("configReader : "+configReader.toString()); + * + * final JSONObject result = new JSONObject (); final JSONArray + * transactionIds = new JSONArray (); result.put ( "transactionIds", + * transactionIds ); + * + * DMaaPTransactionObjDB<DMaaPTransactionObj> transDb = + * configReader.getfTranDb(); + * + * for (String transactionId : transDb.loadAllTransactionObjs()) { + * transactionIds.put (transactionId); } LOG.info( + * "========== TransactionServiceImpl: getAllTransactionObjs: Transaction objects are : " + * + transactionIds.toString()+"==========="); + * DMaaPResponseBuilder.respondOk(dmaapContext, result); + */ + } + + @Override + public void getTransactionObj(DMaaPContext dmaapContext, + String transactionId) throws ConfigDbException, JSONException, + IOException { + + /* + + * + * ConfigurationReader configReader = dmaapContext.getConfigReader(); + * + * DMaaPTransactionObj trnObj; + * + * trnObj = configReader.getfTranDb().loadTransactionObj(transactionId); + * + * + * if (null != trnObj) { trnObj.serialize(); JSONObject result = + * trnObj.asJsonObject(); DMaaPResponseBuilder.respondOk(dmaapContext, + * result); + * LOG.info("========== TransactionServiceImpl: getTransactionObj : "+ + * result.toString()+"==========="); return; } + * + * } LOG.info( + * "========== TransactionServiceImpl: getTransactionObj: Error : Transaction object does not exist. " + * +"==========="); + */ + } +} diff --git a/src/main/java/org/onap/dmaap/dmf/mr/service/impl/UIServiceImpl.java b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/UIServiceImpl.java new file mode 100644 index 0000000..e9ca969 --- /dev/null +++ b/src/main/java/org/onap/dmaap/dmf/mr/service/impl/UIServiceImpl.java @@ -0,0 +1,209 @@ +/******************************************************************************* + * ============LICENSE_START======================================================= + * org.onap.dmaap + * ================================================================================ + * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 +* + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * + * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * + *******************************************************************************/ +package org.onap.dmaap.dmf.mr.service.impl; + +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; +import com.att.nsa.configs.ConfigDbException; +import com.att.nsa.security.db.NsaApiDb; +import com.att.nsa.security.db.simple.NsaSimpleApiKey; +import org.apache.kafka.common.errors.TopicExistsException; +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; +import org.onap.dmaap.dmf.mr.CambriaApiException; +import org.onap.dmaap.dmf.mr.beans.DMaaPContext; +import org.onap.dmaap.dmf.mr.beans.DMaaPKafkaMetaBroker; +import org.onap.dmaap.dmf.mr.metabroker.Topic; +import org.onap.dmaap.dmf.mr.service.UIService; +import org.onap.dmaap.dmf.mr.utils.DMaaPResponseBuilder; +import org.springframework.stereotype.Service; + +import java.io.IOException; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +/** + * @author muzainulhaque.qazi + * + */ +@Service +public class UIServiceImpl implements UIService { + + + private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(UIServiceImpl.class); + /** + * Returning template of hello page + * @param dmaapContext + * @throws IOException + */ + @Override + public void hello(DMaaPContext dmaapContext) throws IOException { + LOGGER.info("Returning template of hello page."); + DMaaPResponseBuilder.respondOkWithHtml(dmaapContext, "templates/hello.html"); + } + + /** + * Fetching list of all api keys and returning in a templated form for display. + * @param dmaapContext + * @throws ConfigDbException + * @throws IOException + */ + @Override + public void getApiKeysTable(DMaaPContext dmaapContext) throws ConfigDbException, IOException { + // TODO - We need to work on the templates and how data will be set in + // the template + LOGGER.info("Fetching list of all api keys and returning in a templated form for display."); + Map<String, NsaSimpleApiKey> keyMap = getApiKeyDb(dmaapContext).loadAllKeyRecords(); + + LinkedList<JSONObject> keyList = new LinkedList<>(); + + JSONObject jsonList = new JSONObject(); + + for (Entry<String, NsaSimpleApiKey> e : keyMap.entrySet()) { + final NsaSimpleApiKey key = e.getValue(); + final JSONObject jsonObject = new JSONObject(); + jsonObject.put("key", key.getKey()); + jsonObject.put("email", key.getContactEmail()); + jsonObject.put("description", key.getDescription()); + keyList.add(jsonObject); + } + + jsonList.put("apiKeys", keyList); + + LOGGER.info("Returning list of all the api keys in JSON format for the template."); + // "templates/apiKeyList.html" + DMaaPResponseBuilder.respondOk(dmaapContext, jsonList); + + } + + /** + * @param dmaapContext + * @param apiKey + * @throws ConfigDbException + * @throws IOException + * @throws JSONException + * @throws Exception + */ + @Override + public void getApiKey(DMaaPContext dmaapContext, String apiKey) throws CambriaApiException, ConfigDbException, JSONException, IOException { + // TODO - We need to work on the templates and how data will be set in + // the template + LOGGER.info("Fetching detials of apikey: " + apiKey); + final NsaSimpleApiKey key = getApiKeyDb(dmaapContext).loadApiKey(apiKey); + + if (null != key) { + LOGGER.info("Details of apikey [" + apiKey + "] found. Returning response"); + DMaaPResponseBuilder.respondOk(dmaapContext, key.asJsonObject()); + } else { + LOGGER.info("Details of apikey [" + apiKey + "] not found. Returning response"); + throw new CambriaApiException(400,"Key [" + apiKey + "] not found."); + } + + } + + /** + * Fetching list of all the topics + * @param dmaapContext + * @throws ConfigDbException + * @throws IOException + */ + @Override + public void getTopicsTable(DMaaPContext dmaapContext) throws ConfigDbException, IOException { + // TODO - We need to work on the templates and how data will be set in + // the template + LOGGER.info("Fetching list of all the topics and returning in a templated form for display"); + List<Topic> topicsList = getMetaBroker(dmaapContext).getAllTopics(); + + JSONObject jsonObject = new JSONObject(); + + JSONArray topicsArray = new JSONArray(); + + List<Topic> topicList = getMetaBroker(dmaapContext).getAllTopics(); + + for (Topic topic : topicList) { + JSONObject obj = new JSONObject(); + obj.put("topicName", topic.getName()); + obj.put("description", topic.getDescription()); + obj.put("owner", topic.getOwner()); + topicsArray.put(obj); + } + + jsonObject.put("topics", topicsList); + + LOGGER.info("Returning the list of topics in templated format for display."); + DMaaPResponseBuilder.respondOk(dmaapContext, jsonObject); + + } + + /** + * @param dmaapContext + * @param topicName + * @throws ConfigDbException + * @throws IOException + * @throws TopicExistsException + */ + @Override + public void getTopic(DMaaPContext dmaapContext, String topicName) + throws ConfigDbException, IOException, TopicExistsException { + // TODO - We need to work on the templates and how data will be set in + // the template + LOGGER.info("Fetching detials of apikey: " + topicName); + Topic topic = getMetaBroker(dmaapContext).getTopic(topicName); + + if (null == topic) { + LOGGER.error("Topic [" + topicName + "] does not exist."); + throw new TopicExistsException("Topic [" + topicName + "] does not exist."); + } + + JSONObject json = new JSONObject(); + json.put("topicName", topic.getName()); + json.put("description", topic.getDescription()); + json.put("owner", topic.getOwner()); + + LOGGER.info("Returning details of topic [" + topicName + "]. Sending response."); + DMaaPResponseBuilder.respondOk(dmaapContext, json); + + } + + /** + * + * @param dmaapContext + * @return + */ + private NsaApiDb<NsaSimpleApiKey> getApiKeyDb(DMaaPContext dmaapContext) { + return dmaapContext.getConfigReader().getfApiKeyDb(); + + } + + /** + * + * @param dmaapContext + * @return + */ + private DMaaPKafkaMetaBroker getMetaBroker(DMaaPContext dmaapContext) { + return (DMaaPKafkaMetaBroker) dmaapContext.getConfigReader().getfMetaBroker(); + } + +} |