summaryrefslogtreecommitdiffstats
path: root/auth/auth-cass
diff options
context:
space:
mode:
authorInstrumental <jonathan.gathman@att.com>2018-09-07 12:21:34 -0500
committerInstrumental <jonathan.gathman@att.com>2018-09-07 12:22:50 -0500
commit4b5a7d721d994a49057e9bfb403c7bff1b376660 (patch)
treea36d03227b63a3e60346d6c3ca87b061087dae85 /auth/auth-cass
parent824dc7b5fc0e1ccdf7f460479aff344727f0f01e (diff)
Mass removal of all Tabs (Style Warnings)
Issue-ID: AAF-473 Change-Id: Iaf0ef8120882937959bb0065f2f6ba74a021940f Signed-off-by: Instrumental <jonathan.gathman@att.com>
Diffstat (limited to 'auth/auth-cass')
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/AbsCassDAO.java854
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Bytification.java4
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CIDAO.java36
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Cacheable.java2
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Cached.java320
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CachedDAO.java348
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CassAccess.java360
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CassDAOImpl.java560
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAO.java12
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAOException.java28
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAO_RO.java48
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Loader.java334
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Streamer.java4
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Touchable.java2
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedCertDAO.java38
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedCredDAO.java62
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedNSDAO.java6
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedPermDAO.java162
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedRoleDAO.java126
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedUserRoleDAO.java148
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ApprovalDAO.java374
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ArtiDAO.java292
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CacheInfoDAO.java738
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CacheableData.java12
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CertDAO.java196
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ConfigDAO.java72
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CredDAO.java224
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/DelegateDAO.java186
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/FutureDAO.java150
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/HistoryDAO.java342
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/LocateDAO.java230
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/Namespace.java200
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsDAO.java876
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsSplit.java70
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsType.java86
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/OAuthTokenDAO.java184
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/PermDAO.java854
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/RoleDAO.java628
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/Status.java78
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/UserRoleDAO.java470
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/CassExecutor.java68
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/Function.java3456
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/PermLookup.java266
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/Question.java2120
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFLocator.java170
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFLur.java290
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFUserPass.java74
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectCertIdentity.java46
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectLocatorCreator.java56
-rw-r--r--auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectRegistrar.java140
-rw-r--r--auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_Cached.java164
-rw-r--r--auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CachedDAO.java44
-rw-r--r--auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CassAccess.java52
-rw-r--r--auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CassDAOImpl.java56
-rw-r--r--auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_DAOException.java22
-rw-r--r--auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/AbsJUCass.java278
-rw-r--r--auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_Bytification.java412
-rw-r--r--auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_NsType.java48
-rw-r--r--auth/auth-cass/src/test/java/org/onap/aaf/auth/direct/test/JU_DirectCertIdentity.java52
59 files changed, 8765 insertions, 8765 deletions
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/AbsCassDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/AbsCassDAO.java
index b5a950d4..f24c7c9f 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/AbsCassDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/AbsCassDAO.java
@@ -45,459 +45,459 @@ import com.datastax.driver.core.Session;
import com.datastax.driver.core.exceptions.DriverException;
public abstract class AbsCassDAO<TRANS extends TransStore,DATA> {
- protected static final char DOT = '.';
- protected static final char DOT_PLUS_ONE = '.'+1;
- protected static final String FIRST_CHAR = Character.toString((char)0);
- protected static final String LAST_CHAR = Character.toString((char)Character.MAX_VALUE);
- protected static final int FIELD_COMMAS = 0;
- protected static final int QUESTION_COMMAS = 1;
- protected static final int ASSIGNMENT_COMMAS = 2;
- protected static final int WHERE_ANDS = 3;
+ protected static final char DOT = '.';
+ protected static final char DOT_PLUS_ONE = '.'+1;
+ protected static final String FIRST_CHAR = Character.toString((char)0);
+ protected static final String LAST_CHAR = Character.toString((char)Character.MAX_VALUE);
+ protected static final int FIELD_COMMAS = 0;
+ protected static final int QUESTION_COMMAS = 1;
+ protected static final int ASSIGNMENT_COMMAS = 2;
+ protected static final int WHERE_ANDS = 3;
- private Cluster cluster;
- /*
- * From DataStax
- * com.datastax.driver.core.Session
- A session holds connections to a Cassandra cluster, allowing it to be queried. Each session maintains multiple connections to the cluster nodes,
- provides policies to choose which node to use for each query (round-robin on all nodes of the cluster by default), and handles retries for
- failed query (when it makes sense), etc...
- Session instances are thread-safe and usually a single instance is enough per application. However, a given session can only be set to one
- keyspace at a time, so one instance per keyspace is necessary.
- */
- private Session session;
- private final String keyspace;
- // If this is null, then we own session
- private final AbsCassDAO<TRANS,?> owningDAO;
- protected Class<DATA> dataClass;
- private final String name;
-// private static Slot sessionSlot; // not used since 2015
- private static final ArrayList<AbsCassDAO<? extends TransStore,?>.PSInfo> psinfos = new ArrayList<>();
- private static final List<Object> EMPTY = new ArrayList<>(0);
- private static final Deque<ResetRequest> resetDeque = new ConcurrentLinkedDeque<ResetRequest>();
- private static boolean resetTrigger = false;
- private static long nextAvailableReset = 0;
-
- public AbsCassDAO(TRANS trans, String name, Cluster cluster, String keyspace, Class<DATA> dataClass) {
- this.name = name;
- this.cluster = cluster;
- this.keyspace = keyspace;
- owningDAO = null; // we own session
- session = null;
- this.dataClass = dataClass;
- }
+ private Cluster cluster;
+ /*
+ * From DataStax
+ * com.datastax.driver.core.Session
+ A session holds connections to a Cassandra cluster, allowing it to be queried. Each session maintains multiple connections to the cluster nodes,
+ provides policies to choose which node to use for each query (round-robin on all nodes of the cluster by default), and handles retries for
+ failed query (when it makes sense), etc...
+ Session instances are thread-safe and usually a single instance is enough per application. However, a given session can only be set to one
+ keyspace at a time, so one instance per keyspace is necessary.
+ */
+ private Session session;
+ private final String keyspace;
+ // If this is null, then we own session
+ private final AbsCassDAO<TRANS,?> owningDAO;
+ protected Class<DATA> dataClass;
+ private final String name;
+// private static Slot sessionSlot; // not used since 2015
+ private static final ArrayList<AbsCassDAO<? extends TransStore,?>.PSInfo> psinfos = new ArrayList<>();
+ private static final List<Object> EMPTY = new ArrayList<>(0);
+ private static final Deque<ResetRequest> resetDeque = new ConcurrentLinkedDeque<ResetRequest>();
+ private static boolean resetTrigger = false;
+ private static long nextAvailableReset = 0;
+
+ public AbsCassDAO(TRANS trans, String name, Cluster cluster, String keyspace, Class<DATA> dataClass) {
+ this.name = name;
+ this.cluster = cluster;
+ this.keyspace = keyspace;
+ owningDAO = null; // we own session
+ session = null;
+ this.dataClass = dataClass;
+ }
- public AbsCassDAO(TRANS trans, String name, AbsCassDAO<TRANS,?> aDao, Class<DATA> dataClass) {
- this.name = name;
- cluster = aDao.cluster;
- keyspace = aDao.keyspace;
- session = null;
- // We do not own session
- owningDAO = aDao;
- this.dataClass = dataClass;
- }
-
+ public AbsCassDAO(TRANS trans, String name, AbsCassDAO<TRANS,?> aDao, Class<DATA> dataClass) {
+ this.name = name;
+ cluster = aDao.cluster;
+ keyspace = aDao.keyspace;
+ session = null;
+ // We do not own session
+ owningDAO = aDao;
+ this.dataClass = dataClass;
+ }
+
// Not used since 2015
-// public static void setSessionSlot(Slot slot) {
-// sessionSlot = slot;
-// }
+// public static void setSessionSlot(Slot slot) {
+// sessionSlot = slot;
+// }
- //Note: Lower case ON PURPOSE. These names used to create History Messages
- public enum CRUD {
- create,read,update,delete;
- }
+ //Note: Lower case ON PURPOSE. These names used to create History Messages
+ public enum CRUD {
+ create,read,update,delete;
+ }
- public class PSInfo {
- private PreparedStatement ps;
- private final int size;
- private final Loader<DATA> loader;
- private final CRUD crud; // Store CRUD, because it makes a difference in Object Order, see Loader
- private final String cql;
- private final ConsistencyLevel consistency;
+ public class PSInfo {
+ private PreparedStatement ps;
+ private final int size;
+ private final Loader<DATA> loader;
+ private final CRUD crud; // Store CRUD, because it makes a difference in Object Order, see Loader
+ private final String cql;
+ private final ConsistencyLevel consistency;
- /**
- * Create a PSInfo and create Prepared Statement
- *
- * @param trans
- * @param theCQL
- * @param loader
- */
- public PSInfo(TRANS trans, String theCQL, Loader<DATA> loader, ConsistencyLevel consistency) {
- this.loader = loader;
- this.consistency=consistency;
- psinfos.add(this);
+ /**
+ * Create a PSInfo and create Prepared Statement
+ *
+ * @param trans
+ * @param theCQL
+ * @param loader
+ */
+ public PSInfo(TRANS trans, String theCQL, Loader<DATA> loader, ConsistencyLevel consistency) {
+ this.loader = loader;
+ this.consistency=consistency;
+ psinfos.add(this);
- cql = theCQL.trim().toUpperCase();
- if(cql.startsWith("INSERT")) {
- crud = CRUD.create;
- } else if(cql.startsWith("UPDATE")) {
- crud = CRUD.update;
- } else if(cql.startsWith("DELETE")) {
- crud = CRUD.delete;
- } else {
- crud = CRUD.read;
- }
-
- int idx = 0, count=0;
- while((idx=cql.indexOf('?',idx))>=0) {
- ++idx;
- ++count;
- }
- size=count;
- }
-
- public synchronized void reset() {
- ps = null;
- }
-
- private synchronized BoundStatement ps(TransStore trans) throws APIException, IOException {
- /* From Datastax
- You should prepare only once, and cache the PreparedStatement in your application (it is thread-safe).
- If you call prepare multiple times with the same query string, the driver will log a warning.
- */
- if(ps==null) {
- TimeTaken tt = trans.start("Preparing PSInfo " + crud.toString().toUpperCase() + " on " + name,Env.SUB);
- try {
- ps = getSession(trans).prepare(cql);
- ps.setConsistencyLevel(consistency);
- } catch (DriverException e) {
- reportPerhapsReset(trans,e);
- throw e;
- } finally {
- tt.done();
- }
- }
- // BoundStatements are NOT threadsafe... need a new one each time.
- return new BoundStatement(ps);
- }
+ cql = theCQL.trim().toUpperCase();
+ if(cql.startsWith("INSERT")) {
+ crud = CRUD.create;
+ } else if(cql.startsWith("UPDATE")) {
+ crud = CRUD.update;
+ } else if(cql.startsWith("DELETE")) {
+ crud = CRUD.delete;
+ } else {
+ crud = CRUD.read;
+ }
+
+ int idx = 0, count=0;
+ while((idx=cql.indexOf('?',idx))>=0) {
+ ++idx;
+ ++count;
+ }
+ size=count;
+ }
+
+ public synchronized void reset() {
+ ps = null;
+ }
+
+ private synchronized BoundStatement ps(TransStore trans) throws APIException, IOException {
+ /* From Datastax
+ You should prepare only once, and cache the PreparedStatement in your application (it is thread-safe).
+ If you call prepare multiple times with the same query string, the driver will log a warning.
+ */
+ if(ps==null) {
+ TimeTaken tt = trans.start("Preparing PSInfo " + crud.toString().toUpperCase() + " on " + name,Env.SUB);
+ try {
+ ps = getSession(trans).prepare(cql);
+ ps.setConsistencyLevel(consistency);
+ } catch (DriverException e) {
+ reportPerhapsReset(trans,e);
+ throw e;
+ } finally {
+ tt.done();
+ }
+ }
+ // BoundStatements are NOT threadsafe... need a new one each time.
+ return new BoundStatement(ps);
+ }
- /**
- * Execute a Prepared Statement by extracting from DATA object
- *
- * @param trans
- * @param text
- * @param data
- * @return
- */
- public Result<ResultSetFuture> execAsync(TRANS trans, String text, DATA data) {
- TimeTaken tt = trans.start(text, Env.REMOTE);
- try {
- return Result.ok(getSession(trans).executeAsync(
- ps(trans).bind(loader.extract(data, size, crud))));
- } catch (DriverException | APIException | IOException e) {
- AbsCassDAO.this.reportPerhapsReset(trans,e);
- return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);
- } finally {
- tt.done();
- }
- }
+ /**
+ * Execute a Prepared Statement by extracting from DATA object
+ *
+ * @param trans
+ * @param text
+ * @param data
+ * @return
+ */
+ public Result<ResultSetFuture> execAsync(TRANS trans, String text, DATA data) {
+ TimeTaken tt = trans.start(text, Env.REMOTE);
+ try {
+ return Result.ok(getSession(trans).executeAsync(
+ ps(trans).bind(loader.extract(data, size, crud))));
+ } catch (DriverException | APIException | IOException e) {
+ AbsCassDAO.this.reportPerhapsReset(trans,e);
+ return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);
+ } finally {
+ tt.done();
+ }
+ }
- /**
- * Execute a Prepared Statement on Object[] key
- *
- * @param trans
- * @param text
- * @param objs
- * @return
- */
- public Result<ResultSetFuture> execAsync(TRANS trans, String text, Object ... objs) {
- TimeTaken tt = trans.start(text, Env.REMOTE);
- try {
- return Result.ok(getSession(trans).executeAsync(ps(trans).bind(objs)));
- } catch (DriverException | APIException | IOException e) {
- AbsCassDAO.this.reportPerhapsReset(trans,e);
- return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);
- } finally {
- tt.done();
- }
- }
-
- /*
- * Note:
- *
- */
+ /**
+ * Execute a Prepared Statement on Object[] key
+ *
+ * @param trans
+ * @param text
+ * @param objs
+ * @return
+ */
+ public Result<ResultSetFuture> execAsync(TRANS trans, String text, Object ... objs) {
+ TimeTaken tt = trans.start(text, Env.REMOTE);
+ try {
+ return Result.ok(getSession(trans).executeAsync(ps(trans).bind(objs)));
+ } catch (DriverException | APIException | IOException e) {
+ AbsCassDAO.this.reportPerhapsReset(trans,e);
+ return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);
+ } finally {
+ tt.done();
+ }
+ }
+
+ /*
+ * Note:
+ *
+ */
- /**
- * Execute a Prepared Statement by extracting from DATA object
- *
- * @param trans
- * @param text
- * @param data
- * @return
- */
- public Result<ResultSet> exec(TRANS trans, String text, DATA data) {
- TimeTaken tt = trans.start(text, Env.REMOTE);
- try {
- /*
- * "execute" (and executeAsync)
- * Executes the provided query.
- This method blocks until at least some result has been received from the database. However,
- for SELECT queries, it does not guarantee that the result has been received in full. But it
- does guarantee that some response has been received from the database, and in particular
- guarantee that if the request is invalid, an exception will be thrown by this method.
+ /**
+ * Execute a Prepared Statement by extracting from DATA object
+ *
+ * @param trans
+ * @param text
+ * @param data
+ * @return
+ */
+ public Result<ResultSet> exec(TRANS trans, String text, DATA data) {
+ TimeTaken tt = trans.start(text, Env.REMOTE);
+ try {
+ /*
+ * "execute" (and executeAsync)
+ * Executes the provided query.
+ This method blocks until at least some result has been received from the database. However,
+ for SELECT queries, it does not guarantee that the result has been received in full. But it
+ does guarantee that some response has been received from the database, and in particular
+ guarantee that if the request is invalid, an exception will be thrown by this method.
- Parameters:
- statement - the CQL query to execute (that can be any Statement).
- Returns:
- the result of the query. That result will never be null but can be empty (and will
- be for any non SELECT query).
- */
- return Result.ok(getSession(trans).execute(
- ps(trans).bind(loader.extract(data, size, crud))));
- } catch (DriverException | APIException | IOException e) {
- AbsCassDAO.this.reportPerhapsReset(trans,e);
- return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);
- } finally {
- tt.done();
- }
- }
+ Parameters:
+ statement - the CQL query to execute (that can be any Statement).
+ Returns:
+ the result of the query. That result will never be null but can be empty (and will
+ be for any non SELECT query).
+ */
+ return Result.ok(getSession(trans).execute(
+ ps(trans).bind(loader.extract(data, size, crud))));
+ } catch (DriverException | APIException | IOException e) {
+ AbsCassDAO.this.reportPerhapsReset(trans,e);
+ return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);
+ } finally {
+ tt.done();
+ }
+ }
- /**
- * Execute a Prepared Statement on Object[] key
- *
- * @param trans
- * @param text
- * @param objs
- * @return
- */
- public Result<ResultSet> exec(TRANS trans, String text, Object ... objs) {
- TimeTaken tt = trans.start(text, Env.REMOTE);
- try {
- return Result.ok(getSession(trans).execute(ps(trans).bind(objs)));
- } catch (DriverException | APIException | IOException e) {
- AbsCassDAO.this.reportPerhapsReset(trans,e);
- return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);
- } finally {
- tt.done();
- }
- }
+ /**
+ * Execute a Prepared Statement on Object[] key
+ *
+ * @param trans
+ * @param text
+ * @param objs
+ * @return
+ */
+ public Result<ResultSet> exec(TRANS trans, String text, Object ... objs) {
+ TimeTaken tt = trans.start(text, Env.REMOTE);
+ try {
+ return Result.ok(getSession(trans).execute(ps(trans).bind(objs)));
+ } catch (DriverException | APIException | IOException e) {
+ AbsCassDAO.this.reportPerhapsReset(trans,e);
+ return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);
+ } finally {
+ tt.done();
+ }
+ }
- /**
- * Read the Data from Cassandra given a Prepared Statement (defined by the
- * DAO Instance)
- *
- * This is common behavior among all DAOs.
- * @throws DAOException
- */
- public Result<List<DATA>> read(TRANS trans, String text, Object[] key) {
- TimeTaken tt = trans.start(text,Env.REMOTE);
-
- ResultSet rs;
- try {
- rs = getSession(trans).execute(key==null?ps(trans):ps(trans).bind(key));
-/// TEST CODE for Exception
-// boolean force = true;
-// if(force) {
-// Map<InetSocketAddress, Throwable> misa = new HashMap<>();
-// //misa.put(new InetSocketAddress(444),new Exception("no host was tried"));
-// misa.put(new InetSocketAddress(444),new Exception("Connection has been closed"));
-// throw new com.datastax.driver.core.exceptions.NoHostAvailableException(misa);
-//// throw new com.datastax.driver.core.exceptions.AuthenticationException(new InetSocketAddress(9999),"no host was tried");
-// }
+ /**
+ * Read the Data from Cassandra given a Prepared Statement (defined by the
+ * DAO Instance)
+ *
+ * This is common behavior among all DAOs.
+ * @throws DAOException
+ */
+ public Result<List<DATA>> read(TRANS trans, String text, Object[] key) {
+ TimeTaken tt = trans.start(text,Env.REMOTE);
+
+ ResultSet rs;
+ try {
+ rs = getSession(trans).execute(key==null?ps(trans):ps(trans).bind(key));
+/// TEST CODE for Exception
+// boolean force = true;
+// if(force) {
+// Map<InetSocketAddress, Throwable> misa = new HashMap<>();
+// //misa.put(new InetSocketAddress(444),new Exception("no host was tried"));
+// misa.put(new InetSocketAddress(444),new Exception("Connection has been closed"));
+// throw new com.datastax.driver.core.exceptions.NoHostAvailableException(misa);
+//// throw new com.datastax.driver.core.exceptions.AuthenticationException(new InetSocketAddress(9999),"no host was tried");
+// }
//// END TEST CODE
- } catch (DriverException | APIException | IOException e) {
- AbsCassDAO.this.reportPerhapsReset(trans,e);
- return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);
- } finally {
- tt.done();
- }
-
- return extract(loader,rs,null /*let Array be created if necessary*/,dflt);
- }
-
- public Result<List<DATA>> read(TRANS trans, String text, DATA data) {
- return read(trans,text, loader.extract(data, size, crud));
- }
-
- public Object[] keyFrom(DATA data) {
- return loader.extract(data, size, CRUD.delete); // Delete is key only
- }
+ } catch (DriverException | APIException | IOException e) {
+ AbsCassDAO.this.reportPerhapsReset(trans,e);
+ return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql);
+ } finally {
+ tt.done();
+ }
+
+ return extract(loader,rs,null /*let Array be created if necessary*/,dflt);
+ }
+
+ public Result<List<DATA>> read(TRANS trans, String text, DATA data) {
+ return read(trans,text, loader.extract(data, size, crud));
+ }
+
+ public Object[] keyFrom(DATA data) {
+ return loader.extract(data, size, CRUD.delete); // Delete is key only
+ }
- /*
- * Note: in case PSInfos are deleted, we want to remove them from list. This is not expected,
- * but we don't want a data leak if it does. Finalize doesn't have to happen quickly
- */
- @Override
- protected void finalize() throws Throwable {
- psinfos.remove(this);
- }
- }
+ /*
+ * Note: in case PSInfos are deleted, we want to remove them from list. This is not expected,
+ * but we don't want a data leak if it does. Finalize doesn't have to happen quickly
+ */
+ @Override
+ protected void finalize() throws Throwable {
+ psinfos.remove(this);
+ }
+ }
- protected final Accept<DATA> dflt = new Accept<DATA>() {
- @Override
- public boolean ok(DATA data) {
- return true;
- }
- };
+ protected final Accept<DATA> dflt = new Accept<DATA>() {
+ @Override
+ public boolean ok(DATA data) {
+ return true;
+ }
+ };
- @SuppressWarnings("unchecked")
+ @SuppressWarnings("unchecked")
protected final Result<List<DATA>> extract(Loader<DATA> loader, ResultSet rs, List<DATA> indata, Accept<DATA> accept) {
- List<Row> rows = rs.all();
- if(rows.isEmpty()) {
- return Result.ok((List<DATA>)EMPTY); // Result sets now .emptyList(true);
- } else {
- DATA d;
- List<DATA> data = indata==null?new ArrayList<>(rows.size()):indata;
-
- for(Row row : rows) {
- try {
- d = loader.load(dataClass.newInstance(),row);
- if(accept.ok(d)) {
- data.add(d);
- }
- } catch(Exception e) {
- return Result.err(e);
- }
- }
- return Result.ok(data);
- }
+ List<Row> rows = rs.all();
+ if(rows.isEmpty()) {
+ return Result.ok((List<DATA>)EMPTY); // Result sets now .emptyList(true);
+ } else {
+ DATA d;
+ List<DATA> data = indata==null?new ArrayList<>(rows.size()):indata;
+
+ for(Row row : rows) {
+ try {
+ d = loader.load(dataClass.newInstance(),row);
+ if(accept.ok(d)) {
+ data.add(d);
+ }
+ } catch(Exception e) {
+ return Result.err(e);
+ }
+ }
+ return Result.ok(data);
+ }
}
- private static final String NEW_CASSANDRA_SESSION_CREATED = "New Cassandra Session Created";
- private static final String NEW_CASSANDRA_CLUSTER_OBJECT_CREATED = "New Cassandra Cluster Object Created";
- private static final String NEW_CASSANDRA_SESSION = "New Cassandra Session";
- private static final Object LOCK = new Object();
+ private static final String NEW_CASSANDRA_SESSION_CREATED = "New Cassandra Session Created";
+ private static final String NEW_CASSANDRA_CLUSTER_OBJECT_CREATED = "New Cassandra Cluster Object Created";
+ private static final String NEW_CASSANDRA_SESSION = "New Cassandra Session";
+ private static final Object LOCK = new Object();
- private static class ResetRequest {
- //package on purpose
- Session session;
- long timestamp;
-
- public ResetRequest(Session session) {
- this.session = session;
- timestamp = System.currentTimeMillis();
- }
- }
+ private static class ResetRequest {
+ //package on purpose
+ Session session;
+ long timestamp;
+
+ public ResetRequest(Session session) {
+ this.session = session;
+ timestamp = System.currentTimeMillis();
+ }
+ }
-
- public static final void primePSIs(TransStore trans) throws APIException, IOException {
- for(AbsCassDAO<? extends TransStore, ?>.PSInfo psi : psinfos) {
- if(psi.ps==null) {
- psi.ps(trans);
- }
- }
- }
-
- public final Session getSession(TransStore trans) throws APIException, IOException {
- // SessionFilter unused since 2015
- // Try to use Trans' session, if exists
-// if(sessionSlot!=null) { // try to get from Trans
-// Session sess = trans.get(sessionSlot, null);
-// if(sess!=null) {
-// return sess;
-// }
-// }
-
- // If there's an owning DAO, use it's session
- if(owningDAO!=null) {
- return owningDAO.getSession(trans);
- }
-
- // OK, nothing else works... get our own.
- if(session==null || resetTrigger) {
- Cluster tempCluster = null;
- Session tempSession = null;
- try {
- synchronized(LOCK) {
- boolean reset = false;
- for(ResetRequest r : resetDeque) {
- if(r.session == session) {
- if(r.timestamp>nextAvailableReset) {
- reset=true;
- nextAvailableReset = System.currentTimeMillis() + 60000;
- tempCluster = cluster;
- tempSession = session;
- break;
- } else {
- trans.warn().log("Cassandra Connection Reset Ignored: Recent Reset");
- }
- }
- }
-
- if(reset || session == null) {
- TimeTaken tt = trans.start(NEW_CASSANDRA_SESSION, Env.SUB);
- try {
- // Note: Maitrayee recommended not closing the cluster, just
- // overwrite it. Jonathan 9/30/2016 assuming same for Session
- // This was a bad idea. Ran out of File Handles as I suspected, Jonathan
- if(reset) {
- for(AbsCassDAO<? extends TransStore, ?>.PSInfo psi : psinfos) {
- psi.reset();
- }
- }
- if(reset || cluster==null) {
- cluster = CassAccess.cluster(trans, keyspace);
- trans.warn().log(NEW_CASSANDRA_CLUSTER_OBJECT_CREATED);
- }
- if(reset || session==null) {
- session = cluster.connect(keyspace);
- trans.warn().log(NEW_CASSANDRA_SESSION_CREATED);
- }
- } finally {
- resetTrigger=false;
- tt.done();
- }
- }
- }
- } finally {
- TimeTaken tt = trans.start("Clear Reset Deque", Env.SUB);
- try {
- resetDeque.clear();
- // Not clearing Session/Cluster appears to kill off FileHandles
- if(tempSession!=null && !tempSession.isClosed()) {
- tempSession.close();
- }
- if(tempCluster!=null && !tempCluster.isClosed()) {
- tempCluster.close();
- }
- } finally {
- tt.done();
- }
- }
- }
- return session;
- }
-
- public final boolean reportPerhapsReset(TransStore trans, Exception e) {
- if(owningDAO!=null) {
- return owningDAO.reportPerhapsReset(trans, e);
- } else {
- boolean rv = false;
- if(CassAccess.isResetException(e)) {
- trans.warn().printf("Session Reset called for %s by %s ",session==null?"":session,e==null?"Mgmt Command":e.getClass().getName());
- resetDeque.addFirst(new ResetRequest(session));
- rv = resetTrigger = true;
- }
- trans.error().log(e);
- return rv;
- }
- }
+
+ public static final void primePSIs(TransStore trans) throws APIException, IOException {
+ for(AbsCassDAO<? extends TransStore, ?>.PSInfo psi : psinfos) {
+ if(psi.ps==null) {
+ psi.ps(trans);
+ }
+ }
+ }
+
+ public final Session getSession(TransStore trans) throws APIException, IOException {
+ // SessionFilter unused since 2015
+ // Try to use Trans' session, if exists
+// if(sessionSlot!=null) { // try to get from Trans
+// Session sess = trans.get(sessionSlot, null);
+// if(sess!=null) {
+// return sess;
+// }
+// }
+
+ // If there's an owning DAO, use it's session
+ if(owningDAO!=null) {
+ return owningDAO.getSession(trans);
+ }
+
+ // OK, nothing else works... get our own.
+ if(session==null || resetTrigger) {
+ Cluster tempCluster = null;
+ Session tempSession = null;
+ try {
+ synchronized(LOCK) {
+ boolean reset = false;
+ for(ResetRequest r : resetDeque) {
+ if(r.session == session) {
+ if(r.timestamp>nextAvailableReset) {
+ reset=true;
+ nextAvailableReset = System.currentTimeMillis() + 60000;
+ tempCluster = cluster;
+ tempSession = session;
+ break;
+ } else {
+ trans.warn().log("Cassandra Connection Reset Ignored: Recent Reset");
+ }
+ }
+ }
+
+ if(reset || session == null) {
+ TimeTaken tt = trans.start(NEW_CASSANDRA_SESSION, Env.SUB);
+ try {
+ // Note: Maitrayee recommended not closing the cluster, just
+ // overwrite it. Jonathan 9/30/2016 assuming same for Session
+ // This was a bad idea. Ran out of File Handles as I suspected, Jonathan
+ if(reset) {
+ for(AbsCassDAO<? extends TransStore, ?>.PSInfo psi : psinfos) {
+ psi.reset();
+ }
+ }
+ if(reset || cluster==null) {
+ cluster = CassAccess.cluster(trans, keyspace);
+ trans.warn().log(NEW_CASSANDRA_CLUSTER_OBJECT_CREATED);
+ }
+ if(reset || session==null) {
+ session = cluster.connect(keyspace);
+ trans.warn().log(NEW_CASSANDRA_SESSION_CREATED);
+ }
+ } finally {
+ resetTrigger=false;
+ tt.done();
+ }
+ }
+ }
+ } finally {
+ TimeTaken tt = trans.start("Clear Reset Deque", Env.SUB);
+ try {
+ resetDeque.clear();
+ // Not clearing Session/Cluster appears to kill off FileHandles
+ if(tempSession!=null && !tempSession.isClosed()) {
+ tempSession.close();
+ }
+ if(tempCluster!=null && !tempCluster.isClosed()) {
+ tempCluster.close();
+ }
+ } finally {
+ tt.done();
+ }
+ }
+ }
+ return session;
+ }
+
+ public final boolean reportPerhapsReset(TransStore trans, Exception e) {
+ if(owningDAO!=null) {
+ return owningDAO.reportPerhapsReset(trans, e);
+ } else {
+ boolean rv = false;
+ if(CassAccess.isResetException(e)) {
+ trans.warn().printf("Session Reset called for %s by %s ",session==null?"":session,e==null?"Mgmt Command":e.getClass().getName());
+ resetDeque.addFirst(new ResetRequest(session));
+ rv = resetTrigger = true;
+ }
+ trans.error().log(e);
+ return rv;
+ }
+ }
- public void close(TransStore trans) {
- if(owningDAO==null) {
- if(session!=null) {
- TimeTaken tt = trans.start("Cassandra Session Close", Env.SUB);
- try {
- session.close();
- } finally {
- tt.done();
- }
- session = null;
- } else {
- trans.debug().log("close called(), Session already closed");
- }
- } else {
- owningDAO.close(trans);
- }
- }
+ public void close(TransStore trans) {
+ if(owningDAO==null) {
+ if(session!=null) {
+ TimeTaken tt = trans.start("Cassandra Session Close", Env.SUB);
+ try {
+ session.close();
+ } finally {
+ tt.done();
+ }
+ session = null;
+ } else {
+ trans.debug().log("close called(), Session already closed");
+ }
+ } else {
+ owningDAO.close(trans);
+ }
+ }
- protected void wasModified(TRANS trans, CRUD modified, DATA data, String ... override) {
- }
-
- protected interface Accept<DATA> {
- public boolean ok(DATA data);
- }
+ protected void wasModified(TRANS trans, CRUD modified, DATA data, String ... override) {
+ }
+
+ protected interface Accept<DATA> {
+ public boolean ok(DATA data);
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Bytification.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Bytification.java
index 279f399d..965ee2c2 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Bytification.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Bytification.java
@@ -25,6 +25,6 @@ import java.io.IOException;
import java.nio.ByteBuffer;
public interface Bytification {
- public ByteBuffer bytify() throws IOException;
- public void reconstitute(ByteBuffer bb) throws IOException;
+ public ByteBuffer bytify() throws IOException;
+ public void reconstitute(ByteBuffer bb) throws IOException;
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CIDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CIDAO.java
index 83b13c34..b148e87a 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CIDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CIDAO.java
@@ -28,23 +28,23 @@ import org.onap.aaf.misc.env.Trans;
public interface CIDAO<TRANS extends Trans> {
- /**
- * Touch the date field for given Table
- *
- * @param trans
- * @param name
- * @return
- */
- public abstract Result<Void> touch(TRANS trans, String name, int ... seg);
-
- /**
- * Read all Info entries, and set local Date objects
- *
- * This is to support regular data checks on the Database to speed up Caching behavior
- *
- */
- public abstract Result<Void> check(TRANS trans);
-
- public abstract Date get(TRANS trans, String table, int seg);
+ /**
+ * Touch the date field for given Table
+ *
+ * @param trans
+ * @param name
+ * @return
+ */
+ public abstract Result<Void> touch(TRANS trans, String name, int ... seg);
+
+ /**
+ * Read all Info entries, and set local Date objects
+ *
+ * This is to support regular data checks on the Database to speed up Caching behavior
+ *
+ */
+ public abstract Result<Void> check(TRANS trans);
+
+ public abstract Date get(TRANS trans, String table, int seg);
} \ No newline at end of file
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Cacheable.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Cacheable.java
index d697b90e..3632aa15 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Cacheable.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Cacheable.java
@@ -30,5 +30,5 @@ package org.onap.aaf.auth.dao;
*
*/
public interface Cacheable {
- public int[] invalidate(Cached<?,?> cache);
+ public int[] invalidate(Cached<?,?> cache);
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Cached.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Cached.java
index 0797b041..3f4bbb20 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Cached.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Cached.java
@@ -36,164 +36,164 @@ import org.onap.aaf.misc.env.Env;
import org.onap.aaf.misc.env.Trans;
public class Cached<TRANS extends Trans, DATA extends Cacheable> extends Cache<TRANS,DATA> {
- // Java does not allow creation of Arrays with Generics in them...
- // private Map<String,Dated> cache[];
- protected final CIDAO<TRANS> info;
-
- private static Timer infoTimer;
- private Object cache[];
- public final int segSize;
-
- protected final String name;
-
- private final long expireIn;
-
-
-
- // Taken from String Hash, but coded, to ensure consistent across Java versions. Also covers negative case;
- public int cacheIdx(String key) {
- int h = 0;
- for (int i = 0; i < key.length(); i++) {
- h = 31*h + key.charAt(i);
- }
- if(h<0)h*=-1;
- return h%segSize;
- }
-
- public Cached(CIDAO<TRANS> info, String name, int segSize, long expireIn) {
- this.name =name;
- this.segSize = segSize;
- this.info = info;
- this.expireIn = expireIn;
- cache = new Object[segSize];
- // Create a new Map for each Segment, and store locally
- for(int i=0;i<segSize;++i) {
- cache[i]=obtain(name+i);
- }
- }
-
- public void add(String key, List<DATA> data) {
- @SuppressWarnings("unchecked")
- Map<String,Dated> map = ((Map<String,Dated>)cache[cacheIdx(key)]);
- map.put(key, new Dated(data, expireIn));
- }
-
-
- public int invalidate(String key) {
- int cacheIdx = cacheIdx(key);
- @SuppressWarnings("unchecked")
- Map<String,Dated> map = ((Map<String,Dated>)cache[cacheIdx]);
-// if(map.remove(key)!=null) // Not seeming to remove all the time
- if(map!=null)map.clear();
-// System.err.println("Remove " + name + " " + key);
- return cacheIdx;
- }
-
- public Result<Void> invalidate(int segment) {
- if(segment<0 || segment>=cache.length) return Result.err(Status.ERR_BadData,"Cache Segment %s is out of range",Integer.toString(segment));
- @SuppressWarnings("unchecked")
- Map<String,Dated> map = ((Map<String,Dated>)cache[segment]);
- if(map!=null) {
- map.clear();
- }
- return Result.ok();
- }
-
- protected interface Getter<D> {
- public abstract Result<List<D>> get();
- };
-
- // TODO utilize Segmented Caches, and fold "get" into "reads"
- @SuppressWarnings("unchecked")
- public Result<List<DATA>> get(TRANS trans, String key, Getter<DATA> getter) {
- List<DATA> ld = null;
- Result<List<DATA>> rld = null;
-
- int cacheIdx = cacheIdx(key);
- Map<String, Dated> map = ((Map<String,Dated>)cache[cacheIdx]);
-
- // Check for saved element in cache
- Dated cached = map.get(key);
- // Note: These Segment Timestamps are kept up to date with DB
- Date dbStamp = info.get(trans, name,cacheIdx);
-
- // Check for cache Entry and whether it is still good (a good Cache Entry is same or after DBEntry, so we use "before" syntax)
- if(cached!=null && dbStamp.before(cached.timestamp)) {
- ld = (List<DATA>)cached.data;
- rld = Result.ok(ld);
- } else {
- rld = getter.get();
- if(rld.isOK()) { // only store valid lists
- map.put(key, new Dated(rld.value,expireIn)); // successful item found gets put in cache
-// } else if(rld.status == Result.ERR_Backend){
-// map.remove(key);
- }
- }
- return rld;
- }
-
- /**
- * Each Cached object has multiple Segments that need cleaning. Derive each, and add to Cleansing Thread
- * @param env
- * @param dao
- */
- public static void startCleansing(AuthzEnv env, CachedDAO<?,?,?> ... dao) {
- for(CachedDAO<?,?,?> d : dao) {
- for(int i=0;i<d.segSize;++i) {
- startCleansing(env, d.table()+i);
- }
- }
- }
-
-
- public static<T extends Trans> void startRefresh(AuthzEnv env, CIDAO<AuthzTrans> cidao) {
- if(infoTimer==null) {
- infoTimer = new Timer("CachedDAO Info Refresh Timer");
- int minRefresh = 10*1000*60; // 10 mins Integer.parseInt(env.getProperty(CACHE_MIN_REFRESH_INTERVAL,"2000")); // 2 second minimum refresh
- infoTimer.schedule(new Refresh(env,cidao, minRefresh), 1000, minRefresh); // note: Refresh from DB immediately
- }
- }
-
- public static void stopTimer() {
- Cache.stopTimer();
- if(infoTimer!=null) {
- infoTimer.cancel();
- infoTimer = null;
- }
- }
-
- private final static class Refresh extends TimerTask {
- private static final int maxRefresh = 2*60*10000; // 20 mins
- private AuthzEnv env;
- private CIDAO<AuthzTrans> cidao;
- private int minRefresh;
- private long lastRun;
-
- public Refresh(AuthzEnv env, CIDAO<AuthzTrans> cidao, int minRefresh) {
- this.env = env;
- this.cidao = cidao;
- this.minRefresh = minRefresh;
- lastRun = System.currentTimeMillis()-maxRefresh-1000;
- }
-
- @Override
- public void run() {
- // Evaluate whether to refresh based on transaction rate
- long now = System.currentTimeMillis();
- long interval = now-lastRun;
-
- if(interval < minRefresh || interval < Math.min(env.transRate(),maxRefresh)) return;
- lastRun = now;
- AuthzTrans trans = env.newTransNoAvg();
- Result<Void> rv = cidao.check(trans);
- if(rv.status!=Result.OK) {
- env.error().log("Error in CacheInfo Refresh",rv.details);
- }
- if(env.debug().isLoggable()) {
- StringBuilder sb = new StringBuilder("Cache Info Refresh: ");
- trans.auditTrail(0, sb, Env.REMOTE);
- env.debug().log(sb);
- }
- }
- }
+ // Java does not allow creation of Arrays with Generics in them...
+ // private Map<String,Dated> cache[];
+ protected final CIDAO<TRANS> info;
+
+ private static Timer infoTimer;
+ private Object cache[];
+ public final int segSize;
+
+ protected final String name;
+
+ private final long expireIn;
+
+
+
+ // Taken from String Hash, but coded, to ensure consistent across Java versions. Also covers negative case;
+ public int cacheIdx(String key) {
+ int h = 0;
+ for (int i = 0; i < key.length(); i++) {
+ h = 31*h + key.charAt(i);
+ }
+ if(h<0)h*=-1;
+ return h%segSize;
+ }
+
+ public Cached(CIDAO<TRANS> info, String name, int segSize, long expireIn) {
+ this.name =name;
+ this.segSize = segSize;
+ this.info = info;
+ this.expireIn = expireIn;
+ cache = new Object[segSize];
+ // Create a new Map for each Segment, and store locally
+ for(int i=0;i<segSize;++i) {
+ cache[i]=obtain(name+i);
+ }
+ }
+
+ public void add(String key, List<DATA> data) {
+ @SuppressWarnings("unchecked")
+ Map<String,Dated> map = ((Map<String,Dated>)cache[cacheIdx(key)]);
+ map.put(key, new Dated(data, expireIn));
+ }
+
+
+ public int invalidate(String key) {
+ int cacheIdx = cacheIdx(key);
+ @SuppressWarnings("unchecked")
+ Map<String,Dated> map = ((Map<String,Dated>)cache[cacheIdx]);
+// if(map.remove(key)!=null) // Not seeming to remove all the time
+ if(map!=null)map.clear();
+// System.err.println("Remove " + name + " " + key);
+ return cacheIdx;
+ }
+
+ public Result<Void> invalidate(int segment) {
+ if(segment<0 || segment>=cache.length) return Result.err(Status.ERR_BadData,"Cache Segment %s is out of range",Integer.toString(segment));
+ @SuppressWarnings("unchecked")
+ Map<String,Dated> map = ((Map<String,Dated>)cache[segment]);
+ if(map!=null) {
+ map.clear();
+ }
+ return Result.ok();
+ }
+
+ protected interface Getter<D> {
+ public abstract Result<List<D>> get();
+ };
+
+ // TODO utilize Segmented Caches, and fold "get" into "reads"
+ @SuppressWarnings("unchecked")
+ public Result<List<DATA>> get(TRANS trans, String key, Getter<DATA> getter) {
+ List<DATA> ld = null;
+ Result<List<DATA>> rld = null;
+
+ int cacheIdx = cacheIdx(key);
+ Map<String, Dated> map = ((Map<String,Dated>)cache[cacheIdx]);
+
+ // Check for saved element in cache
+ Dated cached = map.get(key);
+ // Note: These Segment Timestamps are kept up to date with DB
+ Date dbStamp = info.get(trans, name,cacheIdx);
+
+ // Check for cache Entry and whether it is still good (a good Cache Entry is same or after DBEntry, so we use "before" syntax)
+ if(cached!=null && dbStamp.before(cached.timestamp)) {
+ ld = (List<DATA>)cached.data;
+ rld = Result.ok(ld);
+ } else {
+ rld = getter.get();
+ if(rld.isOK()) { // only store valid lists
+ map.put(key, new Dated(rld.value,expireIn)); // successful item found gets put in cache
+// } else if(rld.status == Result.ERR_Backend){
+// map.remove(key);
+ }
+ }
+ return rld;
+ }
+
+ /**
+ * Each Cached object has multiple Segments that need cleaning. Derive each, and add to Cleansing Thread
+ * @param env
+ * @param dao
+ */
+ public static void startCleansing(AuthzEnv env, CachedDAO<?,?,?> ... dao) {
+ for(CachedDAO<?,?,?> d : dao) {
+ for(int i=0;i<d.segSize;++i) {
+ startCleansing(env, d.table()+i);
+ }
+ }
+ }
+
+
+ public static<T extends Trans> void startRefresh(AuthzEnv env, CIDAO<AuthzTrans> cidao) {
+ if(infoTimer==null) {
+ infoTimer = new Timer("CachedDAO Info Refresh Timer");
+ int minRefresh = 10*1000*60; // 10 mins Integer.parseInt(env.getProperty(CACHE_MIN_REFRESH_INTERVAL,"2000")); // 2 second minimum refresh
+ infoTimer.schedule(new Refresh(env,cidao, minRefresh), 1000, minRefresh); // note: Refresh from DB immediately
+ }
+ }
+
+ public static void stopTimer() {
+ Cache.stopTimer();
+ if(infoTimer!=null) {
+ infoTimer.cancel();
+ infoTimer = null;
+ }
+ }
+
+ private final static class Refresh extends TimerTask {
+ private static final int maxRefresh = 2*60*10000; // 20 mins
+ private AuthzEnv env;
+ private CIDAO<AuthzTrans> cidao;
+ private int minRefresh;
+ private long lastRun;
+
+ public Refresh(AuthzEnv env, CIDAO<AuthzTrans> cidao, int minRefresh) {
+ this.env = env;
+ this.cidao = cidao;
+ this.minRefresh = minRefresh;
+ lastRun = System.currentTimeMillis()-maxRefresh-1000;
+ }
+
+ @Override
+ public void run() {
+ // Evaluate whether to refresh based on transaction rate
+ long now = System.currentTimeMillis();
+ long interval = now-lastRun;
+
+ if(interval < minRefresh || interval < Math.min(env.transRate(),maxRefresh)) return;
+ lastRun = now;
+ AuthzTrans trans = env.newTransNoAvg();
+ Result<Void> rv = cidao.check(trans);
+ if(rv.status!=Result.OK) {
+ env.error().log("Error in CacheInfo Refresh",rv.details);
+ }
+ if(env.debug().isLoggable()) {
+ StringBuilder sb = new StringBuilder("Cache Info Refresh: ");
+ trans.auditTrail(0, sb, Env.REMOTE);
+ env.debug().log(sb);
+ }
+ }
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CachedDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CachedDAO.java
index f468dba4..9375502c 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CachedDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CachedDAO.java
@@ -40,189 +40,189 @@ import org.onap.aaf.misc.env.Trans;
* @param <DATA>
*/
public class CachedDAO<TRANS extends Trans,D extends DAO<TRANS,DATA>,DATA extends Cacheable>
- extends Cached<TRANS,DATA> implements DAO_RO<TRANS,DATA>{
-// private final String dirty_str;
-
- private final D dao;
+ extends Cached<TRANS,DATA> implements DAO_RO<TRANS,DATA>{
+// private final String dirty_str;
+
+ private final D dao;
- public CachedDAO(D dao, CIDAO<TRANS> info, int segsize, long expireIn) {
- super(info, dao.table(), segsize, expireIn);
-
- // Instantiate a new Cache per DAO name (so separate instances use the same cache)
- this.dao = dao;
- //read_str = "Cached READ for " + dao.table();
-// dirty_str = "Cache DIRTY on " + dao.table();
- if(dao instanceof CassDAOImpl) {
- ((CassDAOImpl<?,?>)dao).cache = this;
- }
- }
-
- public static<T extends Trans, DA extends DAO<T,DT>, DT extends Cacheable>
- CachedDAO<T,DA,DT> create(DA dao, CIDAO<T> info, int segsize, long expireIn) {
- return new CachedDAO<T,DA,DT>(dao,info, segsize, expireIn);
- }
+ public CachedDAO(D dao, CIDAO<TRANS> info, int segsize, long expireIn) {
+ super(info, dao.table(), segsize, expireIn);
+
+ // Instantiate a new Cache per DAO name (so separate instances use the same cache)
+ this.dao = dao;
+ //read_str = "Cached READ for " + dao.table();
+// dirty_str = "Cache DIRTY on " + dao.table();
+ if(dao instanceof CassDAOImpl) {
+ ((CassDAOImpl<?,?>)dao).cache = this;
+ }
+ }
+
+ public static<T extends Trans, DA extends DAO<T,DT>, DT extends Cacheable>
+ CachedDAO<T,DA,DT> create(DA dao, CIDAO<T> info, int segsize, long expireIn) {
+ return new CachedDAO<T,DA,DT>(dao,info, segsize, expireIn);
+ }
- public void add(DATA data) {
- String key = keyFromObjs(dao.keyFrom(data));
- List<DATA> list = new ArrayList<>();
- list.add(data);
- super.add(key,list);
- }
-
-// public void invalidate(TRANS trans, Object ... objs) {
-// TimeTaken tt = trans.start(dirty_str, Env.SUB);
-// try {
-// super.invalidate(keyFromObjs(objs));
-// } finally {
-// tt.done();
-// }
-// }
+ public void add(DATA data) {
+ String key = keyFromObjs(dao.keyFrom(data));
+ List<DATA> list = new ArrayList<>();
+ list.add(data);
+ super.add(key,list);
+ }
+
+// public void invalidate(TRANS trans, Object ... objs) {
+// TimeTaken tt = trans.start(dirty_str, Env.SUB);
+// try {
+// super.invalidate(keyFromObjs(objs));
+// } finally {
+// tt.done();
+// }
+// }
- public static String keyFromObjs(Object ... objs) {
- String key;
- if(objs.length==1 && objs[0] instanceof String) {
- key = (String)objs[0];
- } else {
- StringBuilder sb = new StringBuilder();
- boolean first = true;
- for(Object o : objs) {
- if(o!=null) {
- if(first) {
- first =false;
- } else {
- sb.append('|');
- }
- sb.append(o.toString());
- }
- }
- key = sb.toString();
- }
- return key;
- }
+ public static String keyFromObjs(Object ... objs) {
+ String key;
+ if(objs.length==1 && objs[0] instanceof String) {
+ key = (String)objs[0];
+ } else {
+ StringBuilder sb = new StringBuilder();
+ boolean first = true;
+ for(Object o : objs) {
+ if(o!=null) {
+ if(first) {
+ first =false;
+ } else {
+ sb.append('|');
+ }
+ sb.append(o.toString());
+ }
+ }
+ key = sb.toString();
+ }
+ return key;
+ }
- public Result<DATA> create(TRANS trans, DATA data) {
- Result<DATA> d = dao.create(trans,data);
- if(d.status==Status.OK) {
- add(d.value);
- } else {
- trans.error().log(d.errorString());
- }
- // dao.create already modifies cache. Do not invalidate again. invalidate(trans,data);
- return d;
- }
+ public Result<DATA> create(TRANS trans, DATA data) {
+ Result<DATA> d = dao.create(trans,data);
+ if(d.status==Status.OK) {
+ add(d.value);
+ } else {
+ trans.error().log(d.errorString());
+ }
+ // dao.create already modifies cache. Do not invalidate again. invalidate(trans,data);
+ return d;
+ }
- protected class DAOGetter implements Getter<DATA> {
- protected TRANS trans;
- protected Object objs[];
- protected D dao;
- public Result<List<DATA>> result;
+ protected class DAOGetter implements Getter<DATA> {
+ protected TRANS trans;
+ protected Object objs[];
+ protected D dao;
+ public Result<List<DATA>> result;
- public DAOGetter(TRANS trans, D dao, Object ... objs) {
- this.trans = trans;
- this.dao = dao;
- this.objs = objs;
- }
-
- /**
- * Separated into single call for easy overloading
- * @return
- */
- public Result<List<DATA>> call() {
- return dao.read(trans, objs);
- }
-
- @Override
- public final Result<List<DATA>> get() {
- return call();
-// if(result.isOKhasData()) { // Note, given above logic, could exist, but stale
-// return result.value;
-// } else {
-// return null;
-// }
- }
- }
+ public DAOGetter(TRANS trans, D dao, Object ... objs) {
+ this.trans = trans;
+ this.dao = dao;
+ this.objs = objs;
+ }
+
+ /**
+ * Separated into single call for easy overloading
+ * @return
+ */
+ public Result<List<DATA>> call() {
+ return dao.read(trans, objs);
+ }
+
+ @Override
+ public final Result<List<DATA>> get() {
+ return call();
+// if(result.isOKhasData()) { // Note, given above logic, could exist, but stale
+// return result.value;
+// } else {
+// return null;
+// }
+ }
+ }
- @Override
- public Result<List<DATA>> read(final TRANS trans, final Object ... objs) {
- DAOGetter getter = new DAOGetter(trans,dao,objs);
- return get(trans, keyFromObjs(objs),getter);
-// if(ld!=null) {
-// return Result.ok(ld);//.emptyList(ld.isEmpty());
-// }
-// // Result Result if exists
-// if(getter.result==null) {
-// return Result.err(Status.ERR_NotFound, "No Cache or Lookup found on [%s]",dao.table());
-// }
-// return getter.result;
- }
+ @Override
+ public Result<List<DATA>> read(final TRANS trans, final Object ... objs) {
+ DAOGetter getter = new DAOGetter(trans,dao,objs);
+ return get(trans, keyFromObjs(objs),getter);
+// if(ld!=null) {
+// return Result.ok(ld);//.emptyList(ld.isEmpty());
+// }
+// // Result Result if exists
+// if(getter.result==null) {
+// return Result.err(Status.ERR_NotFound, "No Cache or Lookup found on [%s]",dao.table());
+// }
+// return getter.result;
+ }
- // Slight Improved performance available when String and Obj versions are known.
- public Result<List<DATA>> read(final String key, final TRANS trans, final Object[] objs) {
- DAOGetter getter = new DAOGetter(trans,dao,objs);
- return get(trans, key, getter);
-// if(ld!=null) {
-// return Result.ok(ld);//.emptyList(ld.isEmpty());
-// }
-// // Result Result if exists
-// if(getter.result==null) {
-// return Result.err(Status.ERR_NotFound, "No Cache or Lookup found on [%s]",dao.table());
-// }
-// return getter.result;
- }
-
- @Override
- public Result<List<DATA>> read(TRANS trans, DATA data) {
- return read(trans,dao.keyFrom(data));
- }
- public Result<Void> update(TRANS trans, DATA data) {
- Result<Void> d = dao.update(trans, data);
- if(d.status==Status.OK) {
- add(data);
- } else {
- trans.error().log(d.errorString());
- }
- return d;
- }
+ // Slight Improved performance available when String and Obj versions are known.
+ public Result<List<DATA>> read(final String key, final TRANS trans, final Object[] objs) {
+ DAOGetter getter = new DAOGetter(trans,dao,objs);
+ return get(trans, key, getter);
+// if(ld!=null) {
+// return Result.ok(ld);//.emptyList(ld.isEmpty());
+// }
+// // Result Result if exists
+// if(getter.result==null) {
+// return Result.err(Status.ERR_NotFound, "No Cache or Lookup found on [%s]",dao.table());
+// }
+// return getter.result;
+ }
+
+ @Override
+ public Result<List<DATA>> read(TRANS trans, DATA data) {
+ return read(trans,dao.keyFrom(data));
+ }
+ public Result<Void> update(TRANS trans, DATA data) {
+ Result<Void> d = dao.update(trans, data);
+ if(d.status==Status.OK) {
+ add(data);
+ } else {
+ trans.error().log(d.errorString());
+ }
+ return d;
+ }
- public Result<Void> delete(TRANS trans, DATA data, boolean reread) {
- if(reread) { // If reread, get from Cache, if possible, not DB exclusively
- Result<List<DATA>> rd = read(trans,data);
- if(rd.notOK()) {
- return Result.err(rd);
-// } else {
-// trans.error().log(rd.errorString());
- }
- if(rd.isEmpty()) {
- data.invalidate(this);
- return Result.err(Status.ERR_NotFound,"Not Found");
- }
- data = rd.value.get(0);
- }
- Result<Void> rv=dao.delete(trans, data, false);
- data.invalidate(this);
- return rv;
- }
-
- @Override
- public void close(TRANS trans) {
- if(dao!=null) {
- dao.close(trans);
- }
- }
-
+ public Result<Void> delete(TRANS trans, DATA data, boolean reread) {
+ if(reread) { // If reread, get from Cache, if possible, not DB exclusively
+ Result<List<DATA>> rd = read(trans,data);
+ if(rd.notOK()) {
+ return Result.err(rd);
+// } else {
+// trans.error().log(rd.errorString());
+ }
+ if(rd.isEmpty()) {
+ data.invalidate(this);
+ return Result.err(Status.ERR_NotFound,"Not Found");
+ }
+ data = rd.value.get(0);
+ }
+ Result<Void> rv=dao.delete(trans, data, false);
+ data.invalidate(this);
+ return rv;
+ }
+
+ @Override
+ public void close(TRANS trans) {
+ if(dao!=null) {
+ dao.close(trans);
+ }
+ }
+
- @Override
- public String table() {
- return dao.table();
- }
-
- public D dao() {
- return dao;
- }
-
- public void invalidate(TRANS trans, DATA data) {
+ @Override
+ public String table() {
+ return dao.table();
+ }
+
+ public D dao() {
+ return dao;
+ }
+
+ public void invalidate(TRANS trans, DATA data) {
if(info.touch(trans, dao.table(),data.invalidate(this)).notOK()) {
- trans.error().log("Cannot touch CacheInfo for Role");
- }
- }
+ trans.error().log("Cannot touch CacheInfo for Role");
+ }
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CassAccess.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CassAccess.java
index c213a04b..9b168a83 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CassAccess.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CassAccess.java
@@ -38,186 +38,186 @@ import com.datastax.driver.core.policies.DCAwareRoundRobinPolicy;
import com.datastax.driver.core.policies.TokenAwarePolicy;
public class CassAccess {
- public static final String KEYSPACE = "authz";
- public static final String CASSANDRA_CLUSTERS = "cassandra.clusters";
- public static final String CASSANDRA_CLUSTERS_PORT = "cassandra.clusters.port";
- public static final String CASSANDRA_CLUSTERS_USER_NAME = "cassandra.clusters.user";
- public static final String CASSANDRA_CLUSTERS_PASSWORD = "cassandra.clusters.password";
- public static final String CASSANDRA_RESET_EXCEPTIONS = "cassandra.reset.exceptions";
- private static final List<Resettable> resetExceptions = new ArrayList<>();
- public static final String ERR_ACCESS_MSG = "Accessing Backend";
- private static Builder cb = null;
+ public static final String KEYSPACE = "authz";
+ public static final String CASSANDRA_CLUSTERS = "cassandra.clusters";
+ public static final String CASSANDRA_CLUSTERS_PORT = "cassandra.clusters.port";
+ public static final String CASSANDRA_CLUSTERS_USER_NAME = "cassandra.clusters.user";
+ public static final String CASSANDRA_CLUSTERS_PASSWORD = "cassandra.clusters.password";
+ public static final String CASSANDRA_RESET_EXCEPTIONS = "cassandra.reset.exceptions";
+ private static final List<Resettable> resetExceptions = new ArrayList<>();
+ public static final String ERR_ACCESS_MSG = "Accessing Backend";
+ private static Builder cb = null;
- /**
- * To create DCAwareRoundRobing Policy:
- * Need Properties
- * LATITUDE (or AFT_LATITUDE)
- * LONGITUDE (or AFT_LONGITUDE)
- * CASSANDRA CLUSTERS with additional information:
- * machine:DC:lat:long,machine:DC:lat:long
- * @param env
- * @param prefix
- * @return
- * @throws APIException
- * @throws IOException
- */
+ /**
+ * To create DCAwareRoundRobing Policy:
+ * Need Properties
+ * LATITUDE (or AFT_LATITUDE)
+ * LONGITUDE (or AFT_LONGITUDE)
+ * CASSANDRA CLUSTERS with additional information:
+ * machine:DC:lat:long,machine:DC:lat:long
+ * @param env
+ * @param prefix
+ * @return
+ * @throws APIException
+ * @throws IOException
+ */
-// @SuppressWarnings("deprecation")
- public static synchronized Cluster cluster(Env env, String prefix) throws APIException, IOException {
- if(cb == null) {
- String pre;
- if(prefix==null) {
- pre="";
- } else {
- env.info().log("Cassandra Connection for ",prefix);
- pre = prefix+'.';
- }
- cb = Cluster.builder();
- String str = env.getProperty(pre+CASSANDRA_CLUSTERS_PORT,env.getProperty(CASSANDRA_CLUSTERS_PORT,"9042"));
- if(str!=null) {
- env.init().log("Cass Port = ",str );
- cb.withPort(Integer.parseInt(str));
- }
- str = env.getProperty(pre+CASSANDRA_CLUSTERS_USER_NAME,env.getProperty(CASSANDRA_CLUSTERS_USER_NAME,null));
- if(str!=null) {
- env.init().log("Cass User = ",str );
- String epass = env.getProperty(pre + CASSANDRA_CLUSTERS_PASSWORD,env.getProperty(CASSANDRA_CLUSTERS_PASSWORD,null));
- if(epass==null) {
- throw new APIException("No Password configured for " + str);
- }
- //TODO Figure out way to ensure Decryptor setting in AuthzEnv
- if(env instanceof AuthzEnv) {
- cb.withCredentials(str,((AuthzEnv)env).decrypt(epass,true));
- } else {
- cb.withCredentials(str, env.decryptor().decrypt(epass));
- }
- }
-
- str = env.getProperty(pre+CASSANDRA_RESET_EXCEPTIONS,env.getProperty(CASSANDRA_RESET_EXCEPTIONS,null));
- if(str!=null) {
- env.init().log("Cass ResetExceptions = ",str );
- for(String ex : Split.split(',', str)) {
- resetExceptions.add(new Resettable(env,ex));
- }
- }
-
- str = env.getProperty(Config.CADI_LATITUDE);
- Double lat = str!=null?Double.parseDouble(str):null;
- str = env.getProperty(Config.CADI_LONGITUDE);
- Double lon = str!=null?Double.parseDouble(str):null;
- if(lat == null || lon == null) {
- throw new APIException(Config.CADI_LATITUDE + " and/or " + Config.CADI_LONGITUDE + " are not set");
- }
-
- env.init().printf("Service Latitude,Longitude = %f,%f",lat,lon);
-
- str = env.getProperty(pre+CASSANDRA_CLUSTERS,env.getProperty(CASSANDRA_CLUSTERS,"localhost"));
- env.init().log("Cass Clusters = ",str );
- String[] machs = Split.split(',', str);
- String[] cpoints = new String[machs.length];
- String bestDC = null;
- int numInBestDC = 1;
- double mlat, mlon,temp,distance = Double.MAX_VALUE;
- for(int i=0;i<machs.length;++i) {
- String[] minfo = Split.split(':',machs[i]);
- if(minfo.length>0) {
- cpoints[i]=minfo[0];
- }
-
- if(minfo.length>3) {
- if(minfo[1].equals(bestDC)) {
- ++numInBestDC;
- } else {
- // Calc closest DC with Great Circle
- mlat = Double.parseDouble(minfo[2]);
- mlon = Double.parseDouble(minfo[3]);
- // Note: GreatCircle Distance is always >= 0.0 (not negative)
- if((temp=GreatCircle.calc(lat, lon, mlat, mlon)) < distance) {
- distance = temp;
- if(bestDC==null || !bestDC.equals(minfo[1])) {
- bestDC = minfo[1];
- numInBestDC = 1;
- }
- }
- }
- }
- }
-
- cb.addContactPoints(cpoints);
-
- if(bestDC!=null) {
- // 8/26/2016 Management has determined that Accuracy is preferred over speed in bad situations
- // Local DC Aware Load Balancing appears to have the highest normal performance, with the best
- // Degraded Accuracy
- DCAwareRoundRobinPolicy dcrrPolicy = DCAwareRoundRobinPolicy.builder()
- .withLocalDc(bestDC)
- .withUsedHostsPerRemoteDc(numInBestDC)
- .build();
-// cb.withLoadBalancingPolicy(new DCAwareRoundRobinPolicy(
-// bestDC, numInBestDC, true /*allow LocalDC to look at other DCs for LOCAL_QUORUM */));
- cb.withLoadBalancingPolicy(new TokenAwarePolicy(dcrrPolicy));
- env.init().printf("Cassandra configured for DCAwareRoundRobinPolicy with best DC at %s with emergency remote of up to %d node(s)"
- ,bestDC, numInBestDC);
- } else {
- env.init().printf("Cassandra is using Default Policy, which is not DC aware");
- }
- }
- return cb.build();
- }
-
- private static class Resettable {
- private Class<? extends Exception> cls;
- private List<String> messages;
-
- @SuppressWarnings("unchecked")
- public Resettable(Env env, String propData) throws APIException {
- if(propData!=null && propData.length()>1) {
- String[] split = Split.split(':', propData);
- if(split.length>0) {
- try {
- cls = (Class<? extends Exception>)Class.forName(split[0]);
- } catch (ClassNotFoundException e) {
- throw new APIException("Declared Cassandra Reset Exception, " + propData + ", cannot be ClassLoaded");
- }
- }
- if(split.length>1) {
- messages=new ArrayList<>();
- for(int i=1;i<split.length;++i) {
- String str = split[i];
- int start = str.startsWith("\"")?1:0;
- int end = str.length()-(str.endsWith("\"")?1:0);
- messages.add(split[i].substring(start, end));
- }
- } else {
- messages = null;
- }
- }
- }
-
- public boolean matches(Exception ex) {
- if(ex.getClass().equals(cls)) {
- if(messages!=null) {
- String msg = ex.getMessage();
- for(String m : messages) {
- if(msg.contains(m)) {
- return true;
- }
- }
- }
- }
- return false;
- }
- }
-
- public static final boolean isResetException(Exception e) {
- if(e==null) {
- return true;
- }
- for(Resettable re : resetExceptions) {
- if(re.matches(e)) {
- return true;
- }
- }
- return false;
- }
+// @SuppressWarnings("deprecation")
+ public static synchronized Cluster cluster(Env env, String prefix) throws APIException, IOException {
+ if(cb == null) {
+ String pre;
+ if(prefix==null) {
+ pre="";
+ } else {
+ env.info().log("Cassandra Connection for ",prefix);
+ pre = prefix+'.';
+ }
+ cb = Cluster.builder();
+ String str = env.getProperty(pre+CASSANDRA_CLUSTERS_PORT,env.getProperty(CASSANDRA_CLUSTERS_PORT,"9042"));
+ if(str!=null) {
+ env.init().log("Cass Port = ",str );
+ cb.withPort(Integer.parseInt(str));
+ }
+ str = env.getProperty(pre+CASSANDRA_CLUSTERS_USER_NAME,env.getProperty(CASSANDRA_CLUSTERS_USER_NAME,null));
+ if(str!=null) {
+ env.init().log("Cass User = ",str );
+ String epass = env.getProperty(pre + CASSANDRA_CLUSTERS_PASSWORD,env.getProperty(CASSANDRA_CLUSTERS_PASSWORD,null));
+ if(epass==null) {
+ throw new APIException("No Password configured for " + str);
+ }
+ //TODO Figure out way to ensure Decryptor setting in AuthzEnv
+ if(env instanceof AuthzEnv) {
+ cb.withCredentials(str,((AuthzEnv)env).decrypt(epass,true));
+ } else {
+ cb.withCredentials(str, env.decryptor().decrypt(epass));
+ }
+ }
+
+ str = env.getProperty(pre+CASSANDRA_RESET_EXCEPTIONS,env.getProperty(CASSANDRA_RESET_EXCEPTIONS,null));
+ if(str!=null) {
+ env.init().log("Cass ResetExceptions = ",str );
+ for(String ex : Split.split(',', str)) {
+ resetExceptions.add(new Resettable(env,ex));
+ }
+ }
+
+ str = env.getProperty(Config.CADI_LATITUDE);
+ Double lat = str!=null?Double.parseDouble(str):null;
+ str = env.getProperty(Config.CADI_LONGITUDE);
+ Double lon = str!=null?Double.parseDouble(str):null;
+ if(lat == null || lon == null) {
+ throw new APIException(Config.CADI_LATITUDE + " and/or " + Config.CADI_LONGITUDE + " are not set");
+ }
+
+ env.init().printf("Service Latitude,Longitude = %f,%f",lat,lon);
+
+ str = env.getProperty(pre+CASSANDRA_CLUSTERS,env.getProperty(CASSANDRA_CLUSTERS,"localhost"));
+ env.init().log("Cass Clusters = ",str );
+ String[] machs = Split.split(',', str);
+ String[] cpoints = new String[machs.length];
+ String bestDC = null;
+ int numInBestDC = 1;
+ double mlat, mlon,temp,distance = Double.MAX_VALUE;
+ for(int i=0;i<machs.length;++i) {
+ String[] minfo = Split.split(':',machs[i]);
+ if(minfo.length>0) {
+ cpoints[i]=minfo[0];
+ }
+
+ if(minfo.length>3) {
+ if(minfo[1].equals(bestDC)) {
+ ++numInBestDC;
+ } else {
+ // Calc closest DC with Great Circle
+ mlat = Double.parseDouble(minfo[2]);
+ mlon = Double.parseDouble(minfo[3]);
+ // Note: GreatCircle Distance is always >= 0.0 (not negative)
+ if((temp=GreatCircle.calc(lat, lon, mlat, mlon)) < distance) {
+ distance = temp;
+ if(bestDC==null || !bestDC.equals(minfo[1])) {
+ bestDC = minfo[1];
+ numInBestDC = 1;
+ }
+ }
+ }
+ }
+ }
+
+ cb.addContactPoints(cpoints);
+
+ if(bestDC!=null) {
+ // 8/26/2016 Management has determined that Accuracy is preferred over speed in bad situations
+ // Local DC Aware Load Balancing appears to have the highest normal performance, with the best
+ // Degraded Accuracy
+ DCAwareRoundRobinPolicy dcrrPolicy = DCAwareRoundRobinPolicy.builder()
+ .withLocalDc(bestDC)
+ .withUsedHostsPerRemoteDc(numInBestDC)
+ .build();
+// cb.withLoadBalancingPolicy(new DCAwareRoundRobinPolicy(
+// bestDC, numInBestDC, true /*allow LocalDC to look at other DCs for LOCAL_QUORUM */));
+ cb.withLoadBalancingPolicy(new TokenAwarePolicy(dcrrPolicy));
+ env.init().printf("Cassandra configured for DCAwareRoundRobinPolicy with best DC at %s with emergency remote of up to %d node(s)"
+ ,bestDC, numInBestDC);
+ } else {
+ env.init().printf("Cassandra is using Default Policy, which is not DC aware");
+ }
+ }
+ return cb.build();
+ }
+
+ private static class Resettable {
+ private Class<? extends Exception> cls;
+ private List<String> messages;
+
+ @SuppressWarnings("unchecked")
+ public Resettable(Env env, String propData) throws APIException {
+ if(propData!=null && propData.length()>1) {
+ String[] split = Split.split(':', propData);
+ if(split.length>0) {
+ try {
+ cls = (Class<? extends Exception>)Class.forName(split[0]);
+ } catch (ClassNotFoundException e) {
+ throw new APIException("Declared Cassandra Reset Exception, " + propData + ", cannot be ClassLoaded");
+ }
+ }
+ if(split.length>1) {
+ messages=new ArrayList<>();
+ for(int i=1;i<split.length;++i) {
+ String str = split[i];
+ int start = str.startsWith("\"")?1:0;
+ int end = str.length()-(str.endsWith("\"")?1:0);
+ messages.add(split[i].substring(start, end));
+ }
+ } else {
+ messages = null;
+ }
+ }
+ }
+
+ public boolean matches(Exception ex) {
+ if(ex.getClass().equals(cls)) {
+ if(messages!=null) {
+ String msg = ex.getMessage();
+ for(String m : messages) {
+ if(msg.contains(m)) {
+ return true;
+ }
+ }
+ }
+ }
+ return false;
+ }
+ }
+
+ public static final boolean isResetException(Exception e) {
+ if(e==null) {
+ return true;
+ }
+ for(Resettable re : resetExceptions) {
+ if(re.matches(e)) {
+ return true;
+ }
+ }
+ return false;
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CassDAOImpl.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CassDAOImpl.java
index f7cdec5e..fae8f693 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CassDAOImpl.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/CassDAOImpl.java
@@ -47,302 +47,302 @@ import com.datastax.driver.core.ResultSetFuture;
* @param <DATA>
*/
public class CassDAOImpl<TRANS extends TransStore,DATA> extends AbsCassDAO<TRANS, DATA> implements DAO<TRANS,DATA> {
- public static final String USER_NAME = "__USER_NAME__";
- protected static final String CREATE_SP = "CREATE ";
- protected static final String UPDATE_SP = "UPDATE ";
- protected static final String DELETE_SP = "DELETE ";
- protected static final String SELECT_SP = "SELECT ";
+ public static final String USER_NAME = "__USER_NAME__";
+ protected static final String CREATE_SP = "CREATE ";
+ protected static final String UPDATE_SP = "UPDATE ";
+ protected static final String DELETE_SP = "DELETE ";
+ protected static final String SELECT_SP = "SELECT ";
- protected final String C_TEXT = getClass().getSimpleName() + " CREATE";
- protected final String R_TEXT = getClass().getSimpleName() + " READ";
- protected final String U_TEXT = getClass().getSimpleName() + " UPDATE";
- protected final String D_TEXT = getClass().getSimpleName() + " DELETE";
- private String table;
-
- protected final ConsistencyLevel readConsistency,writeConsistency;
-
- // Setteable only by CachedDAO
- protected Cached<?, ?> cache;
+ protected final String C_TEXT = getClass().getSimpleName() + " CREATE";
+ protected final String R_TEXT = getClass().getSimpleName() + " READ";
+ protected final String U_TEXT = getClass().getSimpleName() + " UPDATE";
+ protected final String D_TEXT = getClass().getSimpleName() + " DELETE";
+ private String table;
+
+ protected final ConsistencyLevel readConsistency,writeConsistency;
+
+ // Setteable only by CachedDAO
+ protected Cached<?, ?> cache;
- /**
- * A Constructor from the originating Cluster. This DAO will open the Session at need,
- * and shutdown the session when "close()" is called.
- *
- * @param cluster
- * @param keyspace
- * @param dataClass
- */
- public CassDAOImpl(TRANS trans, String name, Cluster cluster, String keyspace, Class<DATA> dataClass, String table, ConsistencyLevel read, ConsistencyLevel write) {
- super(trans, name, cluster,keyspace,dataClass);
- this.table = table;
- readConsistency = read;
- writeConsistency = write;
- }
-
- /**
- * A Constructor to share Session with other DAOs.
- *
- * This method get the Session and Cluster information from the calling DAO, and won't
- * touch the Session on closure.
- *
- * @param aDao
- * @param dataClass
- */
- public CassDAOImpl(TRANS trans, String name, AbsCassDAO<TRANS,?> aDao, Class<DATA> dataClass, String table, ConsistencyLevel read, ConsistencyLevel write) {
- super(trans, name, aDao,dataClass);
- this.table = table;
- readConsistency = read;
- writeConsistency = write;
- }
+ /**
+ * A Constructor from the originating Cluster. This DAO will open the Session at need,
+ * and shutdown the session when "close()" is called.
+ *
+ * @param cluster
+ * @param keyspace
+ * @param dataClass
+ */
+ public CassDAOImpl(TRANS trans, String name, Cluster cluster, String keyspace, Class<DATA> dataClass, String table, ConsistencyLevel read, ConsistencyLevel write) {
+ super(trans, name, cluster,keyspace,dataClass);
+ this.table = table;
+ readConsistency = read;
+ writeConsistency = write;
+ }
+
+ /**
+ * A Constructor to share Session with other DAOs.
+ *
+ * This method get the Session and Cluster information from the calling DAO, and won't
+ * touch the Session on closure.
+ *
+ * @param aDao
+ * @param dataClass
+ */
+ public CassDAOImpl(TRANS trans, String name, AbsCassDAO<TRANS,?> aDao, Class<DATA> dataClass, String table, ConsistencyLevel read, ConsistencyLevel write) {
+ super(trans, name, aDao,dataClass);
+ this.table = table;
+ readConsistency = read;
+ writeConsistency = write;
+ }
- protected PSInfo createPS;
- protected PSInfo readPS;
- protected PSInfo updatePS;
- protected PSInfo deletePS;
- protected boolean async=false;
+ protected PSInfo createPS;
+ protected PSInfo readPS;
+ protected PSInfo updatePS;
+ protected PSInfo deletePS;
+ protected boolean async=false;
- public void async(boolean bool) {
- async = bool;
- }
+ public void async(boolean bool) {
+ async = bool;
+ }
- public final String[] setCRUD(TRANS trans, String table, Class<?> dc,Loader<DATA> loader) {
- return setCRUD(trans, table, dc, loader, -1);
- }
-
- public final String[] setCRUD(TRANS trans, String table, Class<?> dc,Loader<DATA> loader, int max) {
- Field[] fields = dc.getDeclaredFields();
- int end = max>=0 & max<fields.length?max:fields.length;
- // get keylimit from a non-null Loader
- int keylimit = loader.keylimit();
-
- StringBuilder sbfc = new StringBuilder();
- StringBuilder sbq = new StringBuilder();
- StringBuilder sbwc = new StringBuilder();
- StringBuilder sbup = new StringBuilder();
-
- if(keylimit>0) {
- for(int i=0;i<end;++i) {
- if(i>0) {
- sbfc.append(',');
- sbq.append(',');
- if(i<keylimit) {
- sbwc.append(" AND ");
- }
- }
- sbfc.append(fields[i].getName());
- sbq.append('?');
- if(i>=keylimit) {
- if(i>keylimit) {
- sbup.append(',');
- }
- sbup.append(fields[i].getName());
- sbup.append("=?");
- }
- if(i<keylimit) {
- sbwc.append(fields[i].getName());
- sbwc.append("=?");
- }
- }
-
- createPS = new PSInfo(trans, "INSERT INTO " + table + " ("+ sbfc +") VALUES ("+ sbq +");",loader,writeConsistency);
-
- readPS = new PSInfo(trans, "SELECT " + sbfc + " FROM " + table + " WHERE " + sbwc + ';',loader,readConsistency);
-
- // Note: UPDATES can't compile if there are no fields besides keys... Use "Insert"
- if(sbup.length()==0) {
- updatePS = createPS; // the same as an insert
- } else {
- updatePS = new PSInfo(trans, "UPDATE " + table + " SET " + sbup + " WHERE " + sbwc + ';',loader,writeConsistency);
- }
-
- deletePS = new PSInfo(trans, "DELETE FROM " + table + " WHERE " + sbwc + ';',loader,writeConsistency);
- }
- return new String[] {sbfc.toString(), sbq.toString(), sbup.toString(), sbwc.toString()};
- }
+ public final String[] setCRUD(TRANS trans, String table, Class<?> dc,Loader<DATA> loader) {
+ return setCRUD(trans, table, dc, loader, -1);
+ }
+
+ public final String[] setCRUD(TRANS trans, String table, Class<?> dc,Loader<DATA> loader, int max) {
+ Field[] fields = dc.getDeclaredFields();
+ int end = max>=0 & max<fields.length?max:fields.length;
+ // get keylimit from a non-null Loader
+ int keylimit = loader.keylimit();
+
+ StringBuilder sbfc = new StringBuilder();
+ StringBuilder sbq = new StringBuilder();
+ StringBuilder sbwc = new StringBuilder();
+ StringBuilder sbup = new StringBuilder();
+
+ if(keylimit>0) {
+ for(int i=0;i<end;++i) {
+ if(i>0) {
+ sbfc.append(',');
+ sbq.append(',');
+ if(i<keylimit) {
+ sbwc.append(" AND ");
+ }
+ }
+ sbfc.append(fields[i].getName());
+ sbq.append('?');
+ if(i>=keylimit) {
+ if(i>keylimit) {
+ sbup.append(',');
+ }
+ sbup.append(fields[i].getName());
+ sbup.append("=?");
+ }
+ if(i<keylimit) {
+ sbwc.append(fields[i].getName());
+ sbwc.append("=?");
+ }
+ }
+
+ createPS = new PSInfo(trans, "INSERT INTO " + table + " ("+ sbfc +") VALUES ("+ sbq +");",loader,writeConsistency);
+
+ readPS = new PSInfo(trans, "SELECT " + sbfc + " FROM " + table + " WHERE " + sbwc + ';',loader,readConsistency);
+
+ // Note: UPDATES can't compile if there are no fields besides keys... Use "Insert"
+ if(sbup.length()==0) {
+ updatePS = createPS; // the same as an insert
+ } else {
+ updatePS = new PSInfo(trans, "UPDATE " + table + " SET " + sbup + " WHERE " + sbwc + ';',loader,writeConsistency);
+ }
+
+ deletePS = new PSInfo(trans, "DELETE FROM " + table + " WHERE " + sbwc + ';',loader,writeConsistency);
+ }
+ return new String[] {sbfc.toString(), sbq.toString(), sbup.toString(), sbwc.toString()};
+ }
- public void replace(CRUD crud, PSInfo psInfo) {
- switch(crud) {
- case create: createPS = psInfo; break;
- case read: readPS = psInfo; break;
- case update: updatePS = psInfo; break;
- case delete: deletePS = psInfo; break;
- }
- }
+ public void replace(CRUD crud, PSInfo psInfo) {
+ switch(crud) {
+ case create: createPS = psInfo; break;
+ case read: readPS = psInfo; break;
+ case update: updatePS = psInfo; break;
+ case delete: deletePS = psInfo; break;
+ }
+ }
- public void disable(CRUD crud) {
- switch(crud) {
- case create: createPS = null; break;
- case read: readPS = null; break;
- case update: updatePS = null; break;
- case delete: deletePS = null; break;
- }
- }
+ public void disable(CRUD crud) {
+ switch(crud) {
+ case create: createPS = null; break;
+ case read: readPS = null; break;
+ case update: updatePS = null; break;
+ case delete: deletePS = null; break;
+ }
+ }
-
- /**
- * Given a DATA object, extract the individual elements from the Data into an Object Array for the
- * execute element.
- */
- public Result<DATA> create(TRANS trans, DATA data) {
- if(createPS==null) {
- return Result.err(Result.ERR_NotImplemented,"Create is disabled for %s",getClass().getSimpleName());
- }
- if(async) /*ResultSetFuture */ {
- Result<ResultSetFuture> rs = createPS.execAsync(trans, C_TEXT, data);
- if(rs.notOK()) {
- return Result.err(rs);
- }
- } else {
- Result<ResultSet> rs = createPS.exec(trans, C_TEXT, data);
- if(rs.notOK()) {
- return Result.err(rs);
- }
- }
- wasModified(trans, CRUD.create, data);
- return Result.ok(data);
- }
+
+ /**
+ * Given a DATA object, extract the individual elements from the Data into an Object Array for the
+ * execute element.
+ */
+ public Result<DATA> create(TRANS trans, DATA data) {
+ if(createPS==null) {
+ return Result.err(Result.ERR_NotImplemented,"Create is disabled for %s",getClass().getSimpleName());
+ }
+ if(async) /*ResultSetFuture */ {
+ Result<ResultSetFuture> rs = createPS.execAsync(trans, C_TEXT, data);
+ if(rs.notOK()) {
+ return Result.err(rs);
+ }
+ } else {
+ Result<ResultSet> rs = createPS.exec(trans, C_TEXT, data);
+ if(rs.notOK()) {
+ return Result.err(rs);
+ }
+ }
+ wasModified(trans, CRUD.create, data);
+ return Result.ok(data);
+ }
- /**
- * Read the Unique Row associated with Full Keys
- */
- public Result<List<DATA>> read(TRANS trans, DATA data) {
- if(readPS==null) {
- return Result.err(Result.ERR_NotImplemented,"Read is disabled for %s",getClass().getSimpleName());
- }
- return readPS.read(trans, R_TEXT, data);
- }
+ /**
+ * Read the Unique Row associated with Full Keys
+ */
+ public Result<List<DATA>> read(TRANS trans, DATA data) {
+ if(readPS==null) {
+ return Result.err(Result.ERR_NotImplemented,"Read is disabled for %s",getClass().getSimpleName());
+ }
+ return readPS.read(trans, R_TEXT, data);
+ }
- public Result<List<DATA>> read(TRANS trans, Object ... key) {
- if(readPS==null) {
- return Result.err(Result.ERR_NotImplemented,"Read is disabled for %s",getClass().getSimpleName());
- }
- return readPS.read(trans, R_TEXT, key);
- }
-
- public Result<DATA> readPrimKey(TRANS trans, Object ... key) {
- if(readPS==null) {
- return Result.err(Result.ERR_NotImplemented,"Read is disabled for %s",getClass().getSimpleName());
- }
- Result<List<DATA>> rld = readPS.read(trans, R_TEXT, key);
- if(rld.isOK()) {
- if(rld.isEmpty()) {
- return Result.err(Result.ERR_NotFound,rld.details);
- } else {
- return Result.ok(rld.value.get(0));
- }
- } else {
- return Result.err(rld);
- }
- }
+ public Result<List<DATA>> read(TRANS trans, Object ... key) {
+ if(readPS==null) {
+ return Result.err(Result.ERR_NotImplemented,"Read is disabled for %s",getClass().getSimpleName());
+ }
+ return readPS.read(trans, R_TEXT, key);
+ }
+
+ public Result<DATA> readPrimKey(TRANS trans, Object ... key) {
+ if(readPS==null) {
+ return Result.err(Result.ERR_NotImplemented,"Read is disabled for %s",getClass().getSimpleName());
+ }
+ Result<List<DATA>> rld = readPS.read(trans, R_TEXT, key);
+ if(rld.isOK()) {
+ if(rld.isEmpty()) {
+ return Result.err(Result.ERR_NotFound,rld.details);
+ } else {
+ return Result.ok(rld.value.get(0));
+ }
+ } else {
+ return Result.err(rld);
+ }
+ }
- public Result<Void> update(TRANS trans, DATA data) {
- return update(trans, data, async);
- }
+ public Result<Void> update(TRANS trans, DATA data) {
+ return update(trans, data, async);
+ }
- public Result<Void> update(TRANS trans, DATA data, boolean async) {
- if(updatePS==null) {
- return Result.err(Result.ERR_NotImplemented,"Update is disabled for %s",getClass().getSimpleName());
- }
- if(async)/* ResultSet rs =*/ {
- Result<ResultSetFuture> rs = updatePS.execAsync(trans, U_TEXT, data);
- if(rs.notOK()) {
- return Result.err(rs);
- }
- } else {
- Result<ResultSet> rs = updatePS.exec(trans, U_TEXT, data);
- if(rs.notOK()) {
- return Result.err(rs);
- }
- }
-
- wasModified(trans, CRUD.update, data);
- return Result.ok();
- }
+ public Result<Void> update(TRANS trans, DATA data, boolean async) {
+ if(updatePS==null) {
+ return Result.err(Result.ERR_NotImplemented,"Update is disabled for %s",getClass().getSimpleName());
+ }
+ if(async)/* ResultSet rs =*/ {
+ Result<ResultSetFuture> rs = updatePS.execAsync(trans, U_TEXT, data);
+ if(rs.notOK()) {
+ return Result.err(rs);
+ }
+ } else {
+ Result<ResultSet> rs = updatePS.exec(trans, U_TEXT, data);
+ if(rs.notOK()) {
+ return Result.err(rs);
+ }
+ }
+
+ wasModified(trans, CRUD.update, data);
+ return Result.ok();
+ }
- // This method Sig for Cached...
- public Result<Void> delete(TRANS trans, DATA data, boolean reread) {
- if(deletePS==null) {
- return Result.err(Result.ERR_NotImplemented,"Delete is disabled for %s",getClass().getSimpleName());
- }
- // Since Deleting will be stored off, for possible re-constitution, need the whole thing
- if(reread) {
- Result<List<DATA>> rd = read(trans,data);
- if(rd.notOK()) {
- return Result.err(rd);
- }
- if(rd.isEmpty()) {
- return Result.err(Status.ERR_NotFound,"Not Found");
- }
- for(DATA d : rd.value) {
- if(async) {
- Result<ResultSetFuture> rs = deletePS.execAsync(trans, D_TEXT, d);
- if(rs.notOK()) {
- return Result.err(rs);
- }
- } else {
- Result<ResultSet> rs = deletePS.exec(trans, D_TEXT, d);
- if(rs.notOK()) {
- return Result.err(rs);
- }
- }
- wasModified(trans, CRUD.delete, d);
- }
- } else {
- if(async)/* ResultSet rs =*/ {
- Result<ResultSetFuture> rs = deletePS.execAsync(trans, D_TEXT, data);
- if(rs.notOK()) {
- return Result.err(rs);
- }
- } else {
- Result<ResultSet> rs = deletePS.exec(trans, D_TEXT, data);
- if(rs.notOK()) {
- return Result.err(rs);
- }
- }
- wasModified(trans, CRUD.delete, data);
- }
- return Result.ok();
- }
-
- public final Object[] keyFrom(DATA data) {
- return createPS.keyFrom(data);
- }
+ // This method Sig for Cached...
+ public Result<Void> delete(TRANS trans, DATA data, boolean reread) {
+ if(deletePS==null) {
+ return Result.err(Result.ERR_NotImplemented,"Delete is disabled for %s",getClass().getSimpleName());
+ }
+ // Since Deleting will be stored off, for possible re-constitution, need the whole thing
+ if(reread) {
+ Result<List<DATA>> rd = read(trans,data);
+ if(rd.notOK()) {
+ return Result.err(rd);
+ }
+ if(rd.isEmpty()) {
+ return Result.err(Status.ERR_NotFound,"Not Found");
+ }
+ for(DATA d : rd.value) {
+ if(async) {
+ Result<ResultSetFuture> rs = deletePS.execAsync(trans, D_TEXT, d);
+ if(rs.notOK()) {
+ return Result.err(rs);
+ }
+ } else {
+ Result<ResultSet> rs = deletePS.exec(trans, D_TEXT, d);
+ if(rs.notOK()) {
+ return Result.err(rs);
+ }
+ }
+ wasModified(trans, CRUD.delete, d);
+ }
+ } else {
+ if(async)/* ResultSet rs =*/ {
+ Result<ResultSetFuture> rs = deletePS.execAsync(trans, D_TEXT, data);
+ if(rs.notOK()) {
+ return Result.err(rs);
+ }
+ } else {
+ Result<ResultSet> rs = deletePS.exec(trans, D_TEXT, data);
+ if(rs.notOK()) {
+ return Result.err(rs);
+ }
+ }
+ wasModified(trans, CRUD.delete, data);
+ }
+ return Result.ok();
+ }
+
+ public final Object[] keyFrom(DATA data) {
+ return createPS.keyFrom(data);
+ }
- @Override
- public String table() {
- return table;
- }
-
- public static final String CASS_READ_CONSISTENCY="cassandra.readConsistency";
- public static final String CASS_WRITE_CONSISTENCY="cassandra.writeConsistency";
- protected static ConsistencyLevel readConsistency(AuthzTrans trans, String table) {
- String prop = trans.getProperty(CASS_READ_CONSISTENCY+'.'+table);
- if(prop==null) {
- prop = trans.getProperty(CASS_READ_CONSISTENCY);
- if(prop==null) {
- return ConsistencyLevel.ONE; // this is Cassandra Default
- }
- }
- return ConsistencyLevel.valueOf(prop);
- }
+ @Override
+ public String table() {
+ return table;
+ }
+
+ public static final String CASS_READ_CONSISTENCY="cassandra.readConsistency";
+ public static final String CASS_WRITE_CONSISTENCY="cassandra.writeConsistency";
+ protected static ConsistencyLevel readConsistency(AuthzTrans trans, String table) {
+ String prop = trans.getProperty(CASS_READ_CONSISTENCY+'.'+table);
+ if(prop==null) {
+ prop = trans.getProperty(CASS_READ_CONSISTENCY);
+ if(prop==null) {
+ return ConsistencyLevel.ONE; // this is Cassandra Default
+ }
+ }
+ return ConsistencyLevel.valueOf(prop);
+ }
- protected static ConsistencyLevel writeConsistency(AuthzTrans trans, String table) {
- String prop = trans.getProperty(CASS_WRITE_CONSISTENCY+'.'+table);
- if(prop==null) {
- prop = trans.getProperty(CASS_WRITE_CONSISTENCY);
- if(prop==null) {
- return ConsistencyLevel.ONE; // this is Cassandra Default\
- }
- }
- return ConsistencyLevel.valueOf(prop);
- }
+ protected static ConsistencyLevel writeConsistency(AuthzTrans trans, String table) {
+ String prop = trans.getProperty(CASS_WRITE_CONSISTENCY+'.'+table);
+ if(prop==null) {
+ prop = trans.getProperty(CASS_WRITE_CONSISTENCY);
+ if(prop==null) {
+ return ConsistencyLevel.ONE; // this is Cassandra Default\
+ }
+ }
+ return ConsistencyLevel.valueOf(prop);
+ }
- public static DataInputStream toDIS(ByteBuffer bb) {
- byte[] b = bb.array();
- return new DataInputStream(
- new ByteArrayInputStream(b,bb.position(),bb.limit())
- );
- }
+ public static DataInputStream toDIS(ByteBuffer bb) {
+ byte[] b = bb.array();
+ return new DataInputStream(
+ new ByteArrayInputStream(b,bb.position(),bb.limit())
+ );
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAO.java
index 70db430e..38759075 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAO.java
@@ -35,10 +35,10 @@ import org.onap.aaf.misc.env.Trans;
* @param <DATA>
*/
public interface DAO<TRANS extends Trans,DATA> extends DAO_RO<TRANS,DATA> {
- public Result<DATA> create(TRANS trans, DATA data);
- public Result<Void> update(TRANS trans, DATA data);
- // In many cases, the data has been correctly read first, so we shouldn't read again
- // Use reread=true if you are using DATA with only a Key
- public Result<Void> delete(TRANS trans, DATA data, boolean reread);
- public Object[] keyFrom(DATA data);
+ public Result<DATA> create(TRANS trans, DATA data);
+ public Result<Void> update(TRANS trans, DATA data);
+ // In many cases, the data has been correctly read first, so we shouldn't read again
+ // Use reread=true if you are using DATA with only a Key
+ public Result<Void> delete(TRANS trans, DATA data, boolean reread);
+ public Object[] keyFrom(DATA data);
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAOException.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAOException.java
index 207576e4..b6c35f20 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAOException.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAOException.java
@@ -23,10 +23,10 @@ package org.onap.aaf.auth.dao;
public class DAOException extends Exception {
- /**
- *
- */
- private static final long serialVersionUID = 1527904125585539823L;
+ /**
+ *
+ */
+ private static final long serialVersionUID = 1527904125585539823L;
// // TODO - enum in result class == is our intended design, currently the DAO layer does not use Result<RV> so we still use these for now
// public final static DAOException RoleNotFoundDAOException = new DAOException("RoleNotFound");
@@ -34,18 +34,18 @@ public class DAOException extends Exception {
// public final static DAOException UserNotFoundDAOException = new DAOException("UserNotFound");
public DAOException() {
- }
+ }
- public DAOException(String message) {
- super(message);
- }
+ public DAOException(String message) {
+ super(message);
+ }
- public DAOException(Throwable cause) {
- super(cause);
- }
+ public DAOException(Throwable cause) {
+ super(cause);
+ }
- public DAOException(String message, Throwable cause) {
- super(message, cause);
- }
+ public DAOException(String message, Throwable cause) {
+ super(message, cause);
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAO_RO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAO_RO.java
index 4bffb5f3..ca4277e1 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAO_RO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/DAO_RO.java
@@ -38,33 +38,33 @@ import org.onap.aaf.misc.env.Trans;
* @param <DATA>
*/
public interface DAO_RO<TRANS extends Trans,DATA> {
- /**
- * Get a List of Data given Key of Object Array
- * @param objs
- * @return
- * @throws DAOException
- */
- public Result<List<DATA>> read(TRANS trans, Object ... key);
+ /**
+ * Get a List of Data given Key of Object Array
+ * @param objs
+ * @return
+ * @throws DAOException
+ */
+ public Result<List<DATA>> read(TRANS trans, Object ... key);
- /**
- * Get a List of Data given Key of DATA Object
- * @param trans
- * @param key
- * @return
- * @throws DAOException
- */
- public Result<List<DATA>> read(TRANS trans, DATA key);
+ /**
+ * Get a List of Data given Key of DATA Object
+ * @param trans
+ * @param key
+ * @return
+ * @throws DAOException
+ */
+ public Result<List<DATA>> read(TRANS trans, DATA key);
- /**
- * close DAO
- */
- public void close(TRANS trans);
+ /**
+ * close DAO
+ */
+ public void close(TRANS trans);
- /**
- * Return name of referenced Data
- * @return
- */
- public String table();
+ /**
+ * Return name of referenced Data
+ * @return
+ */
+ public String table();
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Loader.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Loader.java
index 00423161..cdfd0697 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Loader.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Loader.java
@@ -36,179 +36,179 @@ import java.util.Set;
import com.datastax.driver.core.Row;
public abstract class Loader<DATA> {
- private int keylimit;
- public Loader(int keylimit) {
- this.keylimit = keylimit;
- }
-
- public int keylimit() {
- return keylimit;
- }
-
- protected abstract DATA load(DATA data, Row row);
- protected abstract void key(DATA data, int idx, Object[] obj);
- protected abstract void body(DATA data, int idx, Object[] obj);
+ private int keylimit;
+ public Loader(int keylimit) {
+ this.keylimit = keylimit;
+ }
+
+ public int keylimit() {
+ return keylimit;
+ }
+
+ protected abstract DATA load(DATA data, Row row);
+ protected abstract void key(DATA data, int idx, Object[] obj);
+ protected abstract void body(DATA data, int idx, Object[] obj);
- public final Object[] extract(DATA data, int size, CassDAOImpl.CRUD type) {
- Object[] rv=null;
- switch(type) {
- case delete:
- rv = new Object[keylimit()];
- key(data,0,rv);
- break;
- case update:
- rv = new Object[size];
- body(data,0,rv);
- int body = size-keylimit();
- if(body>0) {
- key(data,body,rv);
- }
- break;
- default:
- rv = new Object[size];
- key(data,0,rv);
- if(size>keylimit()) {
- body(data,keylimit(),rv);
- }
- break;
- }
- return rv;
- }
-
- public static void writeString(DataOutputStream os, String s) throws IOException {
- if(s==null) {
- os.writeInt(-1);
- } else {
- switch(s.length()) {
- case 0:
- os.writeInt(0);
- break;
- default:
- byte[] bytes = s.getBytes();
- os.writeInt(bytes.length);
- os.write(bytes);
- }
- }
- }
-
-
- /**
- * We use bytes here to set a Maximum
- *
- * @param is
- * @param MAX
- * @return
- * @throws IOException
- */
- public static String readString(DataInputStream is, byte[] _buff) throws IOException {
- int l = is.readInt();
- byte[] buff = _buff;
- switch(l) {
- case -1: return null;
- case 0: return "";
- default:
- // Cover case where there is a large string, without always allocating a large buffer.
- if(l>buff.length) {
- buff = new byte[l];
- }
- is.read(buff,0,l);
- return new String(buff,0,l);
- }
- }
+ public final Object[] extract(DATA data, int size, CassDAOImpl.CRUD type) {
+ Object[] rv=null;
+ switch(type) {
+ case delete:
+ rv = new Object[keylimit()];
+ key(data,0,rv);
+ break;
+ case update:
+ rv = new Object[size];
+ body(data,0,rv);
+ int body = size-keylimit();
+ if(body>0) {
+ key(data,body,rv);
+ }
+ break;
+ default:
+ rv = new Object[size];
+ key(data,0,rv);
+ if(size>keylimit()) {
+ body(data,keylimit(),rv);
+ }
+ break;
+ }
+ return rv;
+ }
+
+ public static void writeString(DataOutputStream os, String s) throws IOException {
+ if(s==null) {
+ os.writeInt(-1);
+ } else {
+ switch(s.length()) {
+ case 0:
+ os.writeInt(0);
+ break;
+ default:
+ byte[] bytes = s.getBytes();
+ os.writeInt(bytes.length);
+ os.write(bytes);
+ }
+ }
+ }
+
+
+ /**
+ * We use bytes here to set a Maximum
+ *
+ * @param is
+ * @param MAX
+ * @return
+ * @throws IOException
+ */
+ public static String readString(DataInputStream is, byte[] _buff) throws IOException {
+ int l = is.readInt();
+ byte[] buff = _buff;
+ switch(l) {
+ case -1: return null;
+ case 0: return "";
+ default:
+ // Cover case where there is a large string, without always allocating a large buffer.
+ if(l>buff.length) {
+ buff = new byte[l];
+ }
+ is.read(buff,0,l);
+ return new String(buff,0,l);
+ }
+ }
- /**
- * Write a set with proper sizing
- *
- * Note: at the moment, this is just String. Probably can develop system where types
- * are supported too... but not now.
- *
- * @param os
- * @param set
- * @throws IOException
- */
- public static void writeStringSet(DataOutputStream os, Collection<String> set) throws IOException {
- if(set==null) {
- os.writeInt(-1);
- } else {
- os.writeInt(set.size());
- for(String s : set) {
- writeString(os, s);
- }
- }
+ /**
+ * Write a set with proper sizing
+ *
+ * Note: at the moment, this is just String. Probably can develop system where types
+ * are supported too... but not now.
+ *
+ * @param os
+ * @param set
+ * @throws IOException
+ */
+ public static void writeStringSet(DataOutputStream os, Collection<String> set) throws IOException {
+ if(set==null) {
+ os.writeInt(-1);
+ } else {
+ os.writeInt(set.size());
+ for(String s : set) {
+ writeString(os, s);
+ }
+ }
- }
-
- public static Set<String> readStringSet(DataInputStream is, byte[] buff) throws IOException {
- int l = is.readInt();
- if(l<0) {
- return null;
- }
- Set<String> set = new HashSet<>(l);
- for(int i=0;i<l;++i) {
- set.add(readString(is,buff));
- }
- return set;
- }
-
- public static List<String> readStringList(DataInputStream is, byte[] buff) throws IOException {
- int l = is.readInt();
- if(l<0) {
- return null;
- }
- List<String> list = new ArrayList<>(l);
- for(int i=0;i<l;++i) {
- list.add(Loader.readString(is,buff));
- }
- return list;
- }
+ }
+
+ public static Set<String> readStringSet(DataInputStream is, byte[] buff) throws IOException {
+ int l = is.readInt();
+ if(l<0) {
+ return null;
+ }
+ Set<String> set = new HashSet<>(l);
+ for(int i=0;i<l;++i) {
+ set.add(readString(is,buff));
+ }
+ return set;
+ }
+
+ public static List<String> readStringList(DataInputStream is, byte[] buff) throws IOException {
+ int l = is.readInt();
+ if(l<0) {
+ return null;
+ }
+ List<String> list = new ArrayList<>(l);
+ for(int i=0;i<l;++i) {
+ list.add(Loader.readString(is,buff));
+ }
+ return list;
+ }
- /**
- * Write a map
- * @param os
- * @param map
- * @throws IOException
- */
- public static void writeStringMap(DataOutputStream os, Map<String,String> map) throws IOException {
- if(map==null) {
- os.writeInt(-1);
- } else {
- Set<Entry<String, String>> es = map.entrySet();
- os.writeInt(es.size());
- for(Entry<String,String> e : es) {
- writeString(os, e.getKey());
- writeString(os, e.getValue());
- }
- }
+ /**
+ * Write a map
+ * @param os
+ * @param map
+ * @throws IOException
+ */
+ public static void writeStringMap(DataOutputStream os, Map<String,String> map) throws IOException {
+ if(map==null) {
+ os.writeInt(-1);
+ } else {
+ Set<Entry<String, String>> es = map.entrySet();
+ os.writeInt(es.size());
+ for(Entry<String,String> e : es) {
+ writeString(os, e.getKey());
+ writeString(os, e.getValue());
+ }
+ }
- }
+ }
- public static Map<String,String> readStringMap(DataInputStream is, byte[] buff) throws IOException {
- int l = is.readInt();
- if(l<0) {
- return null;
- }
- Map<String,String> map = new HashMap<>(l);
- for(int i=0;i<l;++i) {
- String key = readString(is,buff);
- map.put(key,readString(is,buff));
- }
- return map;
- }
- public static void writeHeader(DataOutputStream os, int magic, int version) throws IOException {
- os.writeInt(magic);
- os.writeInt(version);
- }
-
- public static int readHeader(DataInputStream is, final int magic, final int version) throws IOException {
- if(is.readInt()!=magic) {
- throw new IOException("Corrupted Data Stream");
- }
- int v = is.readInt();
- if(version<0 || v>version) {
- throw new IOException("Unsupported Data Version: " + v);
- }
- return v;
- }
+ public static Map<String,String> readStringMap(DataInputStream is, byte[] buff) throws IOException {
+ int l = is.readInt();
+ if(l<0) {
+ return null;
+ }
+ Map<String,String> map = new HashMap<>(l);
+ for(int i=0;i<l;++i) {
+ String key = readString(is,buff);
+ map.put(key,readString(is,buff));
+ }
+ return map;
+ }
+ public static void writeHeader(DataOutputStream os, int magic, int version) throws IOException {
+ os.writeInt(magic);
+ os.writeInt(version);
+ }
+
+ public static int readHeader(DataInputStream is, final int magic, final int version) throws IOException {
+ if(is.readInt()!=magic) {
+ throw new IOException("Corrupted Data Stream");
+ }
+ int v = is.readInt();
+ if(version<0 || v>version) {
+ throw new IOException("Unsupported Data Version: " + v);
+ }
+ return v;
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Streamer.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Streamer.java
index c40d74fa..af3567eb 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Streamer.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Streamer.java
@@ -26,6 +26,6 @@ import java.io.DataOutputStream;
import java.io.IOException;
public interface Streamer<DATA> {
- public abstract void marshal(DATA data, DataOutputStream os) throws IOException;
- public abstract void unmarshal(DATA data, DataInputStream is) throws IOException;
+ public abstract void marshal(DATA data, DataOutputStream os) throws IOException;
+ public abstract void unmarshal(DATA data, DataInputStream is) throws IOException;
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Touchable.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Touchable.java
index c00c1048..0fcda19c 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Touchable.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/Touchable.java
@@ -22,5 +22,5 @@
package org.onap.aaf.auth.dao;
public interface Touchable {
- // Or make all DAOs accept list of CIDAOs...
+ // Or make all DAOs accept list of CIDAOs...
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedCertDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedCertDAO.java
index 9526bf28..c0a2c49c 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedCertDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedCertDAO.java
@@ -30,25 +30,25 @@ import org.onap.aaf.auth.env.AuthzTrans;
import org.onap.aaf.auth.layer.Result;
public class CachedCertDAO extends CachedDAO<AuthzTrans, CertDAO, CertDAO.Data> {
- public CachedCertDAO(CertDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
- super(dao, info, CertDAO.CACHE_SEG, expiresIn);
- }
-
- /**
- * Pass through Cert ID Lookup
- *
- * @param trans
- * @param ns
- * @return
- */
-
- public Result<List<CertDAO.Data>> readID(AuthzTrans trans, final String id) {
- return dao().readID(trans, id);
- }
-
- public Result<List<CertDAO.Data>> readX500(AuthzTrans trans, final String x500) {
- return dao().readX500(trans, x500);
- }
+ public CachedCertDAO(CertDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
+ super(dao, info, CertDAO.CACHE_SEG, expiresIn);
+ }
+
+ /**
+ * Pass through Cert ID Lookup
+ *
+ * @param trans
+ * @param ns
+ * @return
+ */
+
+ public Result<List<CertDAO.Data>> readID(AuthzTrans trans, final String id) {
+ return dao().readID(trans, id);
+ }
+
+ public Result<List<CertDAO.Data>> readX500(AuthzTrans trans, final String x500) {
+ return dao().readX500(trans, x500);
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedCredDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedCredDAO.java
index 76fd5530..eb5885f0 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedCredDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedCredDAO.java
@@ -31,36 +31,36 @@ import org.onap.aaf.auth.env.AuthzTrans;
import org.onap.aaf.auth.layer.Result;
public class CachedCredDAO extends CachedDAO<AuthzTrans, CredDAO, CredDAO.Data> {
- public CachedCredDAO(CredDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
- super(dao, info, CredDAO.CACHE_SEG, expiresIn);
- }
-
- /**
- * Pass through Cred Lookup
- *
- * Unlike Role and Perm, we don't need or want to cache these elements... Only used for NS Delete.
- *
- * @param trans
- * @param ns
- * @return
- */
- public Result<List<CredDAO.Data>> readNS(AuthzTrans trans, final String ns) {
-
- return dao().readNS(trans, ns);
- }
-
- public Result<List<CredDAO.Data>> readID(AuthzTrans trans, final String id) {
- DAOGetter getter = new DAOGetter(trans,dao()) {
- public Result<List<CredDAO.Data>> call() {
- return dao().readID(trans, id);
- }
- };
-
- Result<List<CredDAO.Data>> lurd = get(trans, id, getter);
- if(lurd.isOK() && lurd.isEmpty()) {
- return Result.err(Status.ERR_UserNotFound,"No User Cred found");
- }
- return lurd;
- }
+ public CachedCredDAO(CredDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
+ super(dao, info, CredDAO.CACHE_SEG, expiresIn);
+ }
+
+ /**
+ * Pass through Cred Lookup
+ *
+ * Unlike Role and Perm, we don't need or want to cache these elements... Only used for NS Delete.
+ *
+ * @param trans
+ * @param ns
+ * @return
+ */
+ public Result<List<CredDAO.Data>> readNS(AuthzTrans trans, final String ns) {
+
+ return dao().readNS(trans, ns);
+ }
+
+ public Result<List<CredDAO.Data>> readID(AuthzTrans trans, final String id) {
+ DAOGetter getter = new DAOGetter(trans,dao()) {
+ public Result<List<CredDAO.Data>> call() {
+ return dao().readID(trans, id);
+ }
+ };
+
+ Result<List<CredDAO.Data>> lurd = get(trans, id, getter);
+ if(lurd.isOK() && lurd.isEmpty()) {
+ return Result.err(Status.ERR_UserNotFound,"No User Cred found");
+ }
+ return lurd;
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedNSDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedNSDAO.java
index be860488..e639767c 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedNSDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedNSDAO.java
@@ -27,7 +27,7 @@ import org.onap.aaf.auth.dao.cass.NsDAO;
import org.onap.aaf.auth.env.AuthzTrans;
public class CachedNSDAO extends CachedDAO<AuthzTrans, NsDAO, NsDAO.Data> {
- public CachedNSDAO(NsDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
- super(dao, info, NsDAO.CACHE_SEG, expiresIn);
- }
+ public CachedNSDAO(NsDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
+ super(dao, info, NsDAO.CACHE_SEG, expiresIn);
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedPermDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedPermDAO.java
index 4cb7cf2e..a18e6a67 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedPermDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedPermDAO.java
@@ -34,91 +34,91 @@ import org.onap.aaf.auth.layer.Result;
public class CachedPermDAO extends CachedDAO<AuthzTrans,PermDAO, PermDAO.Data> {
- public CachedPermDAO(PermDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
- super(dao, info, PermDAO.CACHE_SEG, expiresIn);
- }
+ public CachedPermDAO(PermDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
+ super(dao, info, PermDAO.CACHE_SEG, expiresIn);
+ }
- public Result<List<Data>> readNS(AuthzTrans trans, final String ns) {
- DAOGetter getter = new DAOGetter(trans,dao()) {
- public Result<List<Data>> call() {
- return dao.readNS(trans, ns);
- }
- };
-
- Result<List<Data>> lurd = get(trans, ns, getter);
- if(lurd.isOKhasData()) {
- return lurd;
- } else {
-
- }
-// if(getter.result==null) {
-// if(lurd==null) {
- return Result.err(Status.ERR_PermissionNotFound,"No Permission found - " + lurd.details);
-// } else {
-// return Result.ok(lurd);
-// }
-// }
-// return getter.result;
- }
+ public Result<List<Data>> readNS(AuthzTrans trans, final String ns) {
+ DAOGetter getter = new DAOGetter(trans,dao()) {
+ public Result<List<Data>> call() {
+ return dao.readNS(trans, ns);
+ }
+ };
+
+ Result<List<Data>> lurd = get(trans, ns, getter);
+ if(lurd.isOKhasData()) {
+ return lurd;
+ } else {
+
+ }
+// if(getter.result==null) {
+// if(lurd==null) {
+ return Result.err(Status.ERR_PermissionNotFound,"No Permission found - " + lurd.details);
+// } else {
+// return Result.ok(lurd);
+// }
+// }
+// return getter.result;
+ }
- public Result<List<Data>> readChildren(AuthzTrans trans, final String ns, final String type) {
- return dao().readChildren(trans,ns,type);
- }
+ public Result<List<Data>> readChildren(AuthzTrans trans, final String ns, final String type) {
+ return dao().readChildren(trans,ns,type);
+ }
- /**
- *
- * @param trans
- * @param ns
- * @param type
- * @return
- */
- public Result<List<Data>> readByType(AuthzTrans trans, final String ns, final String type) {
- DAOGetter getter = new DAOGetter(trans,dao()) {
- public Result<List<Data>> call() {
- return dao.readByType(trans, ns, type);
- }
- };
-
- // Note: Can reuse index1 here, because there is no name collision versus response
- Result<List<Data>> lurd = get(trans, ns+'|'+type, getter);
- if(lurd.isOK() && lurd.isEmpty()) {
- return Result.err(Status.ERR_PermissionNotFound,"No Permission found");
- }
- return lurd;
- }
-
- /**
- * Add desciption to this permission
- *
- * @param trans
- * @param ns
- * @param type
- * @param instance
- * @param action
- * @param description
- * @return
- */
- public Result<Void> addDescription(AuthzTrans trans, String ns, String type,
- String instance, String action, String description) {
- //TODO Invalidate?
- return dao().addDescription(trans, ns, type, instance, action, description);
- }
-
- public Result<Void> addRole(AuthzTrans trans, PermDAO.Data perm, RoleDAO.Data role) {
- Result<Void> rv = dao().addRole(trans,perm,role.encode());
- if(trans.debug().isLoggable())
- trans.debug().log("Adding",role.encode(),"to", perm, "with CachedPermDAO.addRole");
- invalidate(trans,perm);
- return rv;
- }
+ /**
+ *
+ * @param trans
+ * @param ns
+ * @param type
+ * @return
+ */
+ public Result<List<Data>> readByType(AuthzTrans trans, final String ns, final String type) {
+ DAOGetter getter = new DAOGetter(trans,dao()) {
+ public Result<List<Data>> call() {
+ return dao.readByType(trans, ns, type);
+ }
+ };
+
+ // Note: Can reuse index1 here, because there is no name collision versus response
+ Result<List<Data>> lurd = get(trans, ns+'|'+type, getter);
+ if(lurd.isOK() && lurd.isEmpty()) {
+ return Result.err(Status.ERR_PermissionNotFound,"No Permission found");
+ }
+ return lurd;
+ }
+
+ /**
+ * Add desciption to this permission
+ *
+ * @param trans
+ * @param ns
+ * @param type
+ * @param instance
+ * @param action
+ * @param description
+ * @return
+ */
+ public Result<Void> addDescription(AuthzTrans trans, String ns, String type,
+ String instance, String action, String description) {
+ //TODO Invalidate?
+ return dao().addDescription(trans, ns, type, instance, action, description);
+ }
+
+ public Result<Void> addRole(AuthzTrans trans, PermDAO.Data perm, RoleDAO.Data role) {
+ Result<Void> rv = dao().addRole(trans,perm,role.encode());
+ if(trans.debug().isLoggable())
+ trans.debug().log("Adding",role.encode(),"to", perm, "with CachedPermDAO.addRole");
+ invalidate(trans,perm);
+ return rv;
+ }
- public Result<Void> delRole(AuthzTrans trans, Data perm, RoleDAO.Data role) {
- Result<Void> rv = dao().delRole(trans,perm,role.encode());
- if(trans.debug().isLoggable())
- trans.debug().log("Removing",role.encode(),"from", perm, "with CachedPermDAO.delRole");
- invalidate(trans,perm);
- return rv;
- }
+ public Result<Void> delRole(AuthzTrans trans, Data perm, RoleDAO.Data role) {
+ Result<Void> rv = dao().delRole(trans,perm,role.encode());
+ if(trans.debug().isLoggable())
+ trans.debug().log("Removing",role.encode(),"from", perm, "with CachedPermDAO.delRole");
+ invalidate(trans,perm);
+ return rv;
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedRoleDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedRoleDAO.java
index 5fac680c..1d8e6709 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedRoleDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedRoleDAO.java
@@ -33,74 +33,74 @@ import org.onap.aaf.auth.env.AuthzTrans;
import org.onap.aaf.auth.layer.Result;
public class CachedRoleDAO extends CachedDAO<AuthzTrans,RoleDAO, RoleDAO.Data> {
- public CachedRoleDAO(RoleDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
- super(dao, info, RoleDAO.CACHE_SEG, expiresIn);
- }
+ public CachedRoleDAO(RoleDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
+ super(dao, info, RoleDAO.CACHE_SEG, expiresIn);
+ }
- public Result<List<Data>> readNS(AuthzTrans trans, final String ns) {
- DAOGetter getter = new DAOGetter(trans,dao()) {
- public Result<List<Data>> call() {
- return dao.readNS(trans, ns);
- }
- };
-
- Result<List<Data>> lurd = get(trans, ns, getter);
- if(lurd.isOK() && lurd.isEmpty()) {
- return Result.err(Status.ERR_RoleNotFound,"No Role found");
- }
- return lurd;
- }
+ public Result<List<Data>> readNS(AuthzTrans trans, final String ns) {
+ DAOGetter getter = new DAOGetter(trans,dao()) {
+ public Result<List<Data>> call() {
+ return dao.readNS(trans, ns);
+ }
+ };
+
+ Result<List<Data>> lurd = get(trans, ns, getter);
+ if(lurd.isOK() && lurd.isEmpty()) {
+ return Result.err(Status.ERR_RoleNotFound,"No Role found");
+ }
+ return lurd;
+ }
- public Result<List<Data>> readName(AuthzTrans trans, final String name) {
- DAOGetter getter = new DAOGetter(trans,dao()) {
- public Result<List<Data>> call() {
- return dao().readName(trans, name);
- }
- };
-
- Result<List<Data>> lurd = get(trans, name, getter);
- if(lurd.isOK() && lurd.isEmpty()) {
- return Result.err(Status.ERR_RoleNotFound,"No Role found");
- }
- return lurd;
- }
+ public Result<List<Data>> readName(AuthzTrans trans, final String name) {
+ DAOGetter getter = new DAOGetter(trans,dao()) {
+ public Result<List<Data>> call() {
+ return dao().readName(trans, name);
+ }
+ };
+
+ Result<List<Data>> lurd = get(trans, name, getter);
+ if(lurd.isOK() && lurd.isEmpty()) {
+ return Result.err(Status.ERR_RoleNotFound,"No Role found");
+ }
+ return lurd;
+ }
- public Result<List<Data>> readChildren(AuthzTrans trans, final String ns, final String name) {
- // At this point, I'm thinking it's better not to try to cache "*" results
- // Data probably won't be accurate, and adding it makes every update invalidate most of the cache
- // Jonathan 2/4/2014
- return dao().readChildren(trans,ns,name);
- }
+ public Result<List<Data>> readChildren(AuthzTrans trans, final String ns, final String name) {
+ // At this point, I'm thinking it's better not to try to cache "*" results
+ // Data probably won't be accurate, and adding it makes every update invalidate most of the cache
+ // Jonathan 2/4/2014
+ return dao().readChildren(trans,ns,name);
+ }
- public Result<Void> addPerm(AuthzTrans trans, RoleDAO.Data rd, PermDAO.Data perm) {
- Result<Void> rv = dao().addPerm(trans,rd,perm);
- if(trans.debug().isLoggable())
- trans.debug().log("Adding",perm,"to", rd, "with CachedRoleDAO.addPerm");
- invalidate(trans, rd);
- return rv;
- }
+ public Result<Void> addPerm(AuthzTrans trans, RoleDAO.Data rd, PermDAO.Data perm) {
+ Result<Void> rv = dao().addPerm(trans,rd,perm);
+ if(trans.debug().isLoggable())
+ trans.debug().log("Adding",perm,"to", rd, "with CachedRoleDAO.addPerm");
+ invalidate(trans, rd);
+ return rv;
+ }
- public Result<Void> delPerm(AuthzTrans trans, RoleDAO.Data rd, PermDAO.Data perm) {
- Result<Void> rv = dao().delPerm(trans,rd,perm);
- if(trans.debug().isLoggable())
- trans.debug().log("Removing",perm,"from", rd, "with CachedRoleDAO.addPerm");
- invalidate(trans, rd);
- return rv;
- }
-
- /**
- * Add description to this role
- *
- * @param trans
- * @param ns
- * @param name
- * @param description
- * @return
- */
- public Result<Void> addDescription(AuthzTrans trans, String ns, String name, String description) {
- //TODO Invalidate?
- return dao().addDescription(trans, ns, name, description);
+ public Result<Void> delPerm(AuthzTrans trans, RoleDAO.Data rd, PermDAO.Data perm) {
+ Result<Void> rv = dao().delPerm(trans,rd,perm);
+ if(trans.debug().isLoggable())
+ trans.debug().log("Removing",perm,"from", rd, "with CachedRoleDAO.addPerm");
+ invalidate(trans, rd);
+ return rv;
+ }
+
+ /**
+ * Add description to this role
+ *
+ * @param trans
+ * @param ns
+ * @param name
+ * @param description
+ * @return
+ */
+ public Result<Void> addDescription(AuthzTrans trans, String ns, String name, String description) {
+ //TODO Invalidate?
+ return dao().addDescription(trans, ns, name, description);
- }
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedUserRoleDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedUserRoleDAO.java
index 100c81d5..2121b136 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedUserRoleDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cached/CachedUserRoleDAO.java
@@ -34,82 +34,82 @@ import org.onap.aaf.auth.layer.Result;
import org.onap.aaf.misc.env.Slot;
public class CachedUserRoleDAO extends CachedDAO<AuthzTrans,UserRoleDAO, UserRoleDAO.Data> {
- private Slot transURSlot;
+ private Slot transURSlot;
- public CachedUserRoleDAO(UserRoleDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
- super(dao, info, UserRoleDAO.CACHE_SEG, expiresIn);
- transURSlot = dao.transURSlot;
- }
+ public CachedUserRoleDAO(UserRoleDAO dao, CIDAO<AuthzTrans> info, long expiresIn) {
+ super(dao, info, UserRoleDAO.CACHE_SEG, expiresIn);
+ transURSlot = dao.transURSlot;
+ }
- /**
- * Special Case.
- * User Roles by User are very likely to be called many times in a Transaction, to validate "May User do..."
- * Pull result, and make accessible by the Trans, which is always keyed by User.
- * @param trans
- * @param user
- * @return
- */
- public Result<List<Data>> readByUser(AuthzTrans trans, final String user) {
- DAOGetter getter = new DAOGetter(trans,dao()) {
- public Result<List<Data>> call() {
- // If the call is for THIS user, and it exists, get from TRANS, add to TRANS if not.
- if(user!=null && user.equals(trans.user())) {
- Result<List<Data>> transLD = trans.get(transURSlot,null);
- if(transLD==null ) {
- transLD = dao.readByUser(trans, user);
- }
- return transLD;
- } else {
- return dao.readByUser(trans, user);
- }
- }
- };
- Result<List<Data>> lurd = get(trans, user, getter);
- if(lurd.isOK() && lurd.isEmpty()) {
- return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for [%s]",user);
- }
- return lurd;
- }
+ /**
+ * Special Case.
+ * User Roles by User are very likely to be called many times in a Transaction, to validate "May User do..."
+ * Pull result, and make accessible by the Trans, which is always keyed by User.
+ * @param trans
+ * @param user
+ * @return
+ */
+ public Result<List<Data>> readByUser(AuthzTrans trans, final String user) {
+ DAOGetter getter = new DAOGetter(trans,dao()) {
+ public Result<List<Data>> call() {
+ // If the call is for THIS user, and it exists, get from TRANS, add to TRANS if not.
+ if(user!=null && user.equals(trans.user())) {
+ Result<List<Data>> transLD = trans.get(transURSlot,null);
+ if(transLD==null ) {
+ transLD = dao.readByUser(trans, user);
+ }
+ return transLD;
+ } else {
+ return dao.readByUser(trans, user);
+ }
+ }
+ };
+ Result<List<Data>> lurd = get(trans, user, getter);
+ if(lurd.isOK() && lurd.isEmpty()) {
+ return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for [%s]",user);
+ }
+ return lurd;
+ }
-
- public Result<List<Data>> readByRole(AuthzTrans trans, final String role) {
- DAOGetter getter = new DAOGetter(trans,dao()) {
- public Result<List<Data>> call() {
- return dao.readByRole(trans, role);
- }
- };
- Result<List<Data>> lurd = get(trans, role, getter);
- if(lurd.isOK() && lurd.isEmpty()) {
- return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for [%s]",role);
- }
- return lurd;
- }
+
+ public Result<List<Data>> readByRole(AuthzTrans trans, final String role) {
+ DAOGetter getter = new DAOGetter(trans,dao()) {
+ public Result<List<Data>> call() {
+ return dao.readByRole(trans, role);
+ }
+ };
+ Result<List<Data>> lurd = get(trans, role, getter);
+ if(lurd.isOK() && lurd.isEmpty()) {
+ return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for [%s]",role);
+ }
+ return lurd;
+ }
- public Result<List<UserRoleDAO.Data>> readUserInRole(final AuthzTrans trans, final String user, final String role) {
- DAOGetter getter = new DAOGetter(trans,dao()) {
- public Result<List<Data>> call() {
- if(user.equals(trans.user())) {
- Result<List<Data>> rrbu = readByUser(trans, user);
- if(rrbu.isOK()) {
- List<Data> ld = new ArrayList<>(1);
- for(Data d : rrbu.value) {
- if(d.role.equals(role)) {
- ld.add(d);
- break;
- }
- }
- return Result.ok(ld).emptyList(ld.isEmpty());
- } else {
- return rrbu;
- }
- }
- return dao.readByUserRole(trans, user, role);
- }
- };
- Result<List<Data>> lurd = get(trans, keyFromObjs(user,role), getter);
- if(lurd.isOK() && lurd.isEmpty()) {
- return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for role [%s] and user [%s]",role,user);
- }
- return lurd;
- }
+ public Result<List<UserRoleDAO.Data>> readUserInRole(final AuthzTrans trans, final String user, final String role) {
+ DAOGetter getter = new DAOGetter(trans,dao()) {
+ public Result<List<Data>> call() {
+ if(user.equals(trans.user())) {
+ Result<List<Data>> rrbu = readByUser(trans, user);
+ if(rrbu.isOK()) {
+ List<Data> ld = new ArrayList<>(1);
+ for(Data d : rrbu.value) {
+ if(d.role.equals(role)) {
+ ld.add(d);
+ break;
+ }
+ }
+ return Result.ok(ld).emptyList(ld.isEmpty());
+ } else {
+ return rrbu;
+ }
+ }
+ return dao.readByUserRole(trans, user, role);
+ }
+ };
+ Result<List<Data>> lurd = get(trans, keyFromObjs(user,role), getter);
+ if(lurd.isOK() && lurd.isEmpty()) {
+ return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for role [%s] and user [%s]",role,user);
+ }
+ return lurd;
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ApprovalDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ApprovalDAO.java
index 284d0a84..1948ac65 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ApprovalDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ApprovalDAO.java
@@ -43,212 +43,212 @@ import com.datastax.driver.core.exceptions.DriverException;
public class ApprovalDAO extends CassDAOImpl<AuthzTrans,ApprovalDAO.Data> {
- public static final String PENDING = "pending";
- public static final String DENIED = "denied";
- public static final String APPROVED = "approved";
-
- private static final String TABLE = "approval";
- private static final String TABLELOG = "approved";
- private HistoryDAO historyDAO;
- private PSInfo psByUser, psByApprover, psByTicket, psByStatus;
+ public static final String PENDING = "pending";
+ public static final String DENIED = "denied";
+ public static final String APPROVED = "approved";
+
+ private static final String TABLE = "approval";
+ private static final String TABLELOG = "approved";
+ private HistoryDAO historyDAO;
+ private PSInfo psByUser, psByApprover, psByTicket, psByStatus;
-
- public ApprovalDAO(AuthzTrans trans, Cluster cluster, String keyspace) {
- super(trans, ApprovalDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+
+ public ApprovalDAO(AuthzTrans trans, Cluster cluster, String keyspace) {
+ super(trans, ApprovalDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
historyDAO = new HistoryDAO(trans, this);
- init(trans);
- }
+ init(trans);
+ }
- public ApprovalDAO(AuthzTrans trans, HistoryDAO hDAO) {
- super(trans, ApprovalDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
- historyDAO=hDAO;
- init(trans);
- }
+ public ApprovalDAO(AuthzTrans trans, HistoryDAO hDAO) {
+ super(trans, ApprovalDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+ historyDAO=hDAO;
+ init(trans);
+ }
- private static final int KEYLIMIT = 1;
- public static class Data {
- public UUID id;
+ private static final int KEYLIMIT = 1;
+ public static class Data {
+ public UUID id;
public UUID ticket;
- public String user;
- public String approver;
- public String type;
- public String status;
- public String memo;
- public String operation;
- public Date last_notified;
- public Date updated;
- }
-
- private static class ApprovalLoader extends Loader<Data> {
- public static final ApprovalLoader deflt = new ApprovalLoader(KEYLIMIT);
-
- public ApprovalLoader(int keylimit) {
- super(keylimit);
- }
-
- @Override
- public Data load(Data data, Row row) {
- data.id = row.getUUID(0);
- data.ticket = row.getUUID(1);
- data.user = row.getString(2);
- data.approver = row.getString(3);
- data.type = row.getString(4);
- data.status = row.getString(5);
- data.memo = row.getString(6);
- data.operation = row.getString(7);
- data.last_notified = row.getTimestamp(8);
- // This is used to get "WRITETIME(STATUS)" from Approval, which gives us an "updated"
- if(row.getColumnDefinitions().size()>9) {
- // Rows reported in MicroSeconds
- data.updated = new Date(row.getLong(9)/1000);
- }
- return data;
- }
+ public String user;
+ public String approver;
+ public String type;
+ public String status;
+ public String memo;
+ public String operation;
+ public Date last_notified;
+ public Date updated;
+ }
+
+ private static class ApprovalLoader extends Loader<Data> {
+ public static final ApprovalLoader deflt = new ApprovalLoader(KEYLIMIT);
+
+ public ApprovalLoader(int keylimit) {
+ super(keylimit);
+ }
+
+ @Override
+ public Data load(Data data, Row row) {
+ data.id = row.getUUID(0);
+ data.ticket = row.getUUID(1);
+ data.user = row.getString(2);
+ data.approver = row.getString(3);
+ data.type = row.getString(4);
+ data.status = row.getString(5);
+ data.memo = row.getString(6);
+ data.operation = row.getString(7);
+ data.last_notified = row.getTimestamp(8);
+ // This is used to get "WRITETIME(STATUS)" from Approval, which gives us an "updated"
+ if(row.getColumnDefinitions().size()>9) {
+ // Rows reported in MicroSeconds
+ data.updated = new Date(row.getLong(9)/1000);
+ }
+ return data;
+ }
- @Override
- protected void key(Data data, int idx, Object[] obj) {
- obj[idx]=data.id;
- }
+ @Override
+ protected void key(Data data, int idx, Object[] obj) {
+ obj[idx]=data.id;
+ }
- @Override
- protected void body(Data data, int _idx, Object[] obj) {
- int idx = _idx;
- obj[idx]=data.ticket;
- obj[++idx]=data.user;
- obj[++idx]=data.approver;
- obj[++idx]=data.type;
- obj[++idx]=data.status;
- obj[++idx]=data.memo;
- obj[++idx]=data.operation;
- obj[++idx]=data.last_notified;
- }
- }
-
- private void init(AuthzTrans trans) {
- String[] helpers = setCRUD(trans, TABLE, Data.class, ApprovalLoader.deflt,9);
- psByUser = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE +
- " WHERE user = ?", new ApprovalLoader(1) {
- @Override
- protected void key(Data data, int idx, Object[] obj) {
- obj[idx]=data.user;
- }
- }, readConsistency);
-
- psByApprover = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE +
- " WHERE approver = ?", new ApprovalLoader(1) {
- @Override
- protected void key(Data data, int idx, Object[] obj) {
- obj[idx]=data.approver;
- }
- }, readConsistency);
+ @Override
+ protected void body(Data data, int _idx, Object[] obj) {
+ int idx = _idx;
+ obj[idx]=data.ticket;
+ obj[++idx]=data.user;
+ obj[++idx]=data.approver;
+ obj[++idx]=data.type;
+ obj[++idx]=data.status;
+ obj[++idx]=data.memo;
+ obj[++idx]=data.operation;
+ obj[++idx]=data.last_notified;
+ }
+ }
+
+ private void init(AuthzTrans trans) {
+ String[] helpers = setCRUD(trans, TABLE, Data.class, ApprovalLoader.deflt,9);
+ psByUser = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE +
+ " WHERE user = ?", new ApprovalLoader(1) {
+ @Override
+ protected void key(Data data, int idx, Object[] obj) {
+ obj[idx]=data.user;
+ }
+ }, readConsistency);
+
+ psByApprover = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE +
+ " WHERE approver = ?", new ApprovalLoader(1) {
+ @Override
+ protected void key(Data data, int idx, Object[] obj) {
+ obj[idx]=data.approver;
+ }
+ }, readConsistency);
- psByTicket = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE +
- " WHERE ticket = ?", new ApprovalLoader(1) {
- @Override
- protected void key(Data data, int idx, Object[] obj) {
- obj[idx]=data.ticket;
- }
- }, readConsistency);
+ psByTicket = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE +
+ " WHERE ticket = ?", new ApprovalLoader(1) {
+ @Override
+ protected void key(Data data, int idx, Object[] obj) {
+ obj[idx]=data.ticket;
+ }
+ }, readConsistency);
- psByStatus = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE +
- " WHERE status = ?", new ApprovalLoader(1) {
- @Override
- protected void key(Data data, int idx, Object[] obj) {
- obj[idx]=data.status;
- }
- }, readConsistency);
+ psByStatus = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE +
+ " WHERE status = ?", new ApprovalLoader(1) {
+ @Override
+ protected void key(Data data, int idx, Object[] obj) {
+ obj[idx]=data.status;
+ }
+ }, readConsistency);
- }
+ }
- /* (non-Javadoc)
- * @see org.onap.aaf.auth.dao.CassDAOImpl#create(com.att.inno.env.TransStore, java.lang.Object)
- */
- @Override
- public Result<Data> create(AuthzTrans trans, Data data) {
- // If ID is not set (typical), create one.
- if(data.id==null) {
- data.id = Chrono.dateToUUID(System.currentTimeMillis());
- }
- Result<ResultSet> rs = createPS.exec(trans, C_TEXT, data);
- if(rs.notOK()) {
- return Result.err(rs);
- }
- return Result.ok(data);
- }
+ /* (non-Javadoc)
+ * @see org.onap.aaf.auth.dao.CassDAOImpl#create(com.att.inno.env.TransStore, java.lang.Object)
+ */
+ @Override
+ public Result<Data> create(AuthzTrans trans, Data data) {
+ // If ID is not set (typical), create one.
+ if(data.id==null) {
+ data.id = Chrono.dateToUUID(System.currentTimeMillis());
+ }
+ Result<ResultSet> rs = createPS.exec(trans, C_TEXT, data);
+ if(rs.notOK()) {
+ return Result.err(rs);
+ }
+ return Result.ok(data);
+ }
- public Result<List<ApprovalDAO.Data>> readByUser(AuthzTrans trans, String user) {
- return psByUser.read(trans, R_TEXT, new Object[]{user});
- }
+ public Result<List<ApprovalDAO.Data>> readByUser(AuthzTrans trans, String user) {
+ return psByUser.read(trans, R_TEXT, new Object[]{user});
+ }
- public Result<List<ApprovalDAO.Data>> readByApprover(AuthzTrans trans, String approver) {
- return psByApprover.read(trans, R_TEXT, new Object[]{approver});
- }
+ public Result<List<ApprovalDAO.Data>> readByApprover(AuthzTrans trans, String approver) {
+ return psByApprover.read(trans, R_TEXT, new Object[]{approver});
+ }
- public Result<List<ApprovalDAO.Data>> readByTicket(AuthzTrans trans, UUID ticket) {
- return psByTicket.read(trans, R_TEXT, new Object[]{ticket});
- }
+ public Result<List<ApprovalDAO.Data>> readByTicket(AuthzTrans trans, UUID ticket) {
+ return psByTicket.read(trans, R_TEXT, new Object[]{ticket});
+ }
- public Result<List<ApprovalDAO.Data>> readByStatus(AuthzTrans trans, String status) {
- return psByStatus.read(trans, R_TEXT, new Object[]{status});
- }
+ public Result<List<ApprovalDAO.Data>> readByStatus(AuthzTrans trans, String status) {
+ return psByStatus.read(trans, R_TEXT, new Object[]{status});
+ }
- /* (non-Javadoc)
- * @see org.onap.aaf.auth.dao.CassDAOImpl#delete(com.att.inno.env.TransStore, java.lang.Object, boolean)
- */
- @Override
- public Result<Void> delete(AuthzTrans trans, Data data, boolean reread) {
- if(reread || data.status == null) { // if Memo is empty, likely not full record
- Result<ResultSet> rd = readPS.exec(trans, R_TEXT, data);
- if(rd.notOK()) {
- return Result.err(rd);
- }
- ApprovalLoader.deflt.load(data, rd.value.one());
- }
- if("approved".equals(data.status) || "denied".equals(data.status)) {
- StringBuilder sb = new StringBuilder("BEGIN BATCH\n");
- sb.append("INSERT INTO ");
- sb.append(TABLELOG);
- sb.append(" (id,user,approver,type,status,memo,operation) VALUES (");
- sb.append(data.id);
- sb.append(",'"); sb.append(data.user);
- sb.append("','"); sb.append(data.approver);
- sb.append("','"); sb.append(data.type);
- sb.append("','"); sb.append(data.status);
- sb.append("','"); sb.append(data.memo.replace("'", "''"));
- sb.append("','"); sb.append(data.operation);
- sb.append("');\n");
- sb.append("DELETE FROM ");
- sb.append(TABLE);
- sb.append(" WHERE id=");
- sb.append(data.id);
- sb.append(";\n");
- sb.append("APPLY BATCH;\n");
- TimeTaken tt = trans.start("DELETE APPROVAL",Env.REMOTE);
- try {
- if(async) {
- getSession(trans).executeAsync(sb.toString());
- return Result.ok();
- } else {
- getSession(trans).execute(sb.toString());
- return Result.ok();
- }
- } catch (DriverException | APIException | IOException e) {
- reportPerhapsReset(trans,e);
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
- } finally {
- tt.done();
- }
- } else {
- return super.delete(trans, data, false);
- }
+ /* (non-Javadoc)
+ * @see org.onap.aaf.auth.dao.CassDAOImpl#delete(com.att.inno.env.TransStore, java.lang.Object, boolean)
+ */
+ @Override
+ public Result<Void> delete(AuthzTrans trans, Data data, boolean reread) {
+ if(reread || data.status == null) { // if Memo is empty, likely not full record
+ Result<ResultSet> rd = readPS.exec(trans, R_TEXT, data);
+ if(rd.notOK()) {
+ return Result.err(rd);
+ }
+ ApprovalLoader.deflt.load(data, rd.value.one());
+ }
+ if("approved".equals(data.status) || "denied".equals(data.status)) {
+ StringBuilder sb = new StringBuilder("BEGIN BATCH\n");
+ sb.append("INSERT INTO ");
+ sb.append(TABLELOG);
+ sb.append(" (id,user,approver,type,status,memo,operation) VALUES (");
+ sb.append(data.id);
+ sb.append(",'"); sb.append(data.user);
+ sb.append("','"); sb.append(data.approver);
+ sb.append("','"); sb.append(data.type);
+ sb.append("','"); sb.append(data.status);
+ sb.append("','"); sb.append(data.memo.replace("'", "''"));
+ sb.append("','"); sb.append(data.operation);
+ sb.append("');\n");
+ sb.append("DELETE FROM ");
+ sb.append(TABLE);
+ sb.append(" WHERE id=");
+ sb.append(data.id);
+ sb.append(";\n");
+ sb.append("APPLY BATCH;\n");
+ TimeTaken tt = trans.start("DELETE APPROVAL",Env.REMOTE);
+ try {
+ if(async) {
+ getSession(trans).executeAsync(sb.toString());
+ return Result.ok();
+ } else {
+ getSession(trans).execute(sb.toString());
+ return Result.ok();
+ }
+ } catch (DriverException | APIException | IOException e) {
+ reportPerhapsReset(trans,e);
+ return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+ } finally {
+ tt.done();
+ }
+ } else {
+ return super.delete(trans, data, false);
+ }
- }
+ }
- /**
+ /**
* Log Modification statements to History
*
* @param modified which CRUD action was done
@@ -257,8 +257,8 @@ public class ApprovalDAO extends CassDAOImpl<AuthzTrans,ApprovalDAO.Data> {
*/
@Override
protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
- boolean memo = override.length>0 && override[0]!=null;
- boolean subject = override.length>1 && override[1]!=null;
+ boolean memo = override.length>0 && override[0]!=null;
+ boolean subject = override.length>1 && override[1]!=null;
HistoryDAO.Data hd = HistoryDAO.newInitedData();
hd.user = trans.user();
@@ -271,7 +271,7 @@ public class ApprovalDAO extends CassDAOImpl<AuthzTrans,ApprovalDAO.Data> {
// Detail?
// Reconstruct?
if(historyDAO.create(trans, hd).status!=Status.OK) {
- trans.error().log("Cannot log to History");
+ trans.error().log("Cannot log to History");
}
}
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ArtiDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ArtiDAO.java
index a6fbecaf..6702476a 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ArtiDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ArtiDAO.java
@@ -52,7 +52,7 @@ public class ArtiDAO extends CassDAOImpl<AuthzTrans,ArtiDAO.Data> {
private HistoryDAO historyDAO;
private PSInfo psByMechID,psByMachine, psByNs;
-
+
public ArtiDAO(AuthzTrans trans, Cluster cluster, String keyspace) {
super(trans, ArtiDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
init(trans);
@@ -65,67 +65,67 @@ public class ArtiDAO extends CassDAOImpl<AuthzTrans,ArtiDAO.Data> {
}
public static final int KEYLIMIT = 2;
- public static class Data implements Bytification {
- public String mechid;
- public String machine;
- private Set<String> type;
- public String sponsor;
- public String ca;
- public String dir;
- public String ns;
- public String os_user;
- public String notify;
- public Date expires;
- public int renewDays;
- public Set<String> sans;
+ public static class Data implements Bytification {
+ public String mechid;
+ public String machine;
+ private Set<String> type;
+ public String sponsor;
+ public String ca;
+ public String dir;
+ public String ns;
+ public String os_user;
+ public String notify;
+ public Date expires;
+ public int renewDays;
+ public Set<String> sans;
// // Getters
- public Set<String> type(boolean mutable) {
- if (type == null) {
- type = new HashSet<>();
- } else if (mutable && !(type instanceof HashSet)) {
- type = new HashSet<>(type);
- }
- return type;
- }
+ public Set<String> type(boolean mutable) {
+ if (type == null) {
+ type = new HashSet<>();
+ } else if (mutable && !(type instanceof HashSet)) {
+ type = new HashSet<>(type);
+ }
+ return type;
+ }
- public Set<String> sans(boolean mutable) {
- if (sans == null) {
- sans = new HashSet<>();
- } else if (mutable && !(sans instanceof HashSet)) {
- sans = new HashSet<>(sans);
- }
- return sans;
- }
+ public Set<String> sans(boolean mutable) {
+ if (sans == null) {
+ sans = new HashSet<>();
+ } else if (mutable && !(sans instanceof HashSet)) {
+ sans = new HashSet<>(sans);
+ }
+ return sans;
+ }
- @Override
- public ByteBuffer bytify() throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- ArtifactLoader.deflt.marshal(this,new DataOutputStream(baos));
- return ByteBuffer.wrap(baos.toByteArray());
- }
-
- @Override
- public void reconstitute(ByteBuffer bb) throws IOException {
- ArtifactLoader.deflt.unmarshal(this, toDIS(bb));
- }
+ @Override
+ public ByteBuffer bytify() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ ArtifactLoader.deflt.marshal(this,new DataOutputStream(baos));
+ return ByteBuffer.wrap(baos.toByteArray());
+ }
+
+ @Override
+ public void reconstitute(ByteBuffer bb) throws IOException {
+ ArtifactLoader.deflt.unmarshal(this, toDIS(bb));
+ }
- public String toString() {
- return mechid + ' ' + machine + ' ' + Chrono.dateTime(expires);
- }
+ public String toString() {
+ return mechid + ' ' + machine + ' ' + Chrono.dateTime(expires);
+ }
}
private static class ArtifactLoader extends Loader<Data> implements Streamer<Data>{
- public static final int MAGIC=95829343;
- public static final int VERSION=1;
- public static final int BUFF_SIZE=48; // Note:
+ public static final int MAGIC=95829343;
+ public static final int VERSION=1;
+ public static final int BUFF_SIZE=48; // Note:
- public static final ArtifactLoader deflt = new ArtifactLoader(KEYLIMIT);
- public ArtifactLoader(int keylimit) {
+ public static final ArtifactLoader deflt = new ArtifactLoader(KEYLIMIT);
+ public ArtifactLoader(int keylimit) {
super(keylimit);
}
- @Override
+ @Override
public Data load(Data data, Row row) {
data.mechid = row.getString(0);
data.machine = row.getString(1);
@@ -144,7 +144,7 @@ public class ArtiDAO extends CassDAOImpl<AuthzTrans,ArtiDAO.Data> {
@Override
protected void key(final Data data, final int idx, Object[] obj) {
- int i;
+ int i;
obj[i=idx] = data.mechid;
obj[++i] = data.machine;
}
@@ -164,110 +164,110 @@ public class ArtiDAO extends CassDAOImpl<AuthzTrans,ArtiDAO.Data> {
obj[++i] = data.sans;
}
- @Override
- public void marshal(Data data, DataOutputStream os) throws IOException {
- writeHeader(os,MAGIC,VERSION);
- writeString(os, data.mechid);
- writeString(os, data.machine);
- os.writeInt(data.type.size());
- for(String s : data.type) {
- writeString(os, s);
- }
- writeString(os, data.sponsor);
- writeString(os, data.ca);
- writeString(os, data.dir);
- writeString(os, data.ns);
- writeString(os, data.os_user);
- writeString(os, data.notify);
- os.writeLong(data.expires==null?-1:data.expires.getTime());
- os.writeInt(data.renewDays);
- if(data.sans!=null) {
- os.writeInt(data.sans.size());
- for(String s : data.sans) {
- writeString(os, s);
- }
- } else {
- os.writeInt(0);
- }
- }
+ @Override
+ public void marshal(Data data, DataOutputStream os) throws IOException {
+ writeHeader(os,MAGIC,VERSION);
+ writeString(os, data.mechid);
+ writeString(os, data.machine);
+ os.writeInt(data.type.size());
+ for(String s : data.type) {
+ writeString(os, s);
+ }
+ writeString(os, data.sponsor);
+ writeString(os, data.ca);
+ writeString(os, data.dir);
+ writeString(os, data.ns);
+ writeString(os, data.os_user);
+ writeString(os, data.notify);
+ os.writeLong(data.expires==null?-1:data.expires.getTime());
+ os.writeInt(data.renewDays);
+ if(data.sans!=null) {
+ os.writeInt(data.sans.size());
+ for(String s : data.sans) {
+ writeString(os, s);
+ }
+ } else {
+ os.writeInt(0);
+ }
+ }
- @Override
- public void unmarshal(Data data, DataInputStream is) throws IOException {
- /*int version = */readHeader(is,MAGIC,VERSION);
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
- byte[] buff = new byte[BUFF_SIZE];
- data.mechid = readString(is,buff);
- data.machine = readString(is,buff);
- int size = is.readInt();
- data.type = new HashSet<>(size);
- for(int i=0;i<size;++i) {
- data.type.add(readString(is,buff));
- }
- data.sponsor = readString(is,buff);
- data.ca = readString(is,buff);
- data.dir = readString(is,buff);
- data.ns = readString(is,buff);
- data.os_user = readString(is,buff);
- data.notify = readString(is,buff);
- long l = is.readLong();
- data.expires = l<0?null:new Date(l);
- data.renewDays = is.readInt();
- size = is.readInt();
- data.sans = new HashSet<>(size);
- for(int i=0;i<size;++i) {
- data.sans.add(readString(is,buff));
- }
- }
+ @Override
+ public void unmarshal(Data data, DataInputStream is) throws IOException {
+ /*int version = */readHeader(is,MAGIC,VERSION);
+ // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+ byte[] buff = new byte[BUFF_SIZE];
+ data.mechid = readString(is,buff);
+ data.machine = readString(is,buff);
+ int size = is.readInt();
+ data.type = new HashSet<>(size);
+ for(int i=0;i<size;++i) {
+ data.type.add(readString(is,buff));
+ }
+ data.sponsor = readString(is,buff);
+ data.ca = readString(is,buff);
+ data.dir = readString(is,buff);
+ data.ns = readString(is,buff);
+ data.os_user = readString(is,buff);
+ data.notify = readString(is,buff);
+ long l = is.readLong();
+ data.expires = l<0?null:new Date(l);
+ data.renewDays = is.readInt();
+ size = is.readInt();
+ data.sans = new HashSet<>(size);
+ for(int i=0;i<size;++i) {
+ data.sans.add(readString(is,buff));
+ }
+ }
}
private void init(AuthzTrans trans) {
// Set up sub-DAOs
if(historyDAO==null) {
- historyDAO = new HistoryDAO(trans,this);
+ historyDAO = new HistoryDAO(trans,this);
}
String[] helpers = setCRUD(trans, TABLE, Data.class, ArtifactLoader.deflt);
- psByMechID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
- " WHERE mechid = ?", new ArtifactLoader(1) {
- @Override
- protected void key(Data data, int idx, Object[] obj) {
- obj[idx]=data.type;
- }
- },readConsistency);
+ psByMechID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+ " WHERE mechid = ?", new ArtifactLoader(1) {
+ @Override
+ protected void key(Data data, int idx, Object[] obj) {
+ obj[idx]=data.type;
+ }
+ },readConsistency);
- psByMachine = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
- " WHERE machine = ?", new ArtifactLoader(1) {
- @Override
- protected void key(Data data, int idx, Object[] obj) {
- obj[idx]=data.type;
- }
- },readConsistency);
+ psByMachine = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+ " WHERE machine = ?", new ArtifactLoader(1) {
+ @Override
+ protected void key(Data data, int idx, Object[] obj) {
+ obj[idx]=data.type;
+ }
+ },readConsistency);
- psByNs = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
- " WHERE ns = ?", new ArtifactLoader(1) {
- @Override
- protected void key(Data data, int idx, Object[] obj) {
- obj[idx]=data.type;
- }
- },readConsistency);
+ psByNs = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+ " WHERE ns = ?", new ArtifactLoader(1) {
+ @Override
+ protected void key(Data data, int idx, Object[] obj) {
+ obj[idx]=data.type;
+ }
+ },readConsistency);
}
-
+
public Result<List<Data>> readByMechID(AuthzTrans trans, String mechid) {
- return psByMechID.read(trans, R_TEXT, new Object[]{mechid});
- }
+ return psByMechID.read(trans, R_TEXT, new Object[]{mechid});
+ }
- public Result<List<ArtiDAO.Data>> readByMachine(AuthzTrans trans, String machine) {
- return psByMachine.read(trans, R_TEXT, new Object[]{machine});
- }
+ public Result<List<ArtiDAO.Data>> readByMachine(AuthzTrans trans, String machine) {
+ return psByMachine.read(trans, R_TEXT, new Object[]{machine});
+ }
- public Result<List<org.onap.aaf.auth.dao.cass.ArtiDAO.Data>> readByNs(AuthzTrans trans, String ns) {
- return psByNs.read(trans, R_TEXT, new Object[]{ns});
- }
+ public Result<List<org.onap.aaf.auth.dao.cass.ArtiDAO.Data>> readByNs(AuthzTrans trans, String ns) {
+ return psByNs.read(trans, R_TEXT, new Object[]{ns});
+ }
- /**
+ /**
* Log Modification statements to History
*
* @param modified which CRUD action was done
@@ -276,8 +276,8 @@ public class ArtiDAO extends CassDAOImpl<AuthzTrans,ArtiDAO.Data> {
*/
@Override
protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
- boolean memo = override.length>0 && override[0]!=null;
- boolean subject = override.length>1 && override[1]!=null;
+ boolean memo = override.length>0 && override[0]!=null;
+ boolean subject = override.length>1 && override[1]!=null;
HistoryDAO.Data hd = HistoryDAO.newInitedData();
hd.user = trans.user();
@@ -288,16 +288,16 @@ public class ArtiDAO extends CassDAOImpl<AuthzTrans,ArtiDAO.Data> {
? String.format("%s by %s", override[0], hd.user)
: String.format("%sd %s for %s",modified.name(),data.mechid,data.machine);
// Detail?
- if(modified==CRUD.delete) {
- try {
- hd.reconstruct = data.bytify();
- } catch (IOException e) {
- trans.error().log(e,"Could not serialize CredDAO.Data");
- }
- }
+ if(modified==CRUD.delete) {
+ try {
+ hd.reconstruct = data.bytify();
+ } catch (IOException e) {
+ trans.error().log(e,"Could not serialize CredDAO.Data");
+ }
+ }
if(historyDAO.create(trans, hd).status!=Status.OK) {
- trans.error().log("Cannot log to History");
+ trans.error().log("Cannot log to History");
}
}
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CacheInfoDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CacheInfoDAO.java
index 6d9900b9..e3f994c0 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CacheInfoDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CacheInfoDAO.java
@@ -62,405 +62,405 @@ import com.datastax.driver.core.exceptions.DriverException;
public class CacheInfoDAO extends CassDAOImpl<AuthzTrans,CacheInfoDAO.Data> implements CIDAO<AuthzTrans> {
- private static final String TABLE = "cache";
- public static final Map<String,Date[]> info = new ConcurrentHashMap<>();
+ private static final String TABLE = "cache";
+ public static final Map<String,Date[]> info = new ConcurrentHashMap<>();
- private static CacheUpdate cacheUpdate;
-
- // Hold current time stamps from Tables
- private final Date startTime;
- private PreparedStatement psCheck;
-
- public CacheInfoDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
- super(trans, CacheInfoDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE,readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
- startTime = new Date();
- init(trans);
- }
+ private static CacheUpdate cacheUpdate;
+
+ // Hold current time stamps from Tables
+ private final Date startTime;
+ private PreparedStatement psCheck;
+
+ public CacheInfoDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
+ super(trans, CacheInfoDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE,readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+ startTime = new Date();
+ init(trans);
+ }
- public CacheInfoDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) throws APIException, IOException {
- super(trans, CacheInfoDAO.class.getSimpleName(),aDao,Data.class,TABLE,readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
- startTime = new Date();
- init(trans);
- }
+ public CacheInfoDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) throws APIException, IOException {
+ super(trans, CacheInfoDAO.class.getSimpleName(),aDao,Data.class,TABLE,readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+ startTime = new Date();
+ init(trans);
+ }
//////////////////////////////////////////
// Data Definition, matches Cassandra DM
//////////////////////////////////////////
private static final int KEYLIMIT = 2;
- /**
+ /**
* @author Jonathan
*/
- public static class Data {
- public Data() {
- name = null;
- touched = null;
- }
- public Data(String name, int seg) {
- this.name = name;
- this.seg = seg;
- touched = null;
- }
-
- public String name;
- public int seg;
- public Date touched;
+ public static class Data {
+ public Data() {
+ name = null;
+ touched = null;
+ }
+ public Data(String name, int seg) {
+ this.name = name;
+ this.seg = seg;
+ touched = null;
+ }
+
+ public String name;
+ public int seg;
+ public Date touched;
}
private static class InfoLoader extends Loader<Data> {
- public static final InfoLoader dflt = new InfoLoader(KEYLIMIT);
-
- public InfoLoader(int keylimit) {
- super(keylimit);
- }
-
- @Override
- public Data load(Data data, Row row) {
- // Int more efficient
- data.name = row.getString(0);
- data.seg = row.getInt(1);
- data.touched = row.getTimestamp(2);
- return data;
- }
+ public static final InfoLoader dflt = new InfoLoader(KEYLIMIT);
+
+ public InfoLoader(int keylimit) {
+ super(keylimit);
+ }
+
+ @Override
+ public Data load(Data data, Row row) {
+ // Int more efficient
+ data.name = row.getString(0);
+ data.seg = row.getInt(1);
+ data.touched = row.getTimestamp(2);
+ return data;
+ }
- @Override
- protected void key(Data data, int _idx, Object[] obj) {
- int idx = _idx;
+ @Override
+ protected void key(Data data, int _idx, Object[] obj) {
+ int idx = _idx;
- obj[idx]=data.name;
- obj[++idx]=data.seg;
- }
+ obj[idx]=data.name;
+ obj[++idx]=data.seg;
+ }
- @Override
- protected void body(Data data, int idx, Object[] obj) {
- obj[idx]=data.touched;
- }
+ @Override
+ protected void body(Data data, int idx, Object[] obj) {
+ obj[idx]=data.touched;
+ }
}
- public static<T extends Trans> void startUpdate(AuthzEnv env, HMangr hman, SecuritySetter<HttpURLConnection> ss, String ip, int port) {
- if(cacheUpdate==null) {
- Thread t= new Thread(cacheUpdate = new CacheUpdate(env,hman,ss, ip,port),"CacheInfo Update Thread");
- t.setDaemon(true);
- t.start();
- }
- }
+ public static<T extends Trans> void startUpdate(AuthzEnv env, HMangr hman, SecuritySetter<HttpURLConnection> ss, String ip, int port) {
+ if(cacheUpdate==null) {
+ Thread t= new Thread(cacheUpdate = new CacheUpdate(env,hman,ss, ip,port),"CacheInfo Update Thread");
+ t.setDaemon(true);
+ t.start();
+ }
+ }
- public static<T extends Trans> void stopUpdate() {
- if(cacheUpdate!=null) {
- cacheUpdate.go=false;
- }
- }
+ public static<T extends Trans> void stopUpdate() {
+ if(cacheUpdate!=null) {
+ cacheUpdate.go=false;
+ }
+ }
- private final static class CacheUpdate extends Thread {
- public static BlockingQueue<Transfer> notifyDQ = new LinkedBlockingQueue<Transfer>(2000);
+ private final static class CacheUpdate extends Thread {
+ public static BlockingQueue<Transfer> notifyDQ = new LinkedBlockingQueue<Transfer>(2000);
- private static final String VOID_CT="application/Void+json;q=1.0;charset=utf-8;version=2.0,application/json;q=1.0;version=2.0,*/*;q=1.0";
- private AuthzEnv env;
- private HMangr hman;
- private SecuritySetter<HttpURLConnection> ss;
- private final String authority;
- public boolean go = true;
-
- public CacheUpdate(AuthzEnv env, HMangr hman, SecuritySetter<HttpURLConnection> ss, String ip, int port) {
- this.env = env;
- this.hman = hman;
- this.ss = ss;
-
- this.authority = ip+':'+port;
- }
-
- private static class Transfer {
- public String table;
- public int segs[];
- public Transfer(String table, int[] segs) {
- this.table = table;
- this.segs = segs;
- }
- }
- private class CacheClear extends Retryable<Integer> {
- public int total=0;
- private AuthzTrans trans;
- private String type;
- private String segs;
-
- public CacheClear(AuthzTrans trans) {
- this.trans = trans;
- }
+ private static final String VOID_CT="application/Void+json;q=1.0;charset=utf-8;version=2.0,application/json;q=1.0;version=2.0,*/*;q=1.0";
+ private AuthzEnv env;
+ private HMangr hman;
+ private SecuritySetter<HttpURLConnection> ss;
+ private final String authority;
+ public boolean go = true;
+
+ public CacheUpdate(AuthzEnv env, HMangr hman, SecuritySetter<HttpURLConnection> ss, String ip, int port) {
+ this.env = env;
+ this.hman = hman;
+ this.ss = ss;
+
+ this.authority = ip+':'+port;
+ }
+
+ private static class Transfer {
+ public String table;
+ public int segs[];
+ public Transfer(String table, int[] segs) {
+ this.table = table;
+ this.segs = segs;
+ }
+ }
+ private class CacheClear extends Retryable<Integer> {
+ public int total=0;
+ private AuthzTrans trans;
+ private String type;
+ private String segs;
+
+ public CacheClear(AuthzTrans trans) {
+ this.trans = trans;
+ }
- public void set(Entry<String, IntHolder> es) {
- type = es.getKey();
- segs = es.getValue().toString();
- }
-
- @Override
- public Integer code(Rcli<?> client) throws APIException, CadiException {
- URI to = client.getURI();
- if(!to.getAuthority().equals(authority)) {
- Future<Void> f = client.delete("/mgmt/cache/"+type+'/'+segs,VOID_CT);
- if(f.get(hman.readTimeout())) {
- ++total;
- } else {
- trans.error().log("Error During AAF Peer Notify",f.code(),f.body());
- }
- }
- return total;
- }
- }
-
- private class IntHolder {
- private int[] raw;
- HashSet<Integer> set;
-
- public IntHolder(int ints[]) {
- raw = ints;
- set = null;
- }
- public void add(int[] ints) {
- if(set==null) {
- set = new HashSet<>();
-
- for(int i=0;i<raw.length;++i) {
- set.add(raw[i]);
- }
- }
- for(int i=0;i<ints.length;++i) {
- set.add(ints[i]);
- }
- }
+ public void set(Entry<String, IntHolder> es) {
+ type = es.getKey();
+ segs = es.getValue().toString();
+ }
+
+ @Override
+ public Integer code(Rcli<?> client) throws APIException, CadiException {
+ URI to = client.getURI();
+ if(!to.getAuthority().equals(authority)) {
+ Future<Void> f = client.delete("/mgmt/cache/"+type+'/'+segs,VOID_CT);
+ if(f.get(hman.readTimeout())) {
+ ++total;
+ } else {
+ trans.error().log("Error During AAF Peer Notify",f.code(),f.body());
+ }
+ }
+ return total;
+ }
+ }
+
+ private class IntHolder {
+ private int[] raw;
+ HashSet<Integer> set;
+
+ public IntHolder(int ints[]) {
+ raw = ints;
+ set = null;
+ }
+ public void add(int[] ints) {
+ if(set==null) {
+ set = new HashSet<>();
+
+ for(int i=0;i<raw.length;++i) {
+ set.add(raw[i]);
+ }
+ }
+ for(int i=0;i<ints.length;++i) {
+ set.add(ints[i]);
+ }
+ }
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder();
- boolean first = true;
- if(set==null) {
- for(int i : raw) {
- if(first) {
- first=false;
- } else {
- sb.append(',');
- }
- sb.append(i);
- }
- } else {
- for(Integer i : set) {
- if(first) {
- first=false;
- } else {
- sb.append(',');
- }
- sb.append(i);
- }
- }
- return sb.toString();
- }
- }
-
- @Override
- public void run() {
- do {
- try {
- Transfer data = notifyDQ.poll(4,TimeUnit.SECONDS);
- if(data==null) {
- continue;
- }
-
- int count = 0;
- CacheClear cc = null;
- Map<String,IntHolder> gather = null;
- AuthzTrans trans = null;
- long start=0;
- // Do a block poll first
- do {
- if(gather==null) {
- start = System.nanoTime();
- trans = env.newTransNoAvg();
- cc = new CacheClear(trans);
- gather = new HashMap<>();
- }
- IntHolder prev = gather.get(data.table);
- if(prev==null) {
- gather.put(data.table,new IntHolder(data.segs));
- } else {
- prev.add(data.segs);
- }
- // continue while there is data
- } while((data = notifyDQ.poll())!=null);
- if(gather!=null) {
- for(Entry<String, IntHolder> es : gather.entrySet()) {
- cc.set(es);
- try {
- if(hman.all(ss, cc, false)!=null) {
- ++count;
- }
- } catch (Exception e) {
- trans.error().log(e, "Error on Cache Update");
- }
- }
- if(env.debug().isLoggable()) {
- float millis = (System.nanoTime()-start)/1000000f;
- StringBuilder sb = new StringBuilder("Direct Cache Refresh: ");
- sb.append("Updated ");
- sb.append(count);
- if(count==1) {
- sb.append(" entry for ");
- } else {
- sb.append(" entries for ");
- }
- int peers = count<=0?0:cc.total/count;
- sb.append(peers);
- sb.append(" client");
- if(peers!=1) {
- sb.append('s');
- }
- sb.append(" in ");
- sb.append(millis);
- sb.append("ms");
- trans.auditTrail(0, sb, Env.REMOTE);
- env.debug().log(sb);
- }
- }
- } catch (InterruptedException e1) {
- go = false;
- Thread.currentThread().interrupt();
- }
- } while(go);
- }
- }
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ boolean first = true;
+ if(set==null) {
+ for(int i : raw) {
+ if(first) {
+ first=false;
+ } else {
+ sb.append(',');
+ }
+ sb.append(i);
+ }
+ } else {
+ for(Integer i : set) {
+ if(first) {
+ first=false;
+ } else {
+ sb.append(',');
+ }
+ sb.append(i);
+ }
+ }
+ return sb.toString();
+ }
+ }
+
+ @Override
+ public void run() {
+ do {
+ try {
+ Transfer data = notifyDQ.poll(4,TimeUnit.SECONDS);
+ if(data==null) {
+ continue;
+ }
+
+ int count = 0;
+ CacheClear cc = null;
+ Map<String,IntHolder> gather = null;
+ AuthzTrans trans = null;
+ long start=0;
+ // Do a block poll first
+ do {
+ if(gather==null) {
+ start = System.nanoTime();
+ trans = env.newTransNoAvg();
+ cc = new CacheClear(trans);
+ gather = new HashMap<>();
+ }
+ IntHolder prev = gather.get(data.table);
+ if(prev==null) {
+ gather.put(data.table,new IntHolder(data.segs));
+ } else {
+ prev.add(data.segs);
+ }
+ // continue while there is data
+ } while((data = notifyDQ.poll())!=null);
+ if(gather!=null) {
+ for(Entry<String, IntHolder> es : gather.entrySet()) {
+ cc.set(es);
+ try {
+ if(hman.all(ss, cc, false)!=null) {
+ ++count;
+ }
+ } catch (Exception e) {
+ trans.error().log(e, "Error on Cache Update");
+ }
+ }
+ if(env.debug().isLoggable()) {
+ float millis = (System.nanoTime()-start)/1000000f;
+ StringBuilder sb = new StringBuilder("Direct Cache Refresh: ");
+ sb.append("Updated ");
+ sb.append(count);
+ if(count==1) {
+ sb.append(" entry for ");
+ } else {
+ sb.append(" entries for ");
+ }
+ int peers = count<=0?0:cc.total/count;
+ sb.append(peers);
+ sb.append(" client");
+ if(peers!=1) {
+ sb.append('s');
+ }
+ sb.append(" in ");
+ sb.append(millis);
+ sb.append("ms");
+ trans.auditTrail(0, sb, Env.REMOTE);
+ env.debug().log(sb);
+ }
+ }
+ } catch (InterruptedException e1) {
+ go = false;
+ Thread.currentThread().interrupt();
+ }
+ } while(go);
+ }
+ }
- private void init(AuthzTrans trans) throws APIException, IOException {
-
- String[] helpers = setCRUD(trans, TABLE, Data.class, InfoLoader.dflt);
- psCheck = getSession(trans).prepare(SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE);
+ private void init(AuthzTrans trans) throws APIException, IOException {
+
+ String[] helpers = setCRUD(trans, TABLE, Data.class, InfoLoader.dflt);
+ psCheck = getSession(trans).prepare(SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE);
- disable(CRUD.create);
- disable(CRUD.delete);
- }
+ disable(CRUD.create);
+ disable(CRUD.delete);
+ }
- /* (non-Javadoc)
- * @see org.onap.aaf.auth.dao.cass.CIDAO#touch(org.onap.aaf.auth.env.test.AuthzTrans, java.lang.String, int)
- */
-
- @Override
- public Result<Void> touch(AuthzTrans trans, String name, int ... seg) {
- /////////////
- // Direct Service Cache Invalidation
- /////////////
- // ConcurrentQueues are open-ended. We don't want any Memory leaks
- // Note: we keep a separate counter, because "size()" on a Linked Queue is expensive
- if(cacheUpdate!=null) {
- try {
- if(!CacheUpdate.notifyDQ.offer(new CacheUpdate.Transfer(name, seg),2,TimeUnit.SECONDS)) {
- trans.error().log("Cache Notify Queue is not accepting messages, bouncing may be appropriate" );
- }
- } catch (InterruptedException e) {
- trans.error().log("Cache Notify Queue posting was interrupted" );
- Thread.currentThread().interrupt();
- }
- }
+ /* (non-Javadoc)
+ * @see org.onap.aaf.auth.dao.cass.CIDAO#touch(org.onap.aaf.auth.env.test.AuthzTrans, java.lang.String, int)
+ */
+
+ @Override
+ public Result<Void> touch(AuthzTrans trans, String name, int ... seg) {
+ /////////////
+ // Direct Service Cache Invalidation
+ /////////////
+ // ConcurrentQueues are open-ended. We don't want any Memory leaks
+ // Note: we keep a separate counter, because "size()" on a Linked Queue is expensive
+ if(cacheUpdate!=null) {
+ try {
+ if(!CacheUpdate.notifyDQ.offer(new CacheUpdate.Transfer(name, seg),2,TimeUnit.SECONDS)) {
+ trans.error().log("Cache Notify Queue is not accepting messages, bouncing may be appropriate" );
+ }
+ } catch (InterruptedException e) {
+ trans.error().log("Cache Notify Queue posting was interrupted" );
+ Thread.currentThread().interrupt();
+ }
+ }
- /////////////
- // Table Based Cache Invalidation (original)
- /////////////
- // Note: Save time with multiple Sequence Touches, but PreparedStmt doesn't support IN
- StringBuilder start = new StringBuilder("CacheInfoDAO Touch segments ");
- start.append(name);
- start.append(": ");
- StringBuilder sb = new StringBuilder("BEGIN BATCH\n");
- boolean first = true;
- for(int s : seg) {
- sb.append(UPDATE_SP);
- sb.append(TABLE);
- sb.append(" SET touched=dateof(now()) WHERE name = '");
- sb.append(name);
- sb.append("' AND seg = ");
- sb.append(s);
- sb.append(";\n");
- if(first) {
- first =false;
- } else {
- start.append(',');
- }
- start.append(s);
- }
- sb.append("APPLY BATCH;");
- TimeTaken tt = trans.start(start.toString(),Env.REMOTE);
- try {
- getSession(trans).executeAsync(sb.toString());
- } catch (DriverException | APIException | IOException e) {
- reportPerhapsReset(trans,e);
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
- } finally {
- tt.done();
- }
- return Result.ok();
- }
+ /////////////
+ // Table Based Cache Invalidation (original)
+ /////////////
+ // Note: Save time with multiple Sequence Touches, but PreparedStmt doesn't support IN
+ StringBuilder start = new StringBuilder("CacheInfoDAO Touch segments ");
+ start.append(name);
+ start.append(": ");
+ StringBuilder sb = new StringBuilder("BEGIN BATCH\n");
+ boolean first = true;
+ for(int s : seg) {
+ sb.append(UPDATE_SP);
+ sb.append(TABLE);
+ sb.append(" SET touched=dateof(now()) WHERE name = '");
+ sb.append(name);
+ sb.append("' AND seg = ");
+ sb.append(s);
+ sb.append(";\n");
+ if(first) {
+ first =false;
+ } else {
+ start.append(',');
+ }
+ start.append(s);
+ }
+ sb.append("APPLY BATCH;");
+ TimeTaken tt = trans.start(start.toString(),Env.REMOTE);
+ try {
+ getSession(trans).executeAsync(sb.toString());
+ } catch (DriverException | APIException | IOException e) {
+ reportPerhapsReset(trans,e);
+ return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+ } finally {
+ tt.done();
+ }
+ return Result.ok();
+ }
- /* (non-Javadoc)
- * @see org.onap.aaf.auth.dao.cass.CIDAO#check(org.onap.aaf.auth.env.test.AuthzTrans)
- */
- @Override
- public Result<Void> check(AuthzTrans trans) {
- ResultSet rs;
- TimeTaken tt = trans.start("Check Table Timestamps",Env.REMOTE);
- try {
- rs = getSession(trans).execute(new BoundStatement(psCheck));
- } catch (DriverException | APIException | IOException e) {
- reportPerhapsReset(trans,e);
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
- } finally {
- tt.done();
- }
-
- String lastName = null;
- Date[] dates = null;
- for(Row row : rs.all()) {
- String name = row.getString(0);
- int seg = row.getInt(1);
- if(!name.equals(lastName)) {
- dates = info.get(name);
- lastName=name;
- }
- if(dates==null) {
- dates=new Date[seg+1];
- info.put(name,dates);
- } else if(dates.length<=seg) {
- Date[] temp = new Date[seg+1];
- System.arraycopy(dates, 0, temp, 0, dates.length);
- dates = temp;
- info.put(name, dates);
- }
- Date temp = row.getTimestamp(2);
- if(dates[seg]==null || dates[seg].before(temp)) {
- dates[seg]=temp;
- }
- }
- return Result.ok();
- }
-
/* (non-Javadoc)
- * @see org.onap.aaf.auth.dao.cass.CIDAO#get(java.lang.String, int)
- */
+ * @see org.onap.aaf.auth.dao.cass.CIDAO#check(org.onap.aaf.auth.env.test.AuthzTrans)
+ */
@Override
- public Date get(AuthzTrans trans, String table, int seg) {
- Date[] dates = info.get(table);
- if(dates==null) {
- dates = new Date[seg+1];
- touch(trans,table, seg);
- } else if(dates.length<=seg) {
- Date[] temp = new Date[seg+1];
- System.arraycopy(dates, 0, temp, 0, dates.length);
- dates = temp;
- }
- Date rv = dates[seg];
- if(rv==null) {
- rv=dates[seg]=startTime;
- }
- return rv;
- }
+ public Result<Void> check(AuthzTrans trans) {
+ ResultSet rs;
+ TimeTaken tt = trans.start("Check Table Timestamps",Env.REMOTE);
+ try {
+ rs = getSession(trans).execute(new BoundStatement(psCheck));
+ } catch (DriverException | APIException | IOException e) {
+ reportPerhapsReset(trans,e);
+ return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+ } finally {
+ tt.done();
+ }
+
+ String lastName = null;
+ Date[] dates = null;
+ for(Row row : rs.all()) {
+ String name = row.getString(0);
+ int seg = row.getInt(1);
+ if(!name.equals(lastName)) {
+ dates = info.get(name);
+ lastName=name;
+ }
+ if(dates==null) {
+ dates=new Date[seg+1];
+ info.put(name,dates);
+ } else if(dates.length<=seg) {
+ Date[] temp = new Date[seg+1];
+ System.arraycopy(dates, 0, temp, 0, dates.length);
+ dates = temp;
+ info.put(name, dates);
+ }
+ Date temp = row.getTimestamp(2);
+ if(dates[seg]==null || dates[seg].before(temp)) {
+ dates[seg]=temp;
+ }
+ }
+ return Result.ok();
+ }
+
+ /* (non-Javadoc)
+ * @see org.onap.aaf.auth.dao.cass.CIDAO#get(java.lang.String, int)
+ */
+ @Override
+ public Date get(AuthzTrans trans, String table, int seg) {
+ Date[] dates = info.get(table);
+ if(dates==null) {
+ dates = new Date[seg+1];
+ touch(trans,table, seg);
+ } else if(dates.length<=seg) {
+ Date[] temp = new Date[seg+1];
+ System.arraycopy(dates, 0, temp, 0, dates.length);
+ dates = temp;
+ }
+ Date rv = dates[seg];
+ if(rv==null) {
+ rv=dates[seg]=startTime;
+ }
+ return rv;
+ }
- @Override
- protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
- // Do nothing
- }
+ @Override
+ protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
+ // Do nothing
+ }
} \ No newline at end of file
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CacheableData.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CacheableData.java
index af4b2302..be7c44ae 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CacheableData.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CacheableData.java
@@ -26,10 +26,10 @@ import org.onap.aaf.auth.dao.Cached;
import org.onap.aaf.auth.dao.CachedDAO;
public abstract class CacheableData implements Cacheable {
- // WARNING: DON'T attempt to add any members here, as it will
- // be treated by system as fields expected in Tables
- protected int seg(Cached<?,?> cache, Object ... fields) {
- return cache==null?0:cache.invalidate(CachedDAO.keyFromObjs(fields));
- }
-
+ // WARNING: DON'T attempt to add any members here, as it will
+ // be treated by system as fields expected in Tables
+ protected int seg(Cached<?,?> cache, Object ... fields) {
+ return cache==null?0:cache.invalidate(CachedDAO.keyFromObjs(fields));
+ }
+
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CertDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CertDAO.java
index 28e27497..00c9ae90 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CertDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CertDAO.java
@@ -52,9 +52,9 @@ public class CertDAO extends CassDAOImpl<AuthzTrans,CertDAO.Data> {
public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F
private HistoryDAO historyDAO;
- private CIDAO<AuthzTrans> infoDAO;
- private PSInfo psX500,psID;
-
+ private CIDAO<AuthzTrans> infoDAO;
+ private PSInfo psX500,psID;
+
public CertDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
super(trans, CertDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
init(trans);
@@ -68,47 +68,47 @@ public class CertDAO extends CassDAOImpl<AuthzTrans,CertDAO.Data> {
}
public static final int KEYLIMIT = 2;
- public static class Data extends CacheableData implements Bytification {
-
- public String ca;
- public BigInteger serial;
- public String id;
- public String x500;
- public String x509;
+ public static class Data extends CacheableData implements Bytification {
+
+ public String ca;
+ public BigInteger serial;
+ public String id;
+ public String x500;
+ public String x509;
@Override
- public int[] invalidate(Cached<?,?> cache) {
- return new int[] {
- seg(cache,ca,serial)
- };
- }
+ public int[] invalidate(Cached<?,?> cache) {
+ return new int[] {
+ seg(cache,ca,serial)
+ };
+ }
- @Override
- public ByteBuffer bytify() throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- CertLoader.deflt.marshal(this,new DataOutputStream(baos));
- return ByteBuffer.wrap(baos.toByteArray());
- }
-
- @Override
- public void reconstitute(ByteBuffer bb) throws IOException {
- CertLoader.deflt.unmarshal(this, toDIS(bb));
- }
+ @Override
+ public ByteBuffer bytify() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ CertLoader.deflt.marshal(this,new DataOutputStream(baos));
+ return ByteBuffer.wrap(baos.toByteArray());
+ }
+
+ @Override
+ public void reconstitute(ByteBuffer bb) throws IOException {
+ CertLoader.deflt.unmarshal(this, toDIS(bb));
+ }
}
private static class CertLoader extends Loader<Data> implements Streamer<Data>{
- public static final int MAGIC=85102934;
- public static final int VERSION=1;
- public static final int BUFF_SIZE=48; // Note:
+ public static final int MAGIC=85102934;
+ public static final int VERSION=1;
+ public static final int BUFF_SIZE=48; // Note:
- public static final CertLoader deflt = new CertLoader(KEYLIMIT);
- public CertLoader(int keylimit) {
+ public static final CertLoader deflt = new CertLoader(KEYLIMIT);
+ public CertLoader(int keylimit) {
super(keylimit);
}
- @Override
+ @Override
public Data load(Data data, Row row) {
- data.ca = row.getString(0);
+ data.ca = row.getString(0);
ByteBuffer bb = row.getBytesUnsafe(1);
byte[] bytes = new byte[bb.remaining()];
bb.get(bytes);
@@ -127,7 +127,7 @@ public class CertDAO extends CassDAOImpl<AuthzTrans,CertDAO.Data> {
@Override
protected void body(Data data, int _idx, Object[] obj) {
- int idx = _idx;
+ int idx = _idx;
obj[idx] = data.id;
obj[++idx] = data.x500;
@@ -136,74 +136,74 @@ public class CertDAO extends CassDAOImpl<AuthzTrans,CertDAO.Data> {
}
- @Override
- public void marshal(Data data, DataOutputStream os) throws IOException {
- writeHeader(os,MAGIC,VERSION);
- writeString(os, data.id);
- writeString(os, data.x500);
- writeString(os, data.x509);
- writeString(os, data.ca);
- if(data.serial==null) {
- os.writeInt(-1);
- } else {
- byte[] dsba = data.serial.toByteArray();
- int l = dsba.length;
- os.writeInt(l);
- os.write(dsba,0,l);
- }
- }
-
- @Override
- public void unmarshal(Data data, DataInputStream is) throws IOException {
- /*int version = */readHeader(is,MAGIC,VERSION);
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
- byte[] buff = new byte[BUFF_SIZE];
- data.id = readString(is,buff);
- data.x500 = readString(is,buff);
- data.x509 = readString(is,buff);
- data.ca = readString(is,buff);
- int i = is.readInt();
- data.serial=null;
- if(i>=0) {
- byte[] bytes = new byte[i]; // a bit dangerous, but lessened because of all the previous sized data reads
- if(is.read(bytes)>0) {
- data.serial = new BigInteger(bytes);
- }
- }
- }
+ @Override
+ public void marshal(Data data, DataOutputStream os) throws IOException {
+ writeHeader(os,MAGIC,VERSION);
+ writeString(os, data.id);
+ writeString(os, data.x500);
+ writeString(os, data.x509);
+ writeString(os, data.ca);
+ if(data.serial==null) {
+ os.writeInt(-1);
+ } else {
+ byte[] dsba = data.serial.toByteArray();
+ int l = dsba.length;
+ os.writeInt(l);
+ os.write(dsba,0,l);
+ }
+ }
+
+ @Override
+ public void unmarshal(Data data, DataInputStream is) throws IOException {
+ /*int version = */readHeader(is,MAGIC,VERSION);
+ // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+ byte[] buff = new byte[BUFF_SIZE];
+ data.id = readString(is,buff);
+ data.x500 = readString(is,buff);
+ data.x509 = readString(is,buff);
+ data.ca = readString(is,buff);
+ int i = is.readInt();
+ data.serial=null;
+ if(i>=0) {
+ byte[] bytes = new byte[i]; // a bit dangerous, but lessened because of all the previous sized data reads
+ if(is.read(bytes)>0) {
+ data.serial = new BigInteger(bytes);
+ }
+ }
+ }
}
public Result<List<CertDAO.Data>> read(AuthzTrans trans, Object ... key) {
- // Translate BigInteger to Byte array for lookup
- return super.read(trans, key[0],ByteBuffer.wrap(((BigInteger)key[1]).toByteArray()));
+ // Translate BigInteger to Byte array for lookup
+ return super.read(trans, key[0],ByteBuffer.wrap(((BigInteger)key[1]).toByteArray()));
}
private void init(AuthzTrans trans) throws APIException, IOException {
// Set up sub-DAOs
if(historyDAO==null) {
- historyDAO = new HistoryDAO(trans,this);
+ historyDAO = new HistoryDAO(trans,this);
+ }
+ if(infoDAO==null) {
+ infoDAO = new CacheInfoDAO(trans,this);
}
- if(infoDAO==null) {
- infoDAO = new CacheInfoDAO(trans,this);
- }
- String[] helpers = setCRUD(trans, TABLE, Data.class, CertLoader.deflt);
+ String[] helpers = setCRUD(trans, TABLE, Data.class, CertLoader.deflt);
- psID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
- " WHERE id = ?", CertLoader.deflt,readConsistency);
+ psID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+ " WHERE id = ?", CertLoader.deflt,readConsistency);
- psX500 = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
- " WHERE x500 = ?", CertLoader.deflt,readConsistency);
-
+ psX500 = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+ " WHERE x500 = ?", CertLoader.deflt,readConsistency);
+
}
- public Result<List<Data>> readX500(AuthzTrans trans, String x500) {
- return psX500.read(trans, R_TEXT, new Object[]{x500});
- }
+ public Result<List<Data>> readX500(AuthzTrans trans, String x500) {
+ return psX500.read(trans, R_TEXT, new Object[]{x500});
+ }
- public Result<List<Data>> readID(AuthzTrans trans, String id) {
- return psID.read(trans, R_TEXT, new Object[]{id});
- }
+ public Result<List<Data>> readID(AuthzTrans trans, String id) {
+ return psID.read(trans, R_TEXT, new Object[]{id});
+ }
/**
* Log Modification statements to History
@@ -214,8 +214,8 @@ public class CertDAO extends CassDAOImpl<AuthzTrans,CertDAO.Data> {
*/
@Override
protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
- boolean memo = override.length>0 && override[0]!=null;
- boolean subject = override.length>1 && override[1]!=null;
+ boolean memo = override.length>0 && override[0]!=null;
+ boolean subject = override.length>1 && override[1]!=null;
HistoryDAO.Data hd = HistoryDAO.newInitedData();
hd.user = trans.user();
@@ -226,19 +226,19 @@ public class CertDAO extends CassDAOImpl<AuthzTrans,CertDAO.Data> {
? String.format("%s by %s", override[0], hd.user)
: (modified.name() + "d certificate info for " + data.id);
// Detail?
- if(modified==CRUD.delete) {
- try {
- hd.reconstruct = data.bytify();
- } catch (IOException e) {
- trans.error().log(e,"Could not serialize CertDAO.Data");
- }
- }
+ if(modified==CRUD.delete) {
+ try {
+ hd.reconstruct = data.bytify();
+ } catch (IOException e) {
+ trans.error().log(e,"Could not serialize CertDAO.Data");
+ }
+ }
if(historyDAO.create(trans, hd).status!=Status.OK) {
- trans.error().log("Cannot log to History");
+ trans.error().log("Cannot log to History");
}
if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).status!=Status.OK) {
- trans.error().log("Cannot touch Cert");
+ trans.error().log("Cannot touch Cert");
}
}
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ConfigDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ConfigDAO.java
index df284044..398e7323 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ConfigDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/ConfigDAO.java
@@ -45,7 +45,7 @@ import com.datastax.driver.core.Row;
public class ConfigDAO extends CassDAOImpl<AuthzTrans,ConfigDAO.Data> {
public static final String TABLE = "config";
public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F
- private PSInfo psName;
+ private PSInfo psName;
public ConfigDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
super(trans, ConfigDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
@@ -58,25 +58,25 @@ public class ConfigDAO extends CassDAOImpl<AuthzTrans,ConfigDAO.Data> {
}
public static final int KEYLIMIT = 2;
- public static class Data {
- public String name;
- public String tag;
- public String value;
+ public static class Data {
+ public String name;
+ public String tag;
+ public String value;
}
private static class ConfigLoader extends Loader<Data> implements Streamer<Data>{
- public static final int MAGIC=2673849;
- public static final int VERSION=1;
- public static final int BUFF_SIZE=48;
+ public static final int MAGIC=2673849;
+ public static final int VERSION=1;
+ public static final int BUFF_SIZE=48;
- public static final ConfigLoader deflt = new ConfigLoader(KEYLIMIT);
- public ConfigLoader(int keylimit) {
+ public static final ConfigLoader deflt = new ConfigLoader(KEYLIMIT);
+ public ConfigLoader(int keylimit) {
super(keylimit);
}
- @Override
+ @Override
public Data load(Data data, Row row) {
- data.name = row.getString(0);
+ data.name = row.getString(0);
data.tag = row.getString(1);
data.value = row.getString(2);
return data;
@@ -93,30 +93,30 @@ public class ConfigDAO extends CassDAOImpl<AuthzTrans,ConfigDAO.Data> {
obj[_idx] = data.value;
}
- @Override
- public void marshal(Data data, DataOutputStream os) throws IOException {
- writeHeader(os,MAGIC,VERSION);
- writeString(os, data.name);
- writeString(os, data.tag);
- writeString(os, data.value);
- }
-
- @Override
- public void unmarshal(Data data, DataInputStream is) throws IOException {
- /*int version = */readHeader(is,MAGIC,VERSION);
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
- byte[] buff = new byte[BUFF_SIZE];
- data.name = readString(is,buff);
- data.tag = readString(is,buff);
- data.value = readString(is,buff);
- }
+ @Override
+ public void marshal(Data data, DataOutputStream os) throws IOException {
+ writeHeader(os,MAGIC,VERSION);
+ writeString(os, data.name);
+ writeString(os, data.tag);
+ writeString(os, data.value);
+ }
+
+ @Override
+ public void unmarshal(Data data, DataInputStream is) throws IOException {
+ /*int version = */readHeader(is,MAGIC,VERSION);
+ // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+ byte[] buff = new byte[BUFF_SIZE];
+ data.name = readString(is,buff);
+ data.tag = readString(is,buff);
+ data.value = readString(is,buff);
+ }
}
private void init(AuthzTrans trans) throws APIException, IOException {
- String[] helpers = setCRUD(trans, TABLE, Data.class, ConfigLoader.deflt);
+ String[] helpers = setCRUD(trans, TABLE, Data.class, ConfigLoader.deflt);
- psName = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
- " WHERE name = ?", ConfigLoader.deflt,readConsistency);
+ psName = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+ " WHERE name = ?", ConfigLoader.deflt,readConsistency);
}
@@ -129,12 +129,12 @@ public class ConfigDAO extends CassDAOImpl<AuthzTrans,ConfigDAO.Data> {
*/
@Override
protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
- // not an auditable table.
+ // not an auditable table.
}
- public Result<List<Data>> readName(AuthzTrans trans, String name) {
- return psName.read(trans, R_TEXT, new Object[]{name});
- }
+ public Result<List<Data>> readName(AuthzTrans trans, String name) {
+ return psName.read(trans, R_TEXT, new Object[]{name});
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CredDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CredDAO.java
index 76e3b424..5bcba9d9 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CredDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/CredDAO.java
@@ -51,16 +51,16 @@ import com.datastax.driver.core.Row;
public class CredDAO extends CassDAOImpl<AuthzTrans,CredDAO.Data> {
public static final String TABLE = "cred";
public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F
- public static final int RAW = -1;
+ public static final int RAW = -1;
public static final int BASIC_AUTH = 1;
public static final int BASIC_AUTH_SHA256 = 2;
public static final int CERT_SHA256_RSA =200;
private HistoryDAO historyDAO;
- private CIDAO<AuthzTrans> infoDAO;
- private PSInfo psNS;
- private PSInfo psID;
-
+ private CIDAO<AuthzTrans> infoDAO;
+ private PSInfo psNS;
+ private PSInfo psID;
+
public CredDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
super(trans, CredDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
init(trans);
@@ -74,52 +74,52 @@ public class CredDAO extends CassDAOImpl<AuthzTrans,CredDAO.Data> {
}
public static final int KEYLIMIT = 3;
- public static class Data extends CacheableData implements Bytification {
-
- public String id;
- public Integer type;
- public Date expires;
- public Integer other;
- public String ns;
- public String notes;
- public ByteBuffer cred; // this is a blob in cassandra
+ public static class Data extends CacheableData implements Bytification {
+
+ public String id;
+ public Integer type;
+ public Date expires;
+ public Integer other;
+ public String ns;
+ public String notes;
+ public ByteBuffer cred; // this is a blob in cassandra
@Override
- public int[] invalidate(Cached<?,?> cache) {
- return new int[] {
- seg(cache,id) // cache is for all entities
- };
- }
+ public int[] invalidate(Cached<?,?> cache) {
+ return new int[] {
+ seg(cache,id) // cache is for all entities
+ };
+ }
+
+ @Override
+ public ByteBuffer bytify() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ CredLoader.deflt.marshal(this,new DataOutputStream(baos));
+ return ByteBuffer.wrap(baos.toByteArray());
+ }
- @Override
- public ByteBuffer bytify() throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- CredLoader.deflt.marshal(this,new DataOutputStream(baos));
- return ByteBuffer.wrap(baos.toByteArray());
- }
-
- @Override
- public void reconstitute(ByteBuffer bb) throws IOException {
- CredLoader.deflt.unmarshal(this, toDIS(bb));
- }
+ @Override
+ public void reconstitute(ByteBuffer bb) throws IOException {
+ CredLoader.deflt.unmarshal(this, toDIS(bb));
+ }
- public String toString() {
- return id + ' ' + type + ' ' + Chrono.dateTime(expires);
- }
+ public String toString() {
+ return id + ' ' + type + ' ' + Chrono.dateTime(expires);
+ }
}
private static class CredLoader extends Loader<Data> implements Streamer<Data>{
- public static final int MAGIC=153323443;
- public static final int VERSION=1;
- public static final int BUFF_SIZE=48; // Note:
+ public static final int MAGIC=153323443;
+ public static final int VERSION=1;
+ public static final int BUFF_SIZE=48; // Note:
- public static final CredLoader deflt = new CredLoader(KEYLIMIT);
- public CredLoader(int keylimit) {
+ public static final CredLoader deflt = new CredLoader(KEYLIMIT);
+ public CredLoader(int keylimit) {
super(keylimit);
}
- @Override
+ @Override
public Data load(Data data, Row row) {
data.id = row.getString(0);
data.type = row.getInt(1); // NOTE: in datastax driver, If the int value is NULL, 0 is returned!
@@ -133,7 +133,7 @@ public class CredDAO extends CassDAOImpl<AuthzTrans,CredDAO.Data> {
@Override
protected void key(Data data, int _idx, Object[] obj) {
- int idx = _idx;
+ int idx = _idx;
obj[idx] = data.id;
obj[++idx] = data.type;
@@ -149,77 +149,77 @@ public class CredDAO extends CassDAOImpl<AuthzTrans,CredDAO.Data> {
obj[++i] = data.cred;
}
- @Override
- public void marshal(Data data, DataOutputStream os) throws IOException {
- writeHeader(os,MAGIC,VERSION);
- writeString(os, data.id);
- os.writeInt(data.type);
- os.writeLong(data.expires==null?-1:data.expires.getTime());
- os.writeInt(data.other==null?0:data.other);
- writeString(os, data.ns);
- writeString(os, data.notes);
- if(data.cred==null) {
- os.writeInt(-1);
- } else {
- int l = data.cred.limit()-data.cred.position();
- os.writeInt(l);
- os.write(data.cred.array(),data.cred.position(),l);
- }
- }
+ @Override
+ public void marshal(Data data, DataOutputStream os) throws IOException {
+ writeHeader(os,MAGIC,VERSION);
+ writeString(os, data.id);
+ os.writeInt(data.type);
+ os.writeLong(data.expires==null?-1:data.expires.getTime());
+ os.writeInt(data.other==null?0:data.other);
+ writeString(os, data.ns);
+ writeString(os, data.notes);
+ if(data.cred==null) {
+ os.writeInt(-1);
+ } else {
+ int l = data.cred.limit()-data.cred.position();
+ os.writeInt(l);
+ os.write(data.cred.array(),data.cred.position(),l);
+ }
+ }
- @Override
- public void unmarshal(Data data, DataInputStream is) throws IOException {
- /*int version = */readHeader(is,MAGIC,VERSION);
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
- byte[] buff = new byte[BUFF_SIZE];
- data.id = readString(is,buff);
- data.type = is.readInt();
-
- long l = is.readLong();
- data.expires = l<0?null:new Date(l);
- data.other = is.readInt();
- data.ns = readString(is,buff);
- data.notes = readString(is,buff);
-
- int i = is.readInt();
- data.cred=null;
- if(i>=0) {
- byte[] bytes = new byte[i]; // a bit dangerous, but lessened because of all the previous sized data reads
- int read = is.read(bytes);
- if(read>0) {
- data.cred = ByteBuffer.wrap(bytes);
- }
- }
- }
+ @Override
+ public void unmarshal(Data data, DataInputStream is) throws IOException {
+ /*int version = */readHeader(is,MAGIC,VERSION);
+ // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+ byte[] buff = new byte[BUFF_SIZE];
+ data.id = readString(is,buff);
+ data.type = is.readInt();
+
+ long l = is.readLong();
+ data.expires = l<0?null:new Date(l);
+ data.other = is.readInt();
+ data.ns = readString(is,buff);
+ data.notes = readString(is,buff);
+
+ int i = is.readInt();
+ data.cred=null;
+ if(i>=0) {
+ byte[] bytes = new byte[i]; // a bit dangerous, but lessened because of all the previous sized data reads
+ int read = is.read(bytes);
+ if(read>0) {
+ data.cred = ByteBuffer.wrap(bytes);
+ }
+ }
+ }
}
private void init(AuthzTrans trans) throws APIException, IOException {
// Set up sub-DAOs
if(historyDAO==null) {
- historyDAO = new HistoryDAO(trans,this);
+ historyDAO = new HistoryDAO(trans,this);
}
- if(infoDAO==null) {
- infoDAO = new CacheInfoDAO(trans,this);
- }
-
+ if(infoDAO==null) {
+ infoDAO = new CacheInfoDAO(trans,this);
+ }
+
- String[] helpers = setCRUD(trans, TABLE, Data.class, CredLoader.deflt);
-
- psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
- " WHERE ns = ?", CredLoader.deflt,readConsistency);
-
- psID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
- " WHERE id = ?", CredLoader.deflt,readConsistency);
+ String[] helpers = setCRUD(trans, TABLE, Data.class, CredLoader.deflt);
+
+ psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+ " WHERE ns = ?", CredLoader.deflt,readConsistency);
+
+ psID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+ " WHERE id = ?", CredLoader.deflt,readConsistency);
+ }
+
+ public Result<List<Data>> readNS(AuthzTrans trans, String ns) {
+ return psNS.read(trans, R_TEXT, new Object[]{ns});
+ }
+
+ public Result<List<Data>> readID(AuthzTrans trans, String id) {
+ return psID.read(trans, R_TEXT, new Object[]{id});
}
- public Result<List<Data>> readNS(AuthzTrans trans, String ns) {
- return psNS.read(trans, R_TEXT, new Object[]{ns});
- }
-
- public Result<List<Data>> readID(AuthzTrans trans, String id) {
- return psID.read(trans, R_TEXT, new Object[]{id});
- }
-
/**
* Log Modification statements to History
*
@@ -229,8 +229,8 @@ public class CredDAO extends CassDAOImpl<AuthzTrans,CredDAO.Data> {
*/
@Override
protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
- boolean memo = override.length>0 && override[0]!=null;
- boolean subject = override.length>1 && override[1]!=null;
+ boolean memo = override.length>0 && override[0]!=null;
+ boolean subject = override.length>1 && override[1]!=null;
HistoryDAO.Data hd = HistoryDAO.newInitedData();
hd.user = trans.user();
@@ -241,19 +241,19 @@ public class CredDAO extends CassDAOImpl<AuthzTrans,CredDAO.Data> {
? String.format("%s by %s", override[0], hd.user)
: (modified.name() + "d credential for " + data.id);
// Detail?
- if(modified==CRUD.delete) {
- try {
- hd.reconstruct = data.bytify();
- } catch (IOException e) {
- trans.error().log(e,"Could not serialize CredDAO.Data");
- }
- }
+ if(modified==CRUD.delete) {
+ try {
+ hd.reconstruct = data.bytify();
+ } catch (IOException e) {
+ trans.error().log(e,"Could not serialize CredDAO.Data");
+ }
+ }
if(historyDAO.create(trans, hd).status!=Status.OK) {
- trans.error().log("Cannot log to History");
+ trans.error().log("Cannot log to History");
}
if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).status!=Status.OK) {
- trans.error().log("Cannot touch Cred");
+ trans.error().log("Cannot touch Cred");
}
}
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/DelegateDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/DelegateDAO.java
index 78a98e1d..b137b640 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/DelegateDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/DelegateDAO.java
@@ -42,97 +42,97 @@ import com.datastax.driver.core.Row;
public class DelegateDAO extends CassDAOImpl<AuthzTrans, DelegateDAO.Data> {
- public static final String TABLE = "delegate";
- private PSInfo psByDelegate;
-
- public DelegateDAO(AuthzTrans trans, Cluster cluster, String keyspace) {
- super(trans, DelegateDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
- init(trans);
- }
-
- public DelegateDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) {
- super(trans, DelegateDAO.class.getSimpleName(),aDao,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
- init(trans);
- }
-
- private static final int KEYLIMIT = 1;
- public static class Data implements Bytification {
- public String user;
- public String delegate;
- public Date expires;
-
- @Override
- public ByteBuffer bytify() throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- DelegateLoader.dflt.marshal(this,new DataOutputStream(baos));
- return ByteBuffer.wrap(baos.toByteArray());
- }
-
- @Override
- public void reconstitute(ByteBuffer bb) throws IOException {
- DelegateLoader.dflt.unmarshal(this, toDIS(bb));
- }
- }
-
- private static class DelegateLoader extends Loader<Data> implements Streamer<Data>{
- public static final int MAGIC=0xD823ACF2;
- public static final int VERSION=1;
- public static final int BUFF_SIZE=48;
-
- public static final DelegateLoader dflt = new DelegateLoader(KEYLIMIT);
-
- public DelegateLoader(int keylimit) {
- super(keylimit);
- }
-
- @Override
- public Data load(Data data, Row row) {
- data.user = row.getString(0);
- data.delegate = row.getString(1);
- data.expires = row.getTimestamp(2);
- return data;
- }
-
- @Override
- protected void key(Data data, int idx, Object[] obj) {
- obj[idx]=data.user;
- }
-
- @Override
- protected void body(Data data, int _idx, Object[] obj) {
- int idx = _idx;
-
- obj[idx]=data.delegate;
- obj[++idx]=data.expires;
- }
-
- @Override
- public void marshal(Data data, DataOutputStream os) throws IOException {
- writeHeader(os,MAGIC,VERSION);
- writeString(os, data.user);
- writeString(os, data.delegate);
- os.writeLong(data.expires.getTime());
- }
-
- @Override
- public void unmarshal(Data data, DataInputStream is) throws IOException {
- /*int version = */readHeader(is,MAGIC,VERSION);
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
- byte[] buff = new byte[BUFF_SIZE];
- data.user = readString(is, buff);
- data.delegate = readString(is,buff);
- data.expires = new Date(is.readLong());
- }
- }
-
- private void init(AuthzTrans trans) {
- String[] helpers = setCRUD(trans, TABLE, Data.class, DelegateLoader.dflt);
- psByDelegate = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
- " WHERE delegate = ?", new DelegateLoader(1),readConsistency);
-
- }
-
- public Result<List<DelegateDAO.Data>> readByDelegate(AuthzTrans trans, String delegate) {
- return psByDelegate.read(trans, R_TEXT, new Object[]{delegate});
- }
+ public static final String TABLE = "delegate";
+ private PSInfo psByDelegate;
+
+ public DelegateDAO(AuthzTrans trans, Cluster cluster, String keyspace) {
+ super(trans, DelegateDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+ init(trans);
+ }
+
+ public DelegateDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) {
+ super(trans, DelegateDAO.class.getSimpleName(),aDao,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+ init(trans);
+ }
+
+ private static final int KEYLIMIT = 1;
+ public static class Data implements Bytification {
+ public String user;
+ public String delegate;
+ public Date expires;
+
+ @Override
+ public ByteBuffer bytify() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ DelegateLoader.dflt.marshal(this,new DataOutputStream(baos));
+ return ByteBuffer.wrap(baos.toByteArray());
+ }
+
+ @Override
+ public void reconstitute(ByteBuffer bb) throws IOException {
+ DelegateLoader.dflt.unmarshal(this, toDIS(bb));
+ }
+ }
+
+ private static class DelegateLoader extends Loader<Data> implements Streamer<Data>{
+ public static final int MAGIC=0xD823ACF2;
+ public static final int VERSION=1;
+ public static final int BUFF_SIZE=48;
+
+ public static final DelegateLoader dflt = new DelegateLoader(KEYLIMIT);
+
+ public DelegateLoader(int keylimit) {
+ super(keylimit);
+ }
+
+ @Override
+ public Data load(Data data, Row row) {
+ data.user = row.getString(0);
+ data.delegate = row.getString(1);
+ data.expires = row.getTimestamp(2);
+ return data;
+ }
+
+ @Override
+ protected void key(Data data, int idx, Object[] obj) {
+ obj[idx]=data.user;
+ }
+
+ @Override
+ protected void body(Data data, int _idx, Object[] obj) {
+ int idx = _idx;
+
+ obj[idx]=data.delegate;
+ obj[++idx]=data.expires;
+ }
+
+ @Override
+ public void marshal(Data data, DataOutputStream os) throws IOException {
+ writeHeader(os,MAGIC,VERSION);
+ writeString(os, data.user);
+ writeString(os, data.delegate);
+ os.writeLong(data.expires.getTime());
+ }
+
+ @Override
+ public void unmarshal(Data data, DataInputStream is) throws IOException {
+ /*int version = */readHeader(is,MAGIC,VERSION);
+ // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+ byte[] buff = new byte[BUFF_SIZE];
+ data.user = readString(is, buff);
+ data.delegate = readString(is,buff);
+ data.expires = new Date(is.readLong());
+ }
+ }
+
+ private void init(AuthzTrans trans) {
+ String[] helpers = setCRUD(trans, TABLE, Data.class, DelegateLoader.dflt);
+ psByDelegate = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+ " WHERE delegate = ?", new DelegateLoader(1),readConsistency);
+
+ }
+
+ public Result<List<DelegateDAO.Data>> readByDelegate(AuthzTrans trans, String delegate) {
+ return psByDelegate.read(trans, R_TEXT, new Object[]{delegate});
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/FutureDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/FutureDAO.java
index 0263e009..6b0ea2df 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/FutureDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/FutureDAO.java
@@ -45,13 +45,13 @@ import com.datastax.driver.core.Row;
*/
public class FutureDAO extends CassDAOImpl<AuthzTrans,FutureDAO.Data> {
private static final String TABLE = "future";
- private final HistoryDAO historyDAO;
-// private static String createString;
- private PSInfo psByStartAndTarget;
-
+ private final HistoryDAO historyDAO;
+// private static String createString;
+ private PSInfo psByStartAndTarget;
+
public FutureDAO(AuthzTrans trans, Cluster cluster, String keyspace) {
super(trans, FutureDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
- historyDAO = new HistoryDAO(trans, this);
+ historyDAO = new HistoryDAO(trans, this);
init(trans);
}
@@ -64,11 +64,11 @@ public class FutureDAO extends CassDAOImpl<AuthzTrans,FutureDAO.Data> {
public static final int KEYLIMIT = 1;
public static class Data {
public UUID id;
- public String target;
- public String memo;
- public Date start;
- public Date expires;
- public ByteBuffer construct; // this is a blob in cassandra
+ public String target;
+ public String memo;
+ public Date start;
+ public Date expires;
+ public ByteBuffer construct; // this is a blob in cassandra
}
private static class FLoader extends Loader<Data> {
@@ -81,13 +81,13 @@ public class FutureDAO extends CassDAOImpl<AuthzTrans,FutureDAO.Data> {
}
@Override
- public Data load(Data data, Row row) {
- data.id = row.getUUID(0);
- data.target = row.getString(1);
+ public Data load(Data data, Row row) {
+ data.id = row.getUUID(0);
+ data.target = row.getString(1);
data.memo = row.getString(2);
- data.start = row.getTimestamp(3);
- data.expires = row.getTimestamp(4);
- data.construct = row.getBytes(5);
+ data.start = row.getTimestamp(3);
+ data.expires = row.getTimestamp(4);
+ data.construct = row.getBytes(5);
return data;
}
@@ -98,7 +98,7 @@ public class FutureDAO extends CassDAOImpl<AuthzTrans,FutureDAO.Data> {
@Override
protected void body(Data data, int _idx, Object[] obj) {
- int idx = _idx;
+ int idx = _idx;
obj[idx] = data.target;
obj[++idx] = data.memo;
@@ -113,71 +113,71 @@ public class FutureDAO extends CassDAOImpl<AuthzTrans,FutureDAO.Data> {
String[] helpers = setCRUD(trans, TABLE, Data.class, new FLoader(KEYLIMIT));
// Uh, oh. Can't use "now()" in Prepared Statements (at least at this level)
-// createString = "INSERT INTO " + TABLE + " ("+helpers[FIELD_COMMAS] +") VALUES (now(),";
+// createString = "INSERT INTO " + TABLE + " ("+helpers[FIELD_COMMAS] +") VALUES (now(),";
//
-// // Need a specialty Creator to handle the "now()"
-// replace(CRUD.Create, new PSInfo(trans, "INSERT INTO future (" + helpers[FIELD_COMMAS] +
-// ") VALUES(now(),?,?,?,?,?)",new FLoader(0)));
-
- // Other SELECT style statements... match with a local Method
- psByStartAndTarget = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
- " FROM future WHERE start <= ? and target = ? ALLOW FILTERING", new FLoader(2) {
- @Override
- protected void key(Data data, int _idx, Object[] obj) {
- int idx = _idx;
-
- obj[idx]=data.start;
- obj[++idx]=data.target;
- }
- },readConsistency);
-
+// // Need a specialty Creator to handle the "now()"
+// replace(CRUD.Create, new PSInfo(trans, "INSERT INTO future (" + helpers[FIELD_COMMAS] +
+// ") VALUES(now(),?,?,?,?,?)",new FLoader(0)));
+
+ // Other SELECT style statements... match with a local Method
+ psByStartAndTarget = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
+ " FROM future WHERE start <= ? and target = ? ALLOW FILTERING", new FLoader(2) {
+ @Override
+ protected void key(Data data, int _idx, Object[] obj) {
+ int idx = _idx;
+
+ obj[idx]=data.start;
+ obj[++idx]=data.target;
+ }
+ },readConsistency);
+
}
public Result<List<Data>> readByStartAndTarget(AuthzTrans trans, Date start, String target) throws DAOException {
- return psByStartAndTarget.read(trans, R_TEXT, new Object[]{start, target});
- }
+ return psByStartAndTarget.read(trans, R_TEXT, new Object[]{start, target});
+ }
/**
- * Override create to add secondary ID to Subject in History, and create Data.ID, if it is null
+ * Override create to add secondary ID to Subject in History, and create Data.ID, if it is null
*/
- public Result<FutureDAO.Data> create(AuthzTrans trans, FutureDAO.Data data, String id) {
- // If ID is not set (typical), create one.
- if(data.id==null) {
- StringBuilder sb = new StringBuilder(trans.user());
- sb.append(data.target);
- sb.append(System.currentTimeMillis());
- data.id = UUID.nameUUIDFromBytes(sb.toString().getBytes());
- }
- Result<ResultSet> rs = createPS.exec(trans, C_TEXT, data);
- if(rs.notOK()) {
- return Result.err(rs);
- }
- wasModified(trans, CRUD.create, data, null, id);
- return Result.ok(data);
- }
-
- /**
- * Log Modification statements to History
- *
- * @param modified which CRUD action was done
- * @param data entity data that needs a log entry
- * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data
- */
- @Override
- protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
- boolean memo = override.length>0 && override[0]!=null;
- boolean subject = override.length>1 && override[1]!=null;
- HistoryDAO.Data hd = HistoryDAO.newInitedData();
- hd.user = trans.user();
- hd.action = modified.name();
- hd.target = TABLE;
- hd.subject = subject?override[1]:"";
- hd.memo = memo?String.format("%s by %s", override[0], hd.user):data.memo;
-
- if(historyDAO.create(trans, hd).status!=Status.OK) {
- trans.error().log("Cannot log to History");
- }
- }
+ public Result<FutureDAO.Data> create(AuthzTrans trans, FutureDAO.Data data, String id) {
+ // If ID is not set (typical), create one.
+ if(data.id==null) {
+ StringBuilder sb = new StringBuilder(trans.user());
+ sb.append(data.target);
+ sb.append(System.currentTimeMillis());
+ data.id = UUID.nameUUIDFromBytes(sb.toString().getBytes());
+ }
+ Result<ResultSet> rs = createPS.exec(trans, C_TEXT, data);
+ if(rs.notOK()) {
+ return Result.err(rs);
+ }
+ wasModified(trans, CRUD.create, data, null, id);
+ return Result.ok(data);
+ }
+
+ /**
+ * Log Modification statements to History
+ *
+ * @param modified which CRUD action was done
+ * @param data entity data that needs a log entry
+ * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data
+ */
+ @Override
+ protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
+ boolean memo = override.length>0 && override[0]!=null;
+ boolean subject = override.length>1 && override[1]!=null;
+ HistoryDAO.Data hd = HistoryDAO.newInitedData();
+ hd.user = trans.user();
+ hd.action = modified.name();
+ hd.target = TABLE;
+ hd.subject = subject?override[1]:"";
+ hd.memo = memo?String.format("%s by %s", override[0], hd.user):data.memo;
+
+ if(historyDAO.create(trans, hd).status!=Status.OK) {
+ trans.error().log("Cannot log to History");
+ }
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/HistoryDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/HistoryDAO.java
index 13af8795..ce7e5477 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/HistoryDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/HistoryDAO.java
@@ -54,175 +54,175 @@ import com.datastax.driver.core.Row;
*
*/
public class HistoryDAO extends CassDAOImpl<AuthzTrans, HistoryDAO.Data> {
- private static final String TABLE = "history";
-
- private String[] helpers;
-
- private HistLoader defLoader;
-
- private AbsCassDAO<AuthzTrans, Data>.PSInfo readByUser, readBySubject, readByYRMN;
-
- public HistoryDAO(AuthzTrans trans, Cluster cluster, String keyspace) {
- super(trans, HistoryDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY);
- init(trans);
- }
-
- public HistoryDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) {
- super(trans, HistoryDAO.class.getSimpleName(),aDao,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY);
- init(trans);
- }
-
-
- private static final int KEYLIMIT = 1;
- public static class Data {
- public UUID id;
- public int yr_mon;
- public String user;
- public String action;
- public String target;
- public String subject;
- public String memo;
- public ByteBuffer reconstruct;
- }
-
- private static class HistLoader extends Loader<Data> {
- public HistLoader(int keylimit) {
- super(keylimit);
- }
-
- @Override
- public Data load(Data data, Row row) {
- data.id = row.getUUID(0);
- data.yr_mon = row.getInt(1);
- data.user = row.getString(2);
- data.action = row.getString(3);
- data.target = row.getString(4);
- data.subject = row.getString(5);
- data.memo = row.getString(6);
- data.reconstruct = row.getBytes(7);
- return data;
- }
-
- @Override
- protected void key(Data data, int idx, Object[] obj) {
- obj[idx]=data.id;
- }
-
- @Override
- protected void body(Data data, int _idx, Object[] obj) {
- int idx = _idx;
- obj[idx]=data.yr_mon;
- obj[++idx]=data.user;
- obj[++idx]=data.action;
- obj[++idx]=data.target;
- obj[++idx]=data.subject;
- obj[++idx]=data.memo;
-// obj[++idx]=data.detail;
- obj[++idx]=data.reconstruct;
- }
- };
-
- private void init(AuthzTrans trans) {
- // Loader must match fields order
- defLoader = new HistLoader(KEYLIMIT);
- helpers = setCRUD(trans, TABLE, Data.class, defLoader);
-
- // Need a specialty Creator to handle the "now()"
- // 9/9/2013 - Jonathan - Just great... now() is evaluated once on Client side, invalidating usage (what point is a now() from a long time in the past?
- // Unless this is fixed, we're putting in non-prepared statement
- // Solved in Cassandra. Make sure you are running 1.2.6 Cassandra or later. https://issues.apache.org/jira/browse/CASSANDRA-5616
- replace(CRUD.create, new PSInfo(trans, "INSERT INTO history (" + helpers[FIELD_COMMAS] +
- ") VALUES(now(),?,?,?,?,?,?,?)",
- new HistLoader(0) {
- @Override
- protected void key(Data data, int idx, Object[] obj) {
- }
- },writeConsistency)
- );
-// disable(CRUD.Create);
-
- replace(CRUD.read, new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
- " FROM history WHERE id = ?", defLoader,readConsistency)
-// new HistLoader(2) {
-// @Override
-// protected void key(Data data, int idx, Object[] obj) {
-// obj[idx]=data.yr_mon;
-// obj[++idx]=data.id;
-// }
-// })
- );
- disable(CRUD.update);
- disable(CRUD.delete);
-
- readByUser = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
- " FROM history WHERE user = ?", defLoader,readConsistency);
- readBySubject = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
- " FROM history WHERE subject = ? and target = ? ALLOW FILTERING", defLoader,readConsistency);
- readByYRMN = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
- " FROM history WHERE yr_mon = ?", defLoader,readConsistency);
- async(true); //TODO dropping messages with Async
- }
-
- public static Data newInitedData() {
- Data data = new Data();
- Date now = new Date();
- // Sonar claims that SimpleDateFormat is not thread safe, so we can't be static
- data.yr_mon = Integer.parseInt(new SimpleDateFormat("yyyyMM").format(now));
- // data.day_time = Integer.parseInt(dayTimeFormat.format(now));
- return data;
- }
-
- public Result<List<Data>> readByYYYYMM(AuthzTrans trans, int yyyymm) {
- Result<ResultSet> rs = readByYRMN.exec(trans, "yr_mon", yyyymm);
- if(rs.notOK()) {
- return Result.err(rs);
- }
- return extract(defLoader,rs.value,null,dflt);
- }
-
- /**
- * Gets the history for a user in the specified year and month
- * year - the year in yyyy format
- * month - the month in a year ...values 1 - 12
- **/
- public Result<List<Data>> readByUser(AuthzTrans trans, String user, int ... yyyymm) {
- if(yyyymm.length==0) {
- return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified");
- }
- Result<ResultSet> rs = readByUser.exec(trans, "user", user);
- if(rs.notOK()) {
- return Result.err(rs);
- }
- return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt);
- }
-
- public Result<List<Data>> readBySubject(AuthzTrans trans, String subject, String target, int ... yyyymm) {
- if(yyyymm.length==0) {
- return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified");
- }
- Result<ResultSet> rs = readBySubject.exec(trans, "subject", subject, target);
- if(rs.notOK()) {
- return Result.err(rs);
- }
- return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt);
- }
-
- private class YYYYMM implements Accept<Data> {
- private int[] yyyymm;
- public YYYYMM(int yyyymm[]) {
- this.yyyymm = yyyymm;
- }
- @Override
- public boolean ok(Data data) {
- int dym = data.yr_mon;
- for(int ym:yyyymm) {
- if(dym==ym) {
- return true;
- }
- }
- return false;
- }
-
- };
-
+ private static final String TABLE = "history";
+
+ private String[] helpers;
+
+ private HistLoader defLoader;
+
+ private AbsCassDAO<AuthzTrans, Data>.PSInfo readByUser, readBySubject, readByYRMN;
+
+ public HistoryDAO(AuthzTrans trans, Cluster cluster, String keyspace) {
+ super(trans, HistoryDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY);
+ init(trans);
+ }
+
+ public HistoryDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) {
+ super(trans, HistoryDAO.class.getSimpleName(),aDao,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY);
+ init(trans);
+ }
+
+
+ private static final int KEYLIMIT = 1;
+ public static class Data {
+ public UUID id;
+ public int yr_mon;
+ public String user;
+ public String action;
+ public String target;
+ public String subject;
+ public String memo;
+ public ByteBuffer reconstruct;
+ }
+
+ private static class HistLoader extends Loader<Data> {
+ public HistLoader(int keylimit) {
+ super(keylimit);
+ }
+
+ @Override
+ public Data load(Data data, Row row) {
+ data.id = row.getUUID(0);
+ data.yr_mon = row.getInt(1);
+ data.user = row.getString(2);
+ data.action = row.getString(3);
+ data.target = row.getString(4);
+ data.subject = row.getString(5);
+ data.memo = row.getString(6);
+ data.reconstruct = row.getBytes(7);
+ return data;
+ }
+
+ @Override
+ protected void key(Data data, int idx, Object[] obj) {
+ obj[idx]=data.id;
+ }
+
+ @Override
+ protected void body(Data data, int _idx, Object[] obj) {
+ int idx = _idx;
+ obj[idx]=data.yr_mon;
+ obj[++idx]=data.user;
+ obj[++idx]=data.action;
+ obj[++idx]=data.target;
+ obj[++idx]=data.subject;
+ obj[++idx]=data.memo;
+// obj[++idx]=data.detail;
+ obj[++idx]=data.reconstruct;
+ }
+ };
+
+ private void init(AuthzTrans trans) {
+ // Loader must match fields order
+ defLoader = new HistLoader(KEYLIMIT);
+ helpers = setCRUD(trans, TABLE, Data.class, defLoader);
+
+ // Need a specialty Creator to handle the "now()"
+ // 9/9/2013 - Jonathan - Just great... now() is evaluated once on Client side, invalidating usage (what point is a now() from a long time in the past?
+ // Unless this is fixed, we're putting in non-prepared statement
+ // Solved in Cassandra. Make sure you are running 1.2.6 Cassandra or later. https://issues.apache.org/jira/browse/CASSANDRA-5616
+ replace(CRUD.create, new PSInfo(trans, "INSERT INTO history (" + helpers[FIELD_COMMAS] +
+ ") VALUES(now(),?,?,?,?,?,?,?)",
+ new HistLoader(0) {
+ @Override
+ protected void key(Data data, int idx, Object[] obj) {
+ }
+ },writeConsistency)
+ );
+// disable(CRUD.Create);
+
+ replace(CRUD.read, new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
+ " FROM history WHERE id = ?", defLoader,readConsistency)
+// new HistLoader(2) {
+// @Override
+// protected void key(Data data, int idx, Object[] obj) {
+// obj[idx]=data.yr_mon;
+// obj[++idx]=data.id;
+// }
+// })
+ );
+ disable(CRUD.update);
+ disable(CRUD.delete);
+
+ readByUser = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
+ " FROM history WHERE user = ?", defLoader,readConsistency);
+ readBySubject = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
+ " FROM history WHERE subject = ? and target = ? ALLOW FILTERING", defLoader,readConsistency);
+ readByYRMN = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] +
+ " FROM history WHERE yr_mon = ?", defLoader,readConsistency);
+ async(true); //TODO dropping messages with Async
+ }
+
+ public static Data newInitedData() {
+ Data data = new Data();
+ Date now = new Date();
+ // Sonar claims that SimpleDateFormat is not thread safe, so we can't be static
+ data.yr_mon = Integer.parseInt(new SimpleDateFormat("yyyyMM").format(now));
+ // data.day_time = Integer.parseInt(dayTimeFormat.format(now));
+ return data;
+ }
+
+ public Result<List<Data>> readByYYYYMM(AuthzTrans trans, int yyyymm) {
+ Result<ResultSet> rs = readByYRMN.exec(trans, "yr_mon", yyyymm);
+ if(rs.notOK()) {
+ return Result.err(rs);
+ }
+ return extract(defLoader,rs.value,null,dflt);
+ }
+
+ /**
+ * Gets the history for a user in the specified year and month
+ * year - the year in yyyy format
+ * month - the month in a year ...values 1 - 12
+ **/
+ public Result<List<Data>> readByUser(AuthzTrans trans, String user, int ... yyyymm) {
+ if(yyyymm.length==0) {
+ return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified");
+ }
+ Result<ResultSet> rs = readByUser.exec(trans, "user", user);
+ if(rs.notOK()) {
+ return Result.err(rs);
+ }
+ return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt);
+ }
+
+ public Result<List<Data>> readBySubject(AuthzTrans trans, String subject, String target, int ... yyyymm) {
+ if(yyyymm.length==0) {
+ return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified");
+ }
+ Result<ResultSet> rs = readBySubject.exec(trans, "subject", subject, target);
+ if(rs.notOK()) {
+ return Result.err(rs);
+ }
+ return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt);
+ }
+
+ private class YYYYMM implements Accept<Data> {
+ private int[] yyyymm;
+ public YYYYMM(int yyyymm[]) {
+ this.yyyymm = yyyymm;
+ }
+ @Override
+ public boolean ok(Data data) {
+ int dym = data.yr_mon;
+ for(int ym:yyyymm) {
+ if(dym==ym) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ };
+
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/LocateDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/LocateDAO.java
index 4778331b..5d7aebed 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/LocateDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/LocateDAO.java
@@ -50,7 +50,7 @@ import com.datastax.driver.core.Row;
*/
public class LocateDAO extends CassDAOImpl<AuthzTrans,LocateDAO.Data> {
public static final String TABLE = "locate";
- private AbsCassDAO<AuthzTrans, Data>.PSInfo psName;
+ private AbsCassDAO<AuthzTrans, Data>.PSInfo psName;
public LocateDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
super(trans, LocateDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
@@ -63,68 +63,68 @@ public class LocateDAO extends CassDAOImpl<AuthzTrans,LocateDAO.Data> {
}
public static final int KEYLIMIT = 3;
- public static class Data implements Bytification {
-
- public String name;
- public String hostname;
- public int port;
- public int major;
- public int minor;
- public int patch;
- public int pkg;
- public float latitude;
- public float longitude;
- public String protocol;
- private Set<String> subprotocol;
- public UUID port_key; // Note: Keep Port_key LAST at all times, because we shorten the UPDATE to leave Port_key Alone during reregistration.
+ public static class Data implements Bytification {
+
+ public String name;
+ public String hostname;
+ public int port;
+ public int major;
+ public int minor;
+ public int patch;
+ public int pkg;
+ public float latitude;
+ public float longitude;
+ public String protocol;
+ private Set<String> subprotocol;
+ public UUID port_key; // Note: Keep Port_key LAST at all times, because we shorten the UPDATE to leave Port_key Alone during reregistration.
- // Getters
- public Set<String> subprotocol(boolean mutable) {
- if (subprotocol == null) {
- subprotocol = new HashSet<>();
- } else if (mutable && !(subprotocol instanceof HashSet)) {
- subprotocol = new HashSet<>(subprotocol);
- }
- return subprotocol;
- }
-
+ // Getters
+ public Set<String> subprotocol(boolean mutable) {
+ if (subprotocol == null) {
+ subprotocol = new HashSet<>();
+ } else if (mutable && !(subprotocol instanceof HashSet)) {
+ subprotocol = new HashSet<>(subprotocol);
+ }
+ return subprotocol;
+ }
+
+ @Override
+ public ByteBuffer bytify() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ LocateLoader.deflt.marshal(this,new DataOutputStream(baos));
+ return ByteBuffer.wrap(baos.toByteArray());
+ }
+
@Override
- public ByteBuffer bytify() throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- LocateLoader.deflt.marshal(this,new DataOutputStream(baos));
- return ByteBuffer.wrap(baos.toByteArray());
- }
-
- @Override
- public void reconstitute(ByteBuffer bb) throws IOException {
- LocateLoader.deflt.unmarshal(this, toDIS(bb));
- }
+ public void reconstitute(ByteBuffer bb) throws IOException {
+ LocateLoader.deflt.unmarshal(this, toDIS(bb));
+ }
}
private static class LocateLoader extends Loader<Data> implements Streamer<Data>{
- public static final int MAGIC=85102934;
- public static final int VERSION=1;
- public static final int BUFF_SIZE=48; // Note:
-
- public static final LocateLoader deflt = new LocateLoader(KEYLIMIT);
- public LocateLoader(int keylimit) {
- super(keylimit);
+ public static final int MAGIC=85102934;
+ public static final int VERSION=1;
+ public static final int BUFF_SIZE=48; // Note:
+
+ public static final LocateLoader deflt = new LocateLoader(KEYLIMIT);
+ public LocateLoader(int keylimit) {
+ super(keylimit);
}
- @Override
+ @Override
public Data load(Data data, Row row) {
- data.name = row.getString(0);
- data.hostname = row.getString(1);
- data.port = row.getInt(2);
- data.major = row.getInt(3);
- data.minor = row.getInt(4);
- data.patch = row.getInt(5);
- data.pkg = row.getInt(6);
- data.latitude = row.getFloat(7);
- data.longitude = row.getFloat(8);
- data.protocol = row.getString(9);
- data.subprotocol = row.getSet(10,String.class);
- data.port_key = row.getUUID(11);
+ data.name = row.getString(0);
+ data.hostname = row.getString(1);
+ data.port = row.getInt(2);
+ data.major = row.getInt(3);
+ data.minor = row.getInt(4);
+ data.patch = row.getInt(5);
+ data.pkg = row.getInt(6);
+ data.latitude = row.getFloat(7);
+ data.longitude = row.getFloat(8);
+ data.protocol = row.getString(9);
+ data.subprotocol = row.getSet(10,String.class);
+ data.port_key = row.getUUID(11);
return data;
}
@@ -137,7 +137,7 @@ public class LocateDAO extends CassDAOImpl<AuthzTrans,LocateDAO.Data> {
@Override
protected void body(final Data data, final int _idx, final Object[] obj) {
- int idx = _idx;
+ int idx = _idx;
obj[idx] = data.major;
obj[++idx] = data.minor;
obj[++idx] = data.patch;
@@ -149,73 +149,73 @@ public class LocateDAO extends CassDAOImpl<AuthzTrans,LocateDAO.Data> {
obj[++idx] = data.port_key;
}
- @Override
- public void marshal(Data data, DataOutputStream os) throws IOException {
- writeHeader(os,MAGIC,VERSION);
- writeString(os, data.name);
- writeString(os, data.hostname);
- os.writeInt(data.port);
- os.writeInt(data.major);
- os.writeInt(data.minor);
- os.writeInt(data.patch);
- os.writeInt(data.pkg);
- os.writeFloat(data.latitude);
- os.writeFloat(data.longitude);
- writeString(os, data.protocol);
- if(data.subprotocol==null) {
- os.writeInt(0);
- } else {
- os.writeInt(data.subprotocol.size());
- for(String s: data.subprotocol) {
- writeString(os,s);
- }
- }
-
- writeString(os,data.port_key==null?"":data.port_key.toString());
- }
+ @Override
+ public void marshal(Data data, DataOutputStream os) throws IOException {
+ writeHeader(os,MAGIC,VERSION);
+ writeString(os, data.name);
+ writeString(os, data.hostname);
+ os.writeInt(data.port);
+ os.writeInt(data.major);
+ os.writeInt(data.minor);
+ os.writeInt(data.patch);
+ os.writeInt(data.pkg);
+ os.writeFloat(data.latitude);
+ os.writeFloat(data.longitude);
+ writeString(os, data.protocol);
+ if(data.subprotocol==null) {
+ os.writeInt(0);
+ } else {
+ os.writeInt(data.subprotocol.size());
+ for(String s: data.subprotocol) {
+ writeString(os,s);
+ }
+ }
+
+ writeString(os,data.port_key==null?"":data.port_key.toString());
+ }
- @Override
- public void unmarshal(Data data, DataInputStream is) throws IOException {
- /*int version = */readHeader(is,MAGIC,VERSION);
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
- byte[] buff = new byte[BUFF_SIZE];
- data.name = readString(is,buff);
- data.hostname = readString(is,buff);
- data.port = is.readInt();
- data.major = is.readInt();
- data.minor = is.readInt();
- data.patch = is.readInt();
- data.pkg = is.readInt();
- data.latitude = is.readFloat();
- data.longitude = is.readFloat();
- data.protocol = readString(is,buff);
-
- int size = is.readInt();
- data.subprotocol = new HashSet<>(size);
- for(int i=0;i<size;++i) {
- data.subprotocol.add(readString(is,buff));
- }
- String port_key = readString(is,buff);
- if(port_key.length()>0) {
- data.port_key=UUID.fromString(port_key);
- } else {
- data.port_key = null;
- }
- }
+ @Override
+ public void unmarshal(Data data, DataInputStream is) throws IOException {
+ /*int version = */readHeader(is,MAGIC,VERSION);
+ // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+ byte[] buff = new byte[BUFF_SIZE];
+ data.name = readString(is,buff);
+ data.hostname = readString(is,buff);
+ data.port = is.readInt();
+ data.major = is.readInt();
+ data.minor = is.readInt();
+ data.patch = is.readInt();
+ data.pkg = is.readInt();
+ data.latitude = is.readFloat();
+ data.longitude = is.readFloat();
+ data.protocol = readString(is,buff);
+
+ int size = is.readInt();
+ data.subprotocol = new HashSet<>(size);
+ for(int i=0;i<size;++i) {
+ data.subprotocol.add(readString(is,buff));
+ }
+ String port_key = readString(is,buff);
+ if(port_key.length()>0) {
+ data.port_key=UUID.fromString(port_key);
+ } else {
+ data.port_key = null;
+ }
+ }
}
public Result<List<LocateDAO.Data>> readByName(AuthzTrans trans, String service) {
- return psName.read(trans, "Read By Name", new Object[] {service});
+ return psName.read(trans, "Read By Name", new Object[] {service});
}
private void init(AuthzTrans trans) throws APIException, IOException {
// Set up sub-DAOs
- String[] helpers = setCRUD(trans, TABLE, Data.class, LocateLoader.deflt);
-// int lastComma = helpers[ASSIGNMENT_COMMAS].lastIndexOf(',');
-// replace(CRUD.update,new PSInfo(trans,"UPDATE LOCATE SET " + helpers[ASSIGNMENT_COMMAS].substring(0, lastComma) +
-// " WHERE name=? AND hostname=? AND port=?;", new LocateLoader(3),writeConsistency));
- psName = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
- " WHERE name = ?", new LocateLoader(1),readConsistency);
+ String[] helpers = setCRUD(trans, TABLE, Data.class, LocateLoader.deflt);
+// int lastComma = helpers[ASSIGNMENT_COMMAS].lastIndexOf(',');
+// replace(CRUD.update,new PSInfo(trans,"UPDATE LOCATE SET " + helpers[ASSIGNMENT_COMMAS].substring(0, lastComma) +
+// " WHERE name=? AND hostname=? AND port=?;", new LocateLoader(3),writeConsistency));
+ psName = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+ " WHERE name = ?", new LocateLoader(1),readConsistency);
}
/**
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/Namespace.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/Namespace.java
index 11ee4bcb..29ac379b 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/Namespace.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/Namespace.java
@@ -37,114 +37,114 @@ import org.onap.aaf.auth.rserv.Pair;
public class Namespace implements Bytification {
- public static final int MAGIC=250935515;
- public static final int VERSION=1;
- public static final int BUFF_SIZE=48;
+ public static final int MAGIC=250935515;
+ public static final int VERSION=1;
+ public static final int BUFF_SIZE=48;
- public String name;
- public List<String> owner;
- public List<String> admin;
- public List<Pair<String,String>> attrib;
- public String description;
- public Integer type;
- public String parent;
- public Namespace() {}
-
- public Namespace(NsDAO.Data ndd) {
- name = ndd.name;
- description = ndd.description;
- type = ndd.type;
- parent = ndd.parent;
- if(ndd.attrib!=null && !ndd.attrib.isEmpty()) {
- attrib = new ArrayList<>();
- for( Entry<String, String> entry : ndd.attrib.entrySet()) {
- attrib.add(new Pair<String,String>(entry.getKey(),entry.getValue()));
- }
- }
- }
-
- public Namespace(NsDAO.Data ndd,List<String> owner, List<String> admin) {
- name = ndd.name;
- this.owner = owner;
- this.admin = admin;
- description = ndd.description;
- type = ndd.type;
- parent = ndd.parent;
- if(ndd.attrib!=null && !ndd.attrib.isEmpty()) {
- attrib = new ArrayList<>();
- for( Entry<String, String> entry : ndd.attrib.entrySet()) {
- attrib.add(new Pair<String,String>(entry.getKey(),entry.getValue()));
- }
- }
- }
+ public String name;
+ public List<String> owner;
+ public List<String> admin;
+ public List<Pair<String,String>> attrib;
+ public String description;
+ public Integer type;
+ public String parent;
+ public Namespace() {}
+
+ public Namespace(NsDAO.Data ndd) {
+ name = ndd.name;
+ description = ndd.description;
+ type = ndd.type;
+ parent = ndd.parent;
+ if(ndd.attrib!=null && !ndd.attrib.isEmpty()) {
+ attrib = new ArrayList<>();
+ for( Entry<String, String> entry : ndd.attrib.entrySet()) {
+ attrib.add(new Pair<String,String>(entry.getKey(),entry.getValue()));
+ }
+ }
+ }
+
+ public Namespace(NsDAO.Data ndd,List<String> owner, List<String> admin) {
+ name = ndd.name;
+ this.owner = owner;
+ this.admin = admin;
+ description = ndd.description;
+ type = ndd.type;
+ parent = ndd.parent;
+ if(ndd.attrib!=null && !ndd.attrib.isEmpty()) {
+ attrib = new ArrayList<>();
+ for( Entry<String, String> entry : ndd.attrib.entrySet()) {
+ attrib.add(new Pair<String,String>(entry.getKey(),entry.getValue()));
+ }
+ }
+ }
- public NsDAO.Data data() {
- NsDAO.Data ndd = new NsDAO.Data();
- ndd.name = name;
- ndd.description = description;
- ndd.parent = parent;
- ndd.type = type;
- return ndd;
- }
+ public NsDAO.Data data() {
+ NsDAO.Data ndd = new NsDAO.Data();
+ ndd.name = name;
+ ndd.description = description;
+ ndd.parent = parent;
+ ndd.type = type;
+ return ndd;
+ }
- @Override
- public ByteBuffer bytify() throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- DataOutputStream os = new DataOutputStream(baos);
+ @Override
+ public ByteBuffer bytify() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ DataOutputStream os = new DataOutputStream(baos);
- Loader.writeHeader(os,MAGIC,VERSION);
- Loader.writeString(os, name);
- os.writeInt(type);
- Loader.writeStringSet(os,admin);
- Loader.writeStringSet(os,owner);
- Loader.writeString(os,description);
- Loader.writeString(os,parent);
+ Loader.writeHeader(os,MAGIC,VERSION);
+ Loader.writeString(os, name);
+ os.writeInt(type);
+ Loader.writeStringSet(os,admin);
+ Loader.writeStringSet(os,owner);
+ Loader.writeString(os,description);
+ Loader.writeString(os,parent);
- return ByteBuffer.wrap(baos.toByteArray());
- }
+ return ByteBuffer.wrap(baos.toByteArray());
+ }
- @Override
- public void reconstitute(ByteBuffer bb) throws IOException {
- DataInputStream is = CassDAOImpl.toDIS(bb);
- /*int version = */Loader.readHeader(is,MAGIC,VERSION);
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
-
- byte[] buff = new byte[BUFF_SIZE];
- name = Loader.readString(is, buff);
- type = is.readInt();
- admin = Loader.readStringList(is,buff);
- owner = Loader.readStringList(is,buff);
- description = Loader.readString(is,buff);
- parent = Loader.readString(is,buff);
-
- }
+ @Override
+ public void reconstitute(ByteBuffer bb) throws IOException {
+ DataInputStream is = CassDAOImpl.toDIS(bb);
+ /*int version = */Loader.readHeader(is,MAGIC,VERSION);
+ // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+
+ byte[] buff = new byte[BUFF_SIZE];
+ name = Loader.readString(is, buff);
+ type = is.readInt();
+ admin = Loader.readStringList(is,buff);
+ owner = Loader.readStringList(is,buff);
+ description = Loader.readString(is,buff);
+ parent = Loader.readString(is,buff);
+
+ }
- /* (non-Javadoc)
- * @see java.lang.Object#hashCode()
- */
- @Override
- public int hashCode() {
- return name.hashCode();
- }
-
+ /* (non-Javadoc)
+ * @see java.lang.Object#hashCode()
+ */
+ @Override
+ public int hashCode() {
+ return name.hashCode();
+ }
+
- /* (non-Javadoc)
- * @see java.lang.Object#toString()
- */
- @Override
- public String toString() {
- return name.toString();
- }
+ /* (non-Javadoc)
+ * @see java.lang.Object#toString()
+ */
+ @Override
+ public String toString() {
+ return name.toString();
+ }
- /* (non-Javadoc)
- * @see java.lang.Object#equals(java.lang.Object)
- */
- @Override
- public boolean equals(Object arg0) {
- if(arg0==null || !(arg0 instanceof Namespace)) {
- return false;
- }
- return name.equals(((Namespace)arg0).name);
- }
+ /* (non-Javadoc)
+ * @see java.lang.Object#equals(java.lang.Object)
+ */
+ @Override
+ public boolean equals(Object arg0) {
+ if(arg0==null || !(arg0 instanceof Namespace)) {
+ return false;
+ }
+ return name.equals(((Namespace)arg0).name);
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsDAO.java
index 07890544..30f0d6b5 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsDAO.java
@@ -61,33 +61,33 @@ import com.datastax.driver.core.exceptions.DriverException;
*
*/
public class NsDAO extends CassDAOImpl<AuthzTrans,NsDAO.Data> {
- public static final String TABLE = "ns";
- public static final String TABLE_ATTRIB = "ns_attrib";
+ public static final String TABLE = "ns";
+ public static final String TABLE_ATTRIB = "ns_attrib";
public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F
public static final int ROOT = 1;
public static final int COMPANY=2;
public static final int APP = 3;
- private static final String BEGIN_BATCH = "BEGIN BATCH\n";
- private static final String APPLY_BATCH = "\nAPPLY BATCH;\n";
- private static final String SQSCCR = "';\n";
- private static final String SQCSQ = "','";
+ private static final String BEGIN_BATCH = "BEGIN BATCH\n";
+ private static final String APPLY_BATCH = "\nAPPLY BATCH;\n";
+ private static final String SQSCCR = "';\n";
+ private static final String SQCSQ = "','";
- private HistoryDAO historyDAO;
- private CacheInfoDAO infoDAO;
- private PSInfo psNS;
+ private HistoryDAO historyDAO;
+ private CacheInfoDAO infoDAO;
+ private PSInfo psNS;
- public NsDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
- super(trans, NsDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
- init(trans);
- }
+ public NsDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
+ super(trans, NsDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+ init(trans);
+ }
- public NsDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO iDAO) throws APIException, IOException {
- super(trans, NsDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
- historyDAO=hDAO;
- infoDAO = iDAO;
- init(trans);
- }
+ public NsDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO iDAO) throws APIException, IOException {
+ super(trans, NsDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+ historyDAO=hDAO;
+ infoDAO = iDAO;
+ init(trans);
+ }
//////////////////////////////////////////
@@ -99,428 +99,428 @@ public class NsDAO extends CassDAOImpl<AuthzTrans,NsDAO.Data> {
*
* @author Jonathan
*/
- public static class Data extends CacheableData implements Bytification {
- public String name;
- public int type;
- public String description;
- public String parent;
- public Map<String,String> attrib;
-
-// ////////////////////////////////////////
+ public static class Data extends CacheableData implements Bytification {
+ public String name;
+ public int type;
+ public String description;
+ public String parent;
+ public Map<String,String> attrib;
+
+// ////////////////////////////////////////
// // Getters
- public Map<String,String> attrib(boolean mutable) {
- if (attrib == null) {
- attrib = new HashMap<>();
- } else if (mutable && !(attrib instanceof HashMap)) {
- attrib = new HashMap<>(attrib);
- }
- return attrib;
- }
-
- @Override
- public int[] invalidate(Cached<?,?> cache) {
- return new int[] {
- seg(cache,name)
- };
- }
-
- public NsSplit split(String name) {
- return new NsSplit(this,name);
- }
-
- @Override
- public ByteBuffer bytify() throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- NSLoader.deflt.marshal(this,new DataOutputStream(baos));
- return ByteBuffer.wrap(baos.toByteArray());
- }
-
- @Override
- public void reconstitute(ByteBuffer bb) throws IOException {
- NSLoader.deflt.unmarshal(this,toDIS(bb));
- }
-
- @Override
- public String toString() {
- return name;
- }
-
+ public Map<String,String> attrib(boolean mutable) {
+ if (attrib == null) {
+ attrib = new HashMap<>();
+ } else if (mutable && !(attrib instanceof HashMap)) {
+ attrib = new HashMap<>(attrib);
+ }
+ return attrib;
+ }
+
+ @Override
+ public int[] invalidate(Cached<?,?> cache) {
+ return new int[] {
+ seg(cache,name)
+ };
+ }
+
+ public NsSplit split(String name) {
+ return new NsSplit(this,name);
+ }
+
+ @Override
+ public ByteBuffer bytify() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ NSLoader.deflt.marshal(this,new DataOutputStream(baos));
+ return ByteBuffer.wrap(baos.toByteArray());
+ }
+
+ @Override
+ public void reconstitute(ByteBuffer bb) throws IOException {
+ NSLoader.deflt.unmarshal(this,toDIS(bb));
+ }
+
+ @Override
+ public String toString() {
+ return name;
+ }
+
}
private void init(AuthzTrans trans) throws APIException, IOException {
// Set up sub-DAOs
if(historyDAO==null) {
- historyDAO = new HistoryDAO(trans, this);
- }
+ historyDAO = new HistoryDAO(trans, this);
+ }
if(infoDAO==null) {
- infoDAO = new CacheInfoDAO(trans,this);
- }
+ infoDAO = new CacheInfoDAO(trans,this);
+ }
- String[] helpers = setCRUD(trans, TABLE, Data.class, NSLoader.deflt,4/*need to skip attrib */);
-
- psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
- " WHERE parent = ?", new NSLoader(1),readConsistency);
+ String[] helpers = setCRUD(trans, TABLE, Data.class, NSLoader.deflt,4/*need to skip attrib */);
+
+ psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+ " WHERE parent = ?", new NSLoader(1),readConsistency);
- }
-
+ }
+
private static final class NSLoader extends Loader<Data> implements Streamer<Data> {
- public static final int MAGIC=250935515;
- public static final int VERSION=1;
- public static final int BUFF_SIZE=48;
-
- public static final NSLoader deflt = new NSLoader(KEYLIMIT);
-
- public NSLoader(int keylimit) {
- super(keylimit);
- }
-
- @Override
- public Data load(Data data, Row row) {
- // Int more efficient
- data.name = row.getString(0);
- data.type = row.getInt(1);
- data.description = row.getString(2);
- data.parent = row.getString(3);
- return data;
- }
-
- @Override
- protected void key(Data data, int idx, Object[] obj) {
- obj[idx]=data.name;
- }
-
- @Override
- protected void body(Data data, int _idx, Object[] obj) {
- int idx = _idx;
-
- obj[idx]=data.type;
- obj[++idx]=data.description;
- obj[++idx]=data.parent;
- }
-
- @Override
- public void marshal(Data data, DataOutputStream os) throws IOException {
- writeHeader(os,MAGIC,VERSION);
- writeString(os, data.name);
- os.writeInt(data.type);
- writeString(os,data.description);
- writeString(os,data.parent);
- if(data.attrib==null) {
- os.writeInt(-1);
- } else {
- os.writeInt(data.attrib.size());
- for(Entry<String, String> es : data.attrib(false).entrySet()) {
- writeString(os,es.getKey());
- writeString(os,es.getValue());
- }
- }
- }
-
- @Override
- public void unmarshal(Data data, DataInputStream is) throws IOException {
- /*int version = */readHeader(is,MAGIC,VERSION);
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
-
- byte[] buff = new byte[BUFF_SIZE];
- data.name = readString(is, buff);
- data.type = is.readInt();
- data.description = readString(is,buff);
- data.parent = readString(is,buff);
- int count = is.readInt();
- if(count>0) {
- Map<String, String> da = data.attrib(true);
- for(int i=0;i<count;++i) {
- da.put(readString(is,buff), readString(is,buff));
- }
- }
- }
+ public static final int MAGIC=250935515;
+ public static final int VERSION=1;
+ public static final int BUFF_SIZE=48;
+
+ public static final NSLoader deflt = new NSLoader(KEYLIMIT);
+
+ public NSLoader(int keylimit) {
+ super(keylimit);
+ }
+
+ @Override
+ public Data load(Data data, Row row) {
+ // Int more efficient
+ data.name = row.getString(0);
+ data.type = row.getInt(1);
+ data.description = row.getString(2);
+ data.parent = row.getString(3);
+ return data;
+ }
+
+ @Override
+ protected void key(Data data, int idx, Object[] obj) {
+ obj[idx]=data.name;
+ }
+
+ @Override
+ protected void body(Data data, int _idx, Object[] obj) {
+ int idx = _idx;
+
+ obj[idx]=data.type;
+ obj[++idx]=data.description;
+ obj[++idx]=data.parent;
+ }
+
+ @Override
+ public void marshal(Data data, DataOutputStream os) throws IOException {
+ writeHeader(os,MAGIC,VERSION);
+ writeString(os, data.name);
+ os.writeInt(data.type);
+ writeString(os,data.description);
+ writeString(os,data.parent);
+ if(data.attrib==null) {
+ os.writeInt(-1);
+ } else {
+ os.writeInt(data.attrib.size());
+ for(Entry<String, String> es : data.attrib(false).entrySet()) {
+ writeString(os,es.getKey());
+ writeString(os,es.getValue());
+ }
+ }
+ }
+
+ @Override
+ public void unmarshal(Data data, DataInputStream is) throws IOException {
+ /*int version = */readHeader(is,MAGIC,VERSION);
+ // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+
+ byte[] buff = new byte[BUFF_SIZE];
+ data.name = readString(is, buff);
+ data.type = is.readInt();
+ data.description = readString(is,buff);
+ data.parent = readString(is,buff);
+ int count = is.readInt();
+ if(count>0) {
+ Map<String, String> da = data.attrib(true);
+ for(int i=0;i<count;++i) {
+ da.put(readString(is,buff), readString(is,buff));
+ }
+ }
+ }
}
- @Override
- public Result<Data> create(AuthzTrans trans, Data data) {
- String ns = data.name;
- // Ensure Parent is set
- if(data.parent==null) {
- return Result.err(Result.ERR_BadData, "Need parent for %s", ns);
- }
-
- // insert Attributes
- StringBuilder stmt = new StringBuilder();
- stmt.append(BEGIN_BATCH);
- attribInsertStmts(stmt, data);
- stmt.append(APPLY_BATCH);
- try {
- getSession(trans).execute(stmt.toString());
-//// TEST CODE for Exception
-// boolean force = true;
-// if(force) {
-// throw new com.datastax.driver.core.exceptions.NoHostAvailableException(new HashMap<>());
-//// throw new com.datastax.driver.core.exceptions.AuthenticationException(new InetSocketAddress(9999),"Sample Message");
-// }
+ @Override
+ public Result<Data> create(AuthzTrans trans, Data data) {
+ String ns = data.name;
+ // Ensure Parent is set
+ if(data.parent==null) {
+ return Result.err(Result.ERR_BadData, "Need parent for %s", ns);
+ }
+
+ // insert Attributes
+ StringBuilder stmt = new StringBuilder();
+ stmt.append(BEGIN_BATCH);
+ attribInsertStmts(stmt, data);
+ stmt.append(APPLY_BATCH);
+ try {
+ getSession(trans).execute(stmt.toString());
+//// TEST CODE for Exception
+// boolean force = true;
+// if(force) {
+// throw new com.datastax.driver.core.exceptions.NoHostAvailableException(new HashMap<>());
+//// throw new com.datastax.driver.core.exceptions.AuthenticationException(new InetSocketAddress(9999),"Sample Message");
+// }
////END TEST CODE
- } catch (DriverException | APIException | IOException e) {
- reportPerhapsReset(trans,e);
- trans.info().log(stmt);
- return Result.err(Result.ERR_Backend, "Backend Access");
- }
- return super.create(trans, data);
- }
-
- @Override
- public Result<Void> update(AuthzTrans trans, Data data) {
- String ns = data.name;
- // Ensure Parent is set
- if(data.parent==null) {
- return Result.err(Result.ERR_BadData, "Need parent for %s", ns);
- }
-
- StringBuilder stmt = new StringBuilder();
- stmt.append(BEGIN_BATCH);
- try {
- Map<String, String> localAttr = data.attrib;
- Result<Map<String, String>> rremoteAttr = readAttribByNS(trans,ns);
- if(rremoteAttr.notOK()) {
- return Result.err(rremoteAttr);
- }
- // update Attributes
- String str;
- for(Entry<String, String> es : localAttr.entrySet()) {
- str = rremoteAttr.value.get(es.getKey());
- if(str==null || !str.equals(es.getValue())) {
- attribUpdateStmt(stmt, ns, es.getKey(),es.getValue());
- }
- }
-
- // No point in deleting... insert overwrites...
-// for(Entry<String, String> es : remoteAttr.entrySet()) {
-// str = localAttr.get(es.getKey());
-// if(str==null || !str.equals(es.getValue())) {
-// attribDeleteStmt(stmt, ns, es.getKey());
-// }
-// }
- if(stmt.length()>BEGIN_BATCH.length()) {
- stmt.append(APPLY_BATCH);
- getSession(trans).execute(stmt.toString());
- }
- } catch (DriverException | APIException | IOException e) {
- reportPerhapsReset(trans,e);
- trans.info().log(stmt);
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
- }
-
- return super.update(trans,data);
- }
-
- /* (non-Javadoc)
- * @see org.onap.aaf.auth.dao.CassDAOImpl#read(com.att.inno.env.TransStore, java.lang.Object)
- */
- @Override
- public Result<List<Data>> read(AuthzTrans trans, Data data) {
- Result<List<Data>> rld = super.read(trans, data);
-
- if(rld.isOKhasData()) {
- for(Data d : rld.value) {
- // Note: Map is null at this point, save time/mem by assignment
- Result<Map<String, String>> rabn = readAttribByNS(trans,d.name);
- if(rabn.isOK()) {
- d.attrib = rabn.value;
- } else {
- return Result.err(rabn);
- }
- }
- }
- return rld;
- }
-
- /* (non-Javadoc)
- * @see org.onap.aaf.auth.dao.CassDAOImpl#read(com.att.inno.env.TransStore, java.lang.Object[])
- */
- @Override
- public Result<List<Data>> read(AuthzTrans trans, Object... key) {
- Result<List<Data>> rld = super.read(trans, key);
-
- if(rld.isOKhasData()) {
- for(Data d : rld.value) {
- // Note: Map is null at this point, save time/mem by assignment
- Result<Map<String, String>> rabn = readAttribByNS(trans,d.name);
- if(rabn.isOK()) {
- d.attrib = rabn.value;
- } else {
- return Result.err(rabn);
- }
- }
- }
- return rld;
- }
-
- @Override
- public Result<Void> delete(AuthzTrans trans, Data data, boolean reread) {
- TimeTaken tt = trans.start("Delete NS Attributes " + data.name, Env.REMOTE);
- try {
- StringBuilder stmt = new StringBuilder();
- attribDeleteAllStmt(stmt, data);
- try {
- getSession(trans).execute(stmt.toString());
- } catch (DriverException | APIException | IOException e) {
- reportPerhapsReset(trans,e);
- trans.info().log(stmt);
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
- }
- } finally {
- tt.done();
- }
- return super.delete(trans, data, reread);
-
- }
+ } catch (DriverException | APIException | IOException e) {
+ reportPerhapsReset(trans,e);
+ trans.info().log(stmt);
+ return Result.err(Result.ERR_Backend, "Backend Access");
+ }
+ return super.create(trans, data);
+ }
+
+ @Override
+ public Result<Void> update(AuthzTrans trans, Data data) {
+ String ns = data.name;
+ // Ensure Parent is set
+ if(data.parent==null) {
+ return Result.err(Result.ERR_BadData, "Need parent for %s", ns);
+ }
+
+ StringBuilder stmt = new StringBuilder();
+ stmt.append(BEGIN_BATCH);
+ try {
+ Map<String, String> localAttr = data.attrib;
+ Result<Map<String, String>> rremoteAttr = readAttribByNS(trans,ns);
+ if(rremoteAttr.notOK()) {
+ return Result.err(rremoteAttr);
+ }
+ // update Attributes
+ String str;
+ for(Entry<String, String> es : localAttr.entrySet()) {
+ str = rremoteAttr.value.get(es.getKey());
+ if(str==null || !str.equals(es.getValue())) {
+ attribUpdateStmt(stmt, ns, es.getKey(),es.getValue());
+ }
+ }
+
+ // No point in deleting... insert overwrites...
+// for(Entry<String, String> es : remoteAttr.entrySet()) {
+// str = localAttr.get(es.getKey());
+// if(str==null || !str.equals(es.getValue())) {
+// attribDeleteStmt(stmt, ns, es.getKey());
+// }
+// }
+ if(stmt.length()>BEGIN_BATCH.length()) {
+ stmt.append(APPLY_BATCH);
+ getSession(trans).execute(stmt.toString());
+ }
+ } catch (DriverException | APIException | IOException e) {
+ reportPerhapsReset(trans,e);
+ trans.info().log(stmt);
+ return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+ }
+
+ return super.update(trans,data);
+ }
+
+ /* (non-Javadoc)
+ * @see org.onap.aaf.auth.dao.CassDAOImpl#read(com.att.inno.env.TransStore, java.lang.Object)
+ */
+ @Override
+ public Result<List<Data>> read(AuthzTrans trans, Data data) {
+ Result<List<Data>> rld = super.read(trans, data);
+
+ if(rld.isOKhasData()) {
+ for(Data d : rld.value) {
+ // Note: Map is null at this point, save time/mem by assignment
+ Result<Map<String, String>> rabn = readAttribByNS(trans,d.name);
+ if(rabn.isOK()) {
+ d.attrib = rabn.value;
+ } else {
+ return Result.err(rabn);
+ }
+ }
+ }
+ return rld;
+ }
+
+ /* (non-Javadoc)
+ * @see org.onap.aaf.auth.dao.CassDAOImpl#read(com.att.inno.env.TransStore, java.lang.Object[])
+ */
+ @Override
+ public Result<List<Data>> read(AuthzTrans trans, Object... key) {
+ Result<List<Data>> rld = super.read(trans, key);
+
+ if(rld.isOKhasData()) {
+ for(Data d : rld.value) {
+ // Note: Map is null at this point, save time/mem by assignment
+ Result<Map<String, String>> rabn = readAttribByNS(trans,d.name);
+ if(rabn.isOK()) {
+ d.attrib = rabn.value;
+ } else {
+ return Result.err(rabn);
+ }
+ }
+ }
+ return rld;
+ }
+
+ @Override
+ public Result<Void> delete(AuthzTrans trans, Data data, boolean reread) {
+ TimeTaken tt = trans.start("Delete NS Attributes " + data.name, Env.REMOTE);
+ try {
+ StringBuilder stmt = new StringBuilder();
+ attribDeleteAllStmt(stmt, data);
+ try {
+ getSession(trans).execute(stmt.toString());
+ } catch (DriverException | APIException | IOException e) {
+ reportPerhapsReset(trans,e);
+ trans.info().log(stmt);
+ return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+ }
+ } finally {
+ tt.done();
+ }
+ return super.delete(trans, data, reread);
+
+ }
- public Result<Map<String,String>> readAttribByNS(AuthzTrans trans, String ns) {
- Map<String,String> map = new HashMap<>();
- TimeTaken tt = trans.start("readAttribByNS " + ns, Env.REMOTE);
- try {
- ResultSet rs = getSession(trans).execute("SELECT key,value FROM "
- + TABLE_ATTRIB
- + " WHERE ns='"
- + ns
- + "';");
-
- for(Iterator<Row> iter = rs.iterator();iter.hasNext(); ) {
- Row r = iter.next();
- map.put(r.getString(0), r.getString(1));
- }
- } catch (DriverException | APIException | IOException e) {
- reportPerhapsReset(trans,e);
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
- } finally {
- tt.done();
- }
- return Result.ok(map);
- }
-
- public Result<Set<String>> readNsByAttrib(AuthzTrans trans, String key) {
- Set<String> set = new HashSet<>();
- TimeTaken tt = trans.start("readNsBykey " + key, Env.REMOTE);
- try {
- ResultSet rs = getSession(trans).execute("SELECT ns FROM "
- + TABLE_ATTRIB
- + " WHERE key='"
- + key
- + "';");
-
- for(Iterator<Row> iter = rs.iterator();iter.hasNext(); ) {
- Row r = iter.next();
- set.add(r.getString(0));
- }
- } catch (DriverException | APIException | IOException e) {
- reportPerhapsReset(trans,e);
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
- } finally {
- tt.done();
- }
- return Result.ok(set);
- }
-
- public Result<Void> attribAdd(AuthzTrans trans, String ns, String key, String value) {
- try {
- getSession(trans).execute(attribInsertStmt(new StringBuilder(),ns,key,value).toString());
- return Result.ok();
- } catch (DriverException | APIException | IOException e) {
- reportPerhapsReset(trans,e);
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
- }
- }
-
- private StringBuilder attribInsertStmt(StringBuilder sb, String ns, String key, String value) {
- sb.append("INSERT INTO ");
- sb.append(TABLE_ATTRIB);
- sb.append(" (ns,key,value) VALUES ('");
- sb.append(ns);
- sb.append(SQCSQ);
- sb.append(key);
- sb.append(SQCSQ);
- sb.append(value);
- sb.append("');");
- return sb;
- }
-
- private StringBuilder attribUpdateStmt(StringBuilder sb, String ns, String key, String value) {
- sb.append("UPDATE ");
- sb.append(TABLE_ATTRIB);
- sb.append(" set value='");
- sb.append(value);
- sb.append("' where ns='");
- sb.append(ns);
- sb.append("' AND key='");
- sb.append(key);
- sb.append("';");
- return sb;
- }
-
-
- public Result<Void> attribRemove(AuthzTrans trans, String ns, String key) {
- try {
- getSession(trans).execute(attribDeleteStmt(new StringBuilder(),ns,key).toString());
- return Result.ok();
- } catch (DriverException | APIException | IOException e) {
- reportPerhapsReset(trans,e);
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
- }
- }
-
- private StringBuilder attribDeleteStmt(StringBuilder stmt, String ns, String key) {
- stmt.append("DELETE FROM ");
- stmt.append(TABLE_ATTRIB);
- stmt.append(" WHERE ns='");
- stmt.append(ns);
- stmt.append("' AND key='");
- stmt.append(key);
- stmt.append("';");
- return stmt;
- }
-
- private void attribDeleteAllStmt(StringBuilder stmt, Data data) {
- stmt.append(" DELETE FROM ");
- stmt.append(TABLE_ATTRIB);
- stmt.append(" WHERE ns='");
- stmt.append(data.name);
- stmt.append(SQSCCR);
- }
-
- private void attribInsertStmts(StringBuilder stmt, Data data) {
- // INSERT new Attrib
- for(Entry<String,String> es : data.attrib(false).entrySet() ) {
- stmt.append(" ");
- attribInsertStmt(stmt,data.name,es.getKey(),es.getValue());
- }
- }
-
- /**
- * Add description to Namespace
- * @param trans
- * @param ns
- * @param description
- * @return
- */
- public Result<Void> addDescription(AuthzTrans trans, String ns, String description) {
- try {
- getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '"
- + description.replace("'", "''") + "' WHERE name = '" + ns + "';");
- } catch (DriverException | APIException | IOException e) {
- reportPerhapsReset(trans,e);
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
- }
-
- Data data = new Data();
- data.name=ns;
- wasModified(trans, CRUD.update, data, "Added description " + description + " to namespace " + ns, null );
- return Result.ok();
- }
-
- public Result<List<Data>> getChildren(AuthzTrans trans, String parent) {
- return psNS.read(trans, R_TEXT, new Object[]{parent});
- }
-
+ public Result<Map<String,String>> readAttribByNS(AuthzTrans trans, String ns) {
+ Map<String,String> map = new HashMap<>();
+ TimeTaken tt = trans.start("readAttribByNS " + ns, Env.REMOTE);
+ try {
+ ResultSet rs = getSession(trans).execute("SELECT key,value FROM "
+ + TABLE_ATTRIB
+ + " WHERE ns='"
+ + ns
+ + "';");
+
+ for(Iterator<Row> iter = rs.iterator();iter.hasNext(); ) {
+ Row r = iter.next();
+ map.put(r.getString(0), r.getString(1));
+ }
+ } catch (DriverException | APIException | IOException e) {
+ reportPerhapsReset(trans,e);
+ return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+ } finally {
+ tt.done();
+ }
+ return Result.ok(map);
+ }
+
+ public Result<Set<String>> readNsByAttrib(AuthzTrans trans, String key) {
+ Set<String> set = new HashSet<>();
+ TimeTaken tt = trans.start("readNsBykey " + key, Env.REMOTE);
+ try {
+ ResultSet rs = getSession(trans).execute("SELECT ns FROM "
+ + TABLE_ATTRIB
+ + " WHERE key='"
+ + key
+ + "';");
+
+ for(Iterator<Row> iter = rs.iterator();iter.hasNext(); ) {
+ Row r = iter.next();
+ set.add(r.getString(0));
+ }
+ } catch (DriverException | APIException | IOException e) {
+ reportPerhapsReset(trans,e);
+ return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+ } finally {
+ tt.done();
+ }
+ return Result.ok(set);
+ }
+
+ public Result<Void> attribAdd(AuthzTrans trans, String ns, String key, String value) {
+ try {
+ getSession(trans).execute(attribInsertStmt(new StringBuilder(),ns,key,value).toString());
+ return Result.ok();
+ } catch (DriverException | APIException | IOException e) {
+ reportPerhapsReset(trans,e);
+ return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+ }
+ }
+
+ private StringBuilder attribInsertStmt(StringBuilder sb, String ns, String key, String value) {
+ sb.append("INSERT INTO ");
+ sb.append(TABLE_ATTRIB);
+ sb.append(" (ns,key,value) VALUES ('");
+ sb.append(ns);
+ sb.append(SQCSQ);
+ sb.append(key);
+ sb.append(SQCSQ);
+ sb.append(value);
+ sb.append("');");
+ return sb;
+ }
+
+ private StringBuilder attribUpdateStmt(StringBuilder sb, String ns, String key, String value) {
+ sb.append("UPDATE ");
+ sb.append(TABLE_ATTRIB);
+ sb.append(" set value='");
+ sb.append(value);
+ sb.append("' where ns='");
+ sb.append(ns);
+ sb.append("' AND key='");
+ sb.append(key);
+ sb.append("';");
+ return sb;
+ }
+
+
+ public Result<Void> attribRemove(AuthzTrans trans, String ns, String key) {
+ try {
+ getSession(trans).execute(attribDeleteStmt(new StringBuilder(),ns,key).toString());
+ return Result.ok();
+ } catch (DriverException | APIException | IOException e) {
+ reportPerhapsReset(trans,e);
+ return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+ }
+ }
+
+ private StringBuilder attribDeleteStmt(StringBuilder stmt, String ns, String key) {
+ stmt.append("DELETE FROM ");
+ stmt.append(TABLE_ATTRIB);
+ stmt.append(" WHERE ns='");
+ stmt.append(ns);
+ stmt.append("' AND key='");
+ stmt.append(key);
+ stmt.append("';");
+ return stmt;
+ }
+
+ private void attribDeleteAllStmt(StringBuilder stmt, Data data) {
+ stmt.append(" DELETE FROM ");
+ stmt.append(TABLE_ATTRIB);
+ stmt.append(" WHERE ns='");
+ stmt.append(data.name);
+ stmt.append(SQSCCR);
+ }
+
+ private void attribInsertStmts(StringBuilder stmt, Data data) {
+ // INSERT new Attrib
+ for(Entry<String,String> es : data.attrib(false).entrySet() ) {
+ stmt.append(" ");
+ attribInsertStmt(stmt,data.name,es.getKey(),es.getValue());
+ }
+ }
+
+ /**
+ * Add description to Namespace
+ * @param trans
+ * @param ns
+ * @param description
+ * @return
+ */
+ public Result<Void> addDescription(AuthzTrans trans, String ns, String description) {
+ try {
+ getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '"
+ + description.replace("'", "''") + "' WHERE name = '" + ns + "';");
+ } catch (DriverException | APIException | IOException e) {
+ reportPerhapsReset(trans,e);
+ return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+ }
+
+ Data data = new Data();
+ data.name=ns;
+ wasModified(trans, CRUD.update, data, "Added description " + description + " to namespace " + ns, null );
+ return Result.ok();
+ }
+
+ public Result<List<Data>> getChildren(AuthzTrans trans, String parent) {
+ return psNS.read(trans, R_TEXT, new Object[]{parent});
+ }
+
/**
* Log Modification statements to History
@@ -531,8 +531,8 @@ public class NsDAO extends CassDAOImpl<AuthzTrans,NsDAO.Data> {
*/
@Override
protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
- boolean memo = override.length>0 && override[0]!=null;
- boolean subject = override.length>1 && override[1]!=null;
+ boolean memo = override.length>0 && override[0]!=null;
+ boolean subject = override.length>1 && override[1]!=null;
//TODO Must log history
HistoryDAO.Data hd = HistoryDAO.newInitedData();
@@ -541,20 +541,20 @@ public class NsDAO extends CassDAOImpl<AuthzTrans,NsDAO.Data> {
hd.target = TABLE;
hd.subject = subject ? override[1] : data.name;
hd.memo = memo ? override[0] : (data.name + " was " + modified.name() + 'd' );
- if(modified==CRUD.delete) {
- try {
- hd.reconstruct = data.bytify();
- } catch (IOException e) {
- trans.error().log(e,"Could not serialize NsDAO.Data");
- }
- }
+ if(modified==CRUD.delete) {
+ try {
+ hd.reconstruct = data.bytify();
+ } catch (IOException e) {
+ trans.error().log(e,"Could not serialize NsDAO.Data");
+ }
+ }
if(historyDAO.create(trans, hd).status!=Status.OK) {
- trans.error().log("Cannot log to History");
- }
+ trans.error().log("Cannot log to History");
+ }
if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {
- trans.error().log("Cannot touch CacheInfo");
- }
+ trans.error().log("Cannot touch CacheInfo");
+ }
}
} \ No newline at end of file
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsSplit.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsSplit.java
index 2694c6c8..a64c0a94 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsSplit.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsSplit.java
@@ -22,40 +22,40 @@
package org.onap.aaf.auth.dao.cass;
public class NsSplit {
- public final String ns;
- public final String name;
- public final NsDAO.Data nsd;
-
- public NsSplit(NsDAO.Data nsd, String child) {
- this.nsd = nsd;
- if(child.startsWith(nsd.name)) {
- ns = nsd.name;
- int dot = ns.length();
- if(dot<child.length() && child.charAt(dot)=='.') {
- name = child.substring(dot+1);
- } else {
- name="";
- }
- } else {
- name=null;
- ns = null;
- }
- }
-
- public NsSplit(String ns, String name) {
- this.ns = ns;
- this.name = name;
- this.nsd = new NsDAO.Data();
- nsd.name = ns;
- int dot = ns.lastIndexOf('.');
- if(dot>=0) {
- nsd.parent = ns.substring(0, dot);
- } else {
- nsd.parent = ".";
- }
- }
+ public final String ns;
+ public final String name;
+ public final NsDAO.Data nsd;
+
+ public NsSplit(NsDAO.Data nsd, String child) {
+ this.nsd = nsd;
+ if(child.startsWith(nsd.name)) {
+ ns = nsd.name;
+ int dot = ns.length();
+ if(dot<child.length() && child.charAt(dot)=='.') {
+ name = child.substring(dot+1);
+ } else {
+ name="";
+ }
+ } else {
+ name=null;
+ ns = null;
+ }
+ }
+
+ public NsSplit(String ns, String name) {
+ this.ns = ns;
+ this.name = name;
+ this.nsd = new NsDAO.Data();
+ nsd.name = ns;
+ int dot = ns.lastIndexOf('.');
+ if(dot>=0) {
+ nsd.parent = ns.substring(0, dot);
+ } else {
+ nsd.parent = ".";
+ }
+ }
- public boolean isOK() {
- return ns!=null && name !=null;
- }
+ public boolean isOK() {
+ return ns!=null && name !=null;
+ }
} \ No newline at end of file
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsType.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsType.java
index 18d5eeec..59e18ae9 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsType.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/NsType.java
@@ -27,48 +27,48 @@ package org.onap.aaf.auth.dao.cass;
*
*/
public enum NsType {
- UNKNOWN (-1),
- DOT (0),
- ROOT (1),
- COMPANY (2),
- APP (3),
- STACKED_APP (10),
- STACK (11);
-
- public final int type;
- private NsType(int t) {
- type = t;
- }
- /**
- * This is not the Ordinal, but the Type that is stored in NS Tables
- *
- * @param t
- * @return
- */
- public static NsType fromType(int t) {
- for(NsType nst : values()) {
- if(t==nst.type) {
- return nst;
- }
- }
- return UNKNOWN;
- }
-
- /**
- * Use this one rather than "valueOf" to avoid Exception
- * @param s
- * @return
- */
- public static NsType fromString(String s) {
- if(s!=null) {
- for(NsType nst : values()) {
- if(nst.name().equals(s)) {
- return nst;
- }
- }
- }
- return UNKNOWN;
- }
+ UNKNOWN (-1),
+ DOT (0),
+ ROOT (1),
+ COMPANY (2),
+ APP (3),
+ STACKED_APP (10),
+ STACK (11);
+
+ public final int type;
+ private NsType(int t) {
+ type = t;
+ }
+ /**
+ * This is not the Ordinal, but the Type that is stored in NS Tables
+ *
+ * @param t
+ * @return
+ */
+ public static NsType fromType(int t) {
+ for(NsType nst : values()) {
+ if(t==nst.type) {
+ return nst;
+ }
+ }
+ return UNKNOWN;
+ }
+
+ /**
+ * Use this one rather than "valueOf" to avoid Exception
+ * @param s
+ * @return
+ */
+ public static NsType fromString(String s) {
+ if(s!=null) {
+ for(NsType nst : values()) {
+ if(nst.name().equals(s)) {
+ return nst;
+ }
+ }
+ }
+ return UNKNOWN;
+ }
-
+
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/OAuthTokenDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/OAuthTokenDAO.java
index 4fe3aaab..5d0f084b 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/OAuthTokenDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/OAuthTokenDAO.java
@@ -50,7 +50,7 @@ import com.datastax.driver.core.Row;
*/
public class OAuthTokenDAO extends CassDAOImpl<AuthzTrans,OAuthTokenDAO.Data> {
public static final String TABLE = "oauth_token";
- private AbsCassDAO<AuthzTrans, Data>.PSInfo psByUser;
+ private AbsCassDAO<AuthzTrans, Data>.PSInfo psByUser;
public OAuthTokenDAO(AuthzTrans trans, Cluster cluster, String keyspace) {
super(trans, OAuthTokenDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
@@ -58,63 +58,63 @@ public class OAuthTokenDAO extends CassDAOImpl<AuthzTrans,OAuthTokenDAO.Data> {
}
public OAuthTokenDAO(AuthzTrans trans, AbsCassDAO<AuthzTrans,?> aDao) {
- super(trans, OAuthTokenDAO.class.getSimpleName(),aDao, Data.class, TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
- init(trans);
+ super(trans, OAuthTokenDAO.class.getSimpleName(),aDao, Data.class, TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+ init(trans);
}
public static final int KEYLIMIT = 1;
- public static class Data implements Bytification {
- public String id;
- public String client_id;
- public String user;
- public boolean active;
- public int type;
- public String refresh;
- public Date expires;
- public long exp_sec;
- public String content;
- public Set<String> scopes;
- public String state;
- public String req_ip; // requesting
-
- public Set<String> scopes(boolean mutable) {
- if (scopes == null) {
- scopes = new HashSet<>();
- } else if (mutable && !(scopes instanceof HashSet)) {
- scopes = new HashSet<>(scopes);
- }
- return scopes;
- }
-
- @Override
- public ByteBuffer bytify() throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- OAuthLoader.deflt.marshal(this,new DataOutputStream(baos));
- return ByteBuffer.wrap(baos.toByteArray());
- }
-
- @Override
- public void reconstitute(ByteBuffer bb) throws IOException {
- OAuthLoader.deflt.unmarshal(this, toDIS(bb));
- }
-
- public String toString() {
- return user.toString() + ' ' + id.toString() + ' ' + Chrono.dateTime(expires) + (active?"":"in") + "active";
- }
+ public static class Data implements Bytification {
+ public String id;
+ public String client_id;
+ public String user;
+ public boolean active;
+ public int type;
+ public String refresh;
+ public Date expires;
+ public long exp_sec;
+ public String content;
+ public Set<String> scopes;
+ public String state;
+ public String req_ip; // requesting
+
+ public Set<String> scopes(boolean mutable) {
+ if (scopes == null) {
+ scopes = new HashSet<>();
+ } else if (mutable && !(scopes instanceof HashSet)) {
+ scopes = new HashSet<>(scopes);
+ }
+ return scopes;
+ }
+
+ @Override
+ public ByteBuffer bytify() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ OAuthLoader.deflt.marshal(this,new DataOutputStream(baos));
+ return ByteBuffer.wrap(baos.toByteArray());
+ }
+
+ @Override
+ public void reconstitute(ByteBuffer bb) throws IOException {
+ OAuthLoader.deflt.unmarshal(this, toDIS(bb));
+ }
+
+ public String toString() {
+ return user.toString() + ' ' + id.toString() + ' ' + Chrono.dateTime(expires) + (active?"":"in") + "active";
+ }
}
private static class OAuthLoader extends Loader<Data> implements Streamer<Data>{
- public static final int MAGIC=235677843;
- public static final int VERSION=1;
- public static final int BUFF_SIZE=96; // Note: only used when
-
- public static final OAuthLoader deflt = new OAuthLoader(KEYLIMIT);
- public OAuthLoader(int keylimit) {
- super(keylimit);
- }
-
- @Override
+ public static final int MAGIC=235677843;
+ public static final int VERSION=1;
+ public static final int BUFF_SIZE=96; // Note: only used when
+
+ public static final OAuthLoader deflt = new OAuthLoader(KEYLIMIT);
+ public OAuthLoader(int keylimit) {
+ super(keylimit);
+ }
+
+ @Override
public Data load(Data data, Row row) {
data.id = row.getString(0);
data.client_id = row.getString(1);
@@ -152,43 +152,43 @@ public class OAuthTokenDAO extends CassDAOImpl<AuthzTrans,OAuthTokenDAO.Data> {
obj[++i] = data.req_ip;
}
- @Override
- public void marshal(Data data, DataOutputStream os) throws IOException {
- writeHeader(os,MAGIC,VERSION);
- writeString(os, data.id);
- writeString(os, data.client_id);
- writeString(os, data.user);
- os.writeBoolean(data.active);
- os.writeInt(data.type);
- writeString(os, data.refresh);
- os.writeLong(data.expires==null?-1:data.expires.getTime());
- os.writeLong(data.exp_sec);
- writeString(os, data.content);
- writeStringSet(os,data.scopes);
- writeString(os, data.state);
- writeString(os, data.req_ip);
- }
-
-
- @Override
- public void unmarshal(Data data, DataInputStream is) throws IOException {
- /*int version = */readHeader(is,MAGIC,VERSION);
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
- byte[] buff = new byte[BUFF_SIZE]; // used only if fits
- data.id = readString(is,buff);
- data.client_id = readString(is,buff);
- data.user = readString(is,buff);
- data.active = is.readBoolean();
- data.type = is.readInt();
- data.refresh = readString(is,buff);
- long l = is.readLong();
- data.expires = l<0?null:new Date(l);
- data.exp_sec = is.readLong();
- data.content = readString(is,buff); // note, large strings still ok with small buffer
- data.scopes = readStringSet(is,buff);
- data.state = readString(is,buff);
- data.req_ip = readString(is,buff);
- }
+ @Override
+ public void marshal(Data data, DataOutputStream os) throws IOException {
+ writeHeader(os,MAGIC,VERSION);
+ writeString(os, data.id);
+ writeString(os, data.client_id);
+ writeString(os, data.user);
+ os.writeBoolean(data.active);
+ os.writeInt(data.type);
+ writeString(os, data.refresh);
+ os.writeLong(data.expires==null?-1:data.expires.getTime());
+ os.writeLong(data.exp_sec);
+ writeString(os, data.content);
+ writeStringSet(os,data.scopes);
+ writeString(os, data.state);
+ writeString(os, data.req_ip);
+ }
+
+
+ @Override
+ public void unmarshal(Data data, DataInputStream is) throws IOException {
+ /*int version = */readHeader(is,MAGIC,VERSION);
+ // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+ byte[] buff = new byte[BUFF_SIZE]; // used only if fits
+ data.id = readString(is,buff);
+ data.client_id = readString(is,buff);
+ data.user = readString(is,buff);
+ data.active = is.readBoolean();
+ data.type = is.readInt();
+ data.refresh = readString(is,buff);
+ long l = is.readLong();
+ data.expires = l<0?null:new Date(l);
+ data.exp_sec = is.readLong();
+ data.content = readString(is,buff); // note, large strings still ok with small buffer
+ data.scopes = readStringSet(is,buff);
+ data.state = readString(is,buff);
+ data.req_ip = readString(is,buff);
+ }
}
private void init(AuthzTrans trans) {
@@ -196,7 +196,7 @@ public class OAuthTokenDAO extends CassDAOImpl<AuthzTrans,OAuthTokenDAO.Data> {
psByUser = new PSInfo(trans, "SELECT " + helpers[0] + " from " + TABLE + " WHERE user=?",OAuthLoader.deflt,readConsistency);
}
- /**
+ /**
* Log Modification statements to History
*
* @param modified which CRUD action was done
@@ -207,7 +207,7 @@ public class OAuthTokenDAO extends CassDAOImpl<AuthzTrans,OAuthTokenDAO.Data> {
protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
}
- public Result<List<Data>> readByUser(AuthzTrans trans, String user) {
- return psByUser.read(trans, "Read By User", new Object[]{user});
- }
+ public Result<List<Data>> readByUser(AuthzTrans trans, String user) {
+ return psByUser.read(trans, "Read By User", new Object[]{user});
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/PermDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/PermDAO.java
index 0ecdd98d..c02387b1 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/PermDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/PermDAO.java
@@ -49,169 +49,169 @@ import com.datastax.driver.core.exceptions.DriverException;
public class PermDAO extends CassDAOImpl<AuthzTrans,PermDAO.Data> {
- public static final String TABLE = "perm";
+ public static final String TABLE = "perm";
public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F
- private static final String STAR = "*";
-
- private final HistoryDAO historyDAO;
- private final CacheInfoDAO infoDAO;
-
- private PSInfo psNS, psChildren, psByType;
-
- public PermDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
- super(trans, PermDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
- init(trans);
- historyDAO = new HistoryDAO(trans, this);
- infoDAO = new CacheInfoDAO(trans,this);
- }
-
- public PermDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) {
- super(trans, PermDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
- historyDAO = hDAO;
- infoDAO=ciDAO;
- init(trans);
- }
-
-
- private static final int KEYLIMIT = 4;
- public static class Data extends CacheableData implements Bytification {
- public String ns;
- public String type;
- public String instance;
- public String action;
- public Set<String> roles;
- public String description;
-
- public Data() {}
-
- public Data(NsSplit nss, String instance, String action) {
- ns = nss.ns;
- type = nss.name;
- this.instance = instance;
- this.action = action;
- }
-
- public String fullType() {
- return ns + '.' + type;
- }
-
- public String fullPerm() {
- return ns + '.' + type + '|' + instance + '|' + action;
- }
-
- public String encode() {
- return ns + '|' + type + '|' + instance + '|' + action;
- }
-
- /**
- * Decode Perm String, including breaking into appropriate Namespace
- *
- * @param trans
- * @param q
- * @param p
- * @return
- */
- public static Result<Data> decode(AuthzTrans trans, Question q, String p) {
- String[] ss = Split.splitTrim('|', p,4);
- if(ss[2]==null) {
- return Result.err(Status.ERR_BadData,"Perm Encodings must be separated by '|'");
- }
- Data data = new Data();
- if(ss[3]==null) { // older 3 part encoding must be evaluated for NS
- Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);
- if(nss.notOK()) {
- return Result.err(nss);
- }
- data.ns=nss.value.ns;
- data.type=nss.value.name;
- data.instance=ss[1];
- data.action=ss[2];
- } else { // new 4 part encoding
- data.ns=ss[0];
- data.type=ss[1];
- data.instance=ss[2];
- data.action=ss[3];
- }
- return Result.ok(data);
- }
-
- /**
- * Decode Perm String, including breaking into appropriate Namespace
- *
- * @param trans
- * @param q
- * @param p
- * @return
- */
- public static Result<String[]> decodeToArray(AuthzTrans trans, Question q, String p) {
- String[] ss = Split.splitTrim('|', p,4);
- if(ss[2]==null) {
- return Result.err(Status.ERR_BadData,"Perm Encodings must be separated by '|'");
- }
-
- if(ss[3]==null) { // older 3 part encoding must be evaluated for NS
- ss[3] = ss[2];
- ss[2] = ss[1];
- Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);
- if(nss.notOK()) {
- return Result.err(nss);
- }
- ss[1] = nss.value.name;
- ss[0] = nss.value.ns;
- }
- return Result.ok(ss);
- }
-
- public static Data create(NsDAO.Data ns, String name) {
- NsSplit nss = new NsSplit(ns,name);
- Data rv = new Data();
- rv.ns = nss.ns;
- String[] s = nss.name.split("\\|");
- switch(s.length) {
- case 3:
- rv.type=s[0];
- rv.instance=s[1];
- rv.action=s[2];
- break;
- case 2:
- rv.type=s[0];
- rv.instance=s[1];
- rv.action=STAR;
- break;
- default:
- rv.type=s[0];
- rv.instance = STAR;
- rv.action = STAR;
- }
- return rv;
- }
-
- public static Data create(AuthzTrans trans, Question q, String name) {
- String[] s = name.split("\\|");
- Result<NsSplit> rdns = q.deriveNsSplit(trans, s[0]);
- Data rv = new PermDAO.Data();
- if(rdns.isOKhasData()) {
- switch(s.length) {
- case 3:
- rv.type=s[1];
- rv.instance=s[2];
- rv.action=s[3];
- break;
- case 2:
- rv.type=s[1];
- rv.instance=s[2];
- rv.action=STAR;
- break;
- default:
- rv.type=s[1];
- rv.instance = STAR;
- rv.action = STAR;
- }
- }
- return rv;
- }
-
+ private static final String STAR = "*";
+
+ private final HistoryDAO historyDAO;
+ private final CacheInfoDAO infoDAO;
+
+ private PSInfo psNS, psChildren, psByType;
+
+ public PermDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
+ super(trans, PermDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+ init(trans);
+ historyDAO = new HistoryDAO(trans, this);
+ infoDAO = new CacheInfoDAO(trans,this);
+ }
+
+ public PermDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) {
+ super(trans, PermDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+ historyDAO = hDAO;
+ infoDAO=ciDAO;
+ init(trans);
+ }
+
+
+ private static final int KEYLIMIT = 4;
+ public static class Data extends CacheableData implements Bytification {
+ public String ns;
+ public String type;
+ public String instance;
+ public String action;
+ public Set<String> roles;
+ public String description;
+
+ public Data() {}
+
+ public Data(NsSplit nss, String instance, String action) {
+ ns = nss.ns;
+ type = nss.name;
+ this.instance = instance;
+ this.action = action;
+ }
+
+ public String fullType() {
+ return ns + '.' + type;
+ }
+
+ public String fullPerm() {
+ return ns + '.' + type + '|' + instance + '|' + action;
+ }
+
+ public String encode() {
+ return ns + '|' + type + '|' + instance + '|' + action;
+ }
+
+ /**
+ * Decode Perm String, including breaking into appropriate Namespace
+ *
+ * @param trans
+ * @param q
+ * @param p
+ * @return
+ */
+ public static Result<Data> decode(AuthzTrans trans, Question q, String p) {
+ String[] ss = Split.splitTrim('|', p,4);
+ if(ss[2]==null) {
+ return Result.err(Status.ERR_BadData,"Perm Encodings must be separated by '|'");
+ }
+ Data data = new Data();
+ if(ss[3]==null) { // older 3 part encoding must be evaluated for NS
+ Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);
+ if(nss.notOK()) {
+ return Result.err(nss);
+ }
+ data.ns=nss.value.ns;
+ data.type=nss.value.name;
+ data.instance=ss[1];
+ data.action=ss[2];
+ } else { // new 4 part encoding
+ data.ns=ss[0];
+ data.type=ss[1];
+ data.instance=ss[2];
+ data.action=ss[3];
+ }
+ return Result.ok(data);
+ }
+
+ /**
+ * Decode Perm String, including breaking into appropriate Namespace
+ *
+ * @param trans
+ * @param q
+ * @param p
+ * @return
+ */
+ public static Result<String[]> decodeToArray(AuthzTrans trans, Question q, String p) {
+ String[] ss = Split.splitTrim('|', p,4);
+ if(ss[2]==null) {
+ return Result.err(Status.ERR_BadData,"Perm Encodings must be separated by '|'");
+ }
+
+ if(ss[3]==null) { // older 3 part encoding must be evaluated for NS
+ ss[3] = ss[2];
+ ss[2] = ss[1];
+ Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);
+ if(nss.notOK()) {
+ return Result.err(nss);
+ }
+ ss[1] = nss.value.name;
+ ss[0] = nss.value.ns;
+ }
+ return Result.ok(ss);
+ }
+
+ public static Data create(NsDAO.Data ns, String name) {
+ NsSplit nss = new NsSplit(ns,name);
+ Data rv = new Data();
+ rv.ns = nss.ns;
+ String[] s = nss.name.split("\\|");
+ switch(s.length) {
+ case 3:
+ rv.type=s[0];
+ rv.instance=s[1];
+ rv.action=s[2];
+ break;
+ case 2:
+ rv.type=s[0];
+ rv.instance=s[1];
+ rv.action=STAR;
+ break;
+ default:
+ rv.type=s[0];
+ rv.instance = STAR;
+ rv.action = STAR;
+ }
+ return rv;
+ }
+
+ public static Data create(AuthzTrans trans, Question q, String name) {
+ String[] s = name.split("\\|");
+ Result<NsSplit> rdns = q.deriveNsSplit(trans, s[0]);
+ Data rv = new PermDAO.Data();
+ if(rdns.isOKhasData()) {
+ switch(s.length) {
+ case 3:
+ rv.type=s[1];
+ rv.instance=s[2];
+ rv.action=s[3];
+ break;
+ case 2:
+ rv.type=s[1];
+ rv.instance=s[2];
+ rv.action=STAR;
+ break;
+ default:
+ rv.type=s[1];
+ rv.instance = STAR;
+ rv.action = STAR;
+ }
+ }
+ return rv;
+ }
+
////////////////////////////////////////
// Getters
public Set<String> roles(boolean mutable) {
@@ -223,279 +223,279 @@ public class PermDAO extends CassDAOImpl<AuthzTrans,PermDAO.Data> {
return roles;
}
- @Override
- public int[] invalidate(Cached<?,?> cache) {
- return new int[] {
- seg(cache,ns),
- seg(cache,ns,type),
- seg(cache,ns,type,STAR),
- seg(cache,ns,type,instance,action)
- };
- }
-
- @Override
- public ByteBuffer bytify() throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- PermLoader.deflt.marshal(this, new DataOutputStream(baos));
- return ByteBuffer.wrap(baos.toByteArray());
- }
-
- @Override
- public void reconstitute(ByteBuffer bb) throws IOException {
- PermLoader.deflt.unmarshal(this, toDIS(bb));
- }
-
- @Override
- public String toString() {
- return encode();
- }
- }
-
- private static class PermLoader extends Loader<Data> implements Streamer<Data> {
- public static final int MAGIC=283939453;
- public static final int VERSION=1;
- public static final int BUFF_SIZE=96;
-
- public static final PermLoader deflt = new PermLoader(KEYLIMIT);
-
- public PermLoader(int keylimit) {
- super(keylimit);
- }
-
- @Override
- public Data load(Data data, Row row) {
- // Int more efficient Match "fields" string
- data.ns = row.getString(0);
- data.type = row.getString(1);
- data.instance = row.getString(2);
- data.action = row.getString(3);
- data.roles = row.getSet(4,String.class);
- data.description = row.getString(5);
- return data;
- }
-
- @Override
- protected void key(Data data, int _idx, Object[] obj) {
- int idx = _idx;
- obj[idx]=data.ns;
- obj[++idx]=data.type;
- obj[++idx]=data.instance;
- obj[++idx]=data.action;
- }
-
- @Override
- protected void body(Data data, int _idx, Object[] obj) {
- int idx = _idx;
- obj[idx]=data.roles;
- obj[++idx]=data.description;
- }
-
- @Override
- public void marshal(Data data, DataOutputStream os) throws IOException {
- writeHeader(os,MAGIC,VERSION);
- writeString(os, data.ns);
- writeString(os, data.type);
- writeString(os, data.instance);
- writeString(os, data.action);
- writeStringSet(os, data.roles);
- writeString(os, data.description);
- }
-
- @Override
- public void unmarshal(Data data, DataInputStream is) throws IOException {
- /*int version = */readHeader(is,MAGIC,VERSION);
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
- byte[] buff = new byte[BUFF_SIZE];
- data.ns = readString(is, buff);
- data.type = readString(is,buff);
- data.instance = readString(is,buff);
- data.action = readString(is,buff);
- data.roles = readStringSet(is,buff);
- data.description = readString(is,buff);
- }
- }
-
- private void init(AuthzTrans trans) {
- // the 3 is the number of key fields
- String[] helpers = setCRUD(trans, TABLE, Data.class, PermLoader.deflt);
-
- // Other SELECT style statements... match with a local Method
- psByType = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
- " WHERE ns = ? AND type = ?", new PermLoader(2) {
- @Override
- protected void key(Data data, int idx, Object[] obj) {
- obj[idx]=data.type;
- }
- },readConsistency);
-
- psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
- " WHERE ns = ?", new PermLoader(1),readConsistency);
-
- psChildren = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
- " WHERE ns=? AND type > ? AND type < ?",
- new PermLoader(3) {
- @Override
- protected void key(Data data, int _idx, Object[] obj) {
- int idx = _idx;
- obj[idx] = data.ns;
- obj[++idx]=data.type + DOT;
- obj[++idx]=data.type + DOT_PLUS_ONE;
- }
- },readConsistency);
-
- }
-
-
- /**
- * Add a single Permission to the Role's Permission Collection
- *
- * @param trans
- * @param roleFullName
- * @param perm
- * @param type
- * @param action
- * @return
- */
- public Result<Void> addRole(AuthzTrans trans, PermDAO.Data perm, String roleFullName) {
- // Note: Prepared Statements for Collection updates aren't supported
- //ResultSet rv =
- try {
- getSession(trans).execute(UPDATE_SP + TABLE + " SET roles = roles + {'" + roleFullName + "'} " +
- "WHERE " +
- "ns = '" + perm.ns + "' AND " +
- "type = '" + perm.type + "' AND " +
- "instance = '" + perm.instance + "' AND " +
- "action = '" + perm.action + "';"
- );
- } catch (DriverException | APIException | IOException e) {
- reportPerhapsReset(trans,e);
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
- }
-
- wasModified(trans, CRUD.update, perm, "Added role " + roleFullName + " to perm " +
- perm.ns + '.' + perm.type + '|' + perm.instance + '|' + perm.action);
- return Result.ok();
- }
-
- /**
- * Remove a single Permission from the Role's Permission Collection
- * @param trans
- * @param roleFullName
- * @param perm
- * @param type
- * @param action
- * @return
- */
- public Result<Void> delRole(AuthzTrans trans, PermDAO.Data perm, String roleFullName) {
- // Note: Prepared Statements for Collection updates aren't supported
- //ResultSet rv =
- try {
- getSession(trans).execute(UPDATE_SP + TABLE + " SET roles = roles - {'" + roleFullName + "'} " +
- "WHERE " +
- "ns = '" + perm.ns + "' AND " +
- "type = '" + perm.type + "' AND " +
- "instance = '" + perm.instance + "' AND " +
- "action = '" + perm.action + "';"
- );
- } catch (DriverException | APIException | IOException e) {
- reportPerhapsReset(trans,e);
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
- }
-
- //TODO how can we tell when it doesn't?
- wasModified(trans, CRUD.update, perm, "Removed role " + roleFullName + " from perm " +
- perm.ns + '.' + perm.type + '|' + perm.instance + '|' + perm.action);
- return Result.ok();
- }
-
-
-
- /**
- * Additional method:
- * Select all Permissions by Name
- *
- * @param name
- * @return
- * @throws DAOException
- */
- public Result<List<Data>> readByType(AuthzTrans trans, String ns, String type) {
- return psByType.read(trans, R_TEXT, new Object[]{ns, type});
- }
-
- public Result<List<Data>> readChildren(AuthzTrans trans, String ns, String type) {
- return psChildren.read(trans, R_TEXT, new Object[]{ns, type+DOT, type + DOT_PLUS_ONE});
- }
-
- public Result<List<Data>> readNS(AuthzTrans trans, String ns) {
- return psNS.read(trans, R_TEXT, new Object[]{ns});
- }
-
- /**
- * Add description to this permission
- *
- * @param trans
- * @param ns
- * @param type
- * @param instance
- * @param action
- * @param description
- * @return
- */
- public Result<Void> addDescription(AuthzTrans trans, String ns, String type,
- String instance, String action, String description) {
- try {
- getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '"
- + description + "' WHERE ns = '" + ns + "' AND type = '" + type + "'"
- + "AND instance = '" + instance + "' AND action = '" + action + "';");
- } catch (DriverException | APIException | IOException e) {
- reportPerhapsReset(trans,e);
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
- }
-
- Data data = new Data();
- data.ns=ns;
- data.type=type;
- data.instance=instance;
- data.action=action;
- wasModified(trans, CRUD.update, data, "Added description " + description + " to permission "
- + data.encode(), null );
- return Result.ok();
- }
-
- /**
- * Log Modification statements to History
- */
- @Override
- protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
- boolean memo = override.length>0 && override[0]!=null;
- boolean subject = override.length>1 && override[1]!=null;
-
- // Need to update history
- HistoryDAO.Data hd = HistoryDAO.newInitedData();
- hd.user = trans.user();
- hd.action = modified.name();
- hd.target = TABLE;
- hd.subject = subject ? override[1] : data.fullType();
- if (memo) {
+ @Override
+ public int[] invalidate(Cached<?,?> cache) {
+ return new int[] {
+ seg(cache,ns),
+ seg(cache,ns,type),
+ seg(cache,ns,type,STAR),
+ seg(cache,ns,type,instance,action)
+ };
+ }
+
+ @Override
+ public ByteBuffer bytify() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ PermLoader.deflt.marshal(this, new DataOutputStream(baos));
+ return ByteBuffer.wrap(baos.toByteArray());
+ }
+
+ @Override
+ public void reconstitute(ByteBuffer bb) throws IOException {
+ PermLoader.deflt.unmarshal(this, toDIS(bb));
+ }
+
+ @Override
+ public String toString() {
+ return encode();
+ }
+ }
+
+ private static class PermLoader extends Loader<Data> implements Streamer<Data> {
+ public static final int MAGIC=283939453;
+ public static final int VERSION=1;
+ public static final int BUFF_SIZE=96;
+
+ public static final PermLoader deflt = new PermLoader(KEYLIMIT);
+
+ public PermLoader(int keylimit) {
+ super(keylimit);
+ }
+
+ @Override
+ public Data load(Data data, Row row) {
+ // Int more efficient Match "fields" string
+ data.ns = row.getString(0);
+ data.type = row.getString(1);
+ data.instance = row.getString(2);
+ data.action = row.getString(3);
+ data.roles = row.getSet(4,String.class);
+ data.description = row.getString(5);
+ return data;
+ }
+
+ @Override
+ protected void key(Data data, int _idx, Object[] obj) {
+ int idx = _idx;
+ obj[idx]=data.ns;
+ obj[++idx]=data.type;
+ obj[++idx]=data.instance;
+ obj[++idx]=data.action;
+ }
+
+ @Override
+ protected void body(Data data, int _idx, Object[] obj) {
+ int idx = _idx;
+ obj[idx]=data.roles;
+ obj[++idx]=data.description;
+ }
+
+ @Override
+ public void marshal(Data data, DataOutputStream os) throws IOException {
+ writeHeader(os,MAGIC,VERSION);
+ writeString(os, data.ns);
+ writeString(os, data.type);
+ writeString(os, data.instance);
+ writeString(os, data.action);
+ writeStringSet(os, data.roles);
+ writeString(os, data.description);
+ }
+
+ @Override
+ public void unmarshal(Data data, DataInputStream is) throws IOException {
+ /*int version = */readHeader(is,MAGIC,VERSION);
+ // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+ byte[] buff = new byte[BUFF_SIZE];
+ data.ns = readString(is, buff);
+ data.type = readString(is,buff);
+ data.instance = readString(is,buff);
+ data.action = readString(is,buff);
+ data.roles = readStringSet(is,buff);
+ data.description = readString(is,buff);
+ }
+ }
+
+ private void init(AuthzTrans trans) {
+ // the 3 is the number of key fields
+ String[] helpers = setCRUD(trans, TABLE, Data.class, PermLoader.deflt);
+
+ // Other SELECT style statements... match with a local Method
+ psByType = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+ " WHERE ns = ? AND type = ?", new PermLoader(2) {
+ @Override
+ protected void key(Data data, int idx, Object[] obj) {
+ obj[idx]=data.type;
+ }
+ },readConsistency);
+
+ psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+ " WHERE ns = ?", new PermLoader(1),readConsistency);
+
+ psChildren = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+ " WHERE ns=? AND type > ? AND type < ?",
+ new PermLoader(3) {
+ @Override
+ protected void key(Data data, int _idx, Object[] obj) {
+ int idx = _idx;
+ obj[idx] = data.ns;
+ obj[++idx]=data.type + DOT;
+ obj[++idx]=data.type + DOT_PLUS_ONE;
+ }
+ },readConsistency);
+
+ }
+
+
+ /**
+ * Add a single Permission to the Role's Permission Collection
+ *
+ * @param trans
+ * @param roleFullName
+ * @param perm
+ * @param type
+ * @param action
+ * @return
+ */
+ public Result<Void> addRole(AuthzTrans trans, PermDAO.Data perm, String roleFullName) {
+ // Note: Prepared Statements for Collection updates aren't supported
+ //ResultSet rv =
+ try {
+ getSession(trans).execute(UPDATE_SP + TABLE + " SET roles = roles + {'" + roleFullName + "'} " +
+ "WHERE " +
+ "ns = '" + perm.ns + "' AND " +
+ "type = '" + perm.type + "' AND " +
+ "instance = '" + perm.instance + "' AND " +
+ "action = '" + perm.action + "';"
+ );
+ } catch (DriverException | APIException | IOException e) {
+ reportPerhapsReset(trans,e);
+ return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+ }
+
+ wasModified(trans, CRUD.update, perm, "Added role " + roleFullName + " to perm " +
+ perm.ns + '.' + perm.type + '|' + perm.instance + '|' + perm.action);
+ return Result.ok();
+ }
+
+ /**
+ * Remove a single Permission from the Role's Permission Collection
+ * @param trans
+ * @param roleFullName
+ * @param perm
+ * @param type
+ * @param action
+ * @return
+ */
+ public Result<Void> delRole(AuthzTrans trans, PermDAO.Data perm, String roleFullName) {
+ // Note: Prepared Statements for Collection updates aren't supported
+ //ResultSet rv =
+ try {
+ getSession(trans).execute(UPDATE_SP + TABLE + " SET roles = roles - {'" + roleFullName + "'} " +
+ "WHERE " +
+ "ns = '" + perm.ns + "' AND " +
+ "type = '" + perm.type + "' AND " +
+ "instance = '" + perm.instance + "' AND " +
+ "action = '" + perm.action + "';"
+ );
+ } catch (DriverException | APIException | IOException e) {
+ reportPerhapsReset(trans,e);
+ return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+ }
+
+ //TODO how can we tell when it doesn't?
+ wasModified(trans, CRUD.update, perm, "Removed role " + roleFullName + " from perm " +
+ perm.ns + '.' + perm.type + '|' + perm.instance + '|' + perm.action);
+ return Result.ok();
+ }
+
+
+
+ /**
+ * Additional method:
+ * Select all Permissions by Name
+ *
+ * @param name
+ * @return
+ * @throws DAOException
+ */
+ public Result<List<Data>> readByType(AuthzTrans trans, String ns, String type) {
+ return psByType.read(trans, R_TEXT, new Object[]{ns, type});
+ }
+
+ public Result<List<Data>> readChildren(AuthzTrans trans, String ns, String type) {
+ return psChildren.read(trans, R_TEXT, new Object[]{ns, type+DOT, type + DOT_PLUS_ONE});
+ }
+
+ public Result<List<Data>> readNS(AuthzTrans trans, String ns) {
+ return psNS.read(trans, R_TEXT, new Object[]{ns});
+ }
+
+ /**
+ * Add description to this permission
+ *
+ * @param trans
+ * @param ns
+ * @param type
+ * @param instance
+ * @param action
+ * @param description
+ * @return
+ */
+ public Result<Void> addDescription(AuthzTrans trans, String ns, String type,
+ String instance, String action, String description) {
+ try {
+ getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '"
+ + description + "' WHERE ns = '" + ns + "' AND type = '" + type + "'"
+ + "AND instance = '" + instance + "' AND action = '" + action + "';");
+ } catch (DriverException | APIException | IOException e) {
+ reportPerhapsReset(trans,e);
+ return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+ }
+
+ Data data = new Data();
+ data.ns=ns;
+ data.type=type;
+ data.instance=instance;
+ data.action=action;
+ wasModified(trans, CRUD.update, data, "Added description " + description + " to permission "
+ + data.encode(), null );
+ return Result.ok();
+ }
+
+ /**
+ * Log Modification statements to History
+ */
+ @Override
+ protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
+ boolean memo = override.length>0 && override[0]!=null;
+ boolean subject = override.length>1 && override[1]!=null;
+
+ // Need to update history
+ HistoryDAO.Data hd = HistoryDAO.newInitedData();
+ hd.user = trans.user();
+ hd.action = modified.name();
+ hd.target = TABLE;
+ hd.subject = subject ? override[1] : data.fullType();
+ if (memo) {
hd.memo = String.format("%s", override[0]);
} else {
hd.memo = String.format("%sd %s|%s|%s", modified.name(),data.fullType(),data.instance,data.action);
}
-
- if(modified==CRUD.delete) {
- try {
- hd.reconstruct = data.bytify();
- } catch (IOException e) {
- trans.error().log(e,"Could not serialize PermDAO.Data");
- }
- }
-
+
+ if(modified==CRUD.delete) {
+ try {
+ hd.reconstruct = data.bytify();
+ } catch (IOException e) {
+ trans.error().log(e,"Could not serialize PermDAO.Data");
+ }
+ }
+
if(historyDAO.create(trans, hd).status!=Status.OK) {
- trans.error().log("Cannot log to History");
+ trans.error().log("Cannot log to History");
}
if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {
- trans.error().log("Cannot touch CacheInfo");
+ trans.error().log("Cannot touch CacheInfo");
}
- }
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/RoleDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/RoleDAO.java
index 974f73fe..4489d268 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/RoleDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/RoleDAO.java
@@ -48,28 +48,28 @@ import com.datastax.driver.core.exceptions.DriverException;
public class RoleDAO extends CassDAOImpl<AuthzTrans,RoleDAO.Data> {
- public static final String TABLE = "role";
+ public static final String TABLE = "role";
public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F
- private final HistoryDAO historyDAO;
- private final CacheInfoDAO infoDAO;
+ private final HistoryDAO historyDAO;
+ private final CacheInfoDAO infoDAO;
- private PSInfo psChildren, psNS, psName;
+ private PSInfo psChildren, psNS, psName;
- public RoleDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
- super(trans, RoleDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+ public RoleDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
+ super(trans, RoleDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
// Set up sub-DAOs
historyDAO = new HistoryDAO(trans, this);
- infoDAO = new CacheInfoDAO(trans,this);
- init(trans);
- }
+ infoDAO = new CacheInfoDAO(trans,this);
+ init(trans);
+ }
- public RoleDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) {
- super(trans, RoleDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
- historyDAO = hDAO;
- infoDAO = ciDAO;
- init(trans);
- }
+ public RoleDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) {
+ super(trans, RoleDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+ historyDAO = hDAO;
+ infoDAO = ciDAO;
+ init(trans);
+ }
//////////////////////////////////////////
@@ -80,301 +80,301 @@ public class RoleDAO extends CassDAOImpl<AuthzTrans,RoleDAO.Data> {
* Data class that matches the Cassandra Table "role"
* @author Jonathan
*/
- public static class Data extends CacheableData implements Bytification {
- public String ns;
- public String name;
- public Set<String> perms;
- public String description;
+ public static class Data extends CacheableData implements Bytification {
+ public String ns;
+ public String name;
+ public Set<String> perms;
+ public String description;
////////////////////////////////////////
// Getters
- public Set<String> perms(boolean mutable) {
- if (perms == null) {
- perms = new HashSet<>();
- } else if (mutable && !(perms instanceof HashSet)) {
- perms = new HashSet<>(perms);
- }
- return perms;
- }
-
- public static Data create(NsDAO.Data ns, String name) {
- NsSplit nss = new NsSplit(ns,name);
- RoleDAO.Data rv = new Data();
- rv.ns = nss.ns;
- rv.name=nss.name;
- return rv;
- }
-
- public String fullName() {
- return ns + '.' + name;
- }
-
- public String encode() {
- return ns + '|' + name;
- }
-
- /**
- * Decode Perm String, including breaking into appropriate Namespace
- *
- * @param trans
- * @param q
- * @param r
- * @return
- */
- public static Result<Data> decode(AuthzTrans trans, Question q, String r) {
- String[] ss = Split.splitTrim('|', r,2);
- Data data = new Data();
- if(ss[1]==null) { // older 1 part encoding must be evaluated for NS
- Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);
- if(nss.notOK()) {
- return Result.err(nss);
- }
- data.ns=nss.value.ns;
- data.name=nss.value.name;
- } else { // new 4 part encoding
- data.ns=ss[0];
- data.name=ss[1];
- }
- return Result.ok(data);
- }
-
- /**
- * Decode from UserRole Data
- * @param urdd
- * @return
- */
- public static RoleDAO.Data decode(UserRoleDAO.Data urdd) {
- RoleDAO.Data rd = new RoleDAO.Data();
- rd.ns = urdd.ns;
- rd.name = urdd.rname;
- return rd;
- }
-
-
- /**
- * Decode Perm String, including breaking into appropriate Namespace
- *
- * @param trans
- * @param q
- * @param p
- * @return
- */
- public static Result<String[]> decodeToArray(AuthzTrans trans, Question q, String p) {
- String[] ss = Split.splitTrim('|', p,2);
- if(ss[1]==null) { // older 1 part encoding must be evaluated for NS
- Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);
- if(nss.notOK()) {
- return Result.err(nss);
- }
- ss[0] = nss.value.ns;
- ss[1] = nss.value.name;
- }
- return Result.ok(ss);
- }
-
- @Override
- public int[] invalidate(Cached<?,?> cache) {
- return new int[] {
- seg(cache,ns,name),
- seg(cache,ns),
- seg(cache,name),
- };
- }
-
- @Override
- public ByteBuffer bytify() throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- RoleLoader.deflt.marshal(this,new DataOutputStream(baos));
- return ByteBuffer.wrap(baos.toByteArray());
- }
-
- @Override
- public void reconstitute(ByteBuffer bb) throws IOException {
- RoleLoader.deflt.unmarshal(this, toDIS(bb));
- }
-
- @Override
- public String toString() {
- return ns + '.' + name;
- }
+ public Set<String> perms(boolean mutable) {
+ if (perms == null) {
+ perms = new HashSet<>();
+ } else if (mutable && !(perms instanceof HashSet)) {
+ perms = new HashSet<>(perms);
+ }
+ return perms;
+ }
+
+ public static Data create(NsDAO.Data ns, String name) {
+ NsSplit nss = new NsSplit(ns,name);
+ RoleDAO.Data rv = new Data();
+ rv.ns = nss.ns;
+ rv.name=nss.name;
+ return rv;
+ }
+
+ public String fullName() {
+ return ns + '.' + name;
+ }
+
+ public String encode() {
+ return ns + '|' + name;
+ }
+
+ /**
+ * Decode Perm String, including breaking into appropriate Namespace
+ *
+ * @param trans
+ * @param q
+ * @param r
+ * @return
+ */
+ public static Result<Data> decode(AuthzTrans trans, Question q, String r) {
+ String[] ss = Split.splitTrim('|', r,2);
+ Data data = new Data();
+ if(ss[1]==null) { // older 1 part encoding must be evaluated for NS
+ Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);
+ if(nss.notOK()) {
+ return Result.err(nss);
+ }
+ data.ns=nss.value.ns;
+ data.name=nss.value.name;
+ } else { // new 4 part encoding
+ data.ns=ss[0];
+ data.name=ss[1];
+ }
+ return Result.ok(data);
+ }
+
+ /**
+ * Decode from UserRole Data
+ * @param urdd
+ * @return
+ */
+ public static RoleDAO.Data decode(UserRoleDAO.Data urdd) {
+ RoleDAO.Data rd = new RoleDAO.Data();
+ rd.ns = urdd.ns;
+ rd.name = urdd.rname;
+ return rd;
+ }
+
+
+ /**
+ * Decode Perm String, including breaking into appropriate Namespace
+ *
+ * @param trans
+ * @param q
+ * @param p
+ * @return
+ */
+ public static Result<String[]> decodeToArray(AuthzTrans trans, Question q, String p) {
+ String[] ss = Split.splitTrim('|', p,2);
+ if(ss[1]==null) { // older 1 part encoding must be evaluated for NS
+ Result<NsSplit> nss = q.deriveNsSplit(trans, ss[0]);
+ if(nss.notOK()) {
+ return Result.err(nss);
+ }
+ ss[0] = nss.value.ns;
+ ss[1] = nss.value.name;
+ }
+ return Result.ok(ss);
+ }
+
+ @Override
+ public int[] invalidate(Cached<?,?> cache) {
+ return new int[] {
+ seg(cache,ns,name),
+ seg(cache,ns),
+ seg(cache,name),
+ };
+ }
+
+ @Override
+ public ByteBuffer bytify() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ RoleLoader.deflt.marshal(this,new DataOutputStream(baos));
+ return ByteBuffer.wrap(baos.toByteArray());
+ }
+
+ @Override
+ public void reconstitute(ByteBuffer bb) throws IOException {
+ RoleLoader.deflt.unmarshal(this, toDIS(bb));
+ }
+
+ @Override
+ public String toString() {
+ return ns + '.' + name;
+ }
}
private static class RoleLoader extends Loader<Data> implements Streamer<Data> {
- public static final int MAGIC=923577343;
- public static final int VERSION=1;
- public static final int BUFF_SIZE=96;
-
- public static final RoleLoader deflt = new RoleLoader(KEYLIMIT);
-
- public RoleLoader(int keylimit) {
- super(keylimit);
- }
-
- @Override
- public Data load(Data data, Row row) {
- // Int more efficient
- data.ns = row.getString(0);
- data.name = row.getString(1);
- data.perms = row.getSet(2,String.class);
- data.description = row.getString(3);
- return data;
- }
-
- @Override
- protected void key(Data data, int _idx, Object[] obj) {
- int idx = _idx;
- obj[idx]=data.ns;
- obj[++idx]=data.name;
- }
-
- @Override
- protected void body(Data data, int _idx, Object[] obj) {
- int idx = _idx;
- obj[idx]=data.perms;
- obj[++idx]=data.description;
- }
-
- @Override
- public void marshal(Data data, DataOutputStream os) throws IOException {
- writeHeader(os,MAGIC,VERSION);
- writeString(os, data.ns);
- writeString(os, data.name);
- writeStringSet(os,data.perms);
- writeString(os, data.description);
- }
-
- @Override
- public void unmarshal(Data data, DataInputStream is) throws IOException {
- /*int version = */readHeader(is,MAGIC,VERSION);
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
- byte[] buff = new byte[BUFF_SIZE];
- data.ns = readString(is, buff);
- data.name = readString(is,buff);
- data.perms = readStringSet(is,buff);
- data.description = readString(is,buff);
- }
+ public static final int MAGIC=923577343;
+ public static final int VERSION=1;
+ public static final int BUFF_SIZE=96;
+
+ public static final RoleLoader deflt = new RoleLoader(KEYLIMIT);
+
+ public RoleLoader(int keylimit) {
+ super(keylimit);
+ }
+
+ @Override
+ public Data load(Data data, Row row) {
+ // Int more efficient
+ data.ns = row.getString(0);
+ data.name = row.getString(1);
+ data.perms = row.getSet(2,String.class);
+ data.description = row.getString(3);
+ return data;
+ }
+
+ @Override
+ protected void key(Data data, int _idx, Object[] obj) {
+ int idx = _idx;
+ obj[idx]=data.ns;
+ obj[++idx]=data.name;
+ }
+
+ @Override
+ protected void body(Data data, int _idx, Object[] obj) {
+ int idx = _idx;
+ obj[idx]=data.perms;
+ obj[++idx]=data.description;
+ }
+
+ @Override
+ public void marshal(Data data, DataOutputStream os) throws IOException {
+ writeHeader(os,MAGIC,VERSION);
+ writeString(os, data.ns);
+ writeString(os, data.name);
+ writeStringSet(os,data.perms);
+ writeString(os, data.description);
+ }
+
+ @Override
+ public void unmarshal(Data data, DataInputStream is) throws IOException {
+ /*int version = */readHeader(is,MAGIC,VERSION);
+ // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+ byte[] buff = new byte[BUFF_SIZE];
+ data.ns = readString(is, buff);
+ data.name = readString(is,buff);
+ data.perms = readStringSet(is,buff);
+ data.description = readString(is,buff);
+ }
};
- private void init(AuthzTrans trans) {
- String[] helpers = setCRUD(trans, TABLE, Data.class, RoleLoader.deflt);
-
- psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
- " WHERE ns = ?", new RoleLoader(1),readConsistency);
-
- psName = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
- " WHERE name = ?", new RoleLoader(1),readConsistency);
-
- psChildren = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
- " WHERE ns=? AND name > ? AND name < ?",
- new RoleLoader(3) {
- @Override
- protected void key(Data data, int _idx, Object[] obj) {
- int idx = _idx;
- obj[idx] = data.ns;
- obj[++idx]=data.name + DOT;
- obj[++idx]=data.name + DOT_PLUS_ONE;
- }
- },readConsistency);
-
- }
-
- public Result<List<Data>> readNS(AuthzTrans trans, String ns) {
- return psNS.read(trans, R_TEXT + " NS " + ns, new Object[]{ns});
- }
-
- public Result<List<Data>> readName(AuthzTrans trans, String name) {
- return psName.read(trans, R_TEXT + name, new Object[]{name});
- }
-
- public Result<List<Data>> readChildren(AuthzTrans trans, String ns, String role) {
- if(role.length()==0 || "*".equals(role)) {
- return psChildren.read(trans, R_TEXT, new Object[]{ns, FIRST_CHAR, LAST_CHAR});
- } else {
- return psChildren.read(trans, R_TEXT, new Object[]{ns, role+DOT, role+DOT_PLUS_ONE});
- }
- }
-
- /**
- * Add a single Permission to the Role's Permission Collection
- *
- * @param trans
- * @param role
- * @param perm
- * @param type
- * @param action
- * @return
- */
- public Result<Void> addPerm(AuthzTrans trans, RoleDAO.Data role, PermDAO.Data perm) {
- // Note: Prepared Statements for Collection updates aren't supported
- String pencode = perm.encode();
- try {
- getSession(trans).execute(UPDATE_SP + TABLE + " SET perms = perms + {'" +
- pencode + "'} WHERE " +
- "ns = '" + role.ns + "' AND name = '" + role.name + "';");
- } catch (DriverException | APIException | IOException e) {
- reportPerhapsReset(trans,e);
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
- }
-
- wasModified(trans, CRUD.update, role, "Added permission " + pencode + " to role " + role.fullName());
- return Result.ok();
- }
-
- /**
- * Remove a single Permission from the Role's Permission Collection
- * @param trans
- * @param role
- * @param perm
- * @param type
- * @param action
- * @return
- */
- public Result<Void> delPerm(AuthzTrans trans, RoleDAO.Data role, PermDAO.Data perm) {
- // Note: Prepared Statements for Collection updates aren't supported
-
- String pencode = perm.encode();
-
- //ResultSet rv =
- try {
- getSession(trans).execute(UPDATE_SP + TABLE + " SET perms = perms - {'" +
- pencode + "'} WHERE " +
- "ns = '" + role.ns + "' AND name = '" + role.name + "';");
- } catch (DriverException | APIException | IOException e) {
- reportPerhapsReset(trans,e);
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
- }
-
- //TODO how can we tell when it doesn't?
- wasModified(trans, CRUD.update, role, "Removed permission " + pencode + " from role " + role.fullName() );
- return Result.ok();
- }
-
- /**
- * Add description to role
- *
- * @param trans
- * @param ns
- * @param name
- * @param description
- * @return
- */
- public Result<Void> addDescription(AuthzTrans trans, String ns, String name, String description) {
- try {
- getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '"
- + description + "' WHERE ns = '" + ns + "' AND name = '" + name + "';");
- } catch (DriverException | APIException | IOException e) {
- reportPerhapsReset(trans,e);
- return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
- }
-
- Data data = new Data();
- data.ns=ns;
- data.name=name;
- wasModified(trans, CRUD.update, data, "Added description " + description + " to role " + data.fullName(), null );
- return Result.ok();
- }
-
-
+ private void init(AuthzTrans trans) {
+ String[] helpers = setCRUD(trans, TABLE, Data.class, RoleLoader.deflt);
+
+ psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+ " WHERE ns = ?", new RoleLoader(1),readConsistency);
+
+ psName = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+ " WHERE name = ?", new RoleLoader(1),readConsistency);
+
+ psChildren = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE +
+ " WHERE ns=? AND name > ? AND name < ?",
+ new RoleLoader(3) {
+ @Override
+ protected void key(Data data, int _idx, Object[] obj) {
+ int idx = _idx;
+ obj[idx] = data.ns;
+ obj[++idx]=data.name + DOT;
+ obj[++idx]=data.name + DOT_PLUS_ONE;
+ }
+ },readConsistency);
+
+ }
+
+ public Result<List<Data>> readNS(AuthzTrans trans, String ns) {
+ return psNS.read(trans, R_TEXT + " NS " + ns, new Object[]{ns});
+ }
+
+ public Result<List<Data>> readName(AuthzTrans trans, String name) {
+ return psName.read(trans, R_TEXT + name, new Object[]{name});
+ }
+
+ public Result<List<Data>> readChildren(AuthzTrans trans, String ns, String role) {
+ if(role.length()==0 || "*".equals(role)) {
+ return psChildren.read(trans, R_TEXT, new Object[]{ns, FIRST_CHAR, LAST_CHAR});
+ } else {
+ return psChildren.read(trans, R_TEXT, new Object[]{ns, role+DOT, role+DOT_PLUS_ONE});
+ }
+ }
+
+ /**
+ * Add a single Permission to the Role's Permission Collection
+ *
+ * @param trans
+ * @param role
+ * @param perm
+ * @param type
+ * @param action
+ * @return
+ */
+ public Result<Void> addPerm(AuthzTrans trans, RoleDAO.Data role, PermDAO.Data perm) {
+ // Note: Prepared Statements for Collection updates aren't supported
+ String pencode = perm.encode();
+ try {
+ getSession(trans).execute(UPDATE_SP + TABLE + " SET perms = perms + {'" +
+ pencode + "'} WHERE " +
+ "ns = '" + role.ns + "' AND name = '" + role.name + "';");
+ } catch (DriverException | APIException | IOException e) {
+ reportPerhapsReset(trans,e);
+ return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+ }
+
+ wasModified(trans, CRUD.update, role, "Added permission " + pencode + " to role " + role.fullName());
+ return Result.ok();
+ }
+
+ /**
+ * Remove a single Permission from the Role's Permission Collection
+ * @param trans
+ * @param role
+ * @param perm
+ * @param type
+ * @param action
+ * @return
+ */
+ public Result<Void> delPerm(AuthzTrans trans, RoleDAO.Data role, PermDAO.Data perm) {
+ // Note: Prepared Statements for Collection updates aren't supported
+
+ String pencode = perm.encode();
+
+ //ResultSet rv =
+ try {
+ getSession(trans).execute(UPDATE_SP + TABLE + " SET perms = perms - {'" +
+ pencode + "'} WHERE " +
+ "ns = '" + role.ns + "' AND name = '" + role.name + "';");
+ } catch (DriverException | APIException | IOException e) {
+ reportPerhapsReset(trans,e);
+ return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+ }
+
+ //TODO how can we tell when it doesn't?
+ wasModified(trans, CRUD.update, role, "Removed permission " + pencode + " from role " + role.fullName() );
+ return Result.ok();
+ }
+
+ /**
+ * Add description to role
+ *
+ * @param trans
+ * @param ns
+ * @param name
+ * @param description
+ * @return
+ */
+ public Result<Void> addDescription(AuthzTrans trans, String ns, String name, String description) {
+ try {
+ getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '"
+ + description + "' WHERE ns = '" + ns + "' AND name = '" + name + "';");
+ } catch (DriverException | APIException | IOException e) {
+ reportPerhapsReset(trans,e);
+ return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG);
+ }
+
+ Data data = new Data();
+ data.ns=ns;
+ data.name=name;
+ wasModified(trans, CRUD.update, data, "Added description " + description + " to role " + data.fullName(), null );
+ return Result.ok();
+ }
+
+
/**
* Log Modification statements to History
* @param modified which CRUD action was done
@@ -383,28 +383,28 @@ public class RoleDAO extends CassDAOImpl<AuthzTrans,RoleDAO.Data> {
*/
@Override
protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
- boolean memo = override.length>0 && override[0]!=null;
- boolean subject = override.length>1 && override[1]!=null;
+ boolean memo = override.length>0 && override[0]!=null;
+ boolean subject = override.length>1 && override[1]!=null;
- HistoryDAO.Data hd = HistoryDAO.newInitedData();
+ HistoryDAO.Data hd = HistoryDAO.newInitedData();
hd.user = trans.user();
hd.action = modified.name();
hd.target = TABLE;
hd.subject = subject ? override[1] : data.fullName();
hd.memo = memo ? override[0] : (data.fullName() + " was " + modified.name() + 'd' );
- if(modified==CRUD.delete) {
- try {
- hd.reconstruct = data.bytify();
- } catch (IOException e) {
- trans.error().log(e,"Could not serialize RoleDAO.Data");
- }
- }
+ if(modified==CRUD.delete) {
+ try {
+ hd.reconstruct = data.bytify();
+ } catch (IOException e) {
+ trans.error().log(e,"Could not serialize RoleDAO.Data");
+ }
+ }
if(historyDAO.create(trans, hd).status!=Status.OK) {
- trans.error().log("Cannot log to History");
+ trans.error().log("Cannot log to History");
}
if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {
- trans.error().log("Cannot touch CacheInfo for Role");
+ trans.error().log("Cannot touch CacheInfo for Role");
}
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/Status.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/Status.java
index be52c406..8a617b94 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/Status.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/Status.java
@@ -36,53 +36,53 @@ import org.onap.aaf.auth.layer.Result;
* @param <RV>
*/
public class Status<RV> extends Result<RV> {
-
- // Jonathan 10/1/2013: Initially, I used enum, but it's not extensible.
+
+ // Jonathan 10/1/2013: Initially, I used enum, but it's not extensible.
public final static int ERR_NsNotFound = Result.ERR_General+1,
- ERR_RoleNotFound = Result.ERR_General+2,
- ERR_PermissionNotFound = Result.ERR_General+3,
- ERR_UserNotFound = Result.ERR_General+4,
- ERR_UserRoleNotFound = Result.ERR_General+5,
- ERR_DelegateNotFound = Result.ERR_General+6,
- ERR_InvalidDelegate = Result.ERR_General+7,
- ERR_DependencyExists = Result.ERR_General+8,
- ERR_NoApprovals = Result.ERR_General+9,
- ACC_Now = Result.ERR_General+10,
- ACC_Future = Result.ERR_General+11,
- ERR_ChoiceNeeded = Result.ERR_General+12,
- ERR_FutureNotRequested = Result.ERR_General+13;
+ ERR_RoleNotFound = Result.ERR_General+2,
+ ERR_PermissionNotFound = Result.ERR_General+3,
+ ERR_UserNotFound = Result.ERR_General+4,
+ ERR_UserRoleNotFound = Result.ERR_General+5,
+ ERR_DelegateNotFound = Result.ERR_General+6,
+ ERR_InvalidDelegate = Result.ERR_General+7,
+ ERR_DependencyExists = Result.ERR_General+8,
+ ERR_NoApprovals = Result.ERR_General+9,
+ ACC_Now = Result.ERR_General+10,
+ ACC_Future = Result.ERR_General+11,
+ ERR_ChoiceNeeded = Result.ERR_General+12,
+ ERR_FutureNotRequested = Result.ERR_General+13;
- /**
+ /**
* Constructor for Result set.
* @param data
* @param status
*/
private Status(RV value, int status, String details, String[] variables ) {
- super(value,status,details,variables);
+ super(value,status,details,variables);
}
- public static String name(int status) {
- switch(status) {
- case OK: return "OK";
- case ERR_NsNotFound: return "ERR_NsNotFound";
- case ERR_RoleNotFound: return "ERR_RoleNotFound";
- case ERR_PermissionNotFound: return "ERR_PermissionNotFound";
- case ERR_UserNotFound: return "ERR_UserNotFound";
- case ERR_UserRoleNotFound: return "ERR_UserRoleNotFound";
- case ERR_DelegateNotFound: return "ERR_DelegateNotFound";
- case ERR_InvalidDelegate: return "ERR_InvalidDelegate";
- case ERR_ConflictAlreadyExists: return "ERR_ConflictAlreadyExists";
- case ERR_DependencyExists: return "ERR_DependencyExists";
- case ERR_ActionNotCompleted: return "ERR_ActionNotCompleted";
- case ERR_Denied: return "ERR_Denied";
- case ERR_Policy: return "ERR_Policy";
- case ERR_BadData: return "ERR_BadData";
- case ERR_NotImplemented: return "ERR_NotImplemented";
- case ERR_NotFound: return "ERR_NotFound";
- case ERR_ChoiceNeeded: return "ERR_ChoiceNeeded";
- }
- //case ERR_General: or unknown...
- return "ERR_General";
- }
+ public static String name(int status) {
+ switch(status) {
+ case OK: return "OK";
+ case ERR_NsNotFound: return "ERR_NsNotFound";
+ case ERR_RoleNotFound: return "ERR_RoleNotFound";
+ case ERR_PermissionNotFound: return "ERR_PermissionNotFound";
+ case ERR_UserNotFound: return "ERR_UserNotFound";
+ case ERR_UserRoleNotFound: return "ERR_UserRoleNotFound";
+ case ERR_DelegateNotFound: return "ERR_DelegateNotFound";
+ case ERR_InvalidDelegate: return "ERR_InvalidDelegate";
+ case ERR_ConflictAlreadyExists: return "ERR_ConflictAlreadyExists";
+ case ERR_DependencyExists: return "ERR_DependencyExists";
+ case ERR_ActionNotCompleted: return "ERR_ActionNotCompleted";
+ case ERR_Denied: return "ERR_Denied";
+ case ERR_Policy: return "ERR_Policy";
+ case ERR_BadData: return "ERR_BadData";
+ case ERR_NotImplemented: return "ERR_NotImplemented";
+ case ERR_NotFound: return "ERR_NotFound";
+ case ERR_ChoiceNeeded: return "ERR_ChoiceNeeded";
+ }
+ //case ERR_General: or unknown...
+ return "ERR_General";
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/UserRoleDAO.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/UserRoleDAO.java
index 301e47fc..7b9f7b91 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/UserRoleDAO.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/cass/UserRoleDAO.java
@@ -46,274 +46,274 @@ import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Row;
public class UserRoleDAO extends CassDAOImpl<AuthzTrans,UserRoleDAO.Data> {
- public static final String TABLE = "user_role";
-
+ public static final String TABLE = "user_role";
+
public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F
- private static final String TRANS_UR_SLOT = "_TRANS_UR_SLOT_";
- public Slot transURSlot;
-
- private final HistoryDAO historyDAO;
- private final CacheInfoDAO infoDAO;
-
- private PSInfo psByUser, psByRole, psUserInRole;
+ private static final String TRANS_UR_SLOT = "_TRANS_UR_SLOT_";
+ public Slot transURSlot;
+
+ private final HistoryDAO historyDAO;
+ private final CacheInfoDAO infoDAO;
+
+ private PSInfo psByUser, psByRole, psUserInRole;
- public UserRoleDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
- super(trans, UserRoleDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
- transURSlot = trans.slot(TRANS_UR_SLOT);
- init(trans);
+ public UserRoleDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException {
+ super(trans, UserRoleDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+ transURSlot = trans.slot(TRANS_UR_SLOT);
+ init(trans);
- // Set up sub-DAOs
- historyDAO = new HistoryDAO(trans, this);
- infoDAO = new CacheInfoDAO(trans,this);
- }
+ // Set up sub-DAOs
+ historyDAO = new HistoryDAO(trans, this);
+ infoDAO = new CacheInfoDAO(trans,this);
+ }
- public UserRoleDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) {
- super(trans, UserRoleDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
- transURSlot = trans.slot(TRANS_UR_SLOT);
- historyDAO = hDAO;
- infoDAO = ciDAO;
- init(trans);
- }
+ public UserRoleDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) {
+ super(trans, UserRoleDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE));
+ transURSlot = trans.slot(TRANS_UR_SLOT);
+ historyDAO = hDAO;
+ infoDAO = ciDAO;
+ init(trans);
+ }
- private static final int KEYLIMIT = 2;
- public static class Data extends CacheableData implements Bytification {
- public String user;
- public String role;
- public String ns;
- public String rname;
- public Date expires;
-
- @Override
- public int[] invalidate(Cached<?,?> cache) {
- // Note: I'm not worried about Name collisions, because the formats are different:
- // Jonathan... etc versus
- // com. ...
- // The "dot" makes the difference.
- return new int[] {
- seg(cache,user,role),
- seg(cache,user),
- seg(cache,role)
- };
- }
+ private static final int KEYLIMIT = 2;
+ public static class Data extends CacheableData implements Bytification {
+ public String user;
+ public String role;
+ public String ns;
+ public String rname;
+ public Date expires;
+
+ @Override
+ public int[] invalidate(Cached<?,?> cache) {
+ // Note: I'm not worried about Name collisions, because the formats are different:
+ // Jonathan... etc versus
+ // com. ...
+ // The "dot" makes the difference.
+ return new int[] {
+ seg(cache,user,role),
+ seg(cache,user),
+ seg(cache,role)
+ };
+ }
- @Override
- public ByteBuffer bytify() throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- URLoader.deflt.marshal(this,new DataOutputStream(baos));
- return ByteBuffer.wrap(baos.toByteArray());
- }
-
- @Override
- public void reconstitute(ByteBuffer bb) throws IOException {
- URLoader.deflt.unmarshal(this, toDIS(bb));
- }
+ @Override
+ public ByteBuffer bytify() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ URLoader.deflt.marshal(this,new DataOutputStream(baos));
+ return ByteBuffer.wrap(baos.toByteArray());
+ }
+
+ @Override
+ public void reconstitute(ByteBuffer bb) throws IOException {
+ URLoader.deflt.unmarshal(this, toDIS(bb));
+ }
- public void role(String ns, String rname) {
- this.ns = ns;
- this.rname = rname;
- this.role = ns + '.' + rname;
- }
-
- public void role(RoleDAO.Data rdd) {
- ns = rdd.ns;
- rname = rdd.name;
- role = rdd.fullName();
- }
+ public void role(String ns, String rname) {
+ this.ns = ns;
+ this.rname = rname;
+ this.role = ns + '.' + rname;
+ }
+
+ public void role(RoleDAO.Data rdd) {
+ ns = rdd.ns;
+ rname = rdd.name;
+ role = rdd.fullName();
+ }
-
- public boolean role(AuthzTrans trans, Question ques, String role) {
- this.role = role;
- Result<NsSplit> rnss = ques.deriveNsSplit(trans, role);
- if(rnss.isOKhasData()) {
- ns = rnss.value.ns;
- rname = rnss.value.name;
- return true;
- } else {
- return false;
- }
- }
+
+ public boolean role(AuthzTrans trans, Question ques, String role) {
+ this.role = role;
+ Result<NsSplit> rnss = ques.deriveNsSplit(trans, role);
+ if(rnss.isOKhasData()) {
+ ns = rnss.value.ns;
+ rname = rnss.value.name;
+ return true;
+ } else {
+ return false;
+ }
+ }
- @Override
- public String toString() {
- return user + '|' + ns + '|' + rname + '|' + Chrono.dateStamp(expires);
- }
+ @Override
+ public String toString() {
+ return user + '|' + ns + '|' + rname + '|' + Chrono.dateStamp(expires);
+ }
- }
-
- private static class URLoader extends Loader<Data> implements Streamer<Data> {
- public static final int MAGIC=738469903;
- public static final int VERSION=1;
- public static final int BUFF_SIZE=48;
-
- public static final URLoader deflt = new URLoader(KEYLIMIT);
+ }
+
+ private static class URLoader extends Loader<Data> implements Streamer<Data> {
+ public static final int MAGIC=738469903;
+ public static final int VERSION=1;
+ public static final int BUFF_SIZE=48;
+
+ public static final URLoader deflt = new URLoader(KEYLIMIT);
- public URLoader(int keylimit) {
- super(keylimit);
- }
+ public URLoader(int keylimit) {
+ super(keylimit);
+ }
- @Override
- public Data load(Data data, Row row) {
- data.user = row.getString(0);
- data.role = row.getString(1);
- data.ns = row.getString(2);
- data.rname = row.getString(3);
- data.expires = row.getTimestamp(4);
- return data;
- }
+ @Override
+ public Data load(Data data, Row row) {
+ data.user = row.getString(0);
+ data.role = row.getString(1);
+ data.ns = row.getString(2);
+ data.rname = row.getString(3);
+ data.expires = row.getTimestamp(4);
+ return data;
+ }
- @Override
- protected void key(Data data, int _idx, Object[] obj) {
- int idx = _idx;
- obj[idx]=data.user;
- obj[++idx]=data.role;
- }
+ @Override
+ protected void key(Data data, int _idx, Object[] obj) {
+ int idx = _idx;
+ obj[idx]=data.user;
+ obj[++idx]=data.role;
+ }
- @Override
- protected void body(Data data, int _idx, Object[] obj) {
- int idx = _idx;
- obj[idx]=data.ns;
- obj[++idx]=data.rname;
- obj[++idx]=data.expires;
- }
-
- @Override
- public void marshal(Data data, DataOutputStream os) throws IOException {
- writeHeader(os,MAGIC,VERSION);
+ @Override
+ protected void body(Data data, int _idx, Object[] obj) {
+ int idx = _idx;
+ obj[idx]=data.ns;
+ obj[++idx]=data.rname;
+ obj[++idx]=data.expires;
+ }
+
+ @Override
+ public void marshal(Data data, DataOutputStream os) throws IOException {
+ writeHeader(os,MAGIC,VERSION);
- writeString(os, data.user);
- writeString(os, data.role);
- writeString(os, data.ns);
- writeString(os, data.rname);
- os.writeLong(data.expires==null?-1:data.expires.getTime());
- }
+ writeString(os, data.user);
+ writeString(os, data.role);
+ writeString(os, data.ns);
+ writeString(os, data.rname);
+ os.writeLong(data.expires==null?-1:data.expires.getTime());
+ }
- @Override
- public void unmarshal(Data data, DataInputStream is) throws IOException {
- /*int version = */readHeader(is,MAGIC,VERSION);
- // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
-
- byte[] buff = new byte[BUFF_SIZE];
- data.user = readString(is,buff);
- data.role = readString(is,buff);
- data.ns = readString(is,buff);
- data.rname = readString(is,buff);
- long l = is.readLong();
- data.expires = l<0?null:new Date(l);
- }
+ @Override
+ public void unmarshal(Data data, DataInputStream is) throws IOException {
+ /*int version = */readHeader(is,MAGIC,VERSION);
+ // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields
+
+ byte[] buff = new byte[BUFF_SIZE];
+ data.user = readString(is,buff);
+ data.role = readString(is,buff);
+ data.ns = readString(is,buff);
+ data.rname = readString(is,buff);
+ long l = is.readLong();
+ data.expires = l<0?null:new Date(l);
+ }
- };
-
- private void init(AuthzTrans trans) {
- String[] helper = setCRUD(trans, TABLE, Data.class, URLoader.deflt);
-
- psByUser = new PSInfo(trans, SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE user = ?",
- new URLoader(1) {
- @Override
- protected void key(Data data, int idx, Object[] obj) {
- obj[idx]=data.user;
- }
- },readConsistency);
-
- // Note: We understand this call may have poor performance, so only should be used in Management (Delete) func
- psByRole = new PSInfo(trans, SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE role = ? ALLOW FILTERING",
- new URLoader(1) {
- @Override
- protected void key(Data data, int idx, Object[] obj) {
- obj[idx]=data.role;
- }
- },readConsistency);
-
- psUserInRole = new PSInfo(trans,SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE user = ? AND role = ?",
- URLoader.deflt,readConsistency);
- }
+ };
+
+ private void init(AuthzTrans trans) {
+ String[] helper = setCRUD(trans, TABLE, Data.class, URLoader.deflt);
+
+ psByUser = new PSInfo(trans, SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE user = ?",
+ new URLoader(1) {
+ @Override
+ protected void key(Data data, int idx, Object[] obj) {
+ obj[idx]=data.user;
+ }
+ },readConsistency);
+
+ // Note: We understand this call may have poor performance, so only should be used in Management (Delete) func
+ psByRole = new PSInfo(trans, SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE role = ? ALLOW FILTERING",
+ new URLoader(1) {
+ @Override
+ protected void key(Data data, int idx, Object[] obj) {
+ obj[idx]=data.role;
+ }
+ },readConsistency);
+
+ psUserInRole = new PSInfo(trans,SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE user = ? AND role = ?",
+ URLoader.deflt,readConsistency);
+ }
- public Result<List<Data>> readByUser(AuthzTrans trans, String user) {
- return psByUser.read(trans, R_TEXT + " by User " + user, new Object[]{user});
- }
+ public Result<List<Data>> readByUser(AuthzTrans trans, String user) {
+ return psByUser.read(trans, R_TEXT + " by User " + user, new Object[]{user});
+ }
- /**
- * Note: Use Sparingly. Cassandra's forced key structure means this will perform fairly poorly
- * @param trans
- * @param role
- * @return
- * @throws DAOException
- */
- public Result<List<Data>> readByRole(AuthzTrans trans, String role) {
- return psByRole.read(trans, R_TEXT + " by Role " + role, new Object[]{role});
- }
-
- /**
- * Direct Lookup of User Role
- * Don't forget to check for Expiration
- */
- public Result<List<Data>> readByUserRole(AuthzTrans trans, String user, String role) {
- return psUserInRole.read(trans, R_TEXT + " by User " + user + " and Role " + role, new Object[]{user,role});
- }
+ /**
+ * Note: Use Sparingly. Cassandra's forced key structure means this will perform fairly poorly
+ * @param trans
+ * @param role
+ * @return
+ * @throws DAOException
+ */
+ public Result<List<Data>> readByRole(AuthzTrans trans, String role) {
+ return psByRole.read(trans, R_TEXT + " by Role " + role, new Object[]{role});
+ }
+
+ /**
+ * Direct Lookup of User Role
+ * Don't forget to check for Expiration
+ */
+ public Result<List<Data>> readByUserRole(AuthzTrans trans, String user, String role) {
+ return psUserInRole.read(trans, R_TEXT + " by User " + user + " and Role " + role, new Object[]{user,role});
+ }
- /**
+ /**
* Log Modification statements to History
* @param modified which CRUD action was done
* @param data entity data that needs a log entry
* @param overrideMessage if this is specified, we use it rather than crafting a history message based on data
*/
- @Override
- protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
- boolean memo = override.length>0 && override[0]!=null;
- boolean subject = override.length>1 && override[1]!=null;
+ @Override
+ protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
+ boolean memo = override.length>0 && override[0]!=null;
+ boolean subject = override.length>1 && override[1]!=null;
- HistoryDAO.Data hd = HistoryDAO.newInitedData();
- HistoryDAO.Data hdRole = HistoryDAO.newInitedData();
-
+ HistoryDAO.Data hd = HistoryDAO.newInitedData();
+ HistoryDAO.Data hdRole = HistoryDAO.newInitedData();
+
hd.user = hdRole.user = trans.user();
- hd.action = modified.name();
- // Modifying User/Role is an Update to Role, not a Create. Jonathan, 07-14-2015
- hdRole.action = CRUD.update.name();
- hd.target = TABLE;
- hdRole.target = RoleDAO.TABLE;
- hd.subject = subject?override[1] : (data.user + '|'+data.role);
- hdRole.subject = data.role;
- switch(modified) {
- case create:
- hd.memo = hdRole.memo = memo
- ? String.format("%s by %s", override[0], hd.user)
- : String.format("%s added to %s",data.user,data.role);
- break;
- case update:
- hd.memo = hdRole.memo = memo
- ? String.format("%s by %s", override[0], hd.user)
- : String.format("%s - %s was updated",data.user,data.role);
- break;
- case delete:
- hd.memo = hdRole.memo = memo
- ? String.format("%s by %s", override[0], hd.user)
- : String.format("%s removed from %s",data.user,data.role);
- try {
- hd.reconstruct = hdRole.reconstruct = data.bytify();
- } catch (IOException e) {
- trans.warn().log(e,"Deleted UserRole could not be serialized");
- }
- break;
- default:
- hd.memo = hdRole.memo = memo
- ? String.format("%s by %s", override[0], hd.user)
- : "n/a";
- }
+ hd.action = modified.name();
+ // Modifying User/Role is an Update to Role, not a Create. Jonathan, 07-14-2015
+ hdRole.action = CRUD.update.name();
+ hd.target = TABLE;
+ hdRole.target = RoleDAO.TABLE;
+ hd.subject = subject?override[1] : (data.user + '|'+data.role);
+ hdRole.subject = data.role;
+ switch(modified) {
+ case create:
+ hd.memo = hdRole.memo = memo
+ ? String.format("%s by %s", override[0], hd.user)
+ : String.format("%s added to %s",data.user,data.role);
+ break;
+ case update:
+ hd.memo = hdRole.memo = memo
+ ? String.format("%s by %s", override[0], hd.user)
+ : String.format("%s - %s was updated",data.user,data.role);
+ break;
+ case delete:
+ hd.memo = hdRole.memo = memo
+ ? String.format("%s by %s", override[0], hd.user)
+ : String.format("%s removed from %s",data.user,data.role);
+ try {
+ hd.reconstruct = hdRole.reconstruct = data.bytify();
+ } catch (IOException e) {
+ trans.warn().log(e,"Deleted UserRole could not be serialized");
+ }
+ break;
+ default:
+ hd.memo = hdRole.memo = memo
+ ? String.format("%s by %s", override[0], hd.user)
+ : "n/a";
+ }
- if(historyDAO.create(trans, hd).status!=Status.OK) {
- trans.error().log("Cannot log to History");
- }
-
- if(historyDAO.create(trans, hdRole).status!=Status.OK) {
- trans.error().log("Cannot log to History");
- }
- // uses User as Segment
+ if(historyDAO.create(trans, hd).status!=Status.OK) {
+ trans.error().log("Cannot log to History");
+ }
+
+ if(historyDAO.create(trans, hdRole).status!=Status.OK) {
+ trans.error().log("Cannot log to History");
+ }
+ // uses User as Segment
if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {
- trans.error().log("Cannot touch CacheInfo");
+ trans.error().log("Cannot touch CacheInfo");
}
- }
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/CassExecutor.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/CassExecutor.java
index 1979db28..f0a59582 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/CassExecutor.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/CassExecutor.java
@@ -29,45 +29,45 @@ import org.onap.aaf.auth.org.Executor;
public class CassExecutor implements Executor {
- private Question q;
- private Function f;
- private AuthzTrans trans;
+ private Question q;
+ private Function f;
+ private AuthzTrans trans;
- public CassExecutor(AuthzTrans trans, Function f) {
- this.trans = trans;
- this.f = f;
- this.q = this.f.q;
- }
+ public CassExecutor(AuthzTrans trans, Function f) {
+ this.trans = trans;
+ this.f = f;
+ this.q = this.f.q;
+ }
- @Override
- public boolean hasPermission(String user, String ns, String type, String instance, String action) {
- return isGranted(user, ns, type, instance, action);
- }
+ @Override
+ public boolean hasPermission(String user, String ns, String type, String instance, String action) {
+ return isGranted(user, ns, type, instance, action);
+ }
- @Override
- public boolean inRole(String name) {
- Result<NsSplit> nss = q.deriveNsSplit(trans, name);
- if(nss.notOK())return false;
- return q.roleDAO.read(trans, nss.value.ns,nss.value.name).isOKhasData();
- }
+ @Override
+ public boolean inRole(String name) {
+ Result<NsSplit> nss = q.deriveNsSplit(trans, name);
+ if(nss.notOK())return false;
+ return q.roleDAO.read(trans, nss.value.ns,nss.value.name).isOKhasData();
+ }
- public boolean isGranted(String user, String ns, String type, String instance, String action) {
- return q.isGranted(trans, user, ns, type, instance,action);
- }
+ public boolean isGranted(String user, String ns, String type, String instance, String action) {
+ return q.isGranted(trans, user, ns, type, instance,action);
+ }
- @Override
- public String namespace() throws Exception {
- Result<Data> res = q.validNSOfDomain(trans,trans.user());
- if(res.isOK()) {
- String user[] = trans.user().split("\\.");
- return user[user.length-1] + '.' + user[user.length-2];
- }
- throw new Exception(res.status + ' ' + res.details);
- }
+ @Override
+ public String namespace() throws Exception {
+ Result<Data> res = q.validNSOfDomain(trans,trans.user());
+ if(res.isOK()) {
+ String user[] = trans.user().split("\\.");
+ return user[user.length-1] + '.' + user[user.length-2];
+ }
+ throw new Exception(res.status + ' ' + res.details);
+ }
- @Override
- public String id() {
- return trans.user();
- }
+ @Override
+ public String id() {
+ return trans.user();
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/Function.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/Function.java
index 8529ce87..f3aae2ec 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/Function.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/Function.java
@@ -60,1733 +60,1733 @@ import org.onap.aaf.auth.org.OrganizationException;
public class Function {
- private static final String CANNOT_BE_THE_OWNER_OF_A_NAMESPACE = "%s(%s) cannot be the owner of the namespace '%s'. Owners %s.";
-
- public enum FUTURE_OP {
- C("Create"),U("Update"),D("Delete"),G("Grant"),UG("UnGrant"),A("Approval");
-
- private String desc;
-
- private FUTURE_OP(String desc) {
- this.desc = desc;
- }
-
- public String desc() {
- return desc;
- }
-
- /**
- * Same as valueOf(), but passes back null instead of throwing Exception
- * @param value
- * @return
- */
- public static FUTURE_OP toFO(String value) {
- if(value!=null) {
- for(FUTURE_OP fo : values()) {
- if(fo.name().equals(value)){
- return fo;
- }
- }
- }
- return null;
- }
- }
-
- public enum OP_STATUS {
- E("Executed"),D("Denied"),P("Pending"),L("Lapsed");
-
- private String desc;
- public final static Result<OP_STATUS> RE = Result.ok(OP_STATUS.E);
- public final static Result<OP_STATUS> RD = Result.ok(OP_STATUS.D);
- public final static Result<OP_STATUS> RP = Result.ok(OP_STATUS.P);
- public final static Result<OP_STATUS> RL = Result.ok(OP_STATUS.L);
-
- private OP_STATUS(String desc) {
- this.desc = desc;
- }
-
- public String desc() {
- return desc;
- }
-
- }
-
- public static final String FOP_CRED = "cred";
- public static final String FOP_DELEGATE = "delegate";
- public static final String FOP_NS = "ns";
- public static final String FOP_PERM = "perm";
- public static final String FOP_ROLE = "role";
- public static final String FOP_USER_ROLE = "user_role";
- private static final List<Identity> NO_ADDL_APPROVE = new ArrayList<>();
- private static final String ROOT_NS = Define.ROOT_NS();
- // First Action should ALWAYS be "write", see "CreateRole"
- public final Question q;
-
- public Function(AuthzTrans trans, Question question) {
- q = question;
- }
-
- private class ErrBuilder {
- private StringBuilder sb;
- private List<String> ao;
-
- public void log(Result<?> result) {
- if (result.notOK()) {
- if (sb == null) {
- sb = new StringBuilder();
- ao = new ArrayList<>();
- }
- sb.append(result.details);
- sb.append('\n');
- for (String s : result.variables) {
- ao.add(s);
- }
- }
- }
-
- public String[] vars() {
- String[] rv = new String[ao.size()];
- ao.toArray(rv);
- return rv;
- }
-
- public boolean hasErr() {
- return sb != null;
- }
-
- @Override
- public String toString() {
- return sb == null ? "" : String.format(sb.toString(), ao);
- }
- }
-
- /**
- * createNS
- *
- * Create Namespace
- *
- * @param trans
- * @param org
- * @param ns
- * @param user
- * @return
- * @throws DAOException
- *
- * To create an NS, you need to: 1) validate permission to
- * modify parent NS 2) Does NS exist already? 3) Create NS with
- * a) "user" as owner. NOTE: Per 10-15 request for AAF 1.0 4)
- * Loop through Roles with Parent NS, and map any that start
- * with this NS into this one 5) Loop through Perms with Parent
- * NS, and map any that start with this NS into this one
- */
- public Result<Void> createNS(AuthzTrans trans, Namespace namespace, boolean fromApproval) {
- Result<?> rq;
-// if (namespace.name.endsWith(Question.DOT_ADMIN)
-// || namespace.name.endsWith(Question.DOT_OWNER)) {
-// return Result.err(Status.ERR_BadData,
-// "'admin' and 'owner' are reserved names in AAF");
-// }
-
- try {
- for (String u : namespace.owner) {
- Organization org = trans.org();
- Identity orgUser = org.getIdentity(trans, u);
- String reason;
- if (orgUser == null) {
- return Result.err(Status.ERR_Policy,"%s is not a valid user at %s",u,org.getName());
- } else if((reason=orgUser.mayOwn())!=null) {
- if (org.isTestEnv()) {
- String reason2;
- if((reason2=org.validate(trans, Policy.AS_RESPONSIBLE,new CassExecutor(trans, this), u))!=null) { // can masquerade as responsible
- trans.debug().log(reason2);
- return Result.err(Status.ERR_Policy,CANNOT_BE_THE_OWNER_OF_A_NAMESPACE,orgUser.fullName(),orgUser.id(),namespace.name,reason);
- }
- // a null means ok
- } else {
- if(orgUser.isFound()) {
- return Result.err(Status.ERR_Policy,CANNOT_BE_THE_OWNER_OF_A_NAMESPACE,orgUser.fullName(),orgUser.id(),namespace.name, reason);
- } else {
- return Result.err(Status.ERR_Policy,u + " is an invalid Identity");
- }
- }
- }
- }
- } catch (Exception e) {
- trans.error().log(e,
- "Could not contact Organization for User Validation");
- }
-
- String user = trans.user();
- // 1) May Change Parent?
- int idx = namespace.name.lastIndexOf('.');
- String parent;
- if (idx < 0) {
- if (!q.isGranted(trans, user, ROOT_NS,Question.NS, ".", "create")) {
- return Result.err(Result.ERR_Security,
- "%s may not create Root Namespaces", user);
- }
- parent = null;
- fromApproval = true;
- } else {
- parent = namespace.name.substring(0, idx); // get Parent String
- }
-
- Result<NsDAO.Data> rparent = q.deriveNs(trans, parent);
- if (rparent.notOK()) {
- return Result.err(rparent);
- }
- if (!fromApproval) {
- rparent = q.mayUser(trans, user, rparent.value, Access.write);
- if (rparent.notOK()) {
- return Result.err(rparent);
- }
- }
- parent = namespace.parent = rparent.value.name; // Correct Namespace from real data
-
- // 2) Does requested NS exist
- if (q.nsDAO.read(trans, namespace.name).isOKhasData()) {
- return Result.err(Status.ERR_ConflictAlreadyExists,
- "Target Namespace already exists");
- }
-
- // Someone must be responsible.
- if (namespace.owner == null || namespace.owner.isEmpty()) {
- return Result
- .err(Status.ERR_Policy,
- "Namespaces must be assigned at least one responsible party");
- }
-
- // 3) Create NS
- Date now = new Date();
-
- Result<Void> r;
- // 3a) Admin
-
- try {
- // Originally, added the enterer as Admin, but that's not necessary,
- // or helpful for Operations folks..
- // Admins can be empty, because they can be changed by lower level
- // NSs
- // if(ns.admin(false).isEmpty()) {
- // ns.admin(true).add(user);
- // }
- if (namespace.admin != null) {
- for (String u : namespace.admin) {
- if ((r = checkValidID(trans, now, u)).notOK()) {
- return r;
- }
- }
- }
-
- // 3b) Responsible
- Organization org = trans.org();
- for (String u : namespace.owner) {
- Identity orgUser = org.getIdentity(trans, u);
- if (orgUser == null) {
- return Result
- .err(Status.ERR_BadData,
- "NS must be created with an %s approved Responsible Party",
- org.getName());
- }
- }
- } catch (Exception e) {
- return Result.err(Status.ERR_UserNotFound, e.getMessage());
- }
-
- // VALIDATIONS done... Add NS
- if ((rq = q.nsDAO.create(trans, namespace.data())).notOK()) {
- return Result.err(rq);
- }
-
- // Since Namespace is now created, we need to grab all subsequent errors
- ErrBuilder eb = new ErrBuilder();
-
- // Add UserRole(s)
- UserRoleDAO.Data urdd = new UserRoleDAO.Data();
- urdd.expires = trans.org().expiration(null, Expiration.UserInRole).getTime();
- urdd.role(namespace.name, Question.ADMIN);
- for (String admin : namespace.admin) {
- urdd.user = admin;
- eb.log(q.userRoleDAO.create(trans, urdd));
- }
- urdd.role(namespace.name,Question.OWNER);
- for (String owner : namespace.owner) {
- urdd.user = owner;
- eb.log(q.userRoleDAO.create(trans, urdd));
- }
-
- addNSAdminRolesPerms(trans, eb, namespace.name);
-
- addNSOwnerRolesPerms(trans, eb, namespace.name);
-
- if (parent != null) {
- // Build up with any errors
-
- String targetNs = rparent.value.name; // Get the Parent Namespace,
- // not target
- String targetName = namespace.name.substring(targetNs.length() + 1); // Remove the Parent Namespace from the
- // Target + a dot, and you'll get the name
- int targetNameDot = targetName.length() + 1;
-
- // 4) Change any roles with children matching this NS, and
- Result<List<RoleDAO.Data>> rrdc = q.roleDAO.readChildren(trans, targetNs, targetName);
- if (rrdc.isOKhasData()) {
- for (RoleDAO.Data rdd : rrdc.value) {
- // Remove old Role from Perms, save them off
- List<PermDAO.Data> lpdd = new ArrayList<>();
- for(String p : rdd.perms(false)) {
- Result<PermDAO.Data> rpdd = PermDAO.Data.decode(trans,q,p);
- if(rpdd.isOKhasData()) {
- PermDAO.Data pdd = rpdd.value;
- lpdd.add(pdd);
- q.permDAO.delRole(trans, pdd, rdd);
- } else{
- trans.error().log(rpdd.errorString());
- }
- }
-
- // Save off Old keys
- String delP1 = rdd.ns;
- String delP2 = rdd.name;
-
- // Write in new key
- rdd.ns = namespace.name;
- rdd.name = (delP2.length() > targetNameDot) ? delP2
- .substring(targetNameDot) : "";
-
- // Need to use non-cached, because switching namespaces, not
- // "create" per se
- if ((rq = q.roleDAO.create(trans, rdd)).isOK()) {
- // Put Role back into Perm, with correct info
- for(PermDAO.Data pdd : lpdd) {
- q.permDAO.addRole(trans, pdd, rdd);
- }
- // Change data for User Roles
- Result<List<UserRoleDAO.Data>> rurd = q.userRoleDAO.readByRole(trans, rdd.fullName());
- if(rurd.isOKhasData()) {
- for(UserRoleDAO.Data urd : rurd.value) {
- urd.ns = rdd.ns;
- urd.rname = rdd.name;
- q.userRoleDAO.update(trans, urd);
- }
- }
- // Now delete old one
- rdd.ns = delP1;
- rdd.name = delP2;
- if ((rq = q.roleDAO.delete(trans, rdd, false)).notOK()) {
- eb.log(rq);
- }
- } else {
- eb.log(rq);
- }
- }
- }
-
- // 4) Change any Permissions with children matching this NS, and
- Result<List<PermDAO.Data>> rpdc = q.permDAO.readChildren(trans,targetNs, targetName);
- if (rpdc.isOKhasData()) {
- for (PermDAO.Data pdd : rpdc.value) {
- // Remove old Perm from Roles, save them off
- List<RoleDAO.Data> lrdd = new ArrayList<>();
-
- for(String rl : pdd.roles(false)) {
- Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans,q,rl);
- if(rrdd.isOKhasData()) {
- RoleDAO.Data rdd = rrdd.value;
- lrdd.add(rdd);
- q.roleDAO.delPerm(trans, rdd, pdd);
- } else{
- trans.error().log(rrdd.errorString());
- }
- }
-
- // Save off Old keys
- String delP1 = pdd.ns;
- String delP2 = pdd.type;
- pdd.ns = namespace.name;
- pdd.type = (delP2.length() > targetNameDot) ? delP2
- .substring(targetNameDot) : "";
- if ((rq = q.permDAO.create(trans, pdd)).isOK()) {
- // Put Role back into Perm, with correct info
- for(RoleDAO.Data rdd : lrdd) {
- q.roleDAO.addPerm(trans, rdd, pdd);
- }
-
- pdd.ns = delP1;
- pdd.type = delP2;
- if ((rq = q.permDAO.delete(trans, pdd, false)).notOK()) {
- eb.log(rq);
- // } else {
- // Need to invalidate directly, because we're
- // switching places in NS, not normal cache behavior
- // q.permDAO.invalidate(trans,pdd);
- }
- } else {
- eb.log(rq);
- }
- }
- }
- if (eb.hasErr()) {
- return Result.err(Status.ERR_ActionNotCompleted,eb.sb.toString(), eb.vars());
- }
- }
- return Result.ok();
- }
-
- private void addNSAdminRolesPerms(AuthzTrans trans, ErrBuilder eb, String ns) {
- // Admin Role/Perm
- RoleDAO.Data rd = new RoleDAO.Data();
- rd.ns = ns;
- rd.name = "admin";
- rd.description = "AAF Namespace Administrators";
-
- PermDAO.Data pd = new PermDAO.Data();
- pd.ns = ns;
- pd.type = "access";
- pd.instance = Question.ASTERIX;
- pd.action = Question.ASTERIX;
- pd.description = "AAF Namespace Write Access";
-
- rd.perms = new HashSet<>();
- rd.perms.add(pd.encode());
- eb.log(q.roleDAO.create(trans, rd));
-
- pd.roles = new HashSet<>();
- pd.roles.add(rd.encode());
- eb.log(q.permDAO.create(trans, pd));
- }
-
- private void addNSOwnerRolesPerms(AuthzTrans trans, ErrBuilder eb, String ns) {
- RoleDAO.Data rd = new RoleDAO.Data();
- rd.ns = ns;
- rd.name = "owner";
- rd.description = "AAF Namespace Owners";
-
- PermDAO.Data pd = new PermDAO.Data();
- pd.ns = ns;
- pd.type = "access";
- pd.instance = Question.ASTERIX;
- pd.action = Question.READ;
- pd.description = "AAF Namespace Read Access";
-
- rd.perms = new HashSet<>();
- rd.perms.add(pd.encode());
- eb.log(q.roleDAO.create(trans, rd));
-
- pd.roles = new HashSet<>();
- pd.roles.add(rd.encode());
- eb.log(q.permDAO.create(trans, pd));
- }
-
- /**
- * deleteNS
- *
- * Delete Namespace
- *
- * @param trans
- * @param org
- * @param ns
- * @param force
- * @param user
- * @return
- * @throws DAOException
- *
- *
- * To delete an NS, you need to: 1) validate permission to
- * modify this NS 2) Find all Roles with this NS, and 2a) if
- * Force, delete them, else modify to Parent NS 3) Find all
- * Perms with this NS, and modify to Parent NS 3a) if Force,
- * delete them, else modify to Parent NS 4) Find all IDs
- * associated to this NS, and deny if exists. 5) Remove NS
- */
- public Result<Void> deleteNS(AuthzTrans trans, String ns) {
- boolean force = trans.requested(REQD_TYPE.force);
- boolean move = trans.requested(REQD_TYPE.move);
- // 1) Validate
- Result<List<NsDAO.Data>> nsl;
- if ((nsl = q.nsDAO.read(trans, ns)).notOKorIsEmpty()) {
- return Result.err(Status.ERR_NsNotFound, "%s does not exist", ns);
- }
- NsDAO.Data nsd = nsl.value.get(0);
- NsType nt;
- if (move && !q.canMove(nt = NsType.fromType(nsd.type))) {
- return Result.err(Status.ERR_Denied, "Namespace Force=move not permitted for Type %s",nt.name());
- }
-
- Result<NsDAO.Data> dnr = q.mayUser(trans, trans.user(), nsd, Access.write);
- if (dnr.status != Status.OK) {
- return Result.err(dnr);
- }
-
- // 2) Find Parent
- String user = trans.user();
- int idx = ns.lastIndexOf('.');
- NsDAO.Data parent;
- if (idx < 0) {
- if (!q.isGranted(trans, user, ROOT_NS,Question.NS, ".", "delete")) {
- return Result.err(Result.ERR_Security,
- "%s may not delete Root Namespaces", user);
- }
- parent = null;
- } else {
- Result<NsDAO.Data> rlparent = q.deriveNs(trans, ns.substring(0, idx));
- if (rlparent.notOKorIsEmpty()) {
- return Result.err(rlparent);
- }
- parent = rlparent.value;
- }
-
- // Build up with any errors
- // If sb != null below is an indication of error
- StringBuilder sb = null;
- ErrBuilder er = new ErrBuilder();
-
- // 2a) Deny if any IDs on Namespace
- Result<List<CredDAO.Data>> creds = q.credDAO.readNS(trans, ns);
- if (creds.isOKhasData()) {
- if (force || move) {
- for (CredDAO.Data cd : creds.value) {
- er.log(q.credDAO.delete(trans, cd, false));
- // Since we're deleting all the creds, we should delete all
- // the user Roles for that Cred
- Result<List<UserRoleDAO.Data>> rlurd = q.userRoleDAO
- .readByUser(trans, cd.id);
- if (rlurd.isOK()) {
- for (UserRoleDAO.Data data : rlurd.value) {
- q.userRoleDAO.delete(trans, data, false);
- }
- }
-
- }
- } else {
- // first possible StringBuilder Create.
- sb = new StringBuilder();
- sb.append('[');
- sb.append(ns);
- sb.append("] contains users");
- }
- }
-
- // 2b) Find (or delete if forced flag is set) dependencies
- // First, find if NS Perms are the only ones
- Result<List<PermDAO.Data>> rpdc = q.permDAO.readNS(trans, ns);
- if (rpdc.isOKhasData()) {
- // Since there are now NS perms, we have to count NON-NS perms.
- // FYI, if we delete them now, and the NS is not deleted, it is in
- // an inconsistent state.
- boolean nonaccess = false;
- for (PermDAO.Data pdd : rpdc.value) {
- if (!"access".equals(pdd.type)) {
- nonaccess = true;
- break;
- }
- }
- if (nonaccess && !force && !move) {
- if (sb == null) {
- sb = new StringBuilder();
- sb.append('[');
- sb.append(ns);
- sb.append("] contains ");
- } else {
- sb.append(", ");
- }
- sb.append("permissions");
- }
- }
-
- Result<List<RoleDAO.Data>> rrdc = q.roleDAO.readNS(trans, ns);
- if (rrdc.isOKhasData()) {
- // Since there are now NS roles, we have to count NON-NS roles.
- // FYI, if we delete th)em now, and the NS is not deleted, it is in
- // an inconsistent state.
- int count = rrdc.value.size();
- for (RoleDAO.Data rdd : rrdc.value) {
- if ("admin".equals(rdd.name) || "owner".equals(rdd.name)) {
- --count;
- }
- }
- if (count > 0 && !force && !move) {
- if (sb == null) {
- sb = new StringBuilder();
- sb.append('[');
- sb.append(ns);
- sb.append("] contains ");
- } else {
- sb.append(", ");
- }
- sb.append("roles");
- }
- }
-
- // 2c) Deny if dependencies exist that would be moved to root level
- // parent is root level parent here. Need to find closest parent ns that
- // exists
- if (sb != null) {
- if (!force && !move) {
- sb.append(".\n Delete dependencies and try again. Note: using \"force=true\" will delete all. \"force=move\" will delete Creds, but move Roles and Perms to parent.");
- return Result.err(Status.ERR_DependencyExists, sb.toString());
- }
-
- if (move && (parent == null || parent.type == NsType.COMPANY.type)) {
- return Result
- .err(Status.ERR_DependencyExists,
- "Cannot move users, roles or permissions to [%s].\nDelete dependencies and try again",
- parent.name);
- }
- } else if (move && parent != null) {
- sb = new StringBuilder();
- // 3) Change any roles with children matching this NS, and
- moveRoles(trans, parent, sb, rrdc);
- // 4) Change any Perms with children matching this NS, and
- movePerms(trans, parent, sb, rpdc);
- }
-
- if (sb != null && sb.length() > 0) {
- return Result.err(Status.ERR_DependencyExists, sb.toString());
- }
-
- if (er.hasErr()) {
- if (trans.debug().isLoggable()) {
- trans.debug().log(er.toString());
- }
- return Result.err(Status.ERR_DependencyExists,
- "Namespace members cannot be deleted for %s", ns);
- }
-
- // 5) OK... good to go for NS Deletion...
- if (!rpdc.isEmpty()) {
- for (PermDAO.Data perm : rpdc.value) {
- deletePerm(trans, perm, true, true);
- }
- }
- if (!rrdc.isEmpty()) {
- for (RoleDAO.Data role : rrdc.value) {
- deleteRole(trans, role, true, true);
- }
- }
-
- return q.nsDAO.delete(trans, nsd, false);
- }
-
- public Result<List<String>> getOwners(AuthzTrans trans, String ns,
- boolean includeExpired) {
- return getUsersByRole(trans, ns + Question.DOT_OWNER, includeExpired);
- }
-
- private Result<Void> mayAddOwner(AuthzTrans trans, String ns, String id) {
- Result<NsDAO.Data> rq = q.deriveNs(trans, ns);
- if (rq.notOK()) {
- return Result.err(rq);
- }
-
- rq = q.mayUser(trans, trans.user(), rq.value, Access.write);
- if (rq.notOK()) {
- return Result.err(rq);
- }
-
- Identity user;
- Organization org = trans.org();
- try {
- if ((user = org.getIdentity(trans, id)) == null) {
- return Result.err(Status.ERR_Policy,
- "%s reports that this is not a valid credential",
- org.getName());
- }
- String reason;
- if ((reason=user.mayOwn())==null) {
- return Result.ok();
- } else {
- if (org.isTestEnv()) {
- String reason2;
- if((reason2 = org.validate(trans, Policy.AS_RESPONSIBLE, new CassExecutor(trans, this), id))==null) {
- return Result.ok();
- } else {
- trans.debug().log(reason2);
- }
- }
- return Result.err(Status.ERR_Policy,CANNOT_BE_THE_OWNER_OF_A_NAMESPACE,user.fullName(),user.id(),ns, reason);
- }
- } catch (Exception e) {
- return Result.err(e);
- }
- }
-
- private Result<Void> mayAddAdmin(AuthzTrans trans, String ns, String id) {
- // Does NS Exist?
- Result<Void> r = checkValidID(trans, new Date(), id);
- if (r.notOK()) {
- return r;
- }
- // Is id able to be an Admin
- Result<NsDAO.Data> rq = q.deriveNs(trans, ns);
- if (rq.notOK()) {
- return Result.err(rq);
- }
-
- rq = q.mayUser(trans, trans.user(), rq.value, Access.write);
- if (rq.notOK()) {
- Result<List<UserRoleDAO.Data>> ruinr = q.userRoleDAO.readUserInRole(trans, trans.user(),ns+".owner");
- if(!(ruinr.isOKhasData() && ruinr.value.get(0).expires.after(new Date()))) {
- return Result.err(rq);
- }
- }
- return r;
- }
-
- private Result<Void> checkValidID(AuthzTrans trans, Date now, String user) {
- Organization org = trans.org();
- if (org.supportsRealm(user)) {
- try {
- if (org.getIdentity(trans, user) == null) {
- return Result.err(Status.ERR_Denied,
- "%s reports that %s is a faulty ID", org.getName(),
- user);
- }
- return Result.ok();
- } catch (Exception e) {
- return Result.err(Result.ERR_Security,
- "%s is not a valid %s Credential", user, org.getName());
- }
- //TODO find out how to make sure good ALTERNATE OAUTH DOMAIN USER
-// } else if(user.endsWith(ALTERNATE OAUTH DOMAIN)) {
-// return Result.ok();
- } else {
- Result<List<CredDAO.Data>> cdr = q.credDAO.readID(trans, user);
- if (cdr.notOKorIsEmpty()) {
- return Result.err(Status.ERR_Security,
- "%s is not a valid AAF Credential", user);
- }
-
- for (CredDAO.Data cd : cdr.value) {
- if (cd.expires.after(now)) {
- return Result.ok();
- }
- }
- }
- return Result.err(Result.ERR_Security, "%s has expired", user);
- }
-
- public Result<Void> delOwner(AuthzTrans trans, String ns, String id) {
- Result<NsDAO.Data> rq = q.deriveNs(trans, ns);
- if (rq.notOK()) {
- return Result.err(rq);
- }
-
- rq = q.mayUser(trans, trans.user(), rq.value, Access.write);
- if (rq.notOK()) {
- return Result.err(rq);
- }
-
- return delUserRole(trans, id, ns,Question.OWNER);
- }
-
- public Result<List<String>> getAdmins(AuthzTrans trans, String ns, boolean includeExpired) {
- return getUsersByRole(trans, ns + Question.DOT_ADMIN, includeExpired);
- }
-
- public Result<Void> delAdmin(AuthzTrans trans, String ns, String id) {
- Result<NsDAO.Data> rq = q.deriveNs(trans, ns);
- if (rq.notOK()) {
- return Result.err(rq);
- }
-
- rq = q.mayUser(trans, trans.user(), rq.value, Access.write);
- if (rq.notOK()) {
- // Even though not a "writer", Owners still determine who gets to be an Admin
- Result<List<UserRoleDAO.Data>> ruinr = q.userRoleDAO.readUserInRole(trans, trans.user(),ns+".owner");
- if(!(ruinr.isOKhasData() && ruinr.value.get(0).expires.after(new Date()))) {
- return Result.err(rq);
- }
- }
-
- return delUserRole(trans, id, ns, Question.ADMIN);
- }
-
- /**
- * Helper function that moves permissions from a namespace being deleted to
- * its parent namespace
- *
- * @param trans
- * @param parent
- * @param sb
- * @param rpdc
- * - list of permissions in namespace being deleted
- */
- private void movePerms(AuthzTrans trans, NsDAO.Data parent,
- StringBuilder sb, Result<List<PermDAO.Data>> rpdc) {
-
- Result<Void> rv;
- Result<PermDAO.Data> pd;
-
- if (rpdc.isOKhasData()) {
- for (PermDAO.Data pdd : rpdc.value) {
- String delP2 = pdd.type;
- if ("access".equals(delP2)) {
- continue;
- }
- // Remove old Perm from Roles, save them off
- List<RoleDAO.Data> lrdd = new ArrayList<>();
-
- for(String rl : pdd.roles(false)) {
- Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans,q,rl);
- if(rrdd.isOKhasData()) {
- RoleDAO.Data rdd = rrdd.value;
- lrdd.add(rdd);
- q.roleDAO.delPerm(trans, rdd, pdd);
- } else{
- trans.error().log(rrdd.errorString());
- }
- }
-
- // Save off Old keys
- String delP1 = pdd.ns;
- NsSplit nss = new NsSplit(parent, pdd.fullType());
- pdd.ns = nss.ns;
- pdd.type = nss.name;
- // Use direct Create/Delete, because switching namespaces
- if ((pd = q.permDAO.create(trans, pdd)).isOK()) {
- // Put Role back into Perm, with correct info
- for(RoleDAO.Data rdd : lrdd) {
- q.roleDAO.addPerm(trans, rdd, pdd);
- }
-
- pdd.ns = delP1;
- pdd.type = delP2;
- if ((rv = q.permDAO.delete(trans, pdd, false)).notOK()) {
- sb.append(rv.details);
- sb.append('\n');
- // } else {
- // Need to invalidate directly, because we're switching
- // places in NS, not normal cache behavior
- // q.permDAO.invalidate(trans,pdd);
- }
- } else {
- sb.append(pd.details);
- sb.append('\n');
- }
- }
- }
- }
-
- /**
- * Helper function that moves roles from a namespace being deleted to its
- * parent namespace
- *
- * @param trans
- * @param parent
- * @param sb
- * @param rrdc
- * - list of roles in namespace being deleted
- */
- private void moveRoles(AuthzTrans trans, NsDAO.Data parent,
- StringBuilder sb, Result<List<RoleDAO.Data>> rrdc) {
-
- Result<Void> rv;
- Result<RoleDAO.Data> rd;
-
- if (rrdc.isOKhasData()) {
- for (RoleDAO.Data rdd : rrdc.value) {
- String delP2 = rdd.name;
- if ("admin".equals(delP2) || "owner".equals(delP2)) {
- continue;
- }
- // Remove old Role from Perms, save them off
- List<PermDAO.Data> lpdd = new ArrayList<>();
- for(String p : rdd.perms(false)) {
- Result<PermDAO.Data> rpdd = PermDAO.Data.decode(trans,q,p);
- if(rpdd.isOKhasData()) {
- PermDAO.Data pdd = rpdd.value;
- lpdd.add(pdd);
- q.permDAO.delRole(trans, pdd, rdd);
- } else{
- trans.error().log(rpdd.errorString());
- }
- }
-
- // Save off Old keys
- String delP1 = rdd.ns;
-
- NsSplit nss = new NsSplit(parent, rdd.fullName());
- rdd.ns = nss.ns;
- rdd.name = nss.name;
- // Use direct Create/Delete, because switching namespaces
- if ((rd = q.roleDAO.create(trans, rdd)).isOK()) {
- // Put Role back into Perm, with correct info
- for(PermDAO.Data pdd : lpdd) {
- q.permDAO.addRole(trans, pdd, rdd);
- }
-
- rdd.ns = delP1;
- rdd.name = delP2;
- if ((rv = q.roleDAO.delete(trans, rdd, true)).notOK()) {
- sb.append(rv.details);
- sb.append('\n');
- // } else {
- // Need to invalidate directly, because we're switching
- // places in NS, not normal cache behavior
- // q.roleDAO.invalidate(trans,rdd);
- }
- } else {
- sb.append(rd.details);
- sb.append('\n');
- }
- }
- }
- }
-
- /**
- * Create Permission (and any missing Permission between this and Parent) if
- * we have permission
- *
- * Pass in the desired Management Permission for this Permission
- *
- * If Force is set, then Roles listed will be created, if allowed,
- * pre-granted.
- */
- public Result<Void> createPerm(AuthzTrans trans, PermDAO.Data perm, boolean fromApproval) {
- String user = trans.user();
- // Next, see if User is allowed to Manage Parent Permission
-
- Result<NsDAO.Data> rnsd;
- if (!fromApproval) {
- rnsd = q.mayUser(trans, user, perm, Access.write);
- if (rnsd.notOK()) {
- return Result.err(rnsd);
- }
- } else {
- rnsd = q.deriveNs(trans, perm.ns);
- }
-
- // Does Child exist?
- if (!trans.requested(REQD_TYPE.force)) {
- if (q.permDAO.read(trans, perm).isOKhasData()) {
- return Result.err(Status.ERR_ConflictAlreadyExists,
- "Permission [%s.%s|%s|%s] already exists.", perm.ns,
- perm.type, perm.instance, perm.action);
- }
- }
-
- // Attempt to add perms to roles, creating as possible
- Set<String> roles;
- String pstring = perm.encode();
-
- // For each Role
- for (String role : roles = perm.roles(true)) {
- Result<RoleDAO.Data> rdd = RoleDAO.Data.decode(trans,q,role);
- if(rdd.isOKhasData()) {
- RoleDAO.Data rd = rdd.value;
- if (!fromApproval) {
- // May User write to the Role in question.
- Result<NsDAO.Data> rns = q.mayUser(trans, user, rd,
- Access.write);
- if (rns.notOK()) {
- // Remove the role from Add, because
- roles.remove(role); // Don't allow adding
- trans.warn()
- .log("User [%s] does not have permission to relate Permissions to Role [%s]",
- user, role);
- }
- }
-
- Result<List<RoleDAO.Data>> rlrd;
- if ((rlrd = q.roleDAO.read(trans, rd)).notOKorIsEmpty()) {
- rd.perms(true).add(pstring);
- if (q.roleDAO.create(trans, rd).notOK()) {
- roles.remove(role); // Role doesn't exist, and can't be
- // created
- }
- } else {
- rd = rlrd.value.get(0);
- if (!rd.perms.contains(pstring)) {
- q.roleDAO.addPerm(trans, rd, perm);
- }
- }
- }
- }
-
- Result<PermDAO.Data> pdr = q.permDAO.create(trans, perm);
- if (pdr.isOK()) {
- return Result.ok();
- } else {
- return Result.err(pdr);
- }
- }
-
- public Result<Void> deletePerm(final AuthzTrans trans, final PermDAO.Data perm, boolean force, boolean fromApproval) {
- String user = trans.user();
-
- // Next, see if User is allowed to Manage Permission
- Result<NsDAO.Data> rnsd;
- if (!fromApproval) {
- rnsd = q.mayUser(trans, user, perm, Access.write);
- if (rnsd.notOK()) {
- return Result.err(rnsd);
- }
- }
- // Does Perm exist?
- Result<List<PermDAO.Data>> pdr = q.permDAO.read(trans, perm);
- if (pdr.notOKorIsEmpty()) {
- return Result.err(Status.ERR_PermissionNotFound,"Permission [%s.%s|%s|%s] does not exist.",
- perm.ns,perm.type, perm.instance, perm.action);
- }
- // Get perm, but with rest of data.
- PermDAO.Data fullperm = pdr.value.get(0);
-
- // Attached to any Roles?
- if (fullperm.roles != null) {
- if (force) {
- for (String role : fullperm.roles) {
- Result<Void> rv = null;
- Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans, q, role);
- if(rrdd.isOKhasData()) {
- trans.debug().log("Removing", role, "from", fullperm, "on Perm Delete");
- if ((rv = q.roleDAO.delPerm(trans, rrdd.value, fullperm)).notOK()) {
- if (rv.notOK()) {
- trans.error().log("Error removing Role during delFromPermRole: ",
- trans.getUserPrincipal(),
- rv.errorString());
- }
- }
- } else {
- return Result.err(rrdd);
- }
- }
- } else if (!fullperm.roles.isEmpty()) {
- return Result
- .err(Status.ERR_DependencyExists,
- "Permission [%s.%s|%s|%s] cannot be deleted as it is attached to 1 or more roles.",
- fullperm.ns, fullperm.type, fullperm.instance, fullperm.action);
- }
- }
-
- return q.permDAO.delete(trans, fullperm, false);
- }
-
- public Result<Void> deleteRole(final AuthzTrans trans, final RoleDAO.Data role, boolean force, boolean fromApproval) {
- String user = trans.user();
-
- // Next, see if User is allowed to Manage Role
- Result<NsDAO.Data> rnsd;
- if (!fromApproval) {
- rnsd = q.mayUser(trans, user, role, Access.write);
- if (rnsd.notOK()) {
- return Result.err(rnsd);
- }
- }
-
- // Are there any Users Attached to Role?
- Result<List<UserRoleDAO.Data>> urdr = q.userRoleDAO.readByRole(trans,role.fullName());
- if (force) {
- if (urdr.isOKhasData()) {
- for (UserRoleDAO.Data urd : urdr.value) {
- q.userRoleDAO.delete(trans, urd, false);
- }
- }
- } else if (urdr.isOKhasData()) {
- return Result.err(Status.ERR_DependencyExists,
- "Role [%s.%s] cannot be deleted as it is used by 1 or more Users.",
- role.ns, role.name);
- }
-
- // Does Role exist?
- Result<List<RoleDAO.Data>> rdr = q.roleDAO.read(trans, role);
- if (rdr.notOKorIsEmpty()) {
- return Result.err(Status.ERR_RoleNotFound,
- "Role [%s.%s] does not exist", role.ns, role.name);
- }
- RoleDAO.Data fullrole = rdr.value.get(0); // full key search
-
- // Remove Self from Permissions... always, force or not. Force only applies to Dependencies (Users)
- if (fullrole.perms != null) {
- for (String perm : fullrole.perms(false)) {
- Result<PermDAO.Data> rpd = PermDAO.Data.decode(trans,q,perm);
- if (rpd.isOK()) {
- trans.debug().log("Removing", perm, "from", fullrole,"on Role Delete");
-
- Result<?> r = q.permDAO.delRole(trans, rpd.value, fullrole);
- if (r.notOK()) {
- trans.error().log("ERR_FDR1 unable to remove",fullrole,"from",perm,':',r.status,'-',r.details);
- }
- } else {
- trans.error().log("ERR_FDR2 Could not remove",perm,"from",fullrole);
- }
- }
- }
- return q.roleDAO.delete(trans, fullrole, false);
- }
-
- /**
- * Only owner of Permission may add to Role
- *
- * If force set, however, Role will be created before Grant, if User is
- * allowed to create.
- *
- * @param trans
- * @param role
- * @param pd
- * @return
- */
- public Result<Void> addPermToRole(AuthzTrans trans, RoleDAO.Data role,PermDAO.Data pd, boolean fromApproval) {
- String user = trans.user();
-
- if (!fromApproval) {
- Result<NsDAO.Data> rRoleCo = q.deriveFirstNsForType(trans, role.ns, NsType.COMPANY);
- if(rRoleCo.notOK()) {
- return Result.err(rRoleCo);
- }
- Result<NsDAO.Data> rPermCo = q.deriveFirstNsForType(trans, pd.ns, NsType.COMPANY);
- if(rPermCo.notOK()) {
- return Result.err(rPermCo);
- }
-
- // Not from same company
- if(!rRoleCo.value.name.equals(rPermCo.value.name)) {
- Result<Data> r;
- // Only grant if User ALSO has Write ability in Other Company
- if((r = q.mayUser(trans, user, role, Access.write)).notOK()) {
- return Result.err(r);
- }
- }
-
-
- // Must be Perm Admin, or Granted Special Permission
- Result<NsDAO.Data> ucp = q.mayUser(trans, user, pd, Access.write);
- if (ucp.notOK()) {
- // Don't allow CLI potential Grantees to change their own AAF
- // Perms,
- if ((ROOT_NS.equals(pd.ns) && Question.NS.equals(pd.type))
- || !q.isGranted(trans, trans.user(),ROOT_NS,Question.PERM, rPermCo.value.name, "grant")) {
- // Not otherwise granted
- // TODO Needed?
- return Result.err(ucp);
- }
- // Final Check... Don't allow Grantees to add to Roles they are
- // part of
- Result<List<UserRoleDAO.Data>> rlurd = q.userRoleDAO
- .readByUser(trans, trans.user());
- if (rlurd.isOK()) {
- for (UserRoleDAO.Data ur : rlurd.value) {
- if (role.ns.equals(ur.ns) && role.name.equals(ur.rname)) {
- return Result.err(ucp);
- }
- }
- }
- }
- }
-
- Result<List<PermDAO.Data>> rlpd = q.permDAO.read(trans, pd);
- if (rlpd.notOKorIsEmpty()) {
- return Result.err(Status.ERR_PermissionNotFound,
- "Permission must exist to add to Role");
- }
-
- Result<List<RoleDAO.Data>> rlrd = q.roleDAO.read(trans, role); // Already
- // Checked
- // for
- // can
- // change
- // Role
- Result<Void> rv;
-
- if (rlrd.notOKorIsEmpty()) {
- if (trans.requested(REQD_TYPE.force)) {
- Result<NsDAO.Data> ucr = q.mayUser(trans, user, role,
- Access.write);
- if (ucr.notOK()) {
- return Result
- .err(Status.ERR_Denied,
- "Role [%s.%s] does not exist. User [%s] cannot create.",
- role.ns, role.name, user);
- }
-
- role.perms(true).add(pd.encode());
- Result<RoleDAO.Data> rdd = q.roleDAO.create(trans, role);
- if (rdd.isOK()) {
- rv = Result.ok();
- } else {
- rv = Result.err(rdd);
- }
- } else {
- return Result.err(Status.ERR_RoleNotFound,
- "Role [%s.%s] does not exist.", role.ns, role.name);
- }
- } else {
- role = rlrd.value.get(0);
- if (role.perms(false).contains(pd.encode())) {
- return Result.err(Status.ERR_ConflictAlreadyExists,
- "Permission [%s.%s] is already a member of role [%s,%s]",
- pd.ns, pd.type, role.ns, role.name);
- }
- role.perms(true).add(pd.encode()); // this is added for Caching
- // access purposes... doesn't
- // affect addPerm
- rv = q.roleDAO.addPerm(trans, role, pd);
- }
- if (rv.status == Status.OK) {
- return q.permDAO.addRole(trans, pd, role);
- // exploring how to add information message to successful http
- // request
- }
- return rv;
- }
-
- /**
- * Either Owner of Role or Permission may delete from Role
- *
- * @param trans
- * @param role
- * @param pd
- * @return
- */
- public Result<Void> delPermFromRole(AuthzTrans trans, RoleDAO.Data role,PermDAO.Data pd, boolean fromApproval) {
- String user = trans.user();
- if (!fromApproval) {
- Result<NsDAO.Data> ucr = q.mayUser(trans, user, role, Access.write);
- Result<NsDAO.Data> ucp = q.mayUser(trans, user, pd, Access.write);
-
- // If Can't change either Role or Perm, then deny
- if (ucr.notOK() && ucp.notOK()) {
- return Result.err(Status.ERR_Denied,
- "User [" + trans.user()
- + "] does not have permission to delete ["
- + pd.encode() + "] from Role ["
- + role.fullName() + ']');
- }
- }
-
- Result<List<RoleDAO.Data>> rlr = q.roleDAO.read(trans, role);
- if (rlr.notOKorIsEmpty()) {
- // If Bad Data, clean out
- Result<List<PermDAO.Data>> rlp = q.permDAO.read(trans, pd);
- if (rlp.isOKhasData()) {
- for (PermDAO.Data pv : rlp.value) {
- q.permDAO.delRole(trans, pv, role);
- }
- }
- return Result.err(rlr);
- }
- String perm1 = pd.encode();
- boolean notFound;
- if (trans.requested(REQD_TYPE.force)) {
- notFound = false;
- } else { // only check if force not set.
- notFound = true;
- for (RoleDAO.Data r : rlr.value) {
- if (r.perms != null) {
- for (String perm : r.perms) {
- if (perm1.equals(perm)) {
- notFound = false;
- break;
- }
- }
- if(!notFound) {
- break;
- }
- }
- }
- }
- if (notFound) { // Need to check both, in case of corruption
- return Result.err(Status.ERR_PermissionNotFound,
- "Permission [%s.%s|%s|%s] not associated with any Role",
- pd.ns,pd.type,pd.instance,pd.action);
- }
-
- // Read Perm for full data
- Result<List<PermDAO.Data>> rlp = q.permDAO.read(trans, pd);
- Result<Void> rv = null;
- if (rlp.isOKhasData()) {
- for (PermDAO.Data pv : rlp.value) {
- if ((rv = q.permDAO.delRole(trans, pv, role)).isOK()) {
- if ((rv = q.roleDAO.delPerm(trans, role, pv)).notOK()) {
- trans.error().log(
- "Error removing Perm during delFromPermRole:",
- trans.getUserPrincipal(), rv.errorString());
- }
- } else {
- trans.error().log(
- "Error removing Role during delFromPermRole:",
- trans.getUserPrincipal(), rv.errorString());
- }
- }
- } else {
- rv = q.roleDAO.delPerm(trans, role, pd);
- if (rv.notOK()) {
- trans.error().log("Error removing Role during delFromPermRole",
- rv.errorString());
- }
- }
- return rv == null ? Result.ok() : rv;
- }
-
- public Result<Void> delPermFromRole(AuthzTrans trans, String role,PermDAO.Data pd) {
- Result<NsSplit> nss = q.deriveNsSplit(trans, role);
- if (nss.notOK()) {
- return Result.err(nss);
- }
- RoleDAO.Data rd = new RoleDAO.Data();
- rd.ns = nss.value.ns;
- rd.name = nss.value.name;
- return delPermFromRole(trans, rd, pd, false);
- }
-
- /**
- * Add a User to Role
- *
- * 1) Role must exist 2) User must be a known Credential (i.e. mechID ok if
- * Credential) or known Organizational User
- *
- * @param trans
- * @param org
- * @param urData
- * @return
- * @throws DAOException
- */
- public Result<Void> addUserRole(AuthzTrans trans,UserRoleDAO.Data urData) {
- Result<Void> rv;
- if(Question.ADMIN.equals(urData.rname)) {
- rv = mayAddAdmin(trans, urData.ns, urData.user);
- } else if(Question.OWNER.equals(urData.rname)) {
- rv = mayAddOwner(trans, urData.ns, urData.user);
- } else {
- rv = checkValidID(trans, new Date(), urData.user);
- }
- if(rv.notOK()) {
- return rv;
- }
-
- // Check if record exists
- if (q.userRoleDAO.read(trans, urData).isOKhasData()) {
- return Result.err(Status.ERR_ConflictAlreadyExists,
- "User Role exists");
- }
- if (q.roleDAO.read(trans, urData.ns, urData.rname).notOKorIsEmpty()) {
- return Result.err(Status.ERR_RoleNotFound,
- "Role [%s.%s] does not exist", urData.ns, urData.rname);
- }
-
- urData.expires = trans.org().expiration(null, Expiration.UserInRole, urData.user).getTime();
-
-
- Result<UserRoleDAO.Data> udr = q.userRoleDAO.create(trans, urData);
- switch (udr.status) {
- case OK:
- return Result.ok();
- default:
- return Result.err(udr);
- }
- }
-
- public Result<Void> addUserRole(AuthzTrans trans, String user, String ns, String rname) {
- try {
- if(trans.org().getIdentity(trans, user)==null) {
- return Result.err(Result.ERR_BadData,user+" is an Invalid Identity for " + trans.org().getName());
- }
- } catch (OrganizationException e) {
- return Result.err(e);
- }
- UserRoleDAO.Data urdd = new UserRoleDAO.Data();
- urdd.ns = ns;
- urdd.role(ns, rname);
- urdd.user = user;
- return addUserRole(trans,urdd);
- }
-
- /**
- * Extend User Role.
- *
- * extend the Expiration data, according to Organization rules.
- *
- * @param trans
- * @param org
- * @param urData
- * @return
- */
- public Result<Void> extendUserRole(AuthzTrans trans, UserRoleDAO.Data urData, boolean checkForExist) {
- // Check if record still exists
- if (checkForExist && q.userRoleDAO.read(trans, urData).notOKorIsEmpty()) {
- return Result.err(Status.ERR_UserRoleNotFound,
- "User Role does not exist");
- }
-
- if (q.roleDAO.read(trans, urData.ns, urData.rname).notOKorIsEmpty()) {
- return Result.err(Status.ERR_RoleNotFound,
- "Role [%s.%s] does not exist", urData.ns,urData.rname);
- }
- // Special case for "Admin" roles. Issue brought forward with Prod
- // problem 9/26
- Date now = new Date();
- GregorianCalendar gc = new GregorianCalendar();
- gc.setTime(now.after(urData.expires)?now:urData.expires);
- urData.expires = trans.org().expiration(gc, Expiration.UserInRole).getTime(); // get
- // Full
- // time
- // starting
- // today
- return q.userRoleDAO.update(trans, urData);
- }
-
- // ////////////////////////////////////////////////////
- // Special User Role Functions
- // These exist, because User Roles have Expiration dates, which must be
- // accounted for
- // Also, as of July, 2015, Namespace Owners and Admins are now regular User
- // Roles
- // ////////////////////////////////////////////////////
- public Result<List<String>> getUsersByRole(AuthzTrans trans, String role, boolean includeExpired) {
- Result<List<UserRoleDAO.Data>> rurdd = q.userRoleDAO.readByRole(trans,role);
- if (rurdd.notOK()) {
- return Result.err(rurdd);
- }
- Date now = new Date();
- List<UserRoleDAO.Data> list = rurdd.value;
- List<String> rv = new ArrayList<>(list.size()); // presize
- for (UserRoleDAO.Data urdd : rurdd.value) {
- if (includeExpired || urdd.expires.after(now)) {
- rv.add(urdd.user);
- }
- }
- return Result.ok(rv);
- }
-
- public Result<Void> delUserRole(AuthzTrans trans, String user, String ns, String rname) {
- UserRoleDAO.Data urdd = new UserRoleDAO.Data();
- urdd.user = user;
- urdd.role(ns,rname);
- Result<List<UserRoleDAO.Data>> r = q.userRoleDAO.read(trans, urdd);
- if (r.status == 404 || r.isEmpty()) {
- return Result.err(Status.ERR_UserRoleNotFound,
- "UserRole [%s] [%s.%s]", user, ns, rname);
- }
- if (r.notOK()) {
- return Result.err(r);
- }
-
- return q.userRoleDAO.delete(trans, urdd, false);
- }
-
- public Result<String> createFuture(AuthzTrans trans, FutureDAO.Data data, String id, String user,
- NsDAO.Data nsd, FUTURE_OP op) {
- StringBuilder sb = new StringBuilder();
- try {
- Organization org = trans.org();
- // For Reapproval, only check Owners.. Do Supervisors, etc, separately
- List<Identity> approvers = op.equals(FUTURE_OP.A)?NO_ADDL_APPROVE:org.getApprovers(trans, user);
- List<Identity> owners = new ArrayList<>();
- if (nsd != null) {
- Result<List<UserRoleDAO.Data>> rrbr = q.userRoleDAO
- .readByRole(trans, nsd.name + Question.DOT_OWNER);
- if (rrbr.isOKhasData()) {
- for(UserRoleDAO.Data urd : rrbr.value) {
- Identity owner = org.getIdentity(trans, urd.user);
- if(owner==null) {
- return Result.err(Result.ERR_NotFound,urd.user + " is not a Valid Owner of " + nsd.name);
- } else {
- owners.add(owner);
- }
- }
- }
- }
-
- if(owners.isEmpty()) {
- return Result.err(Result.ERR_NotFound,"No Owners found for " + nsd.name);
- }
-
- // Create Future Object
-
- Result<FutureDAO.Data> fr = q.futureDAO.create(trans, data, id);
- if (fr.isOK()) {
- sb.append("Created Future: ");
- sb.append(data.id);
- // User Future ID as ticket for Approvals
- final UUID ticket = fr.value.id;
- sb.append(", Approvals: ");
- Boolean first[] = new Boolean[]{true};
- if(op!=FUTURE_OP.A) {
- for (Identity u : approvers) {
- Result<ApprovalDAO.Data> r = addIdentity(trans,sb,first,user,data.memo,op,u,ticket,org.getApproverType());
- if(r.notOK()) {
- return Result.err(r);
- }
- }
- }
- for (Identity u : owners) {
- Result<ApprovalDAO.Data> r = addIdentity(trans,sb,first,user,data.memo,op,u,ticket,"owner");
- if(r.notOK()) {
- return Result.err(r);
- }
- }
- }
- } catch (Exception e) {
- return Result.err(e);
- }
-
- return Result.ok(sb.toString());
- }
-
- /*
- * This interface is to allow performFutureOps with either Realtime Data, or Batched lookups (See Expiring)
- */
- public interface Lookup<T> {
- T get(AuthzTrans trans, Object ... keys);
- }
-
- public Lookup<UserRoleDAO.Data> urDBLookup = new Lookup<UserRoleDAO.Data>() {
- @Override
- public UserRoleDAO.Data get(AuthzTrans trans, Object ... keys) {
- Result<List<UserRoleDAO.Data>> r = q.userRoleDAO.read(trans, keys);
- if(r.isOKhasData()) {
- return r.value.get(0);
- } else {
- return null;
- }
- }
- };
-
- /**
- * Note: if "allApprovals for Ticket is null, it will be looked up.
- * if "fdd" is null, it will be looked up, but
- *
- * They can be passed for performance reasons.
- *
- * @param trans
- * @param cd
- * @param allApprovalsForTicket
- * @return
- */
- public Result<OP_STATUS> performFutureOp(final AuthzTrans trans, FUTURE_OP fop, FutureDAO.Data curr, Lookup<List<ApprovalDAO.Data>> la, Lookup<UserRoleDAO.Data> lur) {
- // Pre-Evaluate if ReApproval is already done.
- UserRoleDAO.Data urdd = null;
- if(fop.equals(FUTURE_OP.A) && curr.target.equals(FOP_USER_ROLE) && curr.construct!=null) {
- try {
- // Get Expected UserRole from Future
- urdd = new UserRoleDAO.Data();
- urdd.reconstitute(curr.construct);
- // Get Current UserRole from lookup
- UserRoleDAO.Data lurdd = lur.get(trans, urdd.user,urdd.role);
- if(lurdd==null) {
- q.futureDAO.delete(trans, curr, false);
- return OP_STATUS.RL;
- } else {
- if(curr.expires.compareTo(lurdd.expires)<0) {
- q.futureDAO.delete(trans, curr, false);
- return OP_STATUS.RL;
- }
- }
- } catch (IOException e) {
- return Result.err(Result.ERR_BadData,"Cannot reconstitute %1",curr.memo);
- }
- }
-
- boolean aDenial = false;
- int cntSuper=0, appSuper=0,cntOwner=0, appOwner=0;
- for(ApprovalDAO.Data add : la.get(trans)) {
- switch(add.status) {
- case "approved":
- if("owner".equals(add.type)) {
- ++cntOwner;
- ++appOwner;
- } else if("supervisor".equals(add.type)) {
- ++cntSuper;
- ++appSuper;
- }
- break;
- case "pending":
- if("owner".equals(add.type)) {
- ++cntOwner;
- } else if("supervisor".equals(add.type)) {
- ++cntSuper;
- }
- break;
- case "denied":
- aDenial=true;
- break;
- }
- }
-
- Result<OP_STATUS> ros=null;
- if(aDenial) {
- // Note: Denial will be Audit-logged.
-// for (ApprovalDAO.Data ad : allApprovalsForTicket.value) {
-// q.approvalDAO.delete(trans, ad, false);
-// }
- ros = OP_STATUS.RD;
- if(q.futureDAO.delete(trans, curr, false).notOK()) {
- trans.info().printf("Future %s could not be deleted", curr.id.toString());
- } else {
- if (FOP_USER_ROLE.equalsIgnoreCase(curr.target)) {
- // A Denial means we must remove UserRole
- if(fop.equals(FUTURE_OP.U) || fop.equals(FUTURE_OP.A)) {
- UserRoleDAO.Data data = new UserRoleDAO.Data();
- try {
- data.reconstitute(curr.construct);
- } catch (IOException e) {
- trans.error().log("Cannot reconstitue",curr.memo);
- }
- ros = set(OP_STATUS.RD,delUserRole(trans, data.user, data.ns, data.rname));
- }
- }
- }
- }
-
- // Decision: If not Denied, and at least owner, if exists, and at least one Super, if exists
- boolean goDecision = (cntOwner>0?appOwner>0:true) && (cntSuper>0?appSuper>0:true);
-
- if(goDecision) {
- // should check if any other pendings before performing
- // actions
- try {
- if (FOP_ROLE.equalsIgnoreCase(curr.target)) {
- RoleDAO.Data data = new RoleDAO.Data();
- data.reconstitute(curr.construct);
- switch(fop) {
- case C:
- ros = set(OP_STATUS.RE,q.roleDAO.dao().create(trans, data));
- break;
- case D:
- ros = set(OP_STATUS.RE,deleteRole(trans, data, true, true));
- break;
- default:
- }
- } else if (FOP_PERM.equalsIgnoreCase(curr.target)) {
- PermDAO.Data pdd = new PermDAO.Data();
- pdd.reconstitute(curr.construct);
- Set<String> roles;
- Result<RoleDAO.Data> rrdd;
- switch(fop) {
- case C:
- ros = set(OP_STATUS.RE,createPerm(trans, pdd, true));
- break;
- case D:
- ros = set(OP_STATUS.RE,deletePerm(trans, pdd, true, true));
- break;
- case G:
- roles = pdd.roles(true);
- for (String roleStr : roles) {
- rrdd = RoleDAO.Data.decode(trans, q, roleStr);
- if (rrdd.isOKhasData()) {
- ros = set(OP_STATUS.RE,addPermToRole(trans, rrdd.value, pdd, true));
- } else {
- trans.error().log(rrdd.errorString());
- }
- }
- break;
- case UG:
- roles = pdd.roles(true);
- for (String roleStr : roles) {
- rrdd = RoleDAO.Data.decode(trans, q, roleStr);
- if (rrdd.isOKhasData()) {
- ros = set(OP_STATUS.RE,delPermFromRole(trans, rrdd.value, pdd, true));
- } else {
- trans.error().log(rrdd.errorString());
- }
- }
- break;
- default:
- }
- } else if (FOP_USER_ROLE.equalsIgnoreCase(curr.target)) {
- if(urdd==null) {
- urdd = new UserRoleDAO.Data();
- urdd.reconstitute(curr.construct);
- }
- // if I am the last to approve, create user role
- switch(fop) {
- case C:
- ros = set(OP_STATUS.RE,addUserRole(trans, urdd));
- break;
- case U:
- case A:
- ros = set(OP_STATUS.RE,extendUserRole(trans,urdd,true));
- break;
- default:
- }
- } else if (FOP_NS.equalsIgnoreCase(curr.target)) {
- Namespace namespace = new Namespace();
- namespace.reconstitute(curr.construct);
- switch(fop) {
- case C:
- ros = set(OP_STATUS.RE,createNS(trans, namespace, true));
- break;
- default:
- }
- } else if (FOP_DELEGATE.equalsIgnoreCase(curr.target)) {
- DelegateDAO.Data data = new DelegateDAO.Data();
- data.reconstitute(curr.construct);
- switch(fop) {
- case C:
- ros = set(OP_STATUS.RE,q.delegateDAO.create(trans, data));
- break;
- case U:
- ros = set(OP_STATUS.RE,q.delegateDAO.update(trans, data));
- break;
- default:
- }
- } else if (FOP_CRED.equalsIgnoreCase(curr.target)) {
- CredDAO.Data data = new CredDAO.Data();
- data.reconstitute(curr.construct);
- switch(fop) {
- case C:
- ros = set(OP_STATUS.RE,q.credDAO.dao().create(trans, data));
- break;
- default:
- }
- }
- } catch (Exception e) {
- trans.error().log("Exception: ", e.getMessage(),
- " \n occurred while performing", curr.memo,
- " from Ticket ", curr.id.toString());
- }
- q.futureDAO.delete(trans, curr, false);
- } // end for goDecision
- if(ros==null) {
- //return Result.err(Status.ACC_Future, "Full Approvals not obtained: No action taken");
- ros = OP_STATUS.RP;
- }
-
- return ros;
- }
-
- // Convenience method for setting OPSTatus Results
- private Result<OP_STATUS> set(Result<OP_STATUS> rs, Result<?> orig) {
- if(orig.isOK()) {
- return rs;
- } else {
- return Result.err(orig);
- }
- }
-
- private Result<ApprovalDAO.Data> addIdentity(AuthzTrans trans, StringBuilder sb,
- Boolean[] first, String user, String memo, FUTURE_OP op, Identity u, UUID ticket, String type) throws OrganizationException {
- ApprovalDAO.Data ad = new ApprovalDAO.Data();
- // Note ad.id is set by ApprovalDAO Create
- ad.ticket = ticket;
- ad.user = user;
- ad.approver = u.fullID();
- ad.status = ApprovalDAO.PENDING;
- ad.memo = memo;
- ad.type = type;
- ad.operation = op.name();
- // Note ad.updated is created in System
- Result<ApprovalDAO.Data> r = q.approvalDAO.create(trans,ad);
- if(r.isOK()) {
- if(first[0]) {
- first[0] = false;
- } else {
- sb.append(", ");
- }
- sb.append(r.value.user);
- sb.append(':');
- sb.append(r.value.ticket);
- return r;
- } else {
- return Result.err(Status.ERR_ActionNotCompleted,
- "Approval for %s, %s could not be created: %s",
- ad.user, ad.approver,
- r.details, sb.toString());
- }
- }
-
- public Executor newExecutor(AuthzTrans trans) {
- return new CassExecutor(trans, this);
- }
+ private static final String CANNOT_BE_THE_OWNER_OF_A_NAMESPACE = "%s(%s) cannot be the owner of the namespace '%s'. Owners %s.";
+
+ public enum FUTURE_OP {
+ C("Create"),U("Update"),D("Delete"),G("Grant"),UG("UnGrant"),A("Approval");
+
+ private String desc;
+
+ private FUTURE_OP(String desc) {
+ this.desc = desc;
+ }
+
+ public String desc() {
+ return desc;
+ }
+
+ /**
+ * Same as valueOf(), but passes back null instead of throwing Exception
+ * @param value
+ * @return
+ */
+ public static FUTURE_OP toFO(String value) {
+ if(value!=null) {
+ for(FUTURE_OP fo : values()) {
+ if(fo.name().equals(value)){
+ return fo;
+ }
+ }
+ }
+ return null;
+ }
+ }
+
+ public enum OP_STATUS {
+ E("Executed"),D("Denied"),P("Pending"),L("Lapsed");
+
+ private String desc;
+ public final static Result<OP_STATUS> RE = Result.ok(OP_STATUS.E);
+ public final static Result<OP_STATUS> RD = Result.ok(OP_STATUS.D);
+ public final static Result<OP_STATUS> RP = Result.ok(OP_STATUS.P);
+ public final static Result<OP_STATUS> RL = Result.ok(OP_STATUS.L);
+
+ private OP_STATUS(String desc) {
+ this.desc = desc;
+ }
+
+ public String desc() {
+ return desc;
+ }
+
+ }
+
+ public static final String FOP_CRED = "cred";
+ public static final String FOP_DELEGATE = "delegate";
+ public static final String FOP_NS = "ns";
+ public static final String FOP_PERM = "perm";
+ public static final String FOP_ROLE = "role";
+ public static final String FOP_USER_ROLE = "user_role";
+ private static final List<Identity> NO_ADDL_APPROVE = new ArrayList<>();
+ private static final String ROOT_NS = Define.ROOT_NS();
+ // First Action should ALWAYS be "write", see "CreateRole"
+ public final Question q;
+
+ public Function(AuthzTrans trans, Question question) {
+ q = question;
+ }
+
+ private class ErrBuilder {
+ private StringBuilder sb;
+ private List<String> ao;
+
+ public void log(Result<?> result) {
+ if (result.notOK()) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ ao = new ArrayList<>();
+ }
+ sb.append(result.details);
+ sb.append('\n');
+ for (String s : result.variables) {
+ ao.add(s);
+ }
+ }
+ }
+
+ public String[] vars() {
+ String[] rv = new String[ao.size()];
+ ao.toArray(rv);
+ return rv;
+ }
+
+ public boolean hasErr() {
+ return sb != null;
+ }
+
+ @Override
+ public String toString() {
+ return sb == null ? "" : String.format(sb.toString(), ao);
+ }
+ }
+
+ /**
+ * createNS
+ *
+ * Create Namespace
+ *
+ * @param trans
+ * @param org
+ * @param ns
+ * @param user
+ * @return
+ * @throws DAOException
+ *
+ * To create an NS, you need to: 1) validate permission to
+ * modify parent NS 2) Does NS exist already? 3) Create NS with
+ * a) "user" as owner. NOTE: Per 10-15 request for AAF 1.0 4)
+ * Loop through Roles with Parent NS, and map any that start
+ * with this NS into this one 5) Loop through Perms with Parent
+ * NS, and map any that start with this NS into this one
+ */
+ public Result<Void> createNS(AuthzTrans trans, Namespace namespace, boolean fromApproval) {
+ Result<?> rq;
+// if (namespace.name.endsWith(Question.DOT_ADMIN)
+// || namespace.name.endsWith(Question.DOT_OWNER)) {
+// return Result.err(Status.ERR_BadData,
+// "'admin' and 'owner' are reserved names in AAF");
+// }
+
+ try {
+ for (String u : namespace.owner) {
+ Organization org = trans.org();
+ Identity orgUser = org.getIdentity(trans, u);
+ String reason;
+ if (orgUser == null) {
+ return Result.err(Status.ERR_Policy,"%s is not a valid user at %s",u,org.getName());
+ } else if((reason=orgUser.mayOwn())!=null) {
+ if (org.isTestEnv()) {
+ String reason2;
+ if((reason2=org.validate(trans, Policy.AS_RESPONSIBLE,new CassExecutor(trans, this), u))!=null) { // can masquerade as responsible
+ trans.debug().log(reason2);
+ return Result.err(Status.ERR_Policy,CANNOT_BE_THE_OWNER_OF_A_NAMESPACE,orgUser.fullName(),orgUser.id(),namespace.name,reason);
+ }
+ // a null means ok
+ } else {
+ if(orgUser.isFound()) {
+ return Result.err(Status.ERR_Policy,CANNOT_BE_THE_OWNER_OF_A_NAMESPACE,orgUser.fullName(),orgUser.id(),namespace.name, reason);
+ } else {
+ return Result.err(Status.ERR_Policy,u + " is an invalid Identity");
+ }
+ }
+ }
+ }
+ } catch (Exception e) {
+ trans.error().log(e,
+ "Could not contact Organization for User Validation");
+ }
+
+ String user = trans.user();
+ // 1) May Change Parent?
+ int idx = namespace.name.lastIndexOf('.');
+ String parent;
+ if (idx < 0) {
+ if (!q.isGranted(trans, user, ROOT_NS,Question.NS, ".", "create")) {
+ return Result.err(Result.ERR_Security,
+ "%s may not create Root Namespaces", user);
+ }
+ parent = null;
+ fromApproval = true;
+ } else {
+ parent = namespace.name.substring(0, idx); // get Parent String
+ }
+
+ Result<NsDAO.Data> rparent = q.deriveNs(trans, parent);
+ if (rparent.notOK()) {
+ return Result.err(rparent);
+ }
+ if (!fromApproval) {
+ rparent = q.mayUser(trans, user, rparent.value, Access.write);
+ if (rparent.notOK()) {
+ return Result.err(rparent);
+ }
+ }
+ parent = namespace.parent = rparent.value.name; // Correct Namespace from real data
+
+ // 2) Does requested NS exist
+ if (q.nsDAO.read(trans, namespace.name).isOKhasData()) {
+ return Result.err(Status.ERR_ConflictAlreadyExists,
+ "Target Namespace already exists");
+ }
+
+ // Someone must be responsible.
+ if (namespace.owner == null || namespace.owner.isEmpty()) {
+ return Result
+ .err(Status.ERR_Policy,
+ "Namespaces must be assigned at least one responsible party");
+ }
+
+ // 3) Create NS
+ Date now = new Date();
+
+ Result<Void> r;
+ // 3a) Admin
+
+ try {
+ // Originally, added the enterer as Admin, but that's not necessary,
+ // or helpful for Operations folks..
+ // Admins can be empty, because they can be changed by lower level
+ // NSs
+ // if(ns.admin(false).isEmpty()) {
+ // ns.admin(true).add(user);
+ // }
+ if (namespace.admin != null) {
+ for (String u : namespace.admin) {
+ if ((r = checkValidID(trans, now, u)).notOK()) {
+ return r;
+ }
+ }
+ }
+
+ // 3b) Responsible
+ Organization org = trans.org();
+ for (String u : namespace.owner) {
+ Identity orgUser = org.getIdentity(trans, u);
+ if (orgUser == null) {
+ return Result
+ .err(Status.ERR_BadData,
+ "NS must be created with an %s approved Responsible Party",
+ org.getName());
+ }
+ }
+ } catch (Exception e) {
+ return Result.err(Status.ERR_UserNotFound, e.getMessage());
+ }
+
+ // VALIDATIONS done... Add NS
+ if ((rq = q.nsDAO.create(trans, namespace.data())).notOK()) {
+ return Result.err(rq);
+ }
+
+ // Since Namespace is now created, we need to grab all subsequent errors
+ ErrBuilder eb = new ErrBuilder();
+
+ // Add UserRole(s)
+ UserRoleDAO.Data urdd = new UserRoleDAO.Data();
+ urdd.expires = trans.org().expiration(null, Expiration.UserInRole).getTime();
+ urdd.role(namespace.name, Question.ADMIN);
+ for (String admin : namespace.admin) {
+ urdd.user = admin;
+ eb.log(q.userRoleDAO.create(trans, urdd));
+ }
+ urdd.role(namespace.name,Question.OWNER);
+ for (String owner : namespace.owner) {
+ urdd.user = owner;
+ eb.log(q.userRoleDAO.create(trans, urdd));
+ }
+
+ addNSAdminRolesPerms(trans, eb, namespace.name);
+
+ addNSOwnerRolesPerms(trans, eb, namespace.name);
+
+ if (parent != null) {
+ // Build up with any errors
+
+ String targetNs = rparent.value.name; // Get the Parent Namespace,
+ // not target
+ String targetName = namespace.name.substring(targetNs.length() + 1); // Remove the Parent Namespace from the
+ // Target + a dot, and you'll get the name
+ int targetNameDot = targetName.length() + 1;
+
+ // 4) Change any roles with children matching this NS, and
+ Result<List<RoleDAO.Data>> rrdc = q.roleDAO.readChildren(trans, targetNs, targetName);
+ if (rrdc.isOKhasData()) {
+ for (RoleDAO.Data rdd : rrdc.value) {
+ // Remove old Role from Perms, save them off
+ List<PermDAO.Data> lpdd = new ArrayList<>();
+ for(String p : rdd.perms(false)) {
+ Result<PermDAO.Data> rpdd = PermDAO.Data.decode(trans,q,p);
+ if(rpdd.isOKhasData()) {
+ PermDAO.Data pdd = rpdd.value;
+ lpdd.add(pdd);
+ q.permDAO.delRole(trans, pdd, rdd);
+ } else{
+ trans.error().log(rpdd.errorString());
+ }
+ }
+
+ // Save off Old keys
+ String delP1 = rdd.ns;
+ String delP2 = rdd.name;
+
+ // Write in new key
+ rdd.ns = namespace.name;
+ rdd.name = (delP2.length() > targetNameDot) ? delP2
+ .substring(targetNameDot) : "";
+
+ // Need to use non-cached, because switching namespaces, not
+ // "create" per se
+ if ((rq = q.roleDAO.create(trans, rdd)).isOK()) {
+ // Put Role back into Perm, with correct info
+ for(PermDAO.Data pdd : lpdd) {
+ q.permDAO.addRole(trans, pdd, rdd);
+ }
+ // Change data for User Roles
+ Result<List<UserRoleDAO.Data>> rurd = q.userRoleDAO.readByRole(trans, rdd.fullName());
+ if(rurd.isOKhasData()) {
+ for(UserRoleDAO.Data urd : rurd.value) {
+ urd.ns = rdd.ns;
+ urd.rname = rdd.name;
+ q.userRoleDAO.update(trans, urd);
+ }
+ }
+ // Now delete old one
+ rdd.ns = delP1;
+ rdd.name = delP2;
+ if ((rq = q.roleDAO.delete(trans, rdd, false)).notOK()) {
+ eb.log(rq);
+ }
+ } else {
+ eb.log(rq);
+ }
+ }
+ }
+
+ // 4) Change any Permissions with children matching this NS, and
+ Result<List<PermDAO.Data>> rpdc = q.permDAO.readChildren(trans,targetNs, targetName);
+ if (rpdc.isOKhasData()) {
+ for (PermDAO.Data pdd : rpdc.value) {
+ // Remove old Perm from Roles, save them off
+ List<RoleDAO.Data> lrdd = new ArrayList<>();
+
+ for(String rl : pdd.roles(false)) {
+ Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans,q,rl);
+ if(rrdd.isOKhasData()) {
+ RoleDAO.Data rdd = rrdd.value;
+ lrdd.add(rdd);
+ q.roleDAO.delPerm(trans, rdd, pdd);
+ } else{
+ trans.error().log(rrdd.errorString());
+ }
+ }
+
+ // Save off Old keys
+ String delP1 = pdd.ns;
+ String delP2 = pdd.type;
+ pdd.ns = namespace.name;
+ pdd.type = (delP2.length() > targetNameDot) ? delP2
+ .substring(targetNameDot) : "";
+ if ((rq = q.permDAO.create(trans, pdd)).isOK()) {
+ // Put Role back into Perm, with correct info
+ for(RoleDAO.Data rdd : lrdd) {
+ q.roleDAO.addPerm(trans, rdd, pdd);
+ }
+
+ pdd.ns = delP1;
+ pdd.type = delP2;
+ if ((rq = q.permDAO.delete(trans, pdd, false)).notOK()) {
+ eb.log(rq);
+ // } else {
+ // Need to invalidate directly, because we're
+ // switching places in NS, not normal cache behavior
+ // q.permDAO.invalidate(trans,pdd);
+ }
+ } else {
+ eb.log(rq);
+ }
+ }
+ }
+ if (eb.hasErr()) {
+ return Result.err(Status.ERR_ActionNotCompleted,eb.sb.toString(), eb.vars());
+ }
+ }
+ return Result.ok();
+ }
+
+ private void addNSAdminRolesPerms(AuthzTrans trans, ErrBuilder eb, String ns) {
+ // Admin Role/Perm
+ RoleDAO.Data rd = new RoleDAO.Data();
+ rd.ns = ns;
+ rd.name = "admin";
+ rd.description = "AAF Namespace Administrators";
+
+ PermDAO.Data pd = new PermDAO.Data();
+ pd.ns = ns;
+ pd.type = "access";
+ pd.instance = Question.ASTERIX;
+ pd.action = Question.ASTERIX;
+ pd.description = "AAF Namespace Write Access";
+
+ rd.perms = new HashSet<>();
+ rd.perms.add(pd.encode());
+ eb.log(q.roleDAO.create(trans, rd));
+
+ pd.roles = new HashSet<>();
+ pd.roles.add(rd.encode());
+ eb.log(q.permDAO.create(trans, pd));
+ }
+
+ private void addNSOwnerRolesPerms(AuthzTrans trans, ErrBuilder eb, String ns) {
+ RoleDAO.Data rd = new RoleDAO.Data();
+ rd.ns = ns;
+ rd.name = "owner";
+ rd.description = "AAF Namespace Owners";
+
+ PermDAO.Data pd = new PermDAO.Data();
+ pd.ns = ns;
+ pd.type = "access";
+ pd.instance = Question.ASTERIX;
+ pd.action = Question.READ;
+ pd.description = "AAF Namespace Read Access";
+
+ rd.perms = new HashSet<>();
+ rd.perms.add(pd.encode());
+ eb.log(q.roleDAO.create(trans, rd));
+
+ pd.roles = new HashSet<>();
+ pd.roles.add(rd.encode());
+ eb.log(q.permDAO.create(trans, pd));
+ }
+
+ /**
+ * deleteNS
+ *
+ * Delete Namespace
+ *
+ * @param trans
+ * @param org
+ * @param ns
+ * @param force
+ * @param user
+ * @return
+ * @throws DAOException
+ *
+ *
+ * To delete an NS, you need to: 1) validate permission to
+ * modify this NS 2) Find all Roles with this NS, and 2a) if
+ * Force, delete them, else modify to Parent NS 3) Find all
+ * Perms with this NS, and modify to Parent NS 3a) if Force,
+ * delete them, else modify to Parent NS 4) Find all IDs
+ * associated to this NS, and deny if exists. 5) Remove NS
+ */
+ public Result<Void> deleteNS(AuthzTrans trans, String ns) {
+ boolean force = trans.requested(REQD_TYPE.force);
+ boolean move = trans.requested(REQD_TYPE.move);
+ // 1) Validate
+ Result<List<NsDAO.Data>> nsl;
+ if ((nsl = q.nsDAO.read(trans, ns)).notOKorIsEmpty()) {
+ return Result.err(Status.ERR_NsNotFound, "%s does not exist", ns);
+ }
+ NsDAO.Data nsd = nsl.value.get(0);
+ NsType nt;
+ if (move && !q.canMove(nt = NsType.fromType(nsd.type))) {
+ return Result.err(Status.ERR_Denied, "Namespace Force=move not permitted for Type %s",nt.name());
+ }
+
+ Result<NsDAO.Data> dnr = q.mayUser(trans, trans.user(), nsd, Access.write);
+ if (dnr.status != Status.OK) {
+ return Result.err(dnr);
+ }
+
+ // 2) Find Parent
+ String user = trans.user();
+ int idx = ns.lastIndexOf('.');
+ NsDAO.Data parent;
+ if (idx < 0) {
+ if (!q.isGranted(trans, user, ROOT_NS,Question.NS, ".", "delete")) {
+ return Result.err(Result.ERR_Security,
+ "%s may not delete Root Namespaces", user);
+ }
+ parent = null;
+ } else {
+ Result<NsDAO.Data> rlparent = q.deriveNs(trans, ns.substring(0, idx));
+ if (rlparent.notOKorIsEmpty()) {
+ return Result.err(rlparent);
+ }
+ parent = rlparent.value;
+ }
+
+ // Build up with any errors
+ // If sb != null below is an indication of error
+ StringBuilder sb = null;
+ ErrBuilder er = new ErrBuilder();
+
+ // 2a) Deny if any IDs on Namespace
+ Result<List<CredDAO.Data>> creds = q.credDAO.readNS(trans, ns);
+ if (creds.isOKhasData()) {
+ if (force || move) {
+ for (CredDAO.Data cd : creds.value) {
+ er.log(q.credDAO.delete(trans, cd, false));
+ // Since we're deleting all the creds, we should delete all
+ // the user Roles for that Cred
+ Result<List<UserRoleDAO.Data>> rlurd = q.userRoleDAO
+ .readByUser(trans, cd.id);
+ if (rlurd.isOK()) {
+ for (UserRoleDAO.Data data : rlurd.value) {
+ q.userRoleDAO.delete(trans, data, false);
+ }
+ }
+
+ }
+ } else {
+ // first possible StringBuilder Create.
+ sb = new StringBuilder();
+ sb.append('[');
+ sb.append(ns);
+ sb.append("] contains users");
+ }
+ }
+
+ // 2b) Find (or delete if forced flag is set) dependencies
+ // First, find if NS Perms are the only ones
+ Result<List<PermDAO.Data>> rpdc = q.permDAO.readNS(trans, ns);
+ if (rpdc.isOKhasData()) {
+ // Since there are now NS perms, we have to count NON-NS perms.
+ // FYI, if we delete them now, and the NS is not deleted, it is in
+ // an inconsistent state.
+ boolean nonaccess = false;
+ for (PermDAO.Data pdd : rpdc.value) {
+ if (!"access".equals(pdd.type)) {
+ nonaccess = true;
+ break;
+ }
+ }
+ if (nonaccess && !force && !move) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ sb.append('[');
+ sb.append(ns);
+ sb.append("] contains ");
+ } else {
+ sb.append(", ");
+ }
+ sb.append("permissions");
+ }
+ }
+
+ Result<List<RoleDAO.Data>> rrdc = q.roleDAO.readNS(trans, ns);
+ if (rrdc.isOKhasData()) {
+ // Since there are now NS roles, we have to count NON-NS roles.
+ // FYI, if we delete th)em now, and the NS is not deleted, it is in
+ // an inconsistent state.
+ int count = rrdc.value.size();
+ for (RoleDAO.Data rdd : rrdc.value) {
+ if ("admin".equals(rdd.name) || "owner".equals(rdd.name)) {
+ --count;
+ }
+ }
+ if (count > 0 && !force && !move) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ sb.append('[');
+ sb.append(ns);
+ sb.append("] contains ");
+ } else {
+ sb.append(", ");
+ }
+ sb.append("roles");
+ }
+ }
+
+ // 2c) Deny if dependencies exist that would be moved to root level
+ // parent is root level parent here. Need to find closest parent ns that
+ // exists
+ if (sb != null) {
+ if (!force && !move) {
+ sb.append(".\n Delete dependencies and try again. Note: using \"force=true\" will delete all. \"force=move\" will delete Creds, but move Roles and Perms to parent.");
+ return Result.err(Status.ERR_DependencyExists, sb.toString());
+ }
+
+ if (move && (parent == null || parent.type == NsType.COMPANY.type)) {
+ return Result
+ .err(Status.ERR_DependencyExists,
+ "Cannot move users, roles or permissions to [%s].\nDelete dependencies and try again",
+ parent.name);
+ }
+ } else if (move && parent != null) {
+ sb = new StringBuilder();
+ // 3) Change any roles with children matching this NS, and
+ moveRoles(trans, parent, sb, rrdc);
+ // 4) Change any Perms with children matching this NS, and
+ movePerms(trans, parent, sb, rpdc);
+ }
+
+ if (sb != null && sb.length() > 0) {
+ return Result.err(Status.ERR_DependencyExists, sb.toString());
+ }
+
+ if (er.hasErr()) {
+ if (trans.debug().isLoggable()) {
+ trans.debug().log(er.toString());
+ }
+ return Result.err(Status.ERR_DependencyExists,
+ "Namespace members cannot be deleted for %s", ns);
+ }
+
+ // 5) OK... good to go for NS Deletion...
+ if (!rpdc.isEmpty()) {
+ for (PermDAO.Data perm : rpdc.value) {
+ deletePerm(trans, perm, true, true);
+ }
+ }
+ if (!rrdc.isEmpty()) {
+ for (RoleDAO.Data role : rrdc.value) {
+ deleteRole(trans, role, true, true);
+ }
+ }
+
+ return q.nsDAO.delete(trans, nsd, false);
+ }
+
+ public Result<List<String>> getOwners(AuthzTrans trans, String ns,
+ boolean includeExpired) {
+ return getUsersByRole(trans, ns + Question.DOT_OWNER, includeExpired);
+ }
+
+ private Result<Void> mayAddOwner(AuthzTrans trans, String ns, String id) {
+ Result<NsDAO.Data> rq = q.deriveNs(trans, ns);
+ if (rq.notOK()) {
+ return Result.err(rq);
+ }
+
+ rq = q.mayUser(trans, trans.user(), rq.value, Access.write);
+ if (rq.notOK()) {
+ return Result.err(rq);
+ }
+
+ Identity user;
+ Organization org = trans.org();
+ try {
+ if ((user = org.getIdentity(trans, id)) == null) {
+ return Result.err(Status.ERR_Policy,
+ "%s reports that this is not a valid credential",
+ org.getName());
+ }
+ String reason;
+ if ((reason=user.mayOwn())==null) {
+ return Result.ok();
+ } else {
+ if (org.isTestEnv()) {
+ String reason2;
+ if((reason2 = org.validate(trans, Policy.AS_RESPONSIBLE, new CassExecutor(trans, this), id))==null) {
+ return Result.ok();
+ } else {
+ trans.debug().log(reason2);
+ }
+ }
+ return Result.err(Status.ERR_Policy,CANNOT_BE_THE_OWNER_OF_A_NAMESPACE,user.fullName(),user.id(),ns, reason);
+ }
+ } catch (Exception e) {
+ return Result.err(e);
+ }
+ }
+
+ private Result<Void> mayAddAdmin(AuthzTrans trans, String ns, String id) {
+ // Does NS Exist?
+ Result<Void> r = checkValidID(trans, new Date(), id);
+ if (r.notOK()) {
+ return r;
+ }
+ // Is id able to be an Admin
+ Result<NsDAO.Data> rq = q.deriveNs(trans, ns);
+ if (rq.notOK()) {
+ return Result.err(rq);
+ }
+
+ rq = q.mayUser(trans, trans.user(), rq.value, Access.write);
+ if (rq.notOK()) {
+ Result<List<UserRoleDAO.Data>> ruinr = q.userRoleDAO.readUserInRole(trans, trans.user(),ns+".owner");
+ if(!(ruinr.isOKhasData() && ruinr.value.get(0).expires.after(new Date()))) {
+ return Result.err(rq);
+ }
+ }
+ return r;
+ }
+
+ private Result<Void> checkValidID(AuthzTrans trans, Date now, String user) {
+ Organization org = trans.org();
+ if (org.supportsRealm(user)) {
+ try {
+ if (org.getIdentity(trans, user) == null) {
+ return Result.err(Status.ERR_Denied,
+ "%s reports that %s is a faulty ID", org.getName(),
+ user);
+ }
+ return Result.ok();
+ } catch (Exception e) {
+ return Result.err(Result.ERR_Security,
+ "%s is not a valid %s Credential", user, org.getName());
+ }
+ //TODO find out how to make sure good ALTERNATE OAUTH DOMAIN USER
+// } else if(user.endsWith(ALTERNATE OAUTH DOMAIN)) {
+// return Result.ok();
+ } else {
+ Result<List<CredDAO.Data>> cdr = q.credDAO.readID(trans, user);
+ if (cdr.notOKorIsEmpty()) {
+ return Result.err(Status.ERR_Security,
+ "%s is not a valid AAF Credential", user);
+ }
+
+ for (CredDAO.Data cd : cdr.value) {
+ if (cd.expires.after(now)) {
+ return Result.ok();
+ }
+ }
+ }
+ return Result.err(Result.ERR_Security, "%s has expired", user);
+ }
+
+ public Result<Void> delOwner(AuthzTrans trans, String ns, String id) {
+ Result<NsDAO.Data> rq = q.deriveNs(trans, ns);
+ if (rq.notOK()) {
+ return Result.err(rq);
+ }
+
+ rq = q.mayUser(trans, trans.user(), rq.value, Access.write);
+ if (rq.notOK()) {
+ return Result.err(rq);
+ }
+
+ return delUserRole(trans, id, ns,Question.OWNER);
+ }
+
+ public Result<List<String>> getAdmins(AuthzTrans trans, String ns, boolean includeExpired) {
+ return getUsersByRole(trans, ns + Question.DOT_ADMIN, includeExpired);
+ }
+
+ public Result<Void> delAdmin(AuthzTrans trans, String ns, String id) {
+ Result<NsDAO.Data> rq = q.deriveNs(trans, ns);
+ if (rq.notOK()) {
+ return Result.err(rq);
+ }
+
+ rq = q.mayUser(trans, trans.user(), rq.value, Access.write);
+ if (rq.notOK()) {
+ // Even though not a "writer", Owners still determine who gets to be an Admin
+ Result<List<UserRoleDAO.Data>> ruinr = q.userRoleDAO.readUserInRole(trans, trans.user(),ns+".owner");
+ if(!(ruinr.isOKhasData() && ruinr.value.get(0).expires.after(new Date()))) {
+ return Result.err(rq);
+ }
+ }
+
+ return delUserRole(trans, id, ns, Question.ADMIN);
+ }
+
+ /**
+ * Helper function that moves permissions from a namespace being deleted to
+ * its parent namespace
+ *
+ * @param trans
+ * @param parent
+ * @param sb
+ * @param rpdc
+ * - list of permissions in namespace being deleted
+ */
+ private void movePerms(AuthzTrans trans, NsDAO.Data parent,
+ StringBuilder sb, Result<List<PermDAO.Data>> rpdc) {
+
+ Result<Void> rv;
+ Result<PermDAO.Data> pd;
+
+ if (rpdc.isOKhasData()) {
+ for (PermDAO.Data pdd : rpdc.value) {
+ String delP2 = pdd.type;
+ if ("access".equals(delP2)) {
+ continue;
+ }
+ // Remove old Perm from Roles, save them off
+ List<RoleDAO.Data> lrdd = new ArrayList<>();
+
+ for(String rl : pdd.roles(false)) {
+ Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans,q,rl);
+ if(rrdd.isOKhasData()) {
+ RoleDAO.Data rdd = rrdd.value;
+ lrdd.add(rdd);
+ q.roleDAO.delPerm(trans, rdd, pdd);
+ } else{
+ trans.error().log(rrdd.errorString());
+ }
+ }
+
+ // Save off Old keys
+ String delP1 = pdd.ns;
+ NsSplit nss = new NsSplit(parent, pdd.fullType());
+ pdd.ns = nss.ns;
+ pdd.type = nss.name;
+ // Use direct Create/Delete, because switching namespaces
+ if ((pd = q.permDAO.create(trans, pdd)).isOK()) {
+ // Put Role back into Perm, with correct info
+ for(RoleDAO.Data rdd : lrdd) {
+ q.roleDAO.addPerm(trans, rdd, pdd);
+ }
+
+ pdd.ns = delP1;
+ pdd.type = delP2;
+ if ((rv = q.permDAO.delete(trans, pdd, false)).notOK()) {
+ sb.append(rv.details);
+ sb.append('\n');
+ // } else {
+ // Need to invalidate directly, because we're switching
+ // places in NS, not normal cache behavior
+ // q.permDAO.invalidate(trans,pdd);
+ }
+ } else {
+ sb.append(pd.details);
+ sb.append('\n');
+ }
+ }
+ }
+ }
+
+ /**
+ * Helper function that moves roles from a namespace being deleted to its
+ * parent namespace
+ *
+ * @param trans
+ * @param parent
+ * @param sb
+ * @param rrdc
+ * - list of roles in namespace being deleted
+ */
+ private void moveRoles(AuthzTrans trans, NsDAO.Data parent,
+ StringBuilder sb, Result<List<RoleDAO.Data>> rrdc) {
+
+ Result<Void> rv;
+ Result<RoleDAO.Data> rd;
+
+ if (rrdc.isOKhasData()) {
+ for (RoleDAO.Data rdd : rrdc.value) {
+ String delP2 = rdd.name;
+ if ("admin".equals(delP2) || "owner".equals(delP2)) {
+ continue;
+ }
+ // Remove old Role from Perms, save them off
+ List<PermDAO.Data> lpdd = new ArrayList<>();
+ for(String p : rdd.perms(false)) {
+ Result<PermDAO.Data> rpdd = PermDAO.Data.decode(trans,q,p);
+ if(rpdd.isOKhasData()) {
+ PermDAO.Data pdd = rpdd.value;
+ lpdd.add(pdd);
+ q.permDAO.delRole(trans, pdd, rdd);
+ } else{
+ trans.error().log(rpdd.errorString());
+ }
+ }
+
+ // Save off Old keys
+ String delP1 = rdd.ns;
+
+ NsSplit nss = new NsSplit(parent, rdd.fullName());
+ rdd.ns = nss.ns;
+ rdd.name = nss.name;
+ // Use direct Create/Delete, because switching namespaces
+ if ((rd = q.roleDAO.create(trans, rdd)).isOK()) {
+ // Put Role back into Perm, with correct info
+ for(PermDAO.Data pdd : lpdd) {
+ q.permDAO.addRole(trans, pdd, rdd);
+ }
+
+ rdd.ns = delP1;
+ rdd.name = delP2;
+ if ((rv = q.roleDAO.delete(trans, rdd, true)).notOK()) {
+ sb.append(rv.details);
+ sb.append('\n');
+ // } else {
+ // Need to invalidate directly, because we're switching
+ // places in NS, not normal cache behavior
+ // q.roleDAO.invalidate(trans,rdd);
+ }
+ } else {
+ sb.append(rd.details);
+ sb.append('\n');
+ }
+ }
+ }
+ }
+
+ /**
+ * Create Permission (and any missing Permission between this and Parent) if
+ * we have permission
+ *
+ * Pass in the desired Management Permission for this Permission
+ *
+ * If Force is set, then Roles listed will be created, if allowed,
+ * pre-granted.
+ */
+ public Result<Void> createPerm(AuthzTrans trans, PermDAO.Data perm, boolean fromApproval) {
+ String user = trans.user();
+ // Next, see if User is allowed to Manage Parent Permission
+
+ Result<NsDAO.Data> rnsd;
+ if (!fromApproval) {
+ rnsd = q.mayUser(trans, user, perm, Access.write);
+ if (rnsd.notOK()) {
+ return Result.err(rnsd);
+ }
+ } else {
+ rnsd = q.deriveNs(trans, perm.ns);
+ }
+
+ // Does Child exist?
+ if (!trans.requested(REQD_TYPE.force)) {
+ if (q.permDAO.read(trans, perm).isOKhasData()) {
+ return Result.err(Status.ERR_ConflictAlreadyExists,
+ "Permission [%s.%s|%s|%s] already exists.", perm.ns,
+ perm.type, perm.instance, perm.action);
+ }
+ }
+
+ // Attempt to add perms to roles, creating as possible
+ Set<String> roles;
+ String pstring = perm.encode();
+
+ // For each Role
+ for (String role : roles = perm.roles(true)) {
+ Result<RoleDAO.Data> rdd = RoleDAO.Data.decode(trans,q,role);
+ if(rdd.isOKhasData()) {
+ RoleDAO.Data rd = rdd.value;
+ if (!fromApproval) {
+ // May User write to the Role in question.
+ Result<NsDAO.Data> rns = q.mayUser(trans, user, rd,
+ Access.write);
+ if (rns.notOK()) {
+ // Remove the role from Add, because
+ roles.remove(role); // Don't allow adding
+ trans.warn()
+ .log("User [%s] does not have permission to relate Permissions to Role [%s]",
+ user, role);
+ }
+ }
+
+ Result<List<RoleDAO.Data>> rlrd;
+ if ((rlrd = q.roleDAO.read(trans, rd)).notOKorIsEmpty()) {
+ rd.perms(true).add(pstring);
+ if (q.roleDAO.create(trans, rd).notOK()) {
+ roles.remove(role); // Role doesn't exist, and can't be
+ // created
+ }
+ } else {
+ rd = rlrd.value.get(0);
+ if (!rd.perms.contains(pstring)) {
+ q.roleDAO.addPerm(trans, rd, perm);
+ }
+ }
+ }
+ }
+
+ Result<PermDAO.Data> pdr = q.permDAO.create(trans, perm);
+ if (pdr.isOK()) {
+ return Result.ok();
+ } else {
+ return Result.err(pdr);
+ }
+ }
+
+ public Result<Void> deletePerm(final AuthzTrans trans, final PermDAO.Data perm, boolean force, boolean fromApproval) {
+ String user = trans.user();
+
+ // Next, see if User is allowed to Manage Permission
+ Result<NsDAO.Data> rnsd;
+ if (!fromApproval) {
+ rnsd = q.mayUser(trans, user, perm, Access.write);
+ if (rnsd.notOK()) {
+ return Result.err(rnsd);
+ }
+ }
+ // Does Perm exist?
+ Result<List<PermDAO.Data>> pdr = q.permDAO.read(trans, perm);
+ if (pdr.notOKorIsEmpty()) {
+ return Result.err(Status.ERR_PermissionNotFound,"Permission [%s.%s|%s|%s] does not exist.",
+ perm.ns,perm.type, perm.instance, perm.action);
+ }
+ // Get perm, but with rest of data.
+ PermDAO.Data fullperm = pdr.value.get(0);
+
+ // Attached to any Roles?
+ if (fullperm.roles != null) {
+ if (force) {
+ for (String role : fullperm.roles) {
+ Result<Void> rv = null;
+ Result<RoleDAO.Data> rrdd = RoleDAO.Data.decode(trans, q, role);
+ if(rrdd.isOKhasData()) {
+ trans.debug().log("Removing", role, "from", fullperm, "on Perm Delete");
+ if ((rv = q.roleDAO.delPerm(trans, rrdd.value, fullperm)).notOK()) {
+ if (rv.notOK()) {
+ trans.error().log("Error removing Role during delFromPermRole: ",
+ trans.getUserPrincipal(),
+ rv.errorString());
+ }
+ }
+ } else {
+ return Result.err(rrdd);
+ }
+ }
+ } else if (!fullperm.roles.isEmpty()) {
+ return Result
+ .err(Status.ERR_DependencyExists,
+ "Permission [%s.%s|%s|%s] cannot be deleted as it is attached to 1 or more roles.",
+ fullperm.ns, fullperm.type, fullperm.instance, fullperm.action);
+ }
+ }
+
+ return q.permDAO.delete(trans, fullperm, false);
+ }
+
+ public Result<Void> deleteRole(final AuthzTrans trans, final RoleDAO.Data role, boolean force, boolean fromApproval) {
+ String user = trans.user();
+
+ // Next, see if User is allowed to Manage Role
+ Result<NsDAO.Data> rnsd;
+ if (!fromApproval) {
+ rnsd = q.mayUser(trans, user, role, Access.write);
+ if (rnsd.notOK()) {
+ return Result.err(rnsd);
+ }
+ }
+
+ // Are there any Users Attached to Role?
+ Result<List<UserRoleDAO.Data>> urdr = q.userRoleDAO.readByRole(trans,role.fullName());
+ if (force) {
+ if (urdr.isOKhasData()) {
+ for (UserRoleDAO.Data urd : urdr.value) {
+ q.userRoleDAO.delete(trans, urd, false);
+ }
+ }
+ } else if (urdr.isOKhasData()) {
+ return Result.err(Status.ERR_DependencyExists,
+ "Role [%s.%s] cannot be deleted as it is used by 1 or more Users.",
+ role.ns, role.name);
+ }
+
+ // Does Role exist?
+ Result<List<RoleDAO.Data>> rdr = q.roleDAO.read(trans, role);
+ if (rdr.notOKorIsEmpty()) {
+ return Result.err(Status.ERR_RoleNotFound,
+ "Role [%s.%s] does not exist", role.ns, role.name);
+ }
+ RoleDAO.Data fullrole = rdr.value.get(0); // full key search
+
+ // Remove Self from Permissions... always, force or not. Force only applies to Dependencies (Users)
+ if (fullrole.perms != null) {
+ for (String perm : fullrole.perms(false)) {
+ Result<PermDAO.Data> rpd = PermDAO.Data.decode(trans,q,perm);
+ if (rpd.isOK()) {
+ trans.debug().log("Removing", perm, "from", fullrole,"on Role Delete");
+
+ Result<?> r = q.permDAO.delRole(trans, rpd.value, fullrole);
+ if (r.notOK()) {
+ trans.error().log("ERR_FDR1 unable to remove",fullrole,"from",perm,':',r.status,'-',r.details);
+ }
+ } else {
+ trans.error().log("ERR_FDR2 Could not remove",perm,"from",fullrole);
+ }
+ }
+ }
+ return q.roleDAO.delete(trans, fullrole, false);
+ }
+
+ /**
+ * Only owner of Permission may add to Role
+ *
+ * If force set, however, Role will be created before Grant, if User is
+ * allowed to create.
+ *
+ * @param trans
+ * @param role
+ * @param pd
+ * @return
+ */
+ public Result<Void> addPermToRole(AuthzTrans trans, RoleDAO.Data role,PermDAO.Data pd, boolean fromApproval) {
+ String user = trans.user();
+
+ if (!fromApproval) {
+ Result<NsDAO.Data> rRoleCo = q.deriveFirstNsForType(trans, role.ns, NsType.COMPANY);
+ if(rRoleCo.notOK()) {
+ return Result.err(rRoleCo);
+ }
+ Result<NsDAO.Data> rPermCo = q.deriveFirstNsForType(trans, pd.ns, NsType.COMPANY);
+ if(rPermCo.notOK()) {
+ return Result.err(rPermCo);
+ }
+
+ // Not from same company
+ if(!rRoleCo.value.name.equals(rPermCo.value.name)) {
+ Result<Data> r;
+ // Only grant if User ALSO has Write ability in Other Company
+ if((r = q.mayUser(trans, user, role, Access.write)).notOK()) {
+ return Result.err(r);
+ }
+ }
+
+
+ // Must be Perm Admin, or Granted Special Permission
+ Result<NsDAO.Data> ucp = q.mayUser(trans, user, pd, Access.write);
+ if (ucp.notOK()) {
+ // Don't allow CLI potential Grantees to change their own AAF
+ // Perms,
+ if ((ROOT_NS.equals(pd.ns) && Question.NS.equals(pd.type))
+ || !q.isGranted(trans, trans.user(),ROOT_NS,Question.PERM, rPermCo.value.name, "grant")) {
+ // Not otherwise granted
+ // TODO Needed?
+ return Result.err(ucp);
+ }
+ // Final Check... Don't allow Grantees to add to Roles they are
+ // part of
+ Result<List<UserRoleDAO.Data>> rlurd = q.userRoleDAO
+ .readByUser(trans, trans.user());
+ if (rlurd.isOK()) {
+ for (UserRoleDAO.Data ur : rlurd.value) {
+ if (role.ns.equals(ur.ns) && role.name.equals(ur.rname)) {
+ return Result.err(ucp);
+ }
+ }
+ }
+ }
+ }
+
+ Result<List<PermDAO.Data>> rlpd = q.permDAO.read(trans, pd);
+ if (rlpd.notOKorIsEmpty()) {
+ return Result.err(Status.ERR_PermissionNotFound,
+ "Permission must exist to add to Role");
+ }
+
+ Result<List<RoleDAO.Data>> rlrd = q.roleDAO.read(trans, role); // Already
+ // Checked
+ // for
+ // can
+ // change
+ // Role
+ Result<Void> rv;
+
+ if (rlrd.notOKorIsEmpty()) {
+ if (trans.requested(REQD_TYPE.force)) {
+ Result<NsDAO.Data> ucr = q.mayUser(trans, user, role,
+ Access.write);
+ if (ucr.notOK()) {
+ return Result
+ .err(Status.ERR_Denied,
+ "Role [%s.%s] does not exist. User [%s] cannot create.",
+ role.ns, role.name, user);
+ }
+
+ role.perms(true).add(pd.encode());
+ Result<RoleDAO.Data> rdd = q.roleDAO.create(trans, role);
+ if (rdd.isOK()) {
+ rv = Result.ok();
+ } else {
+ rv = Result.err(rdd);
+ }
+ } else {
+ return Result.err(Status.ERR_RoleNotFound,
+ "Role [%s.%s] does not exist.", role.ns, role.name);
+ }
+ } else {
+ role = rlrd.value.get(0);
+ if (role.perms(false).contains(pd.encode())) {
+ return Result.err(Status.ERR_ConflictAlreadyExists,
+ "Permission [%s.%s] is already a member of role [%s,%s]",
+ pd.ns, pd.type, role.ns, role.name);
+ }
+ role.perms(true).add(pd.encode()); // this is added for Caching
+ // access purposes... doesn't
+ // affect addPerm
+ rv = q.roleDAO.addPerm(trans, role, pd);
+ }
+ if (rv.status == Status.OK) {
+ return q.permDAO.addRole(trans, pd, role);
+ // exploring how to add information message to successful http
+ // request
+ }
+ return rv;
+ }
+
+ /**
+ * Either Owner of Role or Permission may delete from Role
+ *
+ * @param trans
+ * @param role
+ * @param pd
+ * @return
+ */
+ public Result<Void> delPermFromRole(AuthzTrans trans, RoleDAO.Data role,PermDAO.Data pd, boolean fromApproval) {
+ String user = trans.user();
+ if (!fromApproval) {
+ Result<NsDAO.Data> ucr = q.mayUser(trans, user, role, Access.write);
+ Result<NsDAO.Data> ucp = q.mayUser(trans, user, pd, Access.write);
+
+ // If Can't change either Role or Perm, then deny
+ if (ucr.notOK() && ucp.notOK()) {
+ return Result.err(Status.ERR_Denied,
+ "User [" + trans.user()
+ + "] does not have permission to delete ["
+ + pd.encode() + "] from Role ["
+ + role.fullName() + ']');
+ }
+ }
+
+ Result<List<RoleDAO.Data>> rlr = q.roleDAO.read(trans, role);
+ if (rlr.notOKorIsEmpty()) {
+ // If Bad Data, clean out
+ Result<List<PermDAO.Data>> rlp = q.permDAO.read(trans, pd);
+ if (rlp.isOKhasData()) {
+ for (PermDAO.Data pv : rlp.value) {
+ q.permDAO.delRole(trans, pv, role);
+ }
+ }
+ return Result.err(rlr);
+ }
+ String perm1 = pd.encode();
+ boolean notFound;
+ if (trans.requested(REQD_TYPE.force)) {
+ notFound = false;
+ } else { // only check if force not set.
+ notFound = true;
+ for (RoleDAO.Data r : rlr.value) {
+ if (r.perms != null) {
+ for (String perm : r.perms) {
+ if (perm1.equals(perm)) {
+ notFound = false;
+ break;
+ }
+ }
+ if(!notFound) {
+ break;
+ }
+ }
+ }
+ }
+ if (notFound) { // Need to check both, in case of corruption
+ return Result.err(Status.ERR_PermissionNotFound,
+ "Permission [%s.%s|%s|%s] not associated with any Role",
+ pd.ns,pd.type,pd.instance,pd.action);
+ }
+
+ // Read Perm for full data
+ Result<List<PermDAO.Data>> rlp = q.permDAO.read(trans, pd);
+ Result<Void> rv = null;
+ if (rlp.isOKhasData()) {
+ for (PermDAO.Data pv : rlp.value) {
+ if ((rv = q.permDAO.delRole(trans, pv, role)).isOK()) {
+ if ((rv = q.roleDAO.delPerm(trans, role, pv)).notOK()) {
+ trans.error().log(
+ "Error removing Perm during delFromPermRole:",
+ trans.getUserPrincipal(), rv.errorString());
+ }
+ } else {
+ trans.error().log(
+ "Error removing Role during delFromPermRole:",
+ trans.getUserPrincipal(), rv.errorString());
+ }
+ }
+ } else {
+ rv = q.roleDAO.delPerm(trans, role, pd);
+ if (rv.notOK()) {
+ trans.error().log("Error removing Role during delFromPermRole",
+ rv.errorString());
+ }
+ }
+ return rv == null ? Result.ok() : rv;
+ }
+
+ public Result<Void> delPermFromRole(AuthzTrans trans, String role,PermDAO.Data pd) {
+ Result<NsSplit> nss = q.deriveNsSplit(trans, role);
+ if (nss.notOK()) {
+ return Result.err(nss);
+ }
+ RoleDAO.Data rd = new RoleDAO.Data();
+ rd.ns = nss.value.ns;
+ rd.name = nss.value.name;
+ return delPermFromRole(trans, rd, pd, false);
+ }
+
+ /**
+ * Add a User to Role
+ *
+ * 1) Role must exist 2) User must be a known Credential (i.e. mechID ok if
+ * Credential) or known Organizational User
+ *
+ * @param trans
+ * @param org
+ * @param urData
+ * @return
+ * @throws DAOException
+ */
+ public Result<Void> addUserRole(AuthzTrans trans,UserRoleDAO.Data urData) {
+ Result<Void> rv;
+ if(Question.ADMIN.equals(urData.rname)) {
+ rv = mayAddAdmin(trans, urData.ns, urData.user);
+ } else if(Question.OWNER.equals(urData.rname)) {
+ rv = mayAddOwner(trans, urData.ns, urData.user);
+ } else {
+ rv = checkValidID(trans, new Date(), urData.user);
+ }
+ if(rv.notOK()) {
+ return rv;
+ }
+
+ // Check if record exists
+ if (q.userRoleDAO.read(trans, urData).isOKhasData()) {
+ return Result.err(Status.ERR_ConflictAlreadyExists,
+ "User Role exists");
+ }
+ if (q.roleDAO.read(trans, urData.ns, urData.rname).notOKorIsEmpty()) {
+ return Result.err(Status.ERR_RoleNotFound,
+ "Role [%s.%s] does not exist", urData.ns, urData.rname);
+ }
+
+ urData.expires = trans.org().expiration(null, Expiration.UserInRole, urData.user).getTime();
+
+
+ Result<UserRoleDAO.Data> udr = q.userRoleDAO.create(trans, urData);
+ switch (udr.status) {
+ case OK:
+ return Result.ok();
+ default:
+ return Result.err(udr);
+ }
+ }
+
+ public Result<Void> addUserRole(AuthzTrans trans, String user, String ns, String rname) {
+ try {
+ if(trans.org().getIdentity(trans, user)==null) {
+ return Result.err(Result.ERR_BadData,user+" is an Invalid Identity for " + trans.org().getName());
+ }
+ } catch (OrganizationException e) {
+ return Result.err(e);
+ }
+ UserRoleDAO.Data urdd = new UserRoleDAO.Data();
+ urdd.ns = ns;
+ urdd.role(ns, rname);
+ urdd.user = user;
+ return addUserRole(trans,urdd);
+ }
+
+ /**
+ * Extend User Role.
+ *
+ * extend the Expiration data, according to Organization rules.
+ *
+ * @param trans
+ * @param org
+ * @param urData
+ * @return
+ */
+ public Result<Void> extendUserRole(AuthzTrans trans, UserRoleDAO.Data urData, boolean checkForExist) {
+ // Check if record still exists
+ if (checkForExist && q.userRoleDAO.read(trans, urData).notOKorIsEmpty()) {
+ return Result.err(Status.ERR_UserRoleNotFound,
+ "User Role does not exist");
+ }
+
+ if (q.roleDAO.read(trans, urData.ns, urData.rname).notOKorIsEmpty()) {
+ return Result.err(Status.ERR_RoleNotFound,
+ "Role [%s.%s] does not exist", urData.ns,urData.rname);
+ }
+ // Special case for "Admin" roles. Issue brought forward with Prod
+ // problem 9/26
+ Date now = new Date();
+ GregorianCalendar gc = new GregorianCalendar();
+ gc.setTime(now.after(urData.expires)?now:urData.expires);
+ urData.expires = trans.org().expiration(gc, Expiration.UserInRole).getTime(); // get
+ // Full
+ // time
+ // starting
+ // today
+ return q.userRoleDAO.update(trans, urData);
+ }
+
+ // ////////////////////////////////////////////////////
+ // Special User Role Functions
+ // These exist, because User Roles have Expiration dates, which must be
+ // accounted for
+ // Also, as of July, 2015, Namespace Owners and Admins are now regular User
+ // Roles
+ // ////////////////////////////////////////////////////
+ public Result<List<String>> getUsersByRole(AuthzTrans trans, String role, boolean includeExpired) {
+ Result<List<UserRoleDAO.Data>> rurdd = q.userRoleDAO.readByRole(trans,role);
+ if (rurdd.notOK()) {
+ return Result.err(rurdd);
+ }
+ Date now = new Date();
+ List<UserRoleDAO.Data> list = rurdd.value;
+ List<String> rv = new ArrayList<>(list.size()); // presize
+ for (UserRoleDAO.Data urdd : rurdd.value) {
+ if (includeExpired || urdd.expires.after(now)) {
+ rv.add(urdd.user);
+ }
+ }
+ return Result.ok(rv);
+ }
+
+ public Result<Void> delUserRole(AuthzTrans trans, String user, String ns, String rname) {
+ UserRoleDAO.Data urdd = new UserRoleDAO.Data();
+ urdd.user = user;
+ urdd.role(ns,rname);
+ Result<List<UserRoleDAO.Data>> r = q.userRoleDAO.read(trans, urdd);
+ if (r.status == 404 || r.isEmpty()) {
+ return Result.err(Status.ERR_UserRoleNotFound,
+ "UserRole [%s] [%s.%s]", user, ns, rname);
+ }
+ if (r.notOK()) {
+ return Result.err(r);
+ }
+
+ return q.userRoleDAO.delete(trans, urdd, false);
+ }
+
+ public Result<String> createFuture(AuthzTrans trans, FutureDAO.Data data, String id, String user,
+ NsDAO.Data nsd, FUTURE_OP op) {
+ StringBuilder sb = new StringBuilder();
+ try {
+ Organization org = trans.org();
+ // For Reapproval, only check Owners.. Do Supervisors, etc, separately
+ List<Identity> approvers = op.equals(FUTURE_OP.A)?NO_ADDL_APPROVE:org.getApprovers(trans, user);
+ List<Identity> owners = new ArrayList<>();
+ if (nsd != null) {
+ Result<List<UserRoleDAO.Data>> rrbr = q.userRoleDAO
+ .readByRole(trans, nsd.name + Question.DOT_OWNER);
+ if (rrbr.isOKhasData()) {
+ for(UserRoleDAO.Data urd : rrbr.value) {
+ Identity owner = org.getIdentity(trans, urd.user);
+ if(owner==null) {
+ return Result.err(Result.ERR_NotFound,urd.user + " is not a Valid Owner of " + nsd.name);
+ } else {
+ owners.add(owner);
+ }
+ }
+ }
+ }
+
+ if(owners.isEmpty()) {
+ return Result.err(Result.ERR_NotFound,"No Owners found for " + nsd.name);
+ }
+
+ // Create Future Object
+
+ Result<FutureDAO.Data> fr = q.futureDAO.create(trans, data, id);
+ if (fr.isOK()) {
+ sb.append("Created Future: ");
+ sb.append(data.id);
+ // User Future ID as ticket for Approvals
+ final UUID ticket = fr.value.id;
+ sb.append(", Approvals: ");
+ Boolean first[] = new Boolean[]{true};
+ if(op!=FUTURE_OP.A) {
+ for (Identity u : approvers) {
+ Result<ApprovalDAO.Data> r = addIdentity(trans,sb,first,user,data.memo,op,u,ticket,org.getApproverType());
+ if(r.notOK()) {
+ return Result.err(r);
+ }
+ }
+ }
+ for (Identity u : owners) {
+ Result<ApprovalDAO.Data> r = addIdentity(trans,sb,first,user,data.memo,op,u,ticket,"owner");
+ if(r.notOK()) {
+ return Result.err(r);
+ }
+ }
+ }
+ } catch (Exception e) {
+ return Result.err(e);
+ }
+
+ return Result.ok(sb.toString());
+ }
+
+ /*
+ * This interface is to allow performFutureOps with either Realtime Data, or Batched lookups (See Expiring)
+ */
+ public interface Lookup<T> {
+ T get(AuthzTrans trans, Object ... keys);
+ }
+
+ public Lookup<UserRoleDAO.Data> urDBLookup = new Lookup<UserRoleDAO.Data>() {
+ @Override
+ public UserRoleDAO.Data get(AuthzTrans trans, Object ... keys) {
+ Result<List<UserRoleDAO.Data>> r = q.userRoleDAO.read(trans, keys);
+ if(r.isOKhasData()) {
+ return r.value.get(0);
+ } else {
+ return null;
+ }
+ }
+ };
+
+ /**
+ * Note: if "allApprovals for Ticket is null, it will be looked up.
+ * if "fdd" is null, it will be looked up, but
+ *
+ * They can be passed for performance reasons.
+ *
+ * @param trans
+ * @param cd
+ * @param allApprovalsForTicket
+ * @return
+ */
+ public Result<OP_STATUS> performFutureOp(final AuthzTrans trans, FUTURE_OP fop, FutureDAO.Data curr, Lookup<List<ApprovalDAO.Data>> la, Lookup<UserRoleDAO.Data> lur) {
+ // Pre-Evaluate if ReApproval is already done.
+ UserRoleDAO.Data urdd = null;
+ if(fop.equals(FUTURE_OP.A) && curr.target.equals(FOP_USER_ROLE) && curr.construct!=null) {
+ try {
+ // Get Expected UserRole from Future
+ urdd = new UserRoleDAO.Data();
+ urdd.reconstitute(curr.construct);
+ // Get Current UserRole from lookup
+ UserRoleDAO.Data lurdd = lur.get(trans, urdd.user,urdd.role);
+ if(lurdd==null) {
+ q.futureDAO.delete(trans, curr, false);
+ return OP_STATUS.RL;
+ } else {
+ if(curr.expires.compareTo(lurdd.expires)<0) {
+ q.futureDAO.delete(trans, curr, false);
+ return OP_STATUS.RL;
+ }
+ }
+ } catch (IOException e) {
+ return Result.err(Result.ERR_BadData,"Cannot reconstitute %1",curr.memo);
+ }
+ }
+
+ boolean aDenial = false;
+ int cntSuper=0, appSuper=0,cntOwner=0, appOwner=0;
+ for(ApprovalDAO.Data add : la.get(trans)) {
+ switch(add.status) {
+ case "approved":
+ if("owner".equals(add.type)) {
+ ++cntOwner;
+ ++appOwner;
+ } else if("supervisor".equals(add.type)) {
+ ++cntSuper;
+ ++appSuper;
+ }
+ break;
+ case "pending":
+ if("owner".equals(add.type)) {
+ ++cntOwner;
+ } else if("supervisor".equals(add.type)) {
+ ++cntSuper;
+ }
+ break;
+ case "denied":
+ aDenial=true;
+ break;
+ }
+ }
+
+ Result<OP_STATUS> ros=null;
+ if(aDenial) {
+ // Note: Denial will be Audit-logged.
+// for (ApprovalDAO.Data ad : allApprovalsForTicket.value) {
+// q.approvalDAO.delete(trans, ad, false);
+// }
+ ros = OP_STATUS.RD;
+ if(q.futureDAO.delete(trans, curr, false).notOK()) {
+ trans.info().printf("Future %s could not be deleted", curr.id.toString());
+ } else {
+ if (FOP_USER_ROLE.equalsIgnoreCase(curr.target)) {
+ // A Denial means we must remove UserRole
+ if(fop.equals(FUTURE_OP.U) || fop.equals(FUTURE_OP.A)) {
+ UserRoleDAO.Data data = new UserRoleDAO.Data();
+ try {
+ data.reconstitute(curr.construct);
+ } catch (IOException e) {
+ trans.error().log("Cannot reconstitue",curr.memo);
+ }
+ ros = set(OP_STATUS.RD,delUserRole(trans, data.user, data.ns, data.rname));
+ }
+ }
+ }
+ }
+
+ // Decision: If not Denied, and at least owner, if exists, and at least one Super, if exists
+ boolean goDecision = (cntOwner>0?appOwner>0:true) && (cntSuper>0?appSuper>0:true);
+
+ if(goDecision) {
+ // should check if any other pendings before performing
+ // actions
+ try {
+ if (FOP_ROLE.equalsIgnoreCase(curr.target)) {
+ RoleDAO.Data data = new RoleDAO.Data();
+ data.reconstitute(curr.construct);
+ switch(fop) {
+ case C:
+ ros = set(OP_STATUS.RE,q.roleDAO.dao().create(trans, data));
+ break;
+ case D:
+ ros = set(OP_STATUS.RE,deleteRole(trans, data, true, true));
+ break;
+ default:
+ }
+ } else if (FOP_PERM.equalsIgnoreCase(curr.target)) {
+ PermDAO.Data pdd = new PermDAO.Data();
+ pdd.reconstitute(curr.construct);
+ Set<String> roles;
+ Result<RoleDAO.Data> rrdd;
+ switch(fop) {
+ case C:
+ ros = set(OP_STATUS.RE,createPerm(trans, pdd, true));
+ break;
+ case D:
+ ros = set(OP_STATUS.RE,deletePerm(trans, pdd, true, true));
+ break;
+ case G:
+ roles = pdd.roles(true);
+ for (String roleStr : roles) {
+ rrdd = RoleDAO.Data.decode(trans, q, roleStr);
+ if (rrdd.isOKhasData()) {
+ ros = set(OP_STATUS.RE,addPermToRole(trans, rrdd.value, pdd, true));
+ } else {
+ trans.error().log(rrdd.errorString());
+ }
+ }
+ break;
+ case UG:
+ roles = pdd.roles(true);
+ for (String roleStr : roles) {
+ rrdd = RoleDAO.Data.decode(trans, q, roleStr);
+ if (rrdd.isOKhasData()) {
+ ros = set(OP_STATUS.RE,delPermFromRole(trans, rrdd.value, pdd, true));
+ } else {
+ trans.error().log(rrdd.errorString());
+ }
+ }
+ break;
+ default:
+ }
+ } else if (FOP_USER_ROLE.equalsIgnoreCase(curr.target)) {
+ if(urdd==null) {
+ urdd = new UserRoleDAO.Data();
+ urdd.reconstitute(curr.construct);
+ }
+ // if I am the last to approve, create user role
+ switch(fop) {
+ case C:
+ ros = set(OP_STATUS.RE,addUserRole(trans, urdd));
+ break;
+ case U:
+ case A:
+ ros = set(OP_STATUS.RE,extendUserRole(trans,urdd,true));
+ break;
+ default:
+ }
+ } else if (FOP_NS.equalsIgnoreCase(curr.target)) {
+ Namespace namespace = new Namespace();
+ namespace.reconstitute(curr.construct);
+ switch(fop) {
+ case C:
+ ros = set(OP_STATUS.RE,createNS(trans, namespace, true));
+ break;
+ default:
+ }
+ } else if (FOP_DELEGATE.equalsIgnoreCase(curr.target)) {
+ DelegateDAO.Data data = new DelegateDAO.Data();
+ data.reconstitute(curr.construct);
+ switch(fop) {
+ case C:
+ ros = set(OP_STATUS.RE,q.delegateDAO.create(trans, data));
+ break;
+ case U:
+ ros = set(OP_STATUS.RE,q.delegateDAO.update(trans, data));
+ break;
+ default:
+ }
+ } else if (FOP_CRED.equalsIgnoreCase(curr.target)) {
+ CredDAO.Data data = new CredDAO.Data();
+ data.reconstitute(curr.construct);
+ switch(fop) {
+ case C:
+ ros = set(OP_STATUS.RE,q.credDAO.dao().create(trans, data));
+ break;
+ default:
+ }
+ }
+ } catch (Exception e) {
+ trans.error().log("Exception: ", e.getMessage(),
+ " \n occurred while performing", curr.memo,
+ " from Ticket ", curr.id.toString());
+ }
+ q.futureDAO.delete(trans, curr, false);
+ } // end for goDecision
+ if(ros==null) {
+ //return Result.err(Status.ACC_Future, "Full Approvals not obtained: No action taken");
+ ros = OP_STATUS.RP;
+ }
+
+ return ros;
+ }
+
+ // Convenience method for setting OPSTatus Results
+ private Result<OP_STATUS> set(Result<OP_STATUS> rs, Result<?> orig) {
+ if(orig.isOK()) {
+ return rs;
+ } else {
+ return Result.err(orig);
+ }
+ }
+
+ private Result<ApprovalDAO.Data> addIdentity(AuthzTrans trans, StringBuilder sb,
+ Boolean[] first, String user, String memo, FUTURE_OP op, Identity u, UUID ticket, String type) throws OrganizationException {
+ ApprovalDAO.Data ad = new ApprovalDAO.Data();
+ // Note ad.id is set by ApprovalDAO Create
+ ad.ticket = ticket;
+ ad.user = user;
+ ad.approver = u.fullID();
+ ad.status = ApprovalDAO.PENDING;
+ ad.memo = memo;
+ ad.type = type;
+ ad.operation = op.name();
+ // Note ad.updated is created in System
+ Result<ApprovalDAO.Data> r = q.approvalDAO.create(trans,ad);
+ if(r.isOK()) {
+ if(first[0]) {
+ first[0] = false;
+ } else {
+ sb.append(", ");
+ }
+ sb.append(r.value.user);
+ sb.append(':');
+ sb.append(r.value.ticket);
+ return r;
+ } else {
+ return Result.err(Status.ERR_ActionNotCompleted,
+ "Approval for %s, %s could not be created: %s",
+ ad.user, ad.approver,
+ r.details, sb.toString());
+ }
+ }
+
+ public Executor newExecutor(AuthzTrans trans) {
+ return new CassExecutor(trans, this);
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/PermLookup.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/PermLookup.java
index 1544aab8..6bb440ad 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/PermLookup.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/PermLookup.java
@@ -45,141 +45,141 @@ import org.onap.aaf.auth.layer.Result;
*/
// Package on purpose
class PermLookup {
- private AuthzTrans trans;
- private String user;
- private Question q;
- private Result<List<UserRoleDAO.Data>> userRoles = null;
- private Result<List<RoleDAO.Data>> roles = null;
- private Result<Set<String>> permNames = null;
- private Result<List<PermDAO.Data>> perms = null;
-
- private PermLookup() {}
-
- static PermLookup get(AuthzTrans trans, Question q, String user) {
- PermLookup lp=null;
- Map<String, PermLookup> permMap = trans.get(Question.PERMS, null);
- if (permMap == null) {
- trans.put(Question.PERMS, permMap = new HashMap<>());
- } else {
- lp = permMap.get(user);
- }
+ private AuthzTrans trans;
+ private String user;
+ private Question q;
+ private Result<List<UserRoleDAO.Data>> userRoles = null;
+ private Result<List<RoleDAO.Data>> roles = null;
+ private Result<Set<String>> permNames = null;
+ private Result<List<PermDAO.Data>> perms = null;
+
+ private PermLookup() {}
+
+ static PermLookup get(AuthzTrans trans, Question q, String user) {
+ PermLookup lp=null;
+ Map<String, PermLookup> permMap = trans.get(Question.PERMS, null);
+ if (permMap == null) {
+ trans.put(Question.PERMS, permMap = new HashMap<>());
+ } else {
+ lp = permMap.get(user);
+ }
- if (lp == null) {
- lp = new PermLookup();
- lp.trans = trans;
- lp.user = user;
- lp.q = q;
- permMap.put(user, lp);
- }
- return lp;
- }
-
- public Result<List<UserRoleDAO.Data>> getUserRoles() {
- if(userRoles==null) {
- userRoles = q.userRoleDAO.readByUser(trans,user);
- if(userRoles.isOKhasData()) {
- List<UserRoleDAO.Data> lurdd = new ArrayList<>();
- Date now = new Date();
- for(UserRoleDAO.Data urdd : userRoles.value) {
- if(urdd.expires.after(now)) { // Remove Expired
- lurdd.add(urdd);
- }
- }
- if(lurdd.size()==0) {
- return userRoles = Result.err(Status.ERR_UserNotFound,
- "%s not found or not associated with any Roles: ",
- user);
- } else {
- return userRoles = Result.ok(lurdd);
- }
- } else {
- return userRoles;
- }
- } else {
- return userRoles;
- }
- }
+ if (lp == null) {
+ lp = new PermLookup();
+ lp.trans = trans;
+ lp.user = user;
+ lp.q = q;
+ permMap.put(user, lp);
+ }
+ return lp;
+ }
+
+ public Result<List<UserRoleDAO.Data>> getUserRoles() {
+ if(userRoles==null) {
+ userRoles = q.userRoleDAO.readByUser(trans,user);
+ if(userRoles.isOKhasData()) {
+ List<UserRoleDAO.Data> lurdd = new ArrayList<>();
+ Date now = new Date();
+ for(UserRoleDAO.Data urdd : userRoles.value) {
+ if(urdd.expires.after(now)) { // Remove Expired
+ lurdd.add(urdd);
+ }
+ }
+ if(lurdd.size()==0) {
+ return userRoles = Result.err(Status.ERR_UserNotFound,
+ "%s not found or not associated with any Roles: ",
+ user);
+ } else {
+ return userRoles = Result.ok(lurdd);
+ }
+ } else {
+ return userRoles;
+ }
+ } else {
+ return userRoles;
+ }
+ }
- public Result<List<RoleDAO.Data>> getRoles() {
- if(roles==null) {
- Result<List<UserRoleDAO.Data>> rur = getUserRoles();
- if(rur.isOK()) {
- List<RoleDAO.Data> lrdd = new ArrayList<>();
- for (UserRoleDAO.Data urdata : rur.value) {
- // Gather all permissions from all Roles
- if(urdata.ns==null || urdata.rname==null) {
- return Result.err(Status.ERR_BadData,"DB Content Error: nulls in User Role %s %s", urdata.user,urdata.role);
- } else {
- Result<List<RoleDAO.Data>> rlrd = q.roleDAO.read(
- trans, urdata.ns, urdata.rname);
- if(rlrd.isOK()) {
- lrdd.addAll(rlrd.value);
- }
- }
- }
- return roles = Result.ok(lrdd);
- } else {
- return roles = Result.err(rur);
- }
- } else {
- return roles;
- }
- }
+ public Result<List<RoleDAO.Data>> getRoles() {
+ if(roles==null) {
+ Result<List<UserRoleDAO.Data>> rur = getUserRoles();
+ if(rur.isOK()) {
+ List<RoleDAO.Data> lrdd = new ArrayList<>();
+ for (UserRoleDAO.Data urdata : rur.value) {
+ // Gather all permissions from all Roles
+ if(urdata.ns==null || urdata.rname==null) {
+ return Result.err(Status.ERR_BadData,"DB Content Error: nulls in User Role %s %s", urdata.user,urdata.role);
+ } else {
+ Result<List<RoleDAO.Data>> rlrd = q.roleDAO.read(
+ trans, urdata.ns, urdata.rname);
+ if(rlrd.isOK()) {
+ lrdd.addAll(rlrd.value);
+ }
+ }
+ }
+ return roles = Result.ok(lrdd);
+ } else {
+ return roles = Result.err(rur);
+ }
+ } else {
+ return roles;
+ }
+ }
- public Result<Set<String>> getPermNames() {
- if(permNames==null) {
- Result<List<RoleDAO.Data>> rlrd = getRoles();
- if (rlrd.isOK()) {
- Set<String> pns = new TreeSet<>();
- for (RoleDAO.Data rdata : rlrd.value) {
- pns.addAll(rdata.perms(false));
- }
- return permNames = Result.ok(pns);
- } else {
- return permNames = Result.err(rlrd);
- }
- } else {
- return permNames;
- }
- }
-
- public Result<List<PermDAO.Data>> getPerms(boolean lookup) {
- if(perms==null) {
- // Note: It should be ok for a Valid user to have no permissions -
- // Jonathan 8/12/2013
- Result<Set<String>> rss = getPermNames();
- if(rss.isOK()) {
- List<PermDAO.Data> lpdd = new ArrayList<>();
- for (String perm : rss.value) {
- if(lookup) {
- Result<String[]> ap = PermDAO.Data.decodeToArray(trans, q, perm);
- if(ap.isOK()) {
-
- Result<List<PermDAO.Data>> rlpd = q.permDAO.read(perm,trans,ap.value);
- if (rlpd.isOKhasData()) {
- for (PermDAO.Data pData : rlpd.value) {
- lpdd.add(pData);
- }
- }
- } else {
- trans.error().log("In getPermsByUser, for", user, perm);
- }
- } else {
- Result<PermDAO.Data> pr = PermDAO.Data.decode(trans, q, perm);
- if (pr.notOK()) {
- trans.error().log("In getPermsByUser, for", user, pr.errorString());
- } else {
- lpdd.add(pr.value);
- }
- }
+ public Result<Set<String>> getPermNames() {
+ if(permNames==null) {
+ Result<List<RoleDAO.Data>> rlrd = getRoles();
+ if (rlrd.isOK()) {
+ Set<String> pns = new TreeSet<>();
+ for (RoleDAO.Data rdata : rlrd.value) {
+ pns.addAll(rdata.perms(false));
+ }
+ return permNames = Result.ok(pns);
+ } else {
+ return permNames = Result.err(rlrd);
+ }
+ } else {
+ return permNames;
+ }
+ }
+
+ public Result<List<PermDAO.Data>> getPerms(boolean lookup) {
+ if(perms==null) {
+ // Note: It should be ok for a Valid user to have no permissions -
+ // Jonathan 8/12/2013
+ Result<Set<String>> rss = getPermNames();
+ if(rss.isOK()) {
+ List<PermDAO.Data> lpdd = new ArrayList<>();
+ for (String perm : rss.value) {
+ if(lookup) {
+ Result<String[]> ap = PermDAO.Data.decodeToArray(trans, q, perm);
+ if(ap.isOK()) {
+
+ Result<List<PermDAO.Data>> rlpd = q.permDAO.read(perm,trans,ap.value);
+ if (rlpd.isOKhasData()) {
+ for (PermDAO.Data pData : rlpd.value) {
+ lpdd.add(pData);
+ }
+ }
+ } else {
+ trans.error().log("In getPermsByUser, for", user, perm);
+ }
+ } else {
+ Result<PermDAO.Data> pr = PermDAO.Data.decode(trans, q, perm);
+ if (pr.notOK()) {
+ trans.error().log("In getPermsByUser, for", user, pr.errorString());
+ } else {
+ lpdd.add(pr.value);
+ }
+ }
- }
- return perms = Result.ok(lpdd);
- } else {
- return perms = Result.err(rss);
- }
- } else {
- return perms;
- }
- }
+ }
+ return perms = Result.ok(lpdd);
+ } else {
+ return perms = Result.err(rss);
+ }
+ } else {
+ return perms;
+ }
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/Question.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/Question.java
index 8d148ec1..152412a2 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/Question.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/dao/hl/Question.java
@@ -88,1065 +88,1065 @@ import com.datastax.driver.core.Cluster;
*/
public class Question {
- // DON'T CHANGE FROM lower Case!!!
- public static enum Type {
- ns, role, perm, cred
- };
-
- public static final String OWNER="owner";
- public static final String ADMIN="admin";
- public static final String DOT_OWNER=".owner";
- public static final String DOT_ADMIN=".admin";
- public static final String ACCESS = "access";
-
- static final String ASTERIX = "*";
-
- public static enum Access {
- read, write, create
- };
-
- public static final String READ = Access.read.name();
- public static final String WRITE = Access.write.name();
- public static final String CREATE = Access.create.name();
-
- public static final String ROLE = Type.role.name();
- public static final String PERM = Type.perm.name();
- public static final String NS = Type.ns.name();
- public static final String CRED = Type.cred.name();
- private static final String DELG = "delg";
- public static final String ROOT_NS = Define.isInitialized() ? Define.ROOT_NS() : "undefined";
- public static final String ATTRIB = "attrib";
-
-
- public static final int MAX_SCOPE = 10;
- public static final int APP_SCOPE = 3;
- public static final int COMPANY_SCOPE = 2;
- static Slot PERMS;
-
- private static Set<String> specialLog = null;
- public static final Random random = new SecureRandom();
- private static long traceID = random.nextLong();
- private static Slot specialLogSlot = null;
- private static Slot transIDSlot = null;
-
-
- public final HistoryDAO historyDAO;
- public final CachedNSDAO nsDAO;
- public final CachedRoleDAO roleDAO;
- public final CachedPermDAO permDAO;
- public final CachedUserRoleDAO userRoleDAO;
- public final CachedCredDAO credDAO;
- public final CachedCertDAO certDAO;
- public final DelegateDAO delegateDAO;
- public final FutureDAO futureDAO;
- public final ApprovalDAO approvalDAO;
- private final CacheInfoDAO cacheInfoDAO;
- public final LocateDAO locateDAO;
-
- public Question(AuthzTrans trans, Cluster cluster, String keyspace, boolean startClean) throws APIException, IOException {
- PERMS = trans.slot("USER_PERMS");
- trans.init().log("Instantiating DAOs");
- long expiresIn = Long.parseLong(trans.getProperty(Config.AAF_USER_EXPIRES, Config.AAF_USER_EXPIRES_DEF));
- historyDAO = new HistoryDAO(trans, cluster, keyspace);
-
- // Deal with Cached Entries
- cacheInfoDAO = new CacheInfoDAO(trans, historyDAO);
-
- nsDAO = new CachedNSDAO(new NsDAO(trans, historyDAO, cacheInfoDAO),cacheInfoDAO, expiresIn);
- permDAO = new CachedPermDAO(new PermDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO, expiresIn);
- roleDAO = new CachedRoleDAO(new RoleDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO, expiresIn);
- userRoleDAO = new CachedUserRoleDAO(new UserRoleDAO(trans, historyDAO,cacheInfoDAO), cacheInfoDAO, expiresIn);
- credDAO = new CachedCredDAO(new CredDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO, expiresIn);
- certDAO = new CachedCertDAO(new CertDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO, expiresIn);
-
- locateDAO = new LocateDAO(trans,historyDAO);
- futureDAO = new FutureDAO(trans, historyDAO);
- delegateDAO = new DelegateDAO(trans, historyDAO);
- approvalDAO = new ApprovalDAO(trans, historyDAO);
-
- // Only want to aggressively cleanse User related Caches... The others,
- // just normal refresh
- if(startClean) {
- CachedDAO.startCleansing(trans.env(), credDAO, userRoleDAO);
- CachedDAO.startRefresh(trans.env(), cacheInfoDAO);
- }
- // Set a Timer to Check Caches to send messages for Caching changes
-
- if(specialLogSlot==null) {
- specialLogSlot = trans.slot(AuthzTransFilter.SPECIAL_LOG_SLOT);
- }
-
- if(transIDSlot==null) {
- transIDSlot = trans.slot(AuthzTransFilter.TRANS_ID_SLOT);
- }
-
- AbsCassDAO.primePSIs(trans);
- }
-
-
- public void close(AuthzTrans trans) {
- historyDAO.close(trans);
- cacheInfoDAO.close(trans);
- nsDAO.close(trans);
- permDAO.close(trans);
- roleDAO.close(trans);
- userRoleDAO.close(trans);
- credDAO.close(trans);
- certDAO.close(trans);
- delegateDAO.close(trans);
- futureDAO.close(trans);
- approvalDAO.close(trans);
- }
-
- public Result<PermDAO.Data> permFrom(AuthzTrans trans, String type,
- String instance, String action) {
- Result<NsDAO.Data> rnd = deriveNs(trans, type);
- if (rnd.isOK()) {
- return Result.ok(new PermDAO.Data(new NsSplit(rnd.value, type),
- instance, action));
- } else {
- return Result.err(rnd);
- }
- }
-
- /**
- * getPermsByUser
- *
- * Because this call is frequently called internally, AND because we already
- * look for it in the initial Call, we cache within the Transaction
- *
- * @param trans
- * @param user
- * @return
- */
- public Result<List<PermDAO.Data>> getPermsByUser(AuthzTrans trans, String user, boolean lookup) {
- return PermLookup.get(trans, this, user).getPerms(lookup);
- }
-
- public Result<List<PermDAO.Data>> getPermsByUserFromRolesFilter(AuthzTrans trans, String user, String forUser) {
- PermLookup plUser = PermLookup.get(trans, this, user);
- Result<Set<String>> plPermNames = plUser.getPermNames();
- if(plPermNames.notOK()) {
- return Result.err(plPermNames);
- }
-
- Set<String> nss;
- if(forUser.equals(user)) {
- nss = null;
- } else {
- // Setup a TreeSet to check on Namespaces to
- nss = new TreeSet<>();
- PermLookup fUser = PermLookup.get(trans, this, forUser);
- Result<Set<String>> forUpn = fUser.getPermNames();
- if(forUpn.notOK()) {
- return Result.err(forUpn);
- }
-
- for(String pn : forUpn.value) {
- Result<String[]> decoded = PermDAO.Data.decodeToArray(trans, this, pn);
- if(decoded.isOKhasData()) {
- nss.add(decoded.value[0]);
- } else {
- trans.error().log(pn,", derived from a Role, is invalid:",decoded.errorString());
- }
- }
- }
-
- List<PermDAO.Data> rlpUser = new ArrayList<>();
- Result<PermDAO.Data> rpdd;
- PermDAO.Data pdd;
- for(String pn : plPermNames.value) {
- rpdd = PermDAO.Data.decode(trans, this, pn);
- if(rpdd.isOKhasData()) {
- pdd=rpdd.value;
- if(nss==null || nss.contains(pdd.ns)) {
- rlpUser.add(pdd);
- }
- } else {
- trans.error().log(pn,", derived from a Role, is invalid. Run Data Cleanup:",rpdd.errorString());
- }
- }
- return Result.ok(rlpUser);
- }
-
- public Result<List<PermDAO.Data>> getPermsByType(AuthzTrans trans, String perm) {
- Result<NsSplit> nss = deriveNsSplit(trans, perm);
- if (nss.notOK()) {
- return Result.err(nss);
- }
- return permDAO.readByType(trans, nss.value.ns, nss.value.name);
- }
-
- public Result<List<PermDAO.Data>> getPermsByName(AuthzTrans trans,
- String type, String instance, String action) {
- Result<NsSplit> nss = deriveNsSplit(trans, type);
- if (nss.notOK()) {
- return Result.err(nss);
- }
- return permDAO.read(trans, nss.value.ns, nss.value.name, instance,action);
- }
-
- public Result<List<PermDAO.Data>> getPermsByRole(AuthzTrans trans, String role, boolean lookup) {
- Result<NsSplit> nss = deriveNsSplit(trans, role);
- if (nss.notOK()) {
- return Result.err(nss);
- }
-
- Result<List<RoleDAO.Data>> rlrd = roleDAO.read(trans, nss.value.ns,
- nss.value.name);
- if (rlrd.notOKorIsEmpty()) {
- return Result.err(rlrd);
- }
- // Using Set to avoid duplicates
- Set<String> permNames = new HashSet<>();
- if (rlrd.isOKhasData()) {
- for (RoleDAO.Data drr : rlrd.value) {
- permNames.addAll(drr.perms(false));
- }
- }
-
- // Note: It should be ok for a Valid user to have no permissions -
- // Jonathan 8/12/2013
- List<PermDAO.Data> perms = new ArrayList<>();
- for (String perm : permNames) {
- Result<PermDAO.Data> pr = PermDAO.Data.decode(trans, this, perm);
- if (pr.notOK()) {
- return Result.err(pr);
- }
-
- if(lookup) {
- Result<List<PermDAO.Data>> rlpd = permDAO.read(trans, pr.value);
- if (rlpd.isOKhasData()) {
- for (PermDAO.Data pData : rlpd.value) {
- perms.add(pData);
- }
- }
- } else {
- perms.add(pr.value);
- }
- }
-
- return Result.ok(perms);
- }
-
- public Result<List<RoleDAO.Data>> getRolesByName(AuthzTrans trans,
- String role) {
- Result<NsSplit> nss = deriveNsSplit(trans, role);
- if (nss.notOK()) {
- return Result.err(nss);
- }
- String r = nss.value.name;
- if (r.endsWith(".*")) { // do children Search
- return roleDAO.readChildren(trans, nss.value.ns,
- r.substring(0, r.length() - 2));
- } else if (ASTERIX.equals(r)) {
- return roleDAO.readChildren(trans, nss.value.ns, ASTERIX);
- } else {
- return roleDAO.read(trans, nss.value.ns, r);
- }
- }
-
- /**
- * Derive NS
- *
- * Given a Child Namespace, figure out what the best Namespace parent is.
- *
- * For instance, if in the NS table, the parent "org.osaaf" exists, but not
- * "org.osaaf.child" or "org.osaaf.a.b.c", then passing in either
- * "org.osaaf.child" or "org.osaaf.a.b.c" will return "org.osaaf"
- *
- * Uses recursive search on Cached DAO data
- *
- * @param trans
- * @param child
- * @return
- */
- public Result<NsDAO.Data> deriveNs(AuthzTrans trans, String child) {
- Result<List<NsDAO.Data>> r = nsDAO.read(trans, child);
-
- if (r.isOKhasData()) {
- return Result.ok(r.value.get(0));
- } else {
- int dot;
- if(child==null) {
- return Result.err(Status.ERR_NsNotFound, "No Namespace");
- } else {
- dot = child.lastIndexOf('.');
- }
- if (dot < 0) {
- return Result.err(Status.ERR_NsNotFound, "No Namespace for [%s]", child);
- } else {
- return deriveNs(trans, child.substring(0, dot));
- }
- }
- }
-
- public Result<NsDAO.Data> deriveFirstNsForType(AuthzTrans trans, String str, NsType type) {
- NsDAO.Data nsd;
-
- for(String lookup = str;!".".equals(lookup) && lookup!=null;) {
- Result<List<NsDAO.Data>> rld = nsDAO.read(trans, lookup);
- if(rld.isOKhasData()) {
- nsd=rld.value.get(0);
- lookup = nsd.parent;
- if(type.type == nsd.type) {
- return Result.ok(nsd);
- }
- } else {
- return Result.err(Status.ERR_NsNotFound,"There is no valid Company Namespace for %s",str);
- }
- }
- return Result.err(Status.ERR_NotFound, str + " does not contain type " + type.name());
- }
-
- public Result<NsSplit> deriveNsSplit(AuthzTrans trans, String child) {
- Result<NsDAO.Data> ndd = deriveNs(trans, child);
- if (ndd.isOK()) {
- NsSplit nss = new NsSplit(ndd.value, child);
- if (nss.isOK()) {
- return Result.ok(nss);
- } else {
- return Result.err(Status.ERR_NsNotFound,
- "Cannot split [%s] into valid namespace elements",
- child);
- }
- }
- return Result.err(ndd);
- }
-
- /**
- * Translate an ID into it's domain
- *
- * i.e. myid1234@aaf.att.com results in domain of com.att.aaf
- *
- * @param id
- * @return
- */
- public static String domain2ns(String id) {
- int at = id.indexOf('@');
- if (at >= 0) {
- String[] domain = id.substring(at + 1).split("\\.");
- StringBuilder ns = new StringBuilder(id.length());
- boolean first = true;
- for (int i = domain.length - 1; i >= 0; --i) {
- if (first) {
- first = false;
- } else {
- ns.append('.');
- }
- ns.append(domain[i]);
- }
- return ns.toString();
- } else {
- return "";
- }
-
- }
-
- /**
- * Validate Namespace of ID@Domain
- *
- * Namespace is reverse order of Domain.
- *
- * @param trans
- * @param id
- * @return
- */
- public Result<NsDAO.Data> validNSOfDomain(AuthzTrans trans, String id) {
- // Take domain, reverse order, and check on NS
- String ns;
- if(id.indexOf('@')<0) { // it's already an ns, not an ID
- ns = id;
- } else {
- ns = domain2ns(id);
- }
- if (ns.length() > 0) {
- if(!trans.org().getDomain().equals(ns)) {
- Result<List<NsDAO.Data>> rlnsd = nsDAO.read(trans, ns);
- if (rlnsd.isOKhasData()) {
- return Result.ok(rlnsd.value.get(0));
- }
- }
- }
- return Result.err(Status.ERR_NsNotFound,
- "A Namespace is not available for %s", id);
- }
-
- public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user,NsDAO.Data ndd, Access access) {
- // <ns>.access|:role:<role name>|<read|write>
- String ns = ndd.name;
- int last;
- do {
- if (isGranted(trans, user, ns, ACCESS, ":ns", access.name())) {
- return Result.ok(ndd);
- }
- if ((last = ns.lastIndexOf('.')) >= 0) {
- ns = ns.substring(0, last);
- }
- } while (last >= 0);
- // com.att.aaf.ns|:<client ns>:ns|<access>
- // AAF-724 - Make consistent response for May User", and not take the
- // last check... too confusing.
- Result<NsDAO.Data> rv = mayUserVirtueOfNS(trans, user, ndd, ":" + ndd.name + ":ns", access.name());
- if (rv.isOK()) {
- return rv;
- } else if(rv.status==Result.ERR_Backend) {
- return Result.err(rv);
- } else {
- return Result.err(Status.ERR_Denied, "[%s] may not %s in NS [%s]",
- user, access.name(), ndd.name);
- }
- }
-
- public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user, RoleDAO.Data rdd, Access access) {
- Result<NsDAO.Data> rnsd = deriveNs(trans, rdd.ns);
- if (rnsd.isOK()) {
- return mayUser(trans, user, rnsd.value, rdd, access);
- }
- return rnsd;
- }
-
- public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user, NsDAO.Data ndd, RoleDAO.Data rdd, Access access) {
- // 1) Is User in the Role?
- Result<List<UserRoleDAO.Data>> rurd = userRoleDAO.readUserInRole(trans, user, rdd.fullName());
- if (rurd.isOKhasData()) {
- return Result.ok(ndd);
- }
-
- String roleInst = ":role:" + rdd.name;
- // <ns>.access|:role:<role name>|<read|write>
- String ns = rdd.ns;
- int last;
- do {
- if (isGranted(trans, user, ns,ACCESS, roleInst, access.name())) {
- return Result.ok(ndd);
- }
- if ((last = ns.lastIndexOf('.')) >= 0) {
- ns = ns.substring(0, last);
- }
- } while (last >= 0);
-
- // Check if Access by Global Role perm
- // com.att.aaf.ns|:<client ns>:role:name|<access>
- Result<NsDAO.Data> rnsd = mayUserVirtueOfNS(trans, user, ndd, ":"
- + rdd.ns + roleInst, access.name());
- if (rnsd.isOK()) {
- return rnsd;
- } else if(rnsd.status==Result.ERR_Backend) {
- return Result.err(rnsd);
- }
-
- // Check if Access to Whole NS
- // AAF-724 - Make consistent response for May User", and not take the
- // last check... too confusing.
- Result<org.onap.aaf.auth.dao.cass.NsDAO.Data> rv = mayUserVirtueOfNS(trans, user, ndd,
- ":" + rdd.ns + ":ns", access.name());
- if (rv.isOK()) {
- return rv;
- } else if(rnsd.status==Result.ERR_Backend) {
- return Result.err(rnsd);
- } else {
- return Result.err(Status.ERR_Denied, "[%s] may not %s Role [%s]",
- user, access.name(), rdd.fullName());
- }
-
- }
-
- public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user,PermDAO.Data pdd, Access access) {
- Result<NsDAO.Data> rnsd = deriveNs(trans, pdd.ns);
- if (rnsd.isOK()) {
- return mayUser(trans, user, rnsd.value, pdd, access);
- }
- return rnsd;
- }
-
- public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user,NsDAO.Data ndd, PermDAO.Data pdd, Access access) {
- if (isGranted(trans, user, pdd.ns, pdd.type, pdd.instance, pdd.action)) {
- return Result.ok(ndd);
- }
- String permInst = ":perm:" + pdd.type + ':' + pdd.instance + ':' + pdd.action;
- // <ns>.access|:role:<role name>|<read|write>
- String ns = ndd.name;
- int last;
- do {
- if (isGranted(trans, user, ns, ACCESS, permInst, access.name())) {
- return Result.ok(ndd);
- }
- if ((last = ns.lastIndexOf('.')) >= 0) {
- ns = ns.substring(0, last);
- }
- } while (last >= 0);
-
- // Check if Access by NS perm
- // com.att.aaf.ns|:<client ns>:role:name|<access>
- Result<NsDAO.Data> rnsd = mayUserVirtueOfNS(trans, user, ndd, ":" + pdd.ns + permInst, access.name());
- if (rnsd.isOK()) {
- return rnsd;
- } else if(rnsd.status==Result.ERR_Backend) {
- return Result.err(rnsd);
- }
-
- // Check if Access to Whole NS
- // AAF-724 - Make consistent response for May User", and not take the
- // last check... too confusing.
- Result<NsDAO.Data> rv = mayUserVirtueOfNS(trans, user, ndd, ":" + pdd.ns + ":ns", access.name());
- if (rv.isOK()) {
- return rv;
- } else {
- return Result.err(Status.ERR_Denied,
- "[%s] may not %s Perm [%s|%s|%s]", user, access.name(),
- pdd.fullType(), pdd.instance, pdd.action);
- }
-
- }
-
- public Result<Void> mayUser(AuthzTrans trans, DelegateDAO.Data dd, Access access) {
- try {
- Result<NsDAO.Data> rnsd = deriveNs(trans, domain2ns(trans.user()));
- if(rnsd.isOKhasData() && mayUserVirtueOfNS(trans,trans.user(),rnsd.value, ":" + rnsd.value.name + ":ns", access.name()).isOK()) {
- return Result.ok();
- }
- boolean isUser = trans.user().equals(dd.user);
- boolean isDelegate = dd.delegate != null
- && (dd.user.equals(dd.delegate) || trans.user().equals(
- dd.delegate));
- Organization org = trans.org();
- switch (access) {
- case create:
- if (org.getIdentity(trans, dd.user) == null) {
- return Result.err(Status.ERR_UserNotFound,
- "[%s] is not a user in the company database.",
- dd.user);
- }
- if (!dd.user.equals(dd.delegate) && org.getIdentity(trans, dd.delegate) == null) {
- return Result.err(Status.ERR_UserNotFound,
- "[%s] is not a user in the company database.",
- dd.delegate);
- }
- if (!trans.requested(REQD_TYPE.force) && dd.user != null && dd.user.equals(dd.delegate)) {
- return Result.err(Status.ERR_BadData,
- "[%s] cannot be a delegate for self", dd.user);
- }
- if (!isUser && !isGranted(trans, trans.user(), ROOT_NS,DELG,
- org.getDomain(), Question.CREATE)) {
- return Result.err(Status.ERR_Denied,
- "[%s] may not create a delegate for [%s]",
- trans.user(), dd.user);
- }
- break;
- case read:
- case write:
- if (!isUser && !isDelegate &&
- !isGranted(trans, trans.user(), ROOT_NS,DELG,org.getDomain(), access.name())) {
- return Result.err(Status.ERR_Denied,
- "[%s] may not %s delegates for [%s]", trans.user(),
- access.name(), dd.user);
- }
- break;
- default:
- return Result.err(Status.ERR_BadData,"Unknown Access type [%s]", access.name());
- }
- } catch (Exception e) {
- return Result.err(e);
- }
- return Result.ok();
- }
-
- /*
- * Check (recursively, if necessary), if able to do something based on NS
- */
- private Result<NsDAO.Data> mayUserVirtueOfNS(AuthzTrans trans, String user, NsDAO.Data nsd, String ns_and_type, String access) {
- String ns = nsd.name;
-
- // If an ADMIN of the Namespace, then allow
-
- Result<List<UserRoleDAO.Data>> rurd;
- if ((rurd = userRoleDAO.readUserInRole(trans, user, ns+DOT_ADMIN)).isOKhasData()) {
- return Result.ok(nsd);
- } else if(rurd.status==Result.ERR_Backend) {
- return Result.err(rurd);
- }
-
- // If Specially granted Global Permission
- if (isGranted(trans, user, ROOT_NS,NS, ns_and_type, access)) {
- return Result.ok(nsd);
- }
-
- // Check recur
-
- int dot = ns.length();
- if ((dot = ns.lastIndexOf('.', dot - 1)) >= 0) {
- Result<NsDAO.Data> rnsd = deriveNs(trans, ns.substring(0, dot));
- if (rnsd.isOK()) {
- rnsd = mayUserVirtueOfNS(trans, user, rnsd.value, ns_and_type,access);
- } else if(rnsd.status==Result.ERR_Backend) {
- return Result.err(rnsd);
- }
- if (rnsd.isOK()) {
- return Result.ok(nsd);
- } else if(rnsd.status==Result.ERR_Backend) {
- return Result.err(rnsd);
- }
- }
- return Result.err(Status.ERR_Denied, "%s may not %s %s", user, access,
- ns_and_type);
- }
-
-
- /**
- * isGranted
- *
- * Important function - Check internal Permission Schemes for Permission to
- * do things
- *
- * @param trans
- * @param type
- * @param instance
- * @param action
- * @return
- */
- public boolean isGranted(AuthzTrans trans, String user, String ns, String type,String instance, String action) {
- Result<List<PermDAO.Data>> perms = getPermsByUser(trans, user, false);
- if (perms.isOK()) {
- for (PermDAO.Data pd : perms.value) {
- if (ns.equals(pd.ns)) {
- if (type.equals(pd.type)) {
- if (PermEval.evalInstance(pd.instance, instance)) {
- if(PermEval.evalAction(pd.action, action)) { // don't return action here, might miss other action
- return true;
- }
- }
- }
- }
- }
- }
- return false;
- }
-
- public Result<Date> doesUserCredMatch(AuthzTrans trans, String user, byte[] cred) throws DAOException {
- Result<List<CredDAO.Data>> result;
- TimeTaken tt = trans.start("Read DB Cred", Env.REMOTE);
- try {
- result = credDAO.readID(trans, user);
- } finally {
- tt.done();
- }
-
- Result<Date> rv = null;
- if(result.isOK()) {
- if (result.isEmpty()) {
- rv = Result.err(Status.ERR_UserNotFound, user);
- if (willSpecialLog(trans,user)) {
- trans.audit().log("Special DEBUG:", user, " does not exist in DB");
- }
- } else {
- Date now = new Date();//long now = System.currentTimeMillis();
- // Bug noticed 6/22. Sorting on the result can cause Concurrency Issues.
- List<CredDAO.Data> cddl;
- if(result.value.size() > 1) {
- cddl = new ArrayList<>(result.value.size());
- for(CredDAO.Data old : result.value) {
- if(old.type==CredDAO.BASIC_AUTH || old.type==CredDAO.BASIC_AUTH_SHA256) {
- cddl.add(old);
- }
- }
- if(cddl.size()>1) {
- Collections.sort(cddl,new Comparator<CredDAO.Data>() {
- @Override
- public int compare(org.onap.aaf.auth.dao.cass.CredDAO.Data a,
- org.onap.aaf.auth.dao.cass.CredDAO.Data b) {
- return b.expires.compareTo(a.expires);
- }
- });
- }
- } else {
- cddl = result.value;
- }
-
- Date expired = null;
- StringBuilder debug = willSpecialLog(trans,user)?new StringBuilder():null;
- for (CredDAO.Data cdd : cddl) {
- if(!cdd.id.equals(user)) {
- trans.error().log("doesUserCredMatch DB call does not match for user: " + user);
- }
- if (cdd.expires.after(now)) {
- byte[] dbcred = cdd.cred.array();
-
- try {
- switch(cdd.type) {
- case CredDAO.BASIC_AUTH:
- byte[] md5=Hash.hashMD5(cred);
- if(Hash.compareTo(md5,dbcred)==0) {
- checkLessThanDays(trans,7,now,cdd);
- return Result.ok(cdd.expires);
- } else if (debug!=null) {
- load(debug, cdd);
- }
- break;
- case CredDAO.BASIC_AUTH_SHA256:
- ByteBuffer bb = ByteBuffer.allocate(Integer.SIZE + cred.length);
- bb.putInt(cdd.other);
- bb.put(cred);
- byte[] hash = Hash.hashSHA256(bb.array());
-
- if(Hash.compareTo(hash,dbcred)==0) {
- checkLessThanDays(trans,7,now,cdd);
- return Result.ok(cdd.expires);
- } else if (debug!=null) {
- load(debug, cdd);
- }
- break;
- default:
- trans.error().log("Unknown Credential Type %s for %s, %s",Integer.toString(cdd.type),cdd.id, Chrono.dateTime(cdd.expires));
- }
- } catch (NoSuchAlgorithmException e) {
- trans.error().log(e);
- }
- } else {
- if(expired==null || expired.before(cdd.expires)) {
- expired = cdd.expires;
- }
- }
- } // end for each
- if(debug==null) {
- trans.audit().printf("No cred matches ip=%s, user=%s\n",trans.ip(),user);
- } else {
- trans.audit().printf("No cred matches ip=%s, user=%s %s\n",trans.ip(),user,debug.toString());
- }
- if(expired!=null) {
- // Note: this is only returned if there are no good Credentials
- rv = Result.err(Status.ERR_Security,
- "Credentials %s from %s expired %s",trans.user(), trans.ip(), Chrono.dateTime(expired));
- }
- }
- } else {
- return Result.err(result);
- }
- return rv == null ? Result.create((Date) null, Status.ERR_Security, "Wrong credential") : rv;
- }
-
-
- private void load(StringBuilder debug, Data cdd) {
- debug.append("DB Entry: user=");
- debug.append(cdd.id);
- debug.append(",type=");
- debug.append(cdd.type);
- debug.append(",expires=");
- debug.append(Chrono.dateTime(cdd.expires));
- debug.append('\n');
- }
-
-
- private void checkLessThanDays(AuthzTrans trans, int days, Date now, Data cdd) {
- long close = now.getTime() + (days * 86400000);
- long cexp=cdd.expires.getTime();
- if(cexp<close) {
- int daysLeft = days-(int)((close-cexp)/86400000);
- trans.audit().printf("user=%s,ip=%s,expires=%s,days=%d,msg=\"Password expires in less than %d day%s\"",
- cdd.id,trans.ip(),Chrono.dateOnlyStamp(cdd.expires),daysLeft, daysLeft,daysLeft==1?"":"s");
- }
- }
-
-
- public Result<CredDAO.Data> userCredSetup(AuthzTrans trans, CredDAO.Data cred) {
- if(cred.type==CredDAO.RAW) {
- TimeTaken tt = trans.start("Hash Cred", Env.SUB);
- try {
- cred.type = CredDAO.BASIC_AUTH_SHA256;
- cred.other = random.nextInt();
- ByteBuffer bb = ByteBuffer.allocate(Integer.SIZE + cred.cred.capacity());
- bb.putInt(cred.other);
- bb.put(cred.cred);
- byte[] hash = Hash.hashSHA256(bb.array());
- cred.cred = ByteBuffer.wrap(hash);
- return Result.ok(cred);
- } catch (NoSuchAlgorithmException e) {
- return Result.err(Status.ERR_General,e.getLocalizedMessage());
- } finally {
- tt.done();
- }
-
- }
- return Result.err(Status.ERR_Security,"invalid/unreadable credential");
- }
-
- public Result<Boolean> userCredCheck(AuthzTrans trans, CredDAO.Data orig, final byte[] raw) {
- TimeTaken tt = trans.start("CheckCred Cred", Env.SUB);
- try {
- switch(orig.type) {
- case CredDAO.BASIC_AUTH_SHA256:
- ByteBuffer bb = ByteBuffer.allocate(Integer.SIZE + raw.length);
- bb.putInt(orig.other);
- bb.put(raw);
- return Result.ok(Hash.compareTo(orig.cred.array(),Hash.hashSHA256(bb.array()))==0);
- case CredDAO.BASIC_AUTH:
- return Result.ok( Hash.compareTo(orig.cred.array(), Hash.hashMD5(raw))==0);
- default:
- return Result.ok(false);
- }
- } catch (NoSuchAlgorithmException e) {
- return Result.err(Status.ERR_General,e.getLocalizedMessage());
- } finally {
- tt.done();
- }
- }
-
- public static final String APPROVED = "APPROVE";
- public static final String REJECT = "REJECT";
- public static final String PENDING = "PENDING";
-
- public Result<Void> canAddUser(AuthzTrans trans, UserRoleDAO.Data data,
- List<ApprovalDAO.Data> approvals) {
- // get the approval policy for the organization
-
- // get the list of approvals with an accept status
-
- // validate the approvals against the policy
-
- // for now check if all approvals are received and return
- // SUCCESS/FAILURE/SKIP
- boolean bReject = false;
- boolean bPending = false;
-
- for (ApprovalDAO.Data approval : approvals) {
- if (approval.status.equals(REJECT)) {
- bReject = true;
- } else if (approval.status.equals(PENDING)) {
- bPending = true;
- }
- }
- if (bReject) {
- return Result.err(Status.ERR_Policy,
- "Approval Polocy not conformed");
- }
- if (bPending) {
- return Result.err(Status.ERR_ActionNotCompleted,
- "Required Approvals not received");
- }
-
- return Result.ok();
- }
-
- private static final String NO_CACHE_NAME = "No Cache Data named %s";
-
- public Result<Void> clearCache(AuthzTrans trans, String cname) {
- boolean all = "all".equals(cname);
- Result<Void> rv = null;
-
- if (all || NsDAO.TABLE.equals(cname)) {
- int seg[] = series(NsDAO.CACHE_SEG);
- for(int i: seg) {cacheClear(trans, NsDAO.TABLE,i);}
- rv = cacheInfoDAO.touch(trans, NsDAO.TABLE, seg);
- }
- if (all || PermDAO.TABLE.equals(cname)) {
- int seg[] = series(NsDAO.CACHE_SEG);
- for(int i: seg) {cacheClear(trans, PermDAO.TABLE,i);}
- rv = cacheInfoDAO.touch(trans, PermDAO.TABLE,seg);
- }
- if (all || RoleDAO.TABLE.equals(cname)) {
- int seg[] = series(NsDAO.CACHE_SEG);
- for(int i: seg) {cacheClear(trans, RoleDAO.TABLE,i);}
- rv = cacheInfoDAO.touch(trans, RoleDAO.TABLE,seg);
- }
- if (all || UserRoleDAO.TABLE.equals(cname)) {
- int seg[] = series(NsDAO.CACHE_SEG);
- for(int i: seg) {cacheClear(trans, UserRoleDAO.TABLE,i);}
- rv = cacheInfoDAO.touch(trans, UserRoleDAO.TABLE,seg);
- }
- if (all || CredDAO.TABLE.equals(cname)) {
- int seg[] = series(NsDAO.CACHE_SEG);
- for(int i: seg) {cacheClear(trans, CredDAO.TABLE,i);}
- rv = cacheInfoDAO.touch(trans, CredDAO.TABLE,seg);
- }
- if (all || CertDAO.TABLE.equals(cname)) {
- int seg[] = series(NsDAO.CACHE_SEG);
- for(int i: seg) {cacheClear(trans, CertDAO.TABLE,i);}
- rv = cacheInfoDAO.touch(trans, CertDAO.TABLE,seg);
- }
-
- if (rv == null) {
- rv = Result.err(Status.ERR_BadData, NO_CACHE_NAME, cname);
- }
- return rv;
- }
-
- public Result<Void> cacheClear(AuthzTrans trans, String cname,Integer segment) {
- Result<Void> rv;
- if (NsDAO.TABLE.equals(cname)) {
- rv = nsDAO.invalidate(segment);
- } else if (PermDAO.TABLE.equals(cname)) {
- rv = permDAO.invalidate(segment);
- } else if (RoleDAO.TABLE.equals(cname)) {
- rv = roleDAO.invalidate(segment);
- } else if (UserRoleDAO.TABLE.equals(cname)) {
- rv = userRoleDAO.invalidate(segment);
- } else if (CredDAO.TABLE.equals(cname)) {
- rv = credDAO.invalidate(segment);
- } else if (CertDAO.TABLE.equals(cname)) {
- rv = certDAO.invalidate(segment);
- } else {
- rv = Result.err(Status.ERR_BadData, NO_CACHE_NAME, cname);
- }
- return rv;
- }
-
- private int[] series(int max) {
- int[] series = new int[max];
- for (int i = 0; i < max; ++i)
- series[i] = i;
- return series;
- }
-
- public boolean isDelegated(AuthzTrans trans, String user, String approver, Map<String,Result<List<DelegateDAO.Data>>> rldd ) {
- Result<List<DelegateDAO.Data>> userDelegatedFor = rldd.get(user);
- if(userDelegatedFor==null) {
- userDelegatedFor=delegateDAO.readByDelegate(trans, user);
- rldd.put(user, userDelegatedFor);
- }
- if(userDelegatedFor.isOKhasData()) {
- for (DelegateDAO.Data curr : userDelegatedFor.value) {
- if (curr.user.equals(approver) && curr.delegate.equals(user)
- && curr.expires.after(new Date())) {
- return true;
- }
- }
- }
- return false;
- }
-
- public static boolean willSpecialLog(AuthzTrans trans, String user) {
- Boolean b = trans.get(specialLogSlot, null);
- if(b==null) { // we haven't evaluated in this trans for Special Log yet
- if(specialLog==null) {
- return false;
- } else {
- b = specialLog.contains(user);
- trans.put(specialLogSlot, b);
- }
- }
- return b;
- }
-
- public static void logEncryptTrace(AuthzTrans trans, String data) {
- long ti;
- trans.put(transIDSlot, ti=nextTraceID());
- trans.trace().log("id="+Long.toHexString(ti)+",data=\""+trans.env().encryptor().encrypt(data)+'"');
- }
-
- private synchronized static long nextTraceID() {
- return ++traceID;
- }
-
- public static synchronized boolean specialLogOn(AuthzTrans trans, String id) {
- if (specialLog == null) {
- specialLog = new HashSet<>();
- }
- boolean rc = specialLog.add(id);
- if(rc) {
- trans.trace().printf("Trace on for %s requested by %s",id,trans.user());
- }
- return rc;
- }
-
- public static synchronized boolean specialLogOff(AuthzTrans trans, String id) {
- if(specialLog==null) {
- return false;
- }
- boolean rv = specialLog.remove(id);
- if (specialLog.isEmpty()) {
- specialLog = null;
- }
- if(rv) {
- trans.trace().printf("Trace off for %s requested by %s",id,trans.user());
- }
- return rv;
- }
-
- /**
- * canMove
- * Which Types can be moved
- * @param nsType
- * @return
- */
- public boolean canMove(NsType nsType) {
- boolean rv;
- switch(nsType) {
- case DOT:
- case ROOT:
- case COMPANY:
- case UNKNOWN:
- rv = false;
- break;
- default:
- rv = true;
- }
- return rv;
- }
-
- public boolean isAdmin(AuthzTrans trans, String user, String ns) {
- Date now = new Date();
- Result<List<UserRoleDAO.Data>> rur = userRoleDAO.read(trans, user,ns+DOT_ADMIN);
- if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){
- if(urdd.expires.after(now)) {
- return true;
- }
- }};
- return false;
- }
-
- public boolean isOwner(AuthzTrans trans, String user, String ns) {
- Result<List<UserRoleDAO.Data>> rur = userRoleDAO.read(trans, user,ns+DOT_OWNER);
- Date now = new Date();
- if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){
- if(urdd.expires.after(now)) {
- return true;
- }
- }};
- return false;
- }
-
- public int countOwner(AuthzTrans trans, String ns) {
- Result<List<UserRoleDAO.Data>> rur = userRoleDAO.readByRole(trans,ns+DOT_OWNER);
- Date now = new Date();
- int count = 0;
- if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){
- if(urdd.expires.after(now)) {
- ++count;
- }
- }};
- return count;
- }
-
- /**
- * Return a Unique String, (same string, if it is already unique), with only
- * lowercase letters, digits and the '.' character.
- *
- * @param name
- * @return
- * @throws IOException
- */
- public static String toUnique(String name) throws IOException {
- byte[] from = name.getBytes();
- StringBuilder sb = new StringBuilder();
- byte f;
- for(int i=0;i<from.length;++i) {
- f=(byte)(from[i]); // printables;
- sb.append((char)((f>>4)+0x61));
- sb.append((char)((f&0x0F)+0x61));
- }
- return sb.toString();
- }
-
- public static String fromUnique(String name) throws IOException {
- byte[] from = name.getBytes();
- StringBuilder sb = new StringBuilder();
- char c;
- for(int i=0;i<from.length;++i) {
- c = (char)((from[i]-0x61)<<4);
- c |= (from[++i]-0x61);
- sb.append(c);
- }
- return sb.toString();
- }
+ // DON'T CHANGE FROM lower Case!!!
+ public static enum Type {
+ ns, role, perm, cred
+ };
+
+ public static final String OWNER="owner";
+ public static final String ADMIN="admin";
+ public static final String DOT_OWNER=".owner";
+ public static final String DOT_ADMIN=".admin";
+ public static final String ACCESS = "access";
+
+ static final String ASTERIX = "*";
+
+ public static enum Access {
+ read, write, create
+ };
+
+ public static final String READ = Access.read.name();
+ public static final String WRITE = Access.write.name();
+ public static final String CREATE = Access.create.name();
+
+ public static final String ROLE = Type.role.name();
+ public static final String PERM = Type.perm.name();
+ public static final String NS = Type.ns.name();
+ public static final String CRED = Type.cred.name();
+ private static final String DELG = "delg";
+ public static final String ROOT_NS = Define.isInitialized() ? Define.ROOT_NS() : "undefined";
+ public static final String ATTRIB = "attrib";
+
+
+ public static final int MAX_SCOPE = 10;
+ public static final int APP_SCOPE = 3;
+ public static final int COMPANY_SCOPE = 2;
+ static Slot PERMS;
+
+ private static Set<String> specialLog = null;
+ public static final Random random = new SecureRandom();
+ private static long traceID = random.nextLong();
+ private static Slot specialLogSlot = null;
+ private static Slot transIDSlot = null;
+
+
+ public final HistoryDAO historyDAO;
+ public final CachedNSDAO nsDAO;
+ public final CachedRoleDAO roleDAO;
+ public final CachedPermDAO permDAO;
+ public final CachedUserRoleDAO userRoleDAO;
+ public final CachedCredDAO credDAO;
+ public final CachedCertDAO certDAO;
+ public final DelegateDAO delegateDAO;
+ public final FutureDAO futureDAO;
+ public final ApprovalDAO approvalDAO;
+ private final CacheInfoDAO cacheInfoDAO;
+ public final LocateDAO locateDAO;
+
+ public Question(AuthzTrans trans, Cluster cluster, String keyspace, boolean startClean) throws APIException, IOException {
+ PERMS = trans.slot("USER_PERMS");
+ trans.init().log("Instantiating DAOs");
+ long expiresIn = Long.parseLong(trans.getProperty(Config.AAF_USER_EXPIRES, Config.AAF_USER_EXPIRES_DEF));
+ historyDAO = new HistoryDAO(trans, cluster, keyspace);
+
+ // Deal with Cached Entries
+ cacheInfoDAO = new CacheInfoDAO(trans, historyDAO);
+
+ nsDAO = new CachedNSDAO(new NsDAO(trans, historyDAO, cacheInfoDAO),cacheInfoDAO, expiresIn);
+ permDAO = new CachedPermDAO(new PermDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO, expiresIn);
+ roleDAO = new CachedRoleDAO(new RoleDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO, expiresIn);
+ userRoleDAO = new CachedUserRoleDAO(new UserRoleDAO(trans, historyDAO,cacheInfoDAO), cacheInfoDAO, expiresIn);
+ credDAO = new CachedCredDAO(new CredDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO, expiresIn);
+ certDAO = new CachedCertDAO(new CertDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO, expiresIn);
+
+ locateDAO = new LocateDAO(trans,historyDAO);
+ futureDAO = new FutureDAO(trans, historyDAO);
+ delegateDAO = new DelegateDAO(trans, historyDAO);
+ approvalDAO = new ApprovalDAO(trans, historyDAO);
+
+ // Only want to aggressively cleanse User related Caches... The others,
+ // just normal refresh
+ if(startClean) {
+ CachedDAO.startCleansing(trans.env(), credDAO, userRoleDAO);
+ CachedDAO.startRefresh(trans.env(), cacheInfoDAO);
+ }
+ // Set a Timer to Check Caches to send messages for Caching changes
+
+ if(specialLogSlot==null) {
+ specialLogSlot = trans.slot(AuthzTransFilter.SPECIAL_LOG_SLOT);
+ }
+
+ if(transIDSlot==null) {
+ transIDSlot = trans.slot(AuthzTransFilter.TRANS_ID_SLOT);
+ }
+
+ AbsCassDAO.primePSIs(trans);
+ }
+
+
+ public void close(AuthzTrans trans) {
+ historyDAO.close(trans);
+ cacheInfoDAO.close(trans);
+ nsDAO.close(trans);
+ permDAO.close(trans);
+ roleDAO.close(trans);
+ userRoleDAO.close(trans);
+ credDAO.close(trans);
+ certDAO.close(trans);
+ delegateDAO.close(trans);
+ futureDAO.close(trans);
+ approvalDAO.close(trans);
+ }
+
+ public Result<PermDAO.Data> permFrom(AuthzTrans trans, String type,
+ String instance, String action) {
+ Result<NsDAO.Data> rnd = deriveNs(trans, type);
+ if (rnd.isOK()) {
+ return Result.ok(new PermDAO.Data(new NsSplit(rnd.value, type),
+ instance, action));
+ } else {
+ return Result.err(rnd);
+ }
+ }
+
+ /**
+ * getPermsByUser
+ *
+ * Because this call is frequently called internally, AND because we already
+ * look for it in the initial Call, we cache within the Transaction
+ *
+ * @param trans
+ * @param user
+ * @return
+ */
+ public Result<List<PermDAO.Data>> getPermsByUser(AuthzTrans trans, String user, boolean lookup) {
+ return PermLookup.get(trans, this, user).getPerms(lookup);
+ }
+
+ public Result<List<PermDAO.Data>> getPermsByUserFromRolesFilter(AuthzTrans trans, String user, String forUser) {
+ PermLookup plUser = PermLookup.get(trans, this, user);
+ Result<Set<String>> plPermNames = plUser.getPermNames();
+ if(plPermNames.notOK()) {
+ return Result.err(plPermNames);
+ }
+
+ Set<String> nss;
+ if(forUser.equals(user)) {
+ nss = null;
+ } else {
+ // Setup a TreeSet to check on Namespaces to
+ nss = new TreeSet<>();
+ PermLookup fUser = PermLookup.get(trans, this, forUser);
+ Result<Set<String>> forUpn = fUser.getPermNames();
+ if(forUpn.notOK()) {
+ return Result.err(forUpn);
+ }
+
+ for(String pn : forUpn.value) {
+ Result<String[]> decoded = PermDAO.Data.decodeToArray(trans, this, pn);
+ if(decoded.isOKhasData()) {
+ nss.add(decoded.value[0]);
+ } else {
+ trans.error().log(pn,", derived from a Role, is invalid:",decoded.errorString());
+ }
+ }
+ }
+
+ List<PermDAO.Data> rlpUser = new ArrayList<>();
+ Result<PermDAO.Data> rpdd;
+ PermDAO.Data pdd;
+ for(String pn : plPermNames.value) {
+ rpdd = PermDAO.Data.decode(trans, this, pn);
+ if(rpdd.isOKhasData()) {
+ pdd=rpdd.value;
+ if(nss==null || nss.contains(pdd.ns)) {
+ rlpUser.add(pdd);
+ }
+ } else {
+ trans.error().log(pn,", derived from a Role, is invalid. Run Data Cleanup:",rpdd.errorString());
+ }
+ }
+ return Result.ok(rlpUser);
+ }
+
+ public Result<List<PermDAO.Data>> getPermsByType(AuthzTrans trans, String perm) {
+ Result<NsSplit> nss = deriveNsSplit(trans, perm);
+ if (nss.notOK()) {
+ return Result.err(nss);
+ }
+ return permDAO.readByType(trans, nss.value.ns, nss.value.name);
+ }
+
+ public Result<List<PermDAO.Data>> getPermsByName(AuthzTrans trans,
+ String type, String instance, String action) {
+ Result<NsSplit> nss = deriveNsSplit(trans, type);
+ if (nss.notOK()) {
+ return Result.err(nss);
+ }
+ return permDAO.read(trans, nss.value.ns, nss.value.name, instance,action);
+ }
+
+ public Result<List<PermDAO.Data>> getPermsByRole(AuthzTrans trans, String role, boolean lookup) {
+ Result<NsSplit> nss = deriveNsSplit(trans, role);
+ if (nss.notOK()) {
+ return Result.err(nss);
+ }
+
+ Result<List<RoleDAO.Data>> rlrd = roleDAO.read(trans, nss.value.ns,
+ nss.value.name);
+ if (rlrd.notOKorIsEmpty()) {
+ return Result.err(rlrd);
+ }
+ // Using Set to avoid duplicates
+ Set<String> permNames = new HashSet<>();
+ if (rlrd.isOKhasData()) {
+ for (RoleDAO.Data drr : rlrd.value) {
+ permNames.addAll(drr.perms(false));
+ }
+ }
+
+ // Note: It should be ok for a Valid user to have no permissions -
+ // Jonathan 8/12/2013
+ List<PermDAO.Data> perms = new ArrayList<>();
+ for (String perm : permNames) {
+ Result<PermDAO.Data> pr = PermDAO.Data.decode(trans, this, perm);
+ if (pr.notOK()) {
+ return Result.err(pr);
+ }
+
+ if(lookup) {
+ Result<List<PermDAO.Data>> rlpd = permDAO.read(trans, pr.value);
+ if (rlpd.isOKhasData()) {
+ for (PermDAO.Data pData : rlpd.value) {
+ perms.add(pData);
+ }
+ }
+ } else {
+ perms.add(pr.value);
+ }
+ }
+
+ return Result.ok(perms);
+ }
+
+ public Result<List<RoleDAO.Data>> getRolesByName(AuthzTrans trans,
+ String role) {
+ Result<NsSplit> nss = deriveNsSplit(trans, role);
+ if (nss.notOK()) {
+ return Result.err(nss);
+ }
+ String r = nss.value.name;
+ if (r.endsWith(".*")) { // do children Search
+ return roleDAO.readChildren(trans, nss.value.ns,
+ r.substring(0, r.length() - 2));
+ } else if (ASTERIX.equals(r)) {
+ return roleDAO.readChildren(trans, nss.value.ns, ASTERIX);
+ } else {
+ return roleDAO.read(trans, nss.value.ns, r);
+ }
+ }
+
+ /**
+ * Derive NS
+ *
+ * Given a Child Namespace, figure out what the best Namespace parent is.
+ *
+ * For instance, if in the NS table, the parent "org.osaaf" exists, but not
+ * "org.osaaf.child" or "org.osaaf.a.b.c", then passing in either
+ * "org.osaaf.child" or "org.osaaf.a.b.c" will return "org.osaaf"
+ *
+ * Uses recursive search on Cached DAO data
+ *
+ * @param trans
+ * @param child
+ * @return
+ */
+ public Result<NsDAO.Data> deriveNs(AuthzTrans trans, String child) {
+ Result<List<NsDAO.Data>> r = nsDAO.read(trans, child);
+
+ if (r.isOKhasData()) {
+ return Result.ok(r.value.get(0));
+ } else {
+ int dot;
+ if(child==null) {
+ return Result.err(Status.ERR_NsNotFound, "No Namespace");
+ } else {
+ dot = child.lastIndexOf('.');
+ }
+ if (dot < 0) {
+ return Result.err(Status.ERR_NsNotFound, "No Namespace for [%s]", child);
+ } else {
+ return deriveNs(trans, child.substring(0, dot));
+ }
+ }
+ }
+
+ public Result<NsDAO.Data> deriveFirstNsForType(AuthzTrans trans, String str, NsType type) {
+ NsDAO.Data nsd;
+
+ for(String lookup = str;!".".equals(lookup) && lookup!=null;) {
+ Result<List<NsDAO.Data>> rld = nsDAO.read(trans, lookup);
+ if(rld.isOKhasData()) {
+ nsd=rld.value.get(0);
+ lookup = nsd.parent;
+ if(type.type == nsd.type) {
+ return Result.ok(nsd);
+ }
+ } else {
+ return Result.err(Status.ERR_NsNotFound,"There is no valid Company Namespace for %s",str);
+ }
+ }
+ return Result.err(Status.ERR_NotFound, str + " does not contain type " + type.name());
+ }
+
+ public Result<NsSplit> deriveNsSplit(AuthzTrans trans, String child) {
+ Result<NsDAO.Data> ndd = deriveNs(trans, child);
+ if (ndd.isOK()) {
+ NsSplit nss = new NsSplit(ndd.value, child);
+ if (nss.isOK()) {
+ return Result.ok(nss);
+ } else {
+ return Result.err(Status.ERR_NsNotFound,
+ "Cannot split [%s] into valid namespace elements",
+ child);
+ }
+ }
+ return Result.err(ndd);
+ }
+
+ /**
+ * Translate an ID into it's domain
+ *
+ * i.e. myid1234@aaf.att.com results in domain of com.att.aaf
+ *
+ * @param id
+ * @return
+ */
+ public static String domain2ns(String id) {
+ int at = id.indexOf('@');
+ if (at >= 0) {
+ String[] domain = id.substring(at + 1).split("\\.");
+ StringBuilder ns = new StringBuilder(id.length());
+ boolean first = true;
+ for (int i = domain.length - 1; i >= 0; --i) {
+ if (first) {
+ first = false;
+ } else {
+ ns.append('.');
+ }
+ ns.append(domain[i]);
+ }
+ return ns.toString();
+ } else {
+ return "";
+ }
+
+ }
+
+ /**
+ * Validate Namespace of ID@Domain
+ *
+ * Namespace is reverse order of Domain.
+ *
+ * @param trans
+ * @param id
+ * @return
+ */
+ public Result<NsDAO.Data> validNSOfDomain(AuthzTrans trans, String id) {
+ // Take domain, reverse order, and check on NS
+ String ns;
+ if(id.indexOf('@')<0) { // it's already an ns, not an ID
+ ns = id;
+ } else {
+ ns = domain2ns(id);
+ }
+ if (ns.length() > 0) {
+ if(!trans.org().getDomain().equals(ns)) {
+ Result<List<NsDAO.Data>> rlnsd = nsDAO.read(trans, ns);
+ if (rlnsd.isOKhasData()) {
+ return Result.ok(rlnsd.value.get(0));
+ }
+ }
+ }
+ return Result.err(Status.ERR_NsNotFound,
+ "A Namespace is not available for %s", id);
+ }
+
+ public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user,NsDAO.Data ndd, Access access) {
+ // <ns>.access|:role:<role name>|<read|write>
+ String ns = ndd.name;
+ int last;
+ do {
+ if (isGranted(trans, user, ns, ACCESS, ":ns", access.name())) {
+ return Result.ok(ndd);
+ }
+ if ((last = ns.lastIndexOf('.')) >= 0) {
+ ns = ns.substring(0, last);
+ }
+ } while (last >= 0);
+ // com.att.aaf.ns|:<client ns>:ns|<access>
+ // AAF-724 - Make consistent response for May User", and not take the
+ // last check... too confusing.
+ Result<NsDAO.Data> rv = mayUserVirtueOfNS(trans, user, ndd, ":" + ndd.name + ":ns", access.name());
+ if (rv.isOK()) {
+ return rv;
+ } else if(rv.status==Result.ERR_Backend) {
+ return Result.err(rv);
+ } else {
+ return Result.err(Status.ERR_Denied, "[%s] may not %s in NS [%s]",
+ user, access.name(), ndd.name);
+ }
+ }
+
+ public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user, RoleDAO.Data rdd, Access access) {
+ Result<NsDAO.Data> rnsd = deriveNs(trans, rdd.ns);
+ if (rnsd.isOK()) {
+ return mayUser(trans, user, rnsd.value, rdd, access);
+ }
+ return rnsd;
+ }
+
+ public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user, NsDAO.Data ndd, RoleDAO.Data rdd, Access access) {
+ // 1) Is User in the Role?
+ Result<List<UserRoleDAO.Data>> rurd = userRoleDAO.readUserInRole(trans, user, rdd.fullName());
+ if (rurd.isOKhasData()) {
+ return Result.ok(ndd);
+ }
+
+ String roleInst = ":role:" + rdd.name;
+ // <ns>.access|:role:<role name>|<read|write>
+ String ns = rdd.ns;
+ int last;
+ do {
+ if (isGranted(trans, user, ns,ACCESS, roleInst, access.name())) {
+ return Result.ok(ndd);
+ }
+ if ((last = ns.lastIndexOf('.')) >= 0) {
+ ns = ns.substring(0, last);
+ }
+ } while (last >= 0);
+
+ // Check if Access by Global Role perm
+ // com.att.aaf.ns|:<client ns>:role:name|<access>
+ Result<NsDAO.Data> rnsd = mayUserVirtueOfNS(trans, user, ndd, ":"
+ + rdd.ns + roleInst, access.name());
+ if (rnsd.isOK()) {
+ return rnsd;
+ } else if(rnsd.status==Result.ERR_Backend) {
+ return Result.err(rnsd);
+ }
+
+ // Check if Access to Whole NS
+ // AAF-724 - Make consistent response for May User", and not take the
+ // last check... too confusing.
+ Result<org.onap.aaf.auth.dao.cass.NsDAO.Data> rv = mayUserVirtueOfNS(trans, user, ndd,
+ ":" + rdd.ns + ":ns", access.name());
+ if (rv.isOK()) {
+ return rv;
+ } else if(rnsd.status==Result.ERR_Backend) {
+ return Result.err(rnsd);
+ } else {
+ return Result.err(Status.ERR_Denied, "[%s] may not %s Role [%s]",
+ user, access.name(), rdd.fullName());
+ }
+
+ }
+
+ public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user,PermDAO.Data pdd, Access access) {
+ Result<NsDAO.Data> rnsd = deriveNs(trans, pdd.ns);
+ if (rnsd.isOK()) {
+ return mayUser(trans, user, rnsd.value, pdd, access);
+ }
+ return rnsd;
+ }
+
+ public Result<NsDAO.Data> mayUser(AuthzTrans trans, String user,NsDAO.Data ndd, PermDAO.Data pdd, Access access) {
+ if (isGranted(trans, user, pdd.ns, pdd.type, pdd.instance, pdd.action)) {
+ return Result.ok(ndd);
+ }
+ String permInst = ":perm:" + pdd.type + ':' + pdd.instance + ':' + pdd.action;
+ // <ns>.access|:role:<role name>|<read|write>
+ String ns = ndd.name;
+ int last;
+ do {
+ if (isGranted(trans, user, ns, ACCESS, permInst, access.name())) {
+ return Result.ok(ndd);
+ }
+ if ((last = ns.lastIndexOf('.')) >= 0) {
+ ns = ns.substring(0, last);
+ }
+ } while (last >= 0);
+
+ // Check if Access by NS perm
+ // com.att.aaf.ns|:<client ns>:role:name|<access>
+ Result<NsDAO.Data> rnsd = mayUserVirtueOfNS(trans, user, ndd, ":" + pdd.ns + permInst, access.name());
+ if (rnsd.isOK()) {
+ return rnsd;
+ } else if(rnsd.status==Result.ERR_Backend) {
+ return Result.err(rnsd);
+ }
+
+ // Check if Access to Whole NS
+ // AAF-724 - Make consistent response for May User", and not take the
+ // last check... too confusing.
+ Result<NsDAO.Data> rv = mayUserVirtueOfNS(trans, user, ndd, ":" + pdd.ns + ":ns", access.name());
+ if (rv.isOK()) {
+ return rv;
+ } else {
+ return Result.err(Status.ERR_Denied,
+ "[%s] may not %s Perm [%s|%s|%s]", user, access.name(),
+ pdd.fullType(), pdd.instance, pdd.action);
+ }
+
+ }
+
+ public Result<Void> mayUser(AuthzTrans trans, DelegateDAO.Data dd, Access access) {
+ try {
+ Result<NsDAO.Data> rnsd = deriveNs(trans, domain2ns(trans.user()));
+ if(rnsd.isOKhasData() && mayUserVirtueOfNS(trans,trans.user(),rnsd.value, ":" + rnsd.value.name + ":ns", access.name()).isOK()) {
+ return Result.ok();
+ }
+ boolean isUser = trans.user().equals(dd.user);
+ boolean isDelegate = dd.delegate != null
+ && (dd.user.equals(dd.delegate) || trans.user().equals(
+ dd.delegate));
+ Organization org = trans.org();
+ switch (access) {
+ case create:
+ if (org.getIdentity(trans, dd.user) == null) {
+ return Result.err(Status.ERR_UserNotFound,
+ "[%s] is not a user in the company database.",
+ dd.user);
+ }
+ if (!dd.user.equals(dd.delegate) && org.getIdentity(trans, dd.delegate) == null) {
+ return Result.err(Status.ERR_UserNotFound,
+ "[%s] is not a user in the company database.",
+ dd.delegate);
+ }
+ if (!trans.requested(REQD_TYPE.force) && dd.user != null && dd.user.equals(dd.delegate)) {
+ return Result.err(Status.ERR_BadData,
+ "[%s] cannot be a delegate for self", dd.user);
+ }
+ if (!isUser && !isGranted(trans, trans.user(), ROOT_NS,DELG,
+ org.getDomain(), Question.CREATE)) {
+ return Result.err(Status.ERR_Denied,
+ "[%s] may not create a delegate for [%s]",
+ trans.user(), dd.user);
+ }
+ break;
+ case read:
+ case write:
+ if (!isUser && !isDelegate &&
+ !isGranted(trans, trans.user(), ROOT_NS,DELG,org.getDomain(), access.name())) {
+ return Result.err(Status.ERR_Denied,
+ "[%s] may not %s delegates for [%s]", trans.user(),
+ access.name(), dd.user);
+ }
+ break;
+ default:
+ return Result.err(Status.ERR_BadData,"Unknown Access type [%s]", access.name());
+ }
+ } catch (Exception e) {
+ return Result.err(e);
+ }
+ return Result.ok();
+ }
+
+ /*
+ * Check (recursively, if necessary), if able to do something based on NS
+ */
+ private Result<NsDAO.Data> mayUserVirtueOfNS(AuthzTrans trans, String user, NsDAO.Data nsd, String ns_and_type, String access) {
+ String ns = nsd.name;
+
+ // If an ADMIN of the Namespace, then allow
+
+ Result<List<UserRoleDAO.Data>> rurd;
+ if ((rurd = userRoleDAO.readUserInRole(trans, user, ns+DOT_ADMIN)).isOKhasData()) {
+ return Result.ok(nsd);
+ } else if(rurd.status==Result.ERR_Backend) {
+ return Result.err(rurd);
+ }
+
+ // If Specially granted Global Permission
+ if (isGranted(trans, user, ROOT_NS,NS, ns_and_type, access)) {
+ return Result.ok(nsd);
+ }
+
+ // Check recur
+
+ int dot = ns.length();
+ if ((dot = ns.lastIndexOf('.', dot - 1)) >= 0) {
+ Result<NsDAO.Data> rnsd = deriveNs(trans, ns.substring(0, dot));
+ if (rnsd.isOK()) {
+ rnsd = mayUserVirtueOfNS(trans, user, rnsd.value, ns_and_type,access);
+ } else if(rnsd.status==Result.ERR_Backend) {
+ return Result.err(rnsd);
+ }
+ if (rnsd.isOK()) {
+ return Result.ok(nsd);
+ } else if(rnsd.status==Result.ERR_Backend) {
+ return Result.err(rnsd);
+ }
+ }
+ return Result.err(Status.ERR_Denied, "%s may not %s %s", user, access,
+ ns_and_type);
+ }
+
+
+ /**
+ * isGranted
+ *
+ * Important function - Check internal Permission Schemes for Permission to
+ * do things
+ *
+ * @param trans
+ * @param type
+ * @param instance
+ * @param action
+ * @return
+ */
+ public boolean isGranted(AuthzTrans trans, String user, String ns, String type,String instance, String action) {
+ Result<List<PermDAO.Data>> perms = getPermsByUser(trans, user, false);
+ if (perms.isOK()) {
+ for (PermDAO.Data pd : perms.value) {
+ if (ns.equals(pd.ns)) {
+ if (type.equals(pd.type)) {
+ if (PermEval.evalInstance(pd.instance, instance)) {
+ if(PermEval.evalAction(pd.action, action)) { // don't return action here, might miss other action
+ return true;
+ }
+ }
+ }
+ }
+ }
+ }
+ return false;
+ }
+
+ public Result<Date> doesUserCredMatch(AuthzTrans trans, String user, byte[] cred) throws DAOException {
+ Result<List<CredDAO.Data>> result;
+ TimeTaken tt = trans.start("Read DB Cred", Env.REMOTE);
+ try {
+ result = credDAO.readID(trans, user);
+ } finally {
+ tt.done();
+ }
+
+ Result<Date> rv = null;
+ if(result.isOK()) {
+ if (result.isEmpty()) {
+ rv = Result.err(Status.ERR_UserNotFound, user);
+ if (willSpecialLog(trans,user)) {
+ trans.audit().log("Special DEBUG:", user, " does not exist in DB");
+ }
+ } else {
+ Date now = new Date();//long now = System.currentTimeMillis();
+ // Bug noticed 6/22. Sorting on the result can cause Concurrency Issues.
+ List<CredDAO.Data> cddl;
+ if(result.value.size() > 1) {
+ cddl = new ArrayList<>(result.value.size());
+ for(CredDAO.Data old : result.value) {
+ if(old.type==CredDAO.BASIC_AUTH || old.type==CredDAO.BASIC_AUTH_SHA256) {
+ cddl.add(old);
+ }
+ }
+ if(cddl.size()>1) {
+ Collections.sort(cddl,new Comparator<CredDAO.Data>() {
+ @Override
+ public int compare(org.onap.aaf.auth.dao.cass.CredDAO.Data a,
+ org.onap.aaf.auth.dao.cass.CredDAO.Data b) {
+ return b.expires.compareTo(a.expires);
+ }
+ });
+ }
+ } else {
+ cddl = result.value;
+ }
+
+ Date expired = null;
+ StringBuilder debug = willSpecialLog(trans,user)?new StringBuilder():null;
+ for (CredDAO.Data cdd : cddl) {
+ if(!cdd.id.equals(user)) {
+ trans.error().log("doesUserCredMatch DB call does not match for user: " + user);
+ }
+ if (cdd.expires.after(now)) {
+ byte[] dbcred = cdd.cred.array();
+
+ try {
+ switch(cdd.type) {
+ case CredDAO.BASIC_AUTH:
+ byte[] md5=Hash.hashMD5(cred);
+ if(Hash.compareTo(md5,dbcred)==0) {
+ checkLessThanDays(trans,7,now,cdd);
+ return Result.ok(cdd.expires);
+ } else if (debug!=null) {
+ load(debug, cdd);
+ }
+ break;
+ case CredDAO.BASIC_AUTH_SHA256:
+ ByteBuffer bb = ByteBuffer.allocate(Integer.SIZE + cred.length);
+ bb.putInt(cdd.other);
+ bb.put(cred);
+ byte[] hash = Hash.hashSHA256(bb.array());
+
+ if(Hash.compareTo(hash,dbcred)==0) {
+ checkLessThanDays(trans,7,now,cdd);
+ return Result.ok(cdd.expires);
+ } else if (debug!=null) {
+ load(debug, cdd);
+ }
+ break;
+ default:
+ trans.error().log("Unknown Credential Type %s for %s, %s",Integer.toString(cdd.type),cdd.id, Chrono.dateTime(cdd.expires));
+ }
+ } catch (NoSuchAlgorithmException e) {
+ trans.error().log(e);
+ }
+ } else {
+ if(expired==null || expired.before(cdd.expires)) {
+ expired = cdd.expires;
+ }
+ }
+ } // end for each
+ if(debug==null) {
+ trans.audit().printf("No cred matches ip=%s, user=%s\n",trans.ip(),user);
+ } else {
+ trans.audit().printf("No cred matches ip=%s, user=%s %s\n",trans.ip(),user,debug.toString());
+ }
+ if(expired!=null) {
+ // Note: this is only returned if there are no good Credentials
+ rv = Result.err(Status.ERR_Security,
+ "Credentials %s from %s expired %s",trans.user(), trans.ip(), Chrono.dateTime(expired));
+ }
+ }
+ } else {
+ return Result.err(result);
+ }
+ return rv == null ? Result.create((Date) null, Status.ERR_Security, "Wrong credential") : rv;
+ }
+
+
+ private void load(StringBuilder debug, Data cdd) {
+ debug.append("DB Entry: user=");
+ debug.append(cdd.id);
+ debug.append(",type=");
+ debug.append(cdd.type);
+ debug.append(",expires=");
+ debug.append(Chrono.dateTime(cdd.expires));
+ debug.append('\n');
+ }
+
+
+ private void checkLessThanDays(AuthzTrans trans, int days, Date now, Data cdd) {
+ long close = now.getTime() + (days * 86400000);
+ long cexp=cdd.expires.getTime();
+ if(cexp<close) {
+ int daysLeft = days-(int)((close-cexp)/86400000);
+ trans.audit().printf("user=%s,ip=%s,expires=%s,days=%d,msg=\"Password expires in less than %d day%s\"",
+ cdd.id,trans.ip(),Chrono.dateOnlyStamp(cdd.expires),daysLeft, daysLeft,daysLeft==1?"":"s");
+ }
+ }
+
+
+ public Result<CredDAO.Data> userCredSetup(AuthzTrans trans, CredDAO.Data cred) {
+ if(cred.type==CredDAO.RAW) {
+ TimeTaken tt = trans.start("Hash Cred", Env.SUB);
+ try {
+ cred.type = CredDAO.BASIC_AUTH_SHA256;
+ cred.other = random.nextInt();
+ ByteBuffer bb = ByteBuffer.allocate(Integer.SIZE + cred.cred.capacity());
+ bb.putInt(cred.other);
+ bb.put(cred.cred);
+ byte[] hash = Hash.hashSHA256(bb.array());
+ cred.cred = ByteBuffer.wrap(hash);
+ return Result.ok(cred);
+ } catch (NoSuchAlgorithmException e) {
+ return Result.err(Status.ERR_General,e.getLocalizedMessage());
+ } finally {
+ tt.done();
+ }
+
+ }
+ return Result.err(Status.ERR_Security,"invalid/unreadable credential");
+ }
+
+ public Result<Boolean> userCredCheck(AuthzTrans trans, CredDAO.Data orig, final byte[] raw) {
+ TimeTaken tt = trans.start("CheckCred Cred", Env.SUB);
+ try {
+ switch(orig.type) {
+ case CredDAO.BASIC_AUTH_SHA256:
+ ByteBuffer bb = ByteBuffer.allocate(Integer.SIZE + raw.length);
+ bb.putInt(orig.other);
+ bb.put(raw);
+ return Result.ok(Hash.compareTo(orig.cred.array(),Hash.hashSHA256(bb.array()))==0);
+ case CredDAO.BASIC_AUTH:
+ return Result.ok( Hash.compareTo(orig.cred.array(), Hash.hashMD5(raw))==0);
+ default:
+ return Result.ok(false);
+ }
+ } catch (NoSuchAlgorithmException e) {
+ return Result.err(Status.ERR_General,e.getLocalizedMessage());
+ } finally {
+ tt.done();
+ }
+ }
+
+ public static final String APPROVED = "APPROVE";
+ public static final String REJECT = "REJECT";
+ public static final String PENDING = "PENDING";
+
+ public Result<Void> canAddUser(AuthzTrans trans, UserRoleDAO.Data data,
+ List<ApprovalDAO.Data> approvals) {
+ // get the approval policy for the organization
+
+ // get the list of approvals with an accept status
+
+ // validate the approvals against the policy
+
+ // for now check if all approvals are received and return
+ // SUCCESS/FAILURE/SKIP
+ boolean bReject = false;
+ boolean bPending = false;
+
+ for (ApprovalDAO.Data approval : approvals) {
+ if (approval.status.equals(REJECT)) {
+ bReject = true;
+ } else if (approval.status.equals(PENDING)) {
+ bPending = true;
+ }
+ }
+ if (bReject) {
+ return Result.err(Status.ERR_Policy,
+ "Approval Polocy not conformed");
+ }
+ if (bPending) {
+ return Result.err(Status.ERR_ActionNotCompleted,
+ "Required Approvals not received");
+ }
+
+ return Result.ok();
+ }
+
+ private static final String NO_CACHE_NAME = "No Cache Data named %s";
+
+ public Result<Void> clearCache(AuthzTrans trans, String cname) {
+ boolean all = "all".equals(cname);
+ Result<Void> rv = null;
+
+ if (all || NsDAO.TABLE.equals(cname)) {
+ int seg[] = series(NsDAO.CACHE_SEG);
+ for(int i: seg) {cacheClear(trans, NsDAO.TABLE,i);}
+ rv = cacheInfoDAO.touch(trans, NsDAO.TABLE, seg);
+ }
+ if (all || PermDAO.TABLE.equals(cname)) {
+ int seg[] = series(NsDAO.CACHE_SEG);
+ for(int i: seg) {cacheClear(trans, PermDAO.TABLE,i);}
+ rv = cacheInfoDAO.touch(trans, PermDAO.TABLE,seg);
+ }
+ if (all || RoleDAO.TABLE.equals(cname)) {
+ int seg[] = series(NsDAO.CACHE_SEG);
+ for(int i: seg) {cacheClear(trans, RoleDAO.TABLE,i);}
+ rv = cacheInfoDAO.touch(trans, RoleDAO.TABLE,seg);
+ }
+ if (all || UserRoleDAO.TABLE.equals(cname)) {
+ int seg[] = series(NsDAO.CACHE_SEG);
+ for(int i: seg) {cacheClear(trans, UserRoleDAO.TABLE,i);}
+ rv = cacheInfoDAO.touch(trans, UserRoleDAO.TABLE,seg);
+ }
+ if (all || CredDAO.TABLE.equals(cname)) {
+ int seg[] = series(NsDAO.CACHE_SEG);
+ for(int i: seg) {cacheClear(trans, CredDAO.TABLE,i);}
+ rv = cacheInfoDAO.touch(trans, CredDAO.TABLE,seg);
+ }
+ if (all || CertDAO.TABLE.equals(cname)) {
+ int seg[] = series(NsDAO.CACHE_SEG);
+ for(int i: seg) {cacheClear(trans, CertDAO.TABLE,i);}
+ rv = cacheInfoDAO.touch(trans, CertDAO.TABLE,seg);
+ }
+
+ if (rv == null) {
+ rv = Result.err(Status.ERR_BadData, NO_CACHE_NAME, cname);
+ }
+ return rv;
+ }
+
+ public Result<Void> cacheClear(AuthzTrans trans, String cname,Integer segment) {
+ Result<Void> rv;
+ if (NsDAO.TABLE.equals(cname)) {
+ rv = nsDAO.invalidate(segment);
+ } else if (PermDAO.TABLE.equals(cname)) {
+ rv = permDAO.invalidate(segment);
+ } else if (RoleDAO.TABLE.equals(cname)) {
+ rv = roleDAO.invalidate(segment);
+ } else if (UserRoleDAO.TABLE.equals(cname)) {
+ rv = userRoleDAO.invalidate(segment);
+ } else if (CredDAO.TABLE.equals(cname)) {
+ rv = credDAO.invalidate(segment);
+ } else if (CertDAO.TABLE.equals(cname)) {
+ rv = certDAO.invalidate(segment);
+ } else {
+ rv = Result.err(Status.ERR_BadData, NO_CACHE_NAME, cname);
+ }
+ return rv;
+ }
+
+ private int[] series(int max) {
+ int[] series = new int[max];
+ for (int i = 0; i < max; ++i)
+ series[i] = i;
+ return series;
+ }
+
+ public boolean isDelegated(AuthzTrans trans, String user, String approver, Map<String,Result<List<DelegateDAO.Data>>> rldd ) {
+ Result<List<DelegateDAO.Data>> userDelegatedFor = rldd.get(user);
+ if(userDelegatedFor==null) {
+ userDelegatedFor=delegateDAO.readByDelegate(trans, user);
+ rldd.put(user, userDelegatedFor);
+ }
+ if(userDelegatedFor.isOKhasData()) {
+ for (DelegateDAO.Data curr : userDelegatedFor.value) {
+ if (curr.user.equals(approver) && curr.delegate.equals(user)
+ && curr.expires.after(new Date())) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ public static boolean willSpecialLog(AuthzTrans trans, String user) {
+ Boolean b = trans.get(specialLogSlot, null);
+ if(b==null) { // we haven't evaluated in this trans for Special Log yet
+ if(specialLog==null) {
+ return false;
+ } else {
+ b = specialLog.contains(user);
+ trans.put(specialLogSlot, b);
+ }
+ }
+ return b;
+ }
+
+ public static void logEncryptTrace(AuthzTrans trans, String data) {
+ long ti;
+ trans.put(transIDSlot, ti=nextTraceID());
+ trans.trace().log("id="+Long.toHexString(ti)+",data=\""+trans.env().encryptor().encrypt(data)+'"');
+ }
+
+ private synchronized static long nextTraceID() {
+ return ++traceID;
+ }
+
+ public static synchronized boolean specialLogOn(AuthzTrans trans, String id) {
+ if (specialLog == null) {
+ specialLog = new HashSet<>();
+ }
+ boolean rc = specialLog.add(id);
+ if(rc) {
+ trans.trace().printf("Trace on for %s requested by %s",id,trans.user());
+ }
+ return rc;
+ }
+
+ public static synchronized boolean specialLogOff(AuthzTrans trans, String id) {
+ if(specialLog==null) {
+ return false;
+ }
+ boolean rv = specialLog.remove(id);
+ if (specialLog.isEmpty()) {
+ specialLog = null;
+ }
+ if(rv) {
+ trans.trace().printf("Trace off for %s requested by %s",id,trans.user());
+ }
+ return rv;
+ }
+
+ /**
+ * canMove
+ * Which Types can be moved
+ * @param nsType
+ * @return
+ */
+ public boolean canMove(NsType nsType) {
+ boolean rv;
+ switch(nsType) {
+ case DOT:
+ case ROOT:
+ case COMPANY:
+ case UNKNOWN:
+ rv = false;
+ break;
+ default:
+ rv = true;
+ }
+ return rv;
+ }
+
+ public boolean isAdmin(AuthzTrans trans, String user, String ns) {
+ Date now = new Date();
+ Result<List<UserRoleDAO.Data>> rur = userRoleDAO.read(trans, user,ns+DOT_ADMIN);
+ if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){
+ if(urdd.expires.after(now)) {
+ return true;
+ }
+ }};
+ return false;
+ }
+
+ public boolean isOwner(AuthzTrans trans, String user, String ns) {
+ Result<List<UserRoleDAO.Data>> rur = userRoleDAO.read(trans, user,ns+DOT_OWNER);
+ Date now = new Date();
+ if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){
+ if(urdd.expires.after(now)) {
+ return true;
+ }
+ }};
+ return false;
+ }
+
+ public int countOwner(AuthzTrans trans, String ns) {
+ Result<List<UserRoleDAO.Data>> rur = userRoleDAO.readByRole(trans,ns+DOT_OWNER);
+ Date now = new Date();
+ int count = 0;
+ if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){
+ if(urdd.expires.after(now)) {
+ ++count;
+ }
+ }};
+ return count;
+ }
+
+ /**
+ * Return a Unique String, (same string, if it is already unique), with only
+ * lowercase letters, digits and the '.' character.
+ *
+ * @param name
+ * @return
+ * @throws IOException
+ */
+ public static String toUnique(String name) throws IOException {
+ byte[] from = name.getBytes();
+ StringBuilder sb = new StringBuilder();
+ byte f;
+ for(int i=0;i<from.length;++i) {
+ f=(byte)(from[i]); // printables;
+ sb.append((char)((f>>4)+0x61));
+ sb.append((char)((f&0x0F)+0x61));
+ }
+ return sb.toString();
+ }
+
+ public static String fromUnique(String name) throws IOException {
+ byte[] from = name.getBytes();
+ StringBuilder sb = new StringBuilder();
+ char c;
+ for(int i=0;i<from.length;++i) {
+ c = (char)((from[i]-0x61)<<4);
+ c |= (from[++i]-0x61);
+ sb.append(c);
+ }
+ return sb.toString();
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFLocator.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFLocator.java
index 586ae4df..1fb050b8 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFLocator.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFLocator.java
@@ -41,92 +41,92 @@ import org.onap.aaf.misc.env.util.Split;
import locate.v1_0.Endpoint;
public class DirectAAFLocator extends AbsAAFLocator<AuthzTrans> {
- private LocateDAO ldao;
- private int major=-1, minor=-1, patch=-1, pkg=-1;
- private AuthzEnv env;
- private final URI uri;
+ private LocateDAO ldao;
+ private int major=-1, minor=-1, patch=-1, pkg=-1;
+ private AuthzEnv env;
+ private final URI uri;
- /**
- *
- * @param env
- * @param ldao
- * @param key must be one or more of service, version, other in that order
- * @throws LocatorException
- */
- public DirectAAFLocator(AuthzEnv env, LocateDAO ldao, String name, String version) throws LocatorException {
- super(env.access(), name, 1000L /* Don't hit DB more than once a second */);
- this.env = env;
- this.ldao = ldao;
- if(version!=null) {
- try {
- String[] v = Split.split('.',version);
- if(v.length>0) {major = Integer.parseInt(v[0]);}
- if(v.length>1) {minor = Integer.parseInt(v[1]);}
- if(v.length>2) {patch = Integer.parseInt(v[2]);}
- if(v.length>3) {pkg = Integer.parseInt(v[3]);}
- } catch (NumberFormatException e) {
- throw new LocatorException("Invalid Version String: " + version);
- }
- }
-
- try {
- uri = new URI(access.getProperty(Config.AAF_LOCATE_URL, "localhost")+"/locate/"+name+':'+version);
- } catch (URISyntaxException e) {
- throw new LocatorException(e);
- }
- myhostname=null;
- myport = 0;
- }
-
-
- @Override
- public boolean refresh() {
- AuthzTrans trans = env.newTransNoAvg();
- Result<List<Data>> rl = ldao.readByName(trans, name);
- if(rl.isOK()) {
- LinkedList<EP> epl = new LinkedList<>();
- for(Data d : rl.value) {
-// if(myhostname!=null && d.port==myport && d.hostname.equals(myhostname)) {
-// continue;
-// }
- if((major<0 || major==d.major) &&
- (minor<0 || minor<=d.minor) &&
- (patch<0 || patch==d.patch) &&
- (pkg<0 || pkg ==d.pkg)) {
- Endpoint endpoint = new Endpoint();
- endpoint.setName(d.name);
- endpoint.setHostname(d.hostname);
- endpoint.setPort(d.port);
- endpoint.setMajor(d.major);
- endpoint.setMinor(d.minor);
- endpoint.setPatch(d.patch);
- endpoint.setPkg(d.pkg);
- endpoint.setLatitude(d.latitude);
- endpoint.setLongitude(d.longitude);
- endpoint.setProtocol(d.protocol);
- for(String s : d.subprotocol(false)) {
- endpoint.getSubprotocol().add(s);
- }
-
- try {
- epl.add(new EP(endpoint,latitude,longitude));
- } catch (URISyntaxException e) {
- e.printStackTrace();
- }
- }
- }
- Collections.sort(epl);
- replace(epl);
- return true;
- } else {
- access.log(Level.ERROR, rl.errorString());
- }
- return false;
- }
+ /**
+ *
+ * @param env
+ * @param ldao
+ * @param key must be one or more of service, version, other in that order
+ * @throws LocatorException
+ */
+ public DirectAAFLocator(AuthzEnv env, LocateDAO ldao, String name, String version) throws LocatorException {
+ super(env.access(), name, 1000L /* Don't hit DB more than once a second */);
+ this.env = env;
+ this.ldao = ldao;
+ if(version!=null) {
+ try {
+ String[] v = Split.split('.',version);
+ if(v.length>0) {major = Integer.parseInt(v[0]);}
+ if(v.length>1) {minor = Integer.parseInt(v[1]);}
+ if(v.length>2) {patch = Integer.parseInt(v[2]);}
+ if(v.length>3) {pkg = Integer.parseInt(v[3]);}
+ } catch (NumberFormatException e) {
+ throw new LocatorException("Invalid Version String: " + version);
+ }
+ }
+
+ try {
+ uri = new URI(access.getProperty(Config.AAF_LOCATE_URL, "localhost")+"/locate/"+name+':'+version);
+ } catch (URISyntaxException e) {
+ throw new LocatorException(e);
+ }
+ myhostname=null;
+ myport = 0;
+ }
+
+
+ @Override
+ public boolean refresh() {
+ AuthzTrans trans = env.newTransNoAvg();
+ Result<List<Data>> rl = ldao.readByName(trans, name);
+ if(rl.isOK()) {
+ LinkedList<EP> epl = new LinkedList<>();
+ for(Data d : rl.value) {
+// if(myhostname!=null && d.port==myport && d.hostname.equals(myhostname)) {
+// continue;
+// }
+ if((major<0 || major==d.major) &&
+ (minor<0 || minor<=d.minor) &&
+ (patch<0 || patch==d.patch) &&
+ (pkg<0 || pkg ==d.pkg)) {
+ Endpoint endpoint = new Endpoint();
+ endpoint.setName(d.name);
+ endpoint.setHostname(d.hostname);
+ endpoint.setPort(d.port);
+ endpoint.setMajor(d.major);
+ endpoint.setMinor(d.minor);
+ endpoint.setPatch(d.patch);
+ endpoint.setPkg(d.pkg);
+ endpoint.setLatitude(d.latitude);
+ endpoint.setLongitude(d.longitude);
+ endpoint.setProtocol(d.protocol);
+ for(String s : d.subprotocol(false)) {
+ endpoint.getSubprotocol().add(s);
+ }
+
+ try {
+ epl.add(new EP(endpoint,latitude,longitude));
+ } catch (URISyntaxException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ Collections.sort(epl);
+ replace(epl);
+ return true;
+ } else {
+ access.log(Level.ERROR, rl.errorString());
+ }
+ return false;
+ }
- @Override
- protected URI getURI() {
- return uri;
- }
+ @Override
+ protected URI getURI() {
+ return uri;
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFLur.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFLur.java
index eb44e143..75f6c7a5 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFLur.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFLur.java
@@ -42,158 +42,158 @@ import org.onap.aaf.cadi.lur.LocalPermission;
import org.onap.aaf.misc.env.util.Split;
public class DirectAAFLur implements Lur {
- private final AuthzEnv env;
- private final Question question;
-
- public DirectAAFLur(AuthzEnv env, Question question/*, TokenMgr tm*/) {
- this.env = env;
- this.question = question;
-// oauth = new OAuth2Lur(null);
- }
+ private final AuthzEnv env;
+ private final Question question;
+
+ public DirectAAFLur(AuthzEnv env, Question question/*, TokenMgr tm*/) {
+ this.env = env;
+ this.question = question;
+// oauth = new OAuth2Lur(null);
+ }
- @Override
- public boolean fish(Principal bait, Permission ... pond) {
- return fish(env.newTransNoAvg(),bait,pond);
- }
-
- public boolean fish(AuthzTrans trans, Principal bait, Permission ... pond) {
- boolean rv = false;
- Result<List<Data>> pdr = question.getPermsByUser(trans, bait.getName(),false);
- switch(pdr.status) {
- case OK:
- for(PermDAO.Data d : pdr.value) {
- if(!rv) {
- for (Permission p : pond) {
- if(new PermPermission(d).match(p)) {
- rv=true;
- break;
- }
- }
- }
- }
- break;
- case Status.ERR_UserRoleNotFound:
- case Status.ERR_BadData:
- return false;
- default:
- trans.error().log("Can't access Cassandra to fulfill Permission Query: ",pdr.status,"-",pdr.details);
- }
- return rv;
- }
+ @Override
+ public boolean fish(Principal bait, Permission ... pond) {
+ return fish(env.newTransNoAvg(),bait,pond);
+ }
+
+ public boolean fish(AuthzTrans trans, Principal bait, Permission ... pond) {
+ boolean rv = false;
+ Result<List<Data>> pdr = question.getPermsByUser(trans, bait.getName(),false);
+ switch(pdr.status) {
+ case OK:
+ for(PermDAO.Data d : pdr.value) {
+ if(!rv) {
+ for (Permission p : pond) {
+ if(new PermPermission(d).match(p)) {
+ rv=true;
+ break;
+ }
+ }
+ }
+ }
+ break;
+ case Status.ERR_UserRoleNotFound:
+ case Status.ERR_BadData:
+ return false;
+ default:
+ trans.error().log("Can't access Cassandra to fulfill Permission Query: ",pdr.status,"-",pdr.details);
+ }
+ return rv;
+ }
- @Override
- public void fishAll(Principal bait, List<Permission> permissions) {
- Result<List<Data>> pdr = question.getPermsByUser(env.newTrans(), bait.getName(),false);
- switch(pdr.status) {
- case OK:
- for(PermDAO.Data d : pdr.value) {
- permissions.add(new PermPermission(d));
- }
- break;
- default:
- env.error().log("Can't access Cassandra to fulfill Permission Query: ",pdr.status,"-", pdr.details);
- }
- }
-
- @Override
- public void destroy() {
- }
+ @Override
+ public void fishAll(Principal bait, List<Permission> permissions) {
+ Result<List<Data>> pdr = question.getPermsByUser(env.newTrans(), bait.getName(),false);
+ switch(pdr.status) {
+ case OK:
+ for(PermDAO.Data d : pdr.value) {
+ permissions.add(new PermPermission(d));
+ }
+ break;
+ default:
+ env.error().log("Can't access Cassandra to fulfill Permission Query: ",pdr.status,"-", pdr.details);
+ }
+ }
+
+ @Override
+ public void destroy() {
+ }
- @Override
- public boolean handlesExclusively(Permission ... pond) {
- return false;
- }
-
- /**
- * Small Class implementing CADI's Permission with Cassandra Data
- * @author Jonathan
- *
- */
- public static class PermPermission implements Permission {
- private PermDAO.Data data;
-
- public PermPermission(PermDAO.Data d) {
- data = d;
- }
-
- public PermPermission(AuthzTrans trans, Question q, String p) {
- data = PermDAO.Data.create(trans, q, p);
- }
-
- public PermPermission(String ns, String type, String instance, String action) {
- data = new PermDAO.Data();
- data.ns = ns;
- data.type = type;
- data.instance = instance;
- data.action = action;
- }
+ @Override
+ public boolean handlesExclusively(Permission ... pond) {
+ return false;
+ }
+
+ /**
+ * Small Class implementing CADI's Permission with Cassandra Data
+ * @author Jonathan
+ *
+ */
+ public static class PermPermission implements Permission {
+ private PermDAO.Data data;
+
+ public PermPermission(PermDAO.Data d) {
+ data = d;
+ }
+
+ public PermPermission(AuthzTrans trans, Question q, String p) {
+ data = PermDAO.Data.create(trans, q, p);
+ }
+
+ public PermPermission(String ns, String type, String instance, String action) {
+ data = new PermDAO.Data();
+ data.ns = ns;
+ data.type = type;
+ data.instance = instance;
+ data.action = action;
+ }
- @Override
- public String getKey() {
- return data.type;
- }
+ @Override
+ public String getKey() {
+ return data.type;
+ }
- @Override
- public boolean match(Permission p) {
- if(p==null) {
- return false;
- }
- PermDAO.Data pd;
- if(p instanceof DirectAAFLur.PermPermission) {
- pd = ((DirectAAFLur.PermPermission)p).data;
- if(data.ns.equals(pd.ns))
- if(data.type.equals(pd.type))
- if(data.instance!=null && (data.instance.equals(pd.instance) || "*".equals(data.instance)))
- if(data.action!=null && (data.action.equals(pd.action) || "*".equals(data.action)))
- return true;
- } else{
- String[] lp = p.getKey().split("\\|");
- if(lp.length<3)return false;
- if(data.fullType().equals(lp[0]))
- if(data.instance!=null && (data.instance.equals(lp[1]) || "*".equals(data.instance)))
- if(data.action!=null && (data.action.equals(lp[2]) || "*".equals(data.action)))
- return true;
- }
- return false;
- }
+ @Override
+ public boolean match(Permission p) {
+ if(p==null) {
+ return false;
+ }
+ PermDAO.Data pd;
+ if(p instanceof DirectAAFLur.PermPermission) {
+ pd = ((DirectAAFLur.PermPermission)p).data;
+ if(data.ns.equals(pd.ns))
+ if(data.type.equals(pd.type))
+ if(data.instance!=null && (data.instance.equals(pd.instance) || "*".equals(data.instance)))
+ if(data.action!=null && (data.action.equals(pd.action) || "*".equals(data.action)))
+ return true;
+ } else{
+ String[] lp = p.getKey().split("\\|");
+ if(lp.length<3)return false;
+ if(data.fullType().equals(lp[0]))
+ if(data.instance!=null && (data.instance.equals(lp[1]) || "*".equals(data.instance)))
+ if(data.action!=null && (data.action.equals(lp[2]) || "*".equals(data.action)))
+ return true;
+ }
+ return false;
+ }
- @Override
- public String permType() {
- return "AAFLUR";
- }
-
- }
-
- public String toString() {
- return "DirectAAFLur is enabled";
-
- }
+ @Override
+ public String permType() {
+ return "AAFLUR";
+ }
+
+ }
+
+ public String toString() {
+ return "DirectAAFLur is enabled";
+
+ }
- /* (non-Javadoc)
- * @see org.onap.aaf.cadi.Lur#handles(java.security.Principal)
- */
- @Override
- public boolean handles(Principal principal) {
- return true;
- }
+ /* (non-Javadoc)
+ * @see org.onap.aaf.cadi.Lur#handles(java.security.Principal)
+ */
+ @Override
+ public boolean handles(Principal principal) {
+ return true;
+ }
- @Override
- public Permission createPerm(String p) {
- String[] params = Split.split('|', p);
- if(params.length==3) {
- Result<NsSplit> nss = question.deriveNsSplit(NullTrans.singleton(), params[0]);
- if(nss.isOK()) {
- return new PermPermission(nss.value.ns,nss.value.name,params[1],params[2]);
- }
- }
- return new LocalPermission(p);
- }
+ @Override
+ public Permission createPerm(String p) {
+ String[] params = Split.split('|', p);
+ if(params.length==3) {
+ Result<NsSplit> nss = question.deriveNsSplit(NullTrans.singleton(), params[0]);
+ if(nss.isOK()) {
+ return new PermPermission(nss.value.ns,nss.value.name,params[1],params[2]);
+ }
+ }
+ return new LocalPermission(p);
+ }
- @Override
- public void clear(Principal p, StringBuilder sb) {
- AuthzTrans trans = env.newTrans();
- question.clearCache(trans,"all");
- env.log(Level.AUDIT, p.getName(), "has cleared Cache for",getClass().getSimpleName());
- trans.auditTrail(0, sb);
- }
+ @Override
+ public void clear(Principal p, StringBuilder sb) {
+ AuthzTrans trans = env.newTrans();
+ question.clearCache(trans,"all");
+ env.log(Level.AUDIT, p.getName(), "has cleared Cache for",getClass().getSimpleName());
+ trans.auditTrail(0, sb);
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFUserPass.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFUserPass.java
index f241cdf1..9d4cd05f 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFUserPass.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectAAFUserPass.java
@@ -42,42 +42,42 @@ import org.onap.aaf.cadi.CredVal;
*
*/
public class DirectAAFUserPass implements CredVal {
- private final AuthzEnv env;
- private final Question question;
-
- public DirectAAFUserPass(AuthzEnv env, Question question) {
- this.env = env;
- this.question = question;
- }
+ private final AuthzEnv env;
+ private final Question question;
+
+ public DirectAAFUserPass(AuthzEnv env, Question question) {
+ this.env = env;
+ this.question = question;
+ }
- @Override
- public boolean validate(String user, Type type, byte[] pass, Object state) {
- try {
- AuthzTrans trans;
- if(state !=null) {
- if(state instanceof AuthzTrans) {
- trans = (AuthzTrans)state;
- } else {
- trans = env.newTransNoAvg();
- if(state instanceof HttpServletRequest) {
- trans.set((HttpServletRequest)state);
- }
- }
- } else {
- trans = env.newTransNoAvg();
- }
- Result<Date> result = question.doesUserCredMatch(trans, user, pass);
- trans.logAuditTrail(env.info());
- switch(result.status) {
- case OK:
- return true;
- default:
- String ip = trans.ip()==null?"":(", ip="+trans.ip());
- env.warn().log(user, "failed password validation" + ip + ':',result.errorString());
- }
- } catch (DAOException e) {
- env.error().log(e,"Cannot validate user/pass from cassandra");
- }
- return false;
- }
+ @Override
+ public boolean validate(String user, Type type, byte[] pass, Object state) {
+ try {
+ AuthzTrans trans;
+ if(state !=null) {
+ if(state instanceof AuthzTrans) {
+ trans = (AuthzTrans)state;
+ } else {
+ trans = env.newTransNoAvg();
+ if(state instanceof HttpServletRequest) {
+ trans.set((HttpServletRequest)state);
+ }
+ }
+ } else {
+ trans = env.newTransNoAvg();
+ }
+ Result<Date> result = question.doesUserCredMatch(trans, user, pass);
+ trans.logAuditTrail(env.info());
+ switch(result.status) {
+ case OK:
+ return true;
+ default:
+ String ip = trans.ip()==null?"":(", ip="+trans.ip());
+ env.warn().log(user, "failed password validation" + ip + ':',result.errorString());
+ }
+ } catch (DAOException e) {
+ env.error().log(e,"Cannot validate user/pass from cassandra");
+ }
+ return false;
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectCertIdentity.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectCertIdentity.java
index 2c0c054b..6dd5e006 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectCertIdentity.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectCertIdentity.java
@@ -48,31 +48,31 @@ import org.onap.aaf.cadi.taf.cert.X509Taf;
*
*/
public class DirectCertIdentity implements CertIdentity {
- private static CachedCertDAO certDAO;
+ private static CachedCertDAO certDAO;
- @Override
- public TaggedPrincipal identity(HttpServletRequest req, X509Certificate cert, byte[] _certBytes) throws CertificateException {
- byte[] certBytes = _certBytes;
- if(cert==null && certBytes==null) {
- return null;
- }
- if(certBytes==null) {
- certBytes = cert.getEncoded();
- }
- byte[] fingerprint = X509Taf.getFingerPrint(certBytes);
+ @Override
+ public TaggedPrincipal identity(HttpServletRequest req, X509Certificate cert, byte[] _certBytes) throws CertificateException {
+ byte[] certBytes = _certBytes;
+ if(cert==null && certBytes==null) {
+ return null;
+ }
+ if(certBytes==null) {
+ certBytes = cert.getEncoded();
+ }
+ byte[] fingerprint = X509Taf.getFingerPrint(certBytes);
- AuthzTrans trans = (AuthzTrans) req.getAttribute(TransFilter.TRANS_TAG);
-
- Result<List<Data>> cresp = certDAO.read(trans, ByteBuffer.wrap(fingerprint));
- if(cresp.isOKhasData()) {
- Data cdata = cresp.value.get(0);
- return new X509Principal(cdata.id,cert,certBytes,null);
- }
- return null;
- }
+ AuthzTrans trans = (AuthzTrans) req.getAttribute(TransFilter.TRANS_TAG);
+
+ Result<List<Data>> cresp = certDAO.read(trans, ByteBuffer.wrap(fingerprint));
+ if(cresp.isOKhasData()) {
+ Data cdata = cresp.value.get(0);
+ return new X509Principal(cdata.id,cert,certBytes,null);
+ }
+ return null;
+ }
- public static void set(CachedCertDAO ccd) {
- certDAO = ccd;
- }
+ public static void set(CachedCertDAO ccd) {
+ certDAO = ccd;
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectLocatorCreator.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectLocatorCreator.java
index 3dceb3bf..caededa2 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectLocatorCreator.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectLocatorCreator.java
@@ -27,33 +27,33 @@ import org.onap.aaf.cadi.LocatorException;
import org.onap.aaf.cadi.aaf.v2_0.AbsAAFLocator;
public class DirectLocatorCreator implements AbsAAFLocator.LocatorCreator {
- private final AuthzEnv env;
- private final LocateDAO locateDAO;
- private String myhostname;
- private int myport;
-
- public DirectLocatorCreator(AuthzEnv env, LocateDAO locateDAO) {
- this.env = env;
- this.locateDAO = locateDAO;
- }
-
- @Override
- public AbsAAFLocator<?> create(String key, String version) throws LocatorException {
- DirectAAFLocator dal = new DirectAAFLocator(env,locateDAO,key,version);
- if(myhostname!=null) {
- dal.setSelf(myhostname, myport);
- }
- return dal;
- }
-
- /**
- * Make sure DirectAAFLocator created does not include self.
- * @param hostname
- * @param port
- */
- public void setSelf(String hostname, int port) {
- myhostname = hostname;
- myport = port;
- }
+ private final AuthzEnv env;
+ private final LocateDAO locateDAO;
+ private String myhostname;
+ private int myport;
+
+ public DirectLocatorCreator(AuthzEnv env, LocateDAO locateDAO) {
+ this.env = env;
+ this.locateDAO = locateDAO;
+ }
+
+ @Override
+ public AbsAAFLocator<?> create(String key, String version) throws LocatorException {
+ DirectAAFLocator dal = new DirectAAFLocator(env,locateDAO,key,version);
+ if(myhostname!=null) {
+ dal.setSelf(myhostname, myport);
+ }
+ return dal;
+ }
+
+ /**
+ * Make sure DirectAAFLocator created does not include self.
+ * @param hostname
+ * @param port
+ */
+ public void setSelf(String hostname, int port) {
+ myhostname = hostname;
+ myport = port;
+ }
}
diff --git a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectRegistrar.java b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectRegistrar.java
index 1ddf022c..da7044cf 100644
--- a/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectRegistrar.java
+++ b/auth/auth-cass/src/main/java/org/onap/aaf/auth/direct/DirectRegistrar.java
@@ -35,77 +35,77 @@ import org.onap.aaf.cadi.register.Registrant;
import org.onap.aaf.cadi.util.Split;
public class DirectRegistrar implements Registrant<AuthzEnv> {
- private Data locate;
- private LocateDAO ldao;
- public DirectRegistrar(Access access, LocateDAO ldao, String name, String version, int port) throws CadiException {
- this.ldao = ldao;
- locate = new LocateDAO.Data();
- locate.name = name;
- locate.port = port;
-
- try {
- String latitude = access.getProperty(Config.CADI_LATITUDE, null);
- if(latitude==null) {
- latitude = access.getProperty("AFT_LATITUDE", null);
- }
- String longitude = access.getProperty(Config.CADI_LONGITUDE, null);
- if(longitude==null) {
- longitude = access.getProperty("AFT_LONGITUDE", null);
- }
- if(latitude==null || longitude==null) {
- throw new CadiException(Config.CADI_LATITUDE + " and " + Config.CADI_LONGITUDE + " is required");
- } else {
- locate.latitude = Float.parseFloat(latitude);
- locate.longitude = Float.parseFloat(longitude);
- }
- String split[] = Split.splitTrim('.', version);
- locate.pkg = split.length>3?Integer.parseInt(split[3]):0;
- locate.patch = split.length>2?Integer.parseInt(split[2]):0;
- locate.minor = split.length>1?Integer.parseInt(split[1]):0;
- locate.major = split.length>0?Integer.parseInt(split[0]):0;
- locate.hostname = access.getProperty(Config.AAF_REGISTER_AS, null);
- if(locate.hostname==null) {
- locate.hostname = access.getProperty(Config.HOSTNAME, null);
- }
- if(locate.hostname==null) {
- locate.hostname = Inet4Address.getLocalHost().getHostName();
- }
- String subprotocols = access.getProperty(Config.CADI_PROTOCOLS, null);
- if(subprotocols==null) {
- locate.protocol="http";
- } else {
- locate.protocol="https";
- for(String s : Split.split(',', subprotocols)) {
- locate.subprotocol(true).add(s);
- }
- }
- } catch (NumberFormatException | UnknownHostException e) {
- throw new CadiException("Error extracting Data from Properties for Registrar",e);
- }
- }
-
- @Override
- public Result<Void> update(AuthzEnv env) {
- org.onap.aaf.auth.layer.Result<Void> dr = ldao.update(env.newTransNoAvg(), locate);
- if(dr.isOK()) {
- return Result.ok(200, null);
- } else {
- return Result.err(503, dr.errorString());
- }
- }
+ private Data locate;
+ private LocateDAO ldao;
+ public DirectRegistrar(Access access, LocateDAO ldao, String name, String version, int port) throws CadiException {
+ this.ldao = ldao;
+ locate = new LocateDAO.Data();
+ locate.name = name;
+ locate.port = port;
+
+ try {
+ String latitude = access.getProperty(Config.CADI_LATITUDE, null);
+ if(latitude==null) {
+ latitude = access.getProperty("AFT_LATITUDE", null);
+ }
+ String longitude = access.getProperty(Config.CADI_LONGITUDE, null);
+ if(longitude==null) {
+ longitude = access.getProperty("AFT_LONGITUDE", null);
+ }
+ if(latitude==null || longitude==null) {
+ throw new CadiException(Config.CADI_LATITUDE + " and " + Config.CADI_LONGITUDE + " is required");
+ } else {
+ locate.latitude = Float.parseFloat(latitude);
+ locate.longitude = Float.parseFloat(longitude);
+ }
+ String split[] = Split.splitTrim('.', version);
+ locate.pkg = split.length>3?Integer.parseInt(split[3]):0;
+ locate.patch = split.length>2?Integer.parseInt(split[2]):0;
+ locate.minor = split.length>1?Integer.parseInt(split[1]):0;
+ locate.major = split.length>0?Integer.parseInt(split[0]):0;
+ locate.hostname = access.getProperty(Config.AAF_REGISTER_AS, null);
+ if(locate.hostname==null) {
+ locate.hostname = access.getProperty(Config.HOSTNAME, null);
+ }
+ if(locate.hostname==null) {
+ locate.hostname = Inet4Address.getLocalHost().getHostName();
+ }
+ String subprotocols = access.getProperty(Config.CADI_PROTOCOLS, null);
+ if(subprotocols==null) {
+ locate.protocol="http";
+ } else {
+ locate.protocol="https";
+ for(String s : Split.split(',', subprotocols)) {
+ locate.subprotocol(true).add(s);
+ }
+ }
+ } catch (NumberFormatException | UnknownHostException e) {
+ throw new CadiException("Error extracting Data from Properties for Registrar",e);
+ }
+ }
+
+ @Override
+ public Result<Void> update(AuthzEnv env) {
+ org.onap.aaf.auth.layer.Result<Void> dr = ldao.update(env.newTransNoAvg(), locate);
+ if(dr.isOK()) {
+ return Result.ok(200, null);
+ } else {
+ return Result.err(503, dr.errorString());
+ }
+ }
- /* (non-Javadoc)
- * @see org.onap.aaf.auth.server.Registrant#cancel(org.onap.aaf.auth.env.test.AuthzEnv)
- */
- @Override
- public Result<Void> cancel(AuthzEnv env) {
- org.onap.aaf.auth.layer.Result<Void> dr = ldao.delete(env.newTransNoAvg(), locate, false);
- if(dr.isOK()) {
- return Result.ok(200, null);
- } else {
- return Result.err(503, dr.errorString());
- }
+ /* (non-Javadoc)
+ * @see org.onap.aaf.auth.server.Registrant#cancel(org.onap.aaf.auth.env.test.AuthzEnv)
+ */
+ @Override
+ public Result<Void> cancel(AuthzEnv env) {
+ org.onap.aaf.auth.layer.Result<Void> dr = ldao.delete(env.newTransNoAvg(), locate, false);
+ if(dr.isOK()) {
+ return Result.ok(200, null);
+ } else {
+ return Result.err(503, dr.errorString());
+ }
- }
+ }
}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_Cached.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_Cached.java
index 31a93723..47723e6b 100644
--- a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_Cached.java
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_Cached.java
@@ -50,91 +50,91 @@ import org.onap.aaf.misc.env.Trans;
// @RunWith(PowerMockRunner.class)
public class JU_Cached {
- @Mock
- CIDAO<Trans> ciDaoMock;
+ @Mock
+ CIDAO<Trans> ciDaoMock;
- @Mock
- AuthzEnv authzEnvMock;
+ @Mock
+ AuthzEnv authzEnvMock;
- @Mock
- CIDAO<AuthzTrans> cidaoATMock;
-
- String name = "nameString";
+ @Mock
+ CIDAO<AuthzTrans> cidaoATMock;
+
+ String name = "nameString";
- @Before
- public void setUp(){
- MockitoAnnotations.initMocks(this);
- }
-
- @Test
- public void testCachedIdx(){
- Cached<Trans, DataStub> cached = new Cached<Trans, DataStub>(ciDaoMock, name, 1, 30000L);
- assertThat(cached.cacheIdx("1234567890"), is(0));
- }
-
- @Test
- public void testInvalidate(){
- Cached<Trans, DataStub> cached = new Cached<Trans, DataStub>(ciDaoMock, name, 5, 30000L);
- cached.add("test", new ArrayList<>());
- cached.invalidate("test");
- cached.invalidate("test1");
- }
-
- @SuppressWarnings("static-access")
- @Test
- public void testStopTimer(){
- Cached<Trans, DataStub> cached = new Cached<Trans, DataStub>(ciDaoMock, name, 1, 30000L);
- cached.stopTimer();
- assertTrue(true);
- }
+ @Before
+ public void setUp(){
+ MockitoAnnotations.initMocks(this);
+ }
+
+ @Test
+ public void testCachedIdx(){
+ Cached<Trans, DataStub> cached = new Cached<Trans, DataStub>(ciDaoMock, name, 1, 30000L);
+ assertThat(cached.cacheIdx("1234567890"), is(0));
+ }
+
+ @Test
+ public void testInvalidate(){
+ Cached<Trans, DataStub> cached = new Cached<Trans, DataStub>(ciDaoMock, name, 5, 30000L);
+ cached.add("test", new ArrayList<>());
+ cached.invalidate("test");
+ cached.invalidate("test1");
+ }
+
+ @SuppressWarnings("static-access")
+ @Test
+ public void testStopTimer(){
+ Cached<Trans, DataStub> cached = new Cached<Trans, DataStub>(ciDaoMock, name, 1, 30000L);
+ cached.stopTimer();
+ assertTrue(true);
+ }
- @SuppressWarnings("static-access")
- @Test
- public void testStartRefresh(){
- Cached<Trans, DataStub> cached = new Cached<Trans, DataStub>(ciDaoMock, name, 1, 30000L);
- cached.startRefresh(authzEnvMock, cidaoATMock);
- assertTrue(true);
- }
-// @Mock
-// Trans transMock;
-// @Mock
-// Getter<DAO> getterMock;
-//
-// @Test
-// public void testGet(){
-// cached.get(transMock, name, getterMock);
-// fail("not implemented");
-// }
-//
-// @SuppressWarnings("unchecked")
-// public Result<List<DATA>> get(TRANS trans, String key, Getter<DATA> getter) {
-// List<DATA> ld = null;
-// Result<List<DATA>> rld = null;
-//
-// int cacheIdx = cacheIdx(key);
-// Map<String, Dated> map = ((Map<String,Dated>)cache[cacheIdx]);
-//
-// // Check for saved element in cache
-// Dated cached = map.get(key);
-// // Note: These Segment Timestamps are kept up to date with DB
-// Date dbStamp = info.get(trans, name,cacheIdx);
-//
-// // Check for cache Entry and whether it is still good (a good Cache Entry is same or after DBEntry, so we use "before" syntax)
-// if(cached!=null && dbStamp.before(cached.timestamp)) {
-// ld = (List<DATA>)cached.data;
-// rld = Result.ok(ld);
-// } else {
-// rld = getter.get();
-// if(rld.isOK()) { // only store valid lists
-// map.put(key, new Dated(rld.value)); // successful item found gets put in cache
-//// } else if(rld.status == Result.ERR_Backend){
-//// map.remove(key);
-// }
-// }
-// return rld;
-// }
+ @SuppressWarnings("static-access")
+ @Test
+ public void testStartRefresh(){
+ Cached<Trans, DataStub> cached = new Cached<Trans, DataStub>(ciDaoMock, name, 1, 30000L);
+ cached.startRefresh(authzEnvMock, cidaoATMock);
+ assertTrue(true);
+ }
+// @Mock
+// Trans transMock;
+// @Mock
+// Getter<DAO> getterMock;
+//
+// @Test
+// public void testGet(){
+// cached.get(transMock, name, getterMock);
+// fail("not implemented");
+// }
+//
+// @SuppressWarnings("unchecked")
+// public Result<List<DATA>> get(TRANS trans, String key, Getter<DATA> getter) {
+// List<DATA> ld = null;
+// Result<List<DATA>> rld = null;
+//
+// int cacheIdx = cacheIdx(key);
+// Map<String, Dated> map = ((Map<String,Dated>)cache[cacheIdx]);
+//
+// // Check for saved element in cache
+// Dated cached = map.get(key);
+// // Note: These Segment Timestamps are kept up to date with DB
+// Date dbStamp = info.get(trans, name,cacheIdx);
+//
+// // Check for cache Entry and whether it is still good (a good Cache Entry is same or after DBEntry, so we use "before" syntax)
+// if(cached!=null && dbStamp.before(cached.timestamp)) {
+// ld = (List<DATA>)cached.data;
+// rld = Result.ok(ld);
+// } else {
+// rld = getter.get();
+// if(rld.isOK()) { // only store valid lists
+// map.put(key, new Dated(rld.value)); // successful item found gets put in cache
+//// } else if(rld.status == Result.ERR_Backend){
+//// map.remove(key);
+// }
+// }
+// return rld;
+// }
- class DataStub extends CacheableData {
- @Override public int[] invalidate(Cached<?, ?> cache) { return null; }
- }
+ class DataStub extends CacheableData {
+ @Override public int[] invalidate(Cached<?, ?> cache) { return null; }
+ }
}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CachedDAO.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CachedDAO.java
index 14612a1f..8ffc5c2f 100644
--- a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CachedDAO.java
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CachedDAO.java
@@ -39,26 +39,26 @@ import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
public class JU_CachedDAO {
- CachedDAO cachedDAO;
- @Mock
- DAO daoMock;
- @Mock
- CIDAO<Trans> ciDAOMock;
- int segsize=1;
- Object[ ] objs = new Object[2];
-
- @Before
- public void setUp(){
- objs[0] = "helo";
- objs[1] = "polo";
- cachedDAO = new CachedDAO(daoMock, ciDAOMock, segsize, segsize);
- }
-
- @Test
- public void testKeyFromObjs(){
- String result = cachedDAO.keyFromObjs(objs);
- System.out.println("value of resut " +result);
- assertTrue(true);
- }
-
+ CachedDAO cachedDAO;
+ @Mock
+ DAO daoMock;
+ @Mock
+ CIDAO<Trans> ciDAOMock;
+ int segsize=1;
+ Object[ ] objs = new Object[2];
+
+ @Before
+ public void setUp(){
+ objs[0] = "helo";
+ objs[1] = "polo";
+ cachedDAO = new CachedDAO(daoMock, ciDAOMock, segsize, segsize);
+ }
+
+ @Test
+ public void testKeyFromObjs(){
+ String result = cachedDAO.keyFromObjs(objs);
+ System.out.println("value of resut " +result);
+ assertTrue(true);
+ }
+
}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CassAccess.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CassAccess.java
index 525450a6..722011b0 100644
--- a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CassAccess.java
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CassAccess.java
@@ -41,33 +41,33 @@ import com.datastax.driver.core.Cluster.Builder;
@RunWith(PowerMockRunner.class)
public class JU_CassAccess {
- CassAccess cassAccess;
-
- public static final String KEYSPACE = "authz";
- public static final String CASSANDRA_CLUSTERS = "cassandra.clusters";
- public static final String CASSANDRA_CLUSTERS_PORT = "cassandra.clusters.port";
- public static final String CASSANDRA_CLUSTERS_USER_NAME = "cassandra.clusters.user";
- public static final String CASSANDRA_CLUSTERS_PASSWORD = "cassandra.clusters.password";
- public static final String CASSANDRA_RESET_EXCEPTIONS = "cassandra.reset.exceptions";
- public static final String LATITUDE = "LATITUDE";
- public static final String LONGITUDE = "LONGITUDE";
- //private static final List<Resettable> resetExceptions = new ArrayList<>();
- public static final String ERR_ACCESS_MSG = "Accessing Backend";
- private static Builder cb = null;
- @Mock
- Env envMock;
- String prefix=null;
-
- @Before
- public void setUp(){
- cassAccess = new CassAccess();
- }
+ CassAccess cassAccess;
+
+ public static final String KEYSPACE = "authz";
+ public static final String CASSANDRA_CLUSTERS = "cassandra.clusters";
+ public static final String CASSANDRA_CLUSTERS_PORT = "cassandra.clusters.port";
+ public static final String CASSANDRA_CLUSTERS_USER_NAME = "cassandra.clusters.user";
+ public static final String CASSANDRA_CLUSTERS_PASSWORD = "cassandra.clusters.password";
+ public static final String CASSANDRA_RESET_EXCEPTIONS = "cassandra.reset.exceptions";
+ public static final String LATITUDE = "LATITUDE";
+ public static final String LONGITUDE = "LONGITUDE";
+ //private static final List<Resettable> resetExceptions = new ArrayList<>();
+ public static final String ERR_ACCESS_MSG = "Accessing Backend";
+ private static Builder cb = null;
+ @Mock
+ Env envMock;
+ String prefix=null;
+
+ @Before
+ public void setUp(){
+ cassAccess = new CassAccess();
+ }
- @Test(expected=APIException.class)
- public void testCluster() throws APIException, IOException {
- cassAccess.cluster(envMock, prefix);
-
- }
+ @Test(expected=APIException.class)
+ public void testCluster() throws APIException, IOException {
+ cassAccess.cluster(envMock, prefix);
+
+ }
}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CassDAOImpl.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CassDAOImpl.java
index d06e38f7..b5749747 100644
--- a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CassDAOImpl.java
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_CassDAOImpl.java
@@ -64,33 +64,33 @@ AuthzTrans authzTransMock;
- @SuppressWarnings({ "rawtypes", "unchecked" })
- @Before
- public void setUp()
- {
- String name = "name";
- String keySpace = "keySpace";
- String table = "table";
- cassDAOImpl = new CassDAOImpl(transStoreMock, name, clusterMock, keySpace, classDataMock, table, consistencyLevelMock, consistencyLevelMock);
- }
+ @SuppressWarnings({ "rawtypes", "unchecked" })
+ @Before
+ public void setUp()
+ {
+ String name = "name";
+ String keySpace = "keySpace";
+ String table = "table";
+ cassDAOImpl = new CassDAOImpl(transStoreMock, name, clusterMock, keySpace, classDataMock, table, consistencyLevelMock, consistencyLevelMock);
+ }
- //TODO: Gabe [JUnit] Visibility issue
- @Test
- public void testReadConsistency() {
- String table = "users";
- PowerMockito.when(authzTransMock.getProperty(CASS_READ_CONSISTENCY+'.'+table)).thenReturn("TWO");
- ConsistencyLevel consistencyLevel = cassDAOImpl.readConsistency(authzTransMock, table);
- System.out.println("Consistency level" + consistencyLevel.name());
- assertEquals("TWO", consistencyLevel.name());
- }
-
- @Test
- public void testWriteConsistency() {
- String table = "users";
- PowerMockito.when(authzTransMock.getProperty(CASS_WRITE_CONSISTENCY+'.'+table)).thenReturn(null);
- ConsistencyLevel consistencyLevel = cassDAOImpl.writeConsistency(authzTransMock, table);
- System.out.println("Consistency level" + consistencyLevel.name());
- assertEquals("ONE", consistencyLevel.name());
- }
-
+ //TODO: Gabe [JUnit] Visibility issue
+ @Test
+ public void testReadConsistency() {
+ String table = "users";
+ PowerMockito.when(authzTransMock.getProperty(CASS_READ_CONSISTENCY+'.'+table)).thenReturn("TWO");
+ ConsistencyLevel consistencyLevel = cassDAOImpl.readConsistency(authzTransMock, table);
+ System.out.println("Consistency level" + consistencyLevel.name());
+ assertEquals("TWO", consistencyLevel.name());
+ }
+
+ @Test
+ public void testWriteConsistency() {
+ String table = "users";
+ PowerMockito.when(authzTransMock.getProperty(CASS_WRITE_CONSISTENCY+'.'+table)).thenReturn(null);
+ ConsistencyLevel consistencyLevel = cassDAOImpl.writeConsistency(authzTransMock, table);
+ System.out.println("Consistency level" + consistencyLevel.name());
+ assertEquals("ONE", consistencyLevel.name());
+ }
+
}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_DAOException.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_DAOException.java
index 8cfb8520..8cd412d2 100644
--- a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_DAOException.java
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/JU_DAOException.java
@@ -34,16 +34,16 @@ import org.powermock.modules.junit4.PowerMockRunner;
public class JU_DAOException {
DAOException daoException;
- //DAOException daoException = new DAOException();
- String message = "message";
- Throwable cause;
- @Before
- public void setUp(){
- daoException = new DAOException();
- }
+ //DAOException daoException = new DAOException();
+ String message = "message";
+ Throwable cause;
+ @Before
+ public void setUp(){
+ daoException = new DAOException();
+ }
- @Test
- public void test(){
- assertTrue(true);
- }
+ @Test
+ public void test(){
+ assertTrue(true);
+ }
}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/AbsJUCass.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/AbsJUCass.java
index 3064de55..331e4c8b 100644
--- a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/AbsJUCass.java
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/AbsJUCass.java
@@ -53,148 +53,148 @@ import junit.framework.Assert;
*
*/
public class AbsJUCass {
- protected static final String AUTHZ = "authz";
- protected static Cluster cluster;
- protected static AuthzEnv env;
- protected static int iterations = 0;
- protected static float totals=0.0f;
- protected static float remote = 0.0f;
- protected static float json = 0.0f;
- protected static AuthzTrans trans;
- protected static boolean details = true;
-
- @BeforeClass
- public static void startup() throws APIException, IOException {
- synchronized(AUTHZ) {
- if(env==null) {
- final String resource = "cadi.properties";
- File f = new File("etc" + resource);
- InputStream is=null;
- Properties props = new Properties();
- try {
- if(f.exists()) {
- is = new FileInputStream(f);
- } else {
- URL rsrc = ClassLoader.getSystemResource(resource);
- is = rsrc.openStream();
- }
- props.load(is);
- } finally {
- if(is==null) {
- env= new AuthzEnv();
- Assert.fail(resource + " must exist in etc dir, or in Classpath");
- }
- is.close();
- }
- env = new AuthzEnv(props);
- }
- }
- cluster = CassAccess.cluster(env,"LOCAL");
+ protected static final String AUTHZ = "authz";
+ protected static Cluster cluster;
+ protected static AuthzEnv env;
+ protected static int iterations = 0;
+ protected static float totals=0.0f;
+ protected static float remote = 0.0f;
+ protected static float json = 0.0f;
+ protected static AuthzTrans trans;
+ protected static boolean details = true;
+
+ @BeforeClass
+ public static void startup() throws APIException, IOException {
+ synchronized(AUTHZ) {
+ if(env==null) {
+ final String resource = "cadi.properties";
+ File f = new File("etc" + resource);
+ InputStream is=null;
+ Properties props = new Properties();
+ try {
+ if(f.exists()) {
+ is = new FileInputStream(f);
+ } else {
+ URL rsrc = ClassLoader.getSystemResource(resource);
+ is = rsrc.openStream();
+ }
+ props.load(is);
+ } finally {
+ if(is==null) {
+ env= new AuthzEnv();
+ Assert.fail(resource + " must exist in etc dir, or in Classpath");
+ }
+ is.close();
+ }
+ env = new AuthzEnv(props);
+ }
+ }
+ cluster = CassAccess.cluster(env,"LOCAL");
- env.info().log("Connecting to Cluster");
- try {
- cluster.connect(AUTHZ);
- } catch(Exception e) {
- cluster=null;
- env.error().log(e);
- Assert.fail("Not able to connect to DB: " + e.getLocalizedMessage());
- }
- env.info().log("Connected");
-
- // Load special data here
-
- // WebPhone
- env.setProperty("java.naming.provider.url","ldap://ldap.webphone.att.com:389");
- env.setProperty("com.sun.jndi.ldap.connect.pool","true");
-
- iterations = 0;
-
- }
-
- @AfterClass
- public static void shutdown() {
- if(cluster!=null) {
- cluster.close();
- cluster = null;
- }
- }
+ env.info().log("Connecting to Cluster");
+ try {
+ cluster.connect(AUTHZ);
+ } catch(Exception e) {
+ cluster=null;
+ env.error().log(e);
+ Assert.fail("Not able to connect to DB: " + e.getLocalizedMessage());
+ }
+ env.info().log("Connected");
+
+ // Load special data here
+
+ // WebPhone
+ env.setProperty("java.naming.provider.url","ldap://ldap.webphone.att.com:389");
+ env.setProperty("com.sun.jndi.ldap.connect.pool","true");
+
+ iterations = 0;
+
+ }
+
+ @AfterClass
+ public static void shutdown() {
+ if(cluster!=null) {
+ cluster.close();
+ cluster = null;
+ }
+ }
- @Before
- public void newTrans() {
- trans = env.newTrans();
-
- trans.setProperty(CassDAOImpl.USER_NAME, System.getProperty("user.name"));
- }
-
- @After
- public void auditTrail() {
- if(totals==0) { // "updateTotals()" was not called... just do one Trans
- StringBuilder sb = new StringBuilder();
- Metric metric = trans.auditTrail(4, sb, Env.JSON, Env.REMOTE);
- if(details) {
- env.info().log(
- sb,
- "Total time:",
- totals += metric.total,
- "JSON time: ",
- metric.buckets[0],
- "REMOTE time: ",
- metric.buckets[1]
- );
- } else {
- totals += metric.total;
- }
- }
- }
-
- protected void updateTotals() {
- Metric metric = trans.auditTrail(0, null, Env.JSON, Env.REMOTE);
- totals+=metric.total;
- json +=metric.buckets[0];
- remote+=metric.buckets[1];
- }
+ @Before
+ public void newTrans() {
+ trans = env.newTrans();
+
+ trans.setProperty(CassDAOImpl.USER_NAME, System.getProperty("user.name"));
+ }
+
+ @After
+ public void auditTrail() {
+ if(totals==0) { // "updateTotals()" was not called... just do one Trans
+ StringBuilder sb = new StringBuilder();
+ Metric metric = trans.auditTrail(4, sb, Env.JSON, Env.REMOTE);
+ if(details) {
+ env.info().log(
+ sb,
+ "Total time:",
+ totals += metric.total,
+ "JSON time: ",
+ metric.buckets[0],
+ "REMOTE time: ",
+ metric.buckets[1]
+ );
+ } else {
+ totals += metric.total;
+ }
+ }
+ }
+
+ protected void updateTotals() {
+ Metric metric = trans.auditTrail(0, null, Env.JSON, Env.REMOTE);
+ totals+=metric.total;
+ json +=metric.buckets[0];
+ remote+=metric.buckets[1];
+ }
- @AfterClass
- public static void print() {
- float transTime;
- if(iterations==0) {
- transTime=totals;
- } else {
- transTime=totals/iterations;
- }
- env.info().log(
- "Total time:",
- totals,
- "JSON time:",
- json,
- "REMOTE time:",
- remote,
- "Iterations:",
- iterations,
- "Transaction time:",
- transTime
- );
- }
-
- /**
- * Take a User/Pass and turn into an MD5 Hashed BasicAuth
- *
- * @param user
- * @param pass
- * @return
- * @throws IOException
- * @throws NoSuchAlgorithmException
- */
- //TODO: Gabe [JUnit] Issue
- public static byte[] userPassToBytes(String user, String pass)
- throws IOException, NoSuchAlgorithmException {
- // Take the form of BasicAuth, so as to allow any character in Password
- // (this is an issue in 1.0)
- // Also, it makes it quicker to evaluate Basic Auth direct questions
- String ba = Symm.base64url.encode(user + ':' + pass);
- // Take MD5 Hash, so that data in DB can't be reversed out.
- return Hash.hashMD5(ba.getBytes());
- }
+ @AfterClass
+ public static void print() {
+ float transTime;
+ if(iterations==0) {
+ transTime=totals;
+ } else {
+ transTime=totals/iterations;
+ }
+ env.info().log(
+ "Total time:",
+ totals,
+ "JSON time:",
+ json,
+ "REMOTE time:",
+ remote,
+ "Iterations:",
+ iterations,
+ "Transaction time:",
+ transTime
+ );
+ }
+
+ /**
+ * Take a User/Pass and turn into an MD5 Hashed BasicAuth
+ *
+ * @param user
+ * @param pass
+ * @return
+ * @throws IOException
+ * @throws NoSuchAlgorithmException
+ */
+ //TODO: Gabe [JUnit] Issue
+ public static byte[] userPassToBytes(String user, String pass)
+ throws IOException, NoSuchAlgorithmException {
+ // Take the form of BasicAuth, so as to allow any character in Password
+ // (this is an issue in 1.0)
+ // Also, it makes it quicker to evaluate Basic Auth direct questions
+ String ba = Symm.base64url.encode(user + ':' + pass);
+ // Take MD5 Hash, so that data in DB can't be reversed out.
+ return Hash.hashMD5(ba.getBytes());
+ }
}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_Bytification.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_Bytification.java
index e316ac7e..4ffa946f 100644
--- a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_Bytification.java
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_Bytification.java
@@ -38,228 +38,228 @@ import org.onap.aaf.auth.dao.cass.UserRoleDAO;
public class JU_Bytification {
- @Test
- public void testNS() throws IOException {
-
- // Normal
- NsDAO.Data ns = new NsDAO.Data();
- ns.name = "org.osaaf.<pass>";
- ns.type = NsType.APP.type;
+ @Test
+ public void testNS() throws IOException {
+
+ // Normal
+ NsDAO.Data ns = new NsDAO.Data();
+ ns.name = "org.osaaf.<pass>";
+ ns.type = NsType.APP.type;
- ByteBuffer bb = ns.bytify();
-
- NsDAO.Data nsr = new NsDAO.Data();
- nsr.reconstitute(bb);
- check(ns,nsr);
-
- // Empty admin
-// ns.admin(true).clear();
- bb = ns.bytify();
- nsr = new NsDAO.Data();
- nsr.reconstitute(bb);
- check(ns,nsr);
-
- // Empty responsible
-// ns.responsible(true).clear();
- bb = ns.bytify();
- nsr = new NsDAO.Data();
- nsr.reconstitute(bb);
- check(ns,nsr);
+ ByteBuffer bb = ns.bytify();
+
+ NsDAO.Data nsr = new NsDAO.Data();
+ nsr.reconstitute(bb);
+ check(ns,nsr);
+
+ // Empty admin
+// ns.admin(true).clear();
+ bb = ns.bytify();
+ nsr = new NsDAO.Data();
+ nsr.reconstitute(bb);
+ check(ns,nsr);
+
+ // Empty responsible
+// ns.responsible(true).clear();
+ bb = ns.bytify();
+ nsr = new NsDAO.Data();
+ nsr.reconstitute(bb);
+ check(ns,nsr);
- bb = ns.bytify();
- nsr = new NsDAO.Data();
- nsr.reconstitute(bb);
- check(ns,nsr);
- }
-
- private void check(NsDAO.Data a, NsDAO.Data b) {
- assertEquals(a.name,b.name);
- assertEquals(a.type,b.type);
-// assertEquals(a.admin.size(),b.admin.size());
-
-// for(String s: a.admin) {
-// assertTrue(b.admin.contains(s));
-// }
-//
-// assertEquals(a.responsible.size(),b.responsible.size());
-// for(String s: a.responsible) {
-// assertTrue(b.responsible.contains(s));
-// }
- }
+ bb = ns.bytify();
+ nsr = new NsDAO.Data();
+ nsr.reconstitute(bb);
+ check(ns,nsr);
+ }
+
+ private void check(NsDAO.Data a, NsDAO.Data b) {
+ assertEquals(a.name,b.name);
+ assertEquals(a.type,b.type);
+// assertEquals(a.admin.size(),b.admin.size());
+
+// for(String s: a.admin) {
+// assertTrue(b.admin.contains(s));
+// }
+//
+// assertEquals(a.responsible.size(),b.responsible.size());
+// for(String s: a.responsible) {
+// assertTrue(b.responsible.contains(s));
+// }
+ }
- @Test
- public void testRole() throws IOException {
- RoleDAO.Data rd1 = new RoleDAO.Data();
- rd1.ns = "org.osaaf.<pass>";
- rd1.name = "my.role";
- rd1.perms(true).add("org.osaaf.<pass>.my.Perm|myInstance|myAction");
- rd1.perms(true).add("org.osaaf.<pass>.my.Perm|myInstance|myAction2");
+ @Test
+ public void testRole() throws IOException {
+ RoleDAO.Data rd1 = new RoleDAO.Data();
+ rd1.ns = "org.osaaf.<pass>";
+ rd1.name = "my.role";
+ rd1.perms(true).add("org.osaaf.<pass>.my.Perm|myInstance|myAction");
+ rd1.perms(true).add("org.osaaf.<pass>.my.Perm|myInstance|myAction2");
- // Normal
- ByteBuffer bb = rd1.bytify();
- RoleDAO.Data rd2 = new RoleDAO.Data();
- rd2.reconstitute(bb);
- check(rd1,rd2);
-
- // Overshoot Buffer
- StringBuilder sb = new StringBuilder(300);
- sb.append("role|instance|veryLongAction...");
- for(int i=0;i<280;++i) {
- sb.append('a');
- }
- rd1.perms(true).add(sb.toString());
- bb = rd1.bytify();
- rd2 = new RoleDAO.Data();
- rd2.reconstitute(bb);
- check(rd1,rd2);
-
- // No Perms
- rd1.perms.clear();
-
- bb = rd1.bytify();
- rd2 = new RoleDAO.Data();
- rd2.reconstitute(bb);
- check(rd1,rd2);
-
- // 1000 Perms
- for(int i=0;i<1000;++i) {
- rd1.perms(true).add("com|inst|action"+ i);
- }
+ // Normal
+ ByteBuffer bb = rd1.bytify();
+ RoleDAO.Data rd2 = new RoleDAO.Data();
+ rd2.reconstitute(bb);
+ check(rd1,rd2);
+
+ // Overshoot Buffer
+ StringBuilder sb = new StringBuilder(300);
+ sb.append("role|instance|veryLongAction...");
+ for(int i=0;i<280;++i) {
+ sb.append('a');
+ }
+ rd1.perms(true).add(sb.toString());
+ bb = rd1.bytify();
+ rd2 = new RoleDAO.Data();
+ rd2.reconstitute(bb);
+ check(rd1,rd2);
+
+ // No Perms
+ rd1.perms.clear();
+
+ bb = rd1.bytify();
+ rd2 = new RoleDAO.Data();
+ rd2.reconstitute(bb);
+ check(rd1,rd2);
+
+ // 1000 Perms
+ for(int i=0;i<1000;++i) {
+ rd1.perms(true).add("com|inst|action"+ i);
+ }
- bb = rd1.bytify();
- rd2 = new RoleDAO.Data();
- rd2.reconstitute(bb);
- check(rd1,rd2);
+ bb = rd1.bytify();
+ rd2 = new RoleDAO.Data();
+ rd2.reconstitute(bb);
+ check(rd1,rd2);
- }
-
- private void check(RoleDAO.Data a, RoleDAO.Data b) {
- assertEquals(a.ns,b.ns);
- assertEquals(a.name,b.name);
-
- assertEquals(a.perms.size(),b.perms.size());
- for(String s: a.perms) {
- assertTrue(b.perms.contains(s));
- }
- }
+ }
+
+ private void check(RoleDAO.Data a, RoleDAO.Data b) {
+ assertEquals(a.ns,b.ns);
+ assertEquals(a.name,b.name);
+
+ assertEquals(a.perms.size(),b.perms.size());
+ for(String s: a.perms) {
+ assertTrue(b.perms.contains(s));
+ }
+ }
- @Test
- public void testPerm() throws IOException {
- PermDAO.Data pd1 = new PermDAO.Data();
- pd1.ns = "org.osaaf.<pass>";
- pd1.type = "my.perm";
- pd1.instance = "instance";
- pd1.action = "read";
- pd1.roles(true).add("org.osaaf.<pass>.my.Role");
- pd1.roles(true).add("org.osaaf.<pass>.my.Role2");
+ @Test
+ public void testPerm() throws IOException {
+ PermDAO.Data pd1 = new PermDAO.Data();
+ pd1.ns = "org.osaaf.<pass>";
+ pd1.type = "my.perm";
+ pd1.instance = "instance";
+ pd1.action = "read";
+ pd1.roles(true).add("org.osaaf.<pass>.my.Role");
+ pd1.roles(true).add("org.osaaf.<pass>.my.Role2");
- // Normal
- ByteBuffer bb = pd1.bytify();
- PermDAO.Data rd2 = new PermDAO.Data();
- rd2.reconstitute(bb);
- check(pd1,rd2);
-
- // No Perms
- pd1.roles.clear();
-
- bb = pd1.bytify();
- rd2 = new PermDAO.Data();
- rd2.reconstitute(bb);
- check(pd1,rd2);
-
- // 1000 Perms
- for(int i=0;i<1000;++i) {
- pd1.roles(true).add("org.osaaf.<pass>.my.Role"+ i);
- }
+ // Normal
+ ByteBuffer bb = pd1.bytify();
+ PermDAO.Data rd2 = new PermDAO.Data();
+ rd2.reconstitute(bb);
+ check(pd1,rd2);
+
+ // No Perms
+ pd1.roles.clear();
+
+ bb = pd1.bytify();
+ rd2 = new PermDAO.Data();
+ rd2.reconstitute(bb);
+ check(pd1,rd2);
+
+ // 1000 Perms
+ for(int i=0;i<1000;++i) {
+ pd1.roles(true).add("org.osaaf.<pass>.my.Role"+ i);
+ }
- bb = pd1.bytify();
- rd2 = new PermDAO.Data();
- rd2.reconstitute(bb);
- check(pd1,rd2);
+ bb = pd1.bytify();
+ rd2 = new PermDAO.Data();
+ rd2.reconstitute(bb);
+ check(pd1,rd2);
- }
-
- private void check(PermDAO.Data a, PermDAO.Data b) {
- assertEquals(a.ns,b.ns);
- assertEquals(a.type,b.type);
- assertEquals(a.instance,b.instance);
- assertEquals(a.action,b.action);
-
- assertEquals(a.roles.size(),b.roles.size());
- for(String s: a.roles) {
- assertTrue(b.roles.contains(s));
- }
- }
+ }
+
+ private void check(PermDAO.Data a, PermDAO.Data b) {
+ assertEquals(a.ns,b.ns);
+ assertEquals(a.type,b.type);
+ assertEquals(a.instance,b.instance);
+ assertEquals(a.action,b.action);
+
+ assertEquals(a.roles.size(),b.roles.size());
+ for(String s: a.roles) {
+ assertTrue(b.roles.contains(s));
+ }
+ }
- @Test
- public void testUserRole() throws IOException {
- UserRoleDAO.Data urd1 = new UserRoleDAO.Data();
- urd1.user = "myname@abc.att.com";
- urd1.role("org.osaaf.<pass>","my.role");
- urd1.expires = new Date();
+ @Test
+ public void testUserRole() throws IOException {
+ UserRoleDAO.Data urd1 = new UserRoleDAO.Data();
+ urd1.user = "myname@abc.att.com";
+ urd1.role("org.osaaf.<pass>","my.role");
+ urd1.expires = new Date();
- // Normal
- ByteBuffer bb = urd1.bytify();
- UserRoleDAO.Data urd2 = new UserRoleDAO.Data();
- urd2.reconstitute(bb);
- check(urd1,urd2);
-
- // A null
- urd1.expires = null;
- urd1.role = null;
-
- bb = urd1.bytify();
- urd2 = new UserRoleDAO.Data();
- urd2.reconstitute(bb);
- check(urd1,urd2);
- }
+ // Normal
+ ByteBuffer bb = urd1.bytify();
+ UserRoleDAO.Data urd2 = new UserRoleDAO.Data();
+ urd2.reconstitute(bb);
+ check(urd1,urd2);
+
+ // A null
+ urd1.expires = null;
+ urd1.role = null;
+
+ bb = urd1.bytify();
+ urd2 = new UserRoleDAO.Data();
+ urd2.reconstitute(bb);
+ check(urd1,urd2);
+ }
- private void check(UserRoleDAO.Data a, UserRoleDAO.Data b) {
- assertEquals(a.user,b.user);
- assertEquals(a.role,b.role);
- assertEquals(a.expires,b.expires);
- }
+ private void check(UserRoleDAO.Data a, UserRoleDAO.Data b) {
+ assertEquals(a.user,b.user);
+ assertEquals(a.role,b.role);
+ assertEquals(a.expires,b.expires);
+ }
-
- @Test
- public void testCred() throws IOException {
- CredDAO.Data cd = new CredDAO.Data();
- cd.id = "m55555@abc.att.com";
- cd.ns = "org.osaaf.abc";
- cd.type = 2;
- cd.cred = ByteBuffer.wrap(new byte[]{1,34,5,3,25,0,2,5,3,4});
- cd.expires = new Date();
+
+ @Test
+ public void testCred() throws IOException {
+ CredDAO.Data cd = new CredDAO.Data();
+ cd.id = "m55555@abc.att.com";
+ cd.ns = "org.osaaf.abc";
+ cd.type = 2;
+ cd.cred = ByteBuffer.wrap(new byte[]{1,34,5,3,25,0,2,5,3,4});
+ cd.expires = new Date();
- // Normal
- ByteBuffer bb = cd.bytify();
- CredDAO.Data cd2 = new CredDAO.Data();
- cd2.reconstitute(bb);
- check(cd,cd2);
-
- // nulls
- cd.expires = null;
- cd.cred = null;
-
- bb = cd.bytify();
- cd2 = new CredDAO.Data();
- cd2.reconstitute(bb);
- check(cd,cd2);
+ // Normal
+ ByteBuffer bb = cd.bytify();
+ CredDAO.Data cd2 = new CredDAO.Data();
+ cd2.reconstitute(bb);
+ check(cd,cd2);
+
+ // nulls
+ cd.expires = null;
+ cd.cred = null;
+
+ bb = cd.bytify();
+ cd2 = new CredDAO.Data();
+ cd2.reconstitute(bb);
+ check(cd,cd2);
- }
+ }
- private void check(CredDAO.Data a, CredDAO.Data b) {
- assertEquals(a.id,b.id);
- assertEquals(a.ns,b.ns);
- assertEquals(a.type,b.type);
- if(a.cred==null) {
- assertEquals(a.cred,b.cred);
- } else {
- int l = a.cred.limit();
- assertEquals(l,b.cred.limit());
- for (int i=0;i<l;++i) {
- assertEquals(a.cred.get(),b.cred.get());
- }
- }
- }
+ private void check(CredDAO.Data a, CredDAO.Data b) {
+ assertEquals(a.id,b.id);
+ assertEquals(a.ns,b.ns);
+ assertEquals(a.type,b.type);
+ if(a.cred==null) {
+ assertEquals(a.cred,b.cred);
+ } else {
+ int l = a.cred.limit();
+ assertEquals(l,b.cred.limit());
+ for (int i=0;i<l;++i) {
+ assertEquals(a.cred.get(),b.cred.get());
+ }
+ }
+ }
}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_NsType.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_NsType.java
index 06e5f0ed..38d0fbfd 100644
--- a/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_NsType.java
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/dao/aaf/test/JU_NsType.java
@@ -29,30 +29,30 @@ import org.onap.aaf.auth.dao.cass.NsType;
public class JU_NsType {
- @AfterClass
- public static void tearDownAfterClass() throws Exception {
- }
+ @AfterClass
+ public static void tearDownAfterClass() throws Exception {
+ }
- @Test
- public void test() {
- NsType nt,nt2;
- String[] tests = new String[] {"DOT","ROOT","COMPANY","APP","STACKED_APP","STACK"};
- for(String s : tests) {
- nt = NsType.valueOf(s);
- assertEquals(s,nt.name());
-
- nt2 = NsType.fromString(s);
- assertEquals(nt,nt2);
-
- int t = nt.type;
- nt2 = NsType.fromType(t);
- assertEquals(nt,nt2);
- }
-
- nt = NsType.fromType(Integer.MIN_VALUE);
- assertEquals(nt,NsType.UNKNOWN);
- nt = NsType.fromString("Garbage");
- assertEquals(nt,NsType.UNKNOWN);
- }
+ @Test
+ public void test() {
+ NsType nt,nt2;
+ String[] tests = new String[] {"DOT","ROOT","COMPANY","APP","STACKED_APP","STACK"};
+ for(String s : tests) {
+ nt = NsType.valueOf(s);
+ assertEquals(s,nt.name());
+
+ nt2 = NsType.fromString(s);
+ assertEquals(nt,nt2);
+
+ int t = nt.type;
+ nt2 = NsType.fromType(t);
+ assertEquals(nt,nt2);
+ }
+
+ nt = NsType.fromType(Integer.MIN_VALUE);
+ assertEquals(nt,NsType.UNKNOWN);
+ nt = NsType.fromString("Garbage");
+ assertEquals(nt,NsType.UNKNOWN);
+ }
}
diff --git a/auth/auth-cass/src/test/java/org/onap/aaf/auth/direct/test/JU_DirectCertIdentity.java b/auth/auth-cass/src/test/java/org/onap/aaf/auth/direct/test/JU_DirectCertIdentity.java
index 07cd7ae9..f9ad5a9e 100644
--- a/auth/auth-cass/src/test/java/org/onap/aaf/auth/direct/test/JU_DirectCertIdentity.java
+++ b/auth/auth-cass/src/test/java/org/onap/aaf/auth/direct/test/JU_DirectCertIdentity.java
@@ -39,33 +39,33 @@ import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
public class JU_DirectCertIdentity {
-
- public DirectCertIdentity directCertIdentity;
-
- @Before
- public void setUp(){
- directCertIdentity = new DirectCertIdentity();
- }
+
+ public DirectCertIdentity directCertIdentity;
+
+ @Before
+ public void setUp(){
+ directCertIdentity = new DirectCertIdentity();
+ }
- @Mock
- HttpServletRequest req;
- X509Certificate cert;
- byte[] _certBytes;
-
- @Test
- public void testidentity(){
-
- try {
- Principal p = directCertIdentity.identity(req, cert, _certBytes);
- assertEquals(( (p) == null),true);
-
- } catch (CertificateException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- //assertTrue(true);
-
- }
+ @Mock
+ HttpServletRequest req;
+ X509Certificate cert;
+ byte[] _certBytes;
+
+ @Test
+ public void testidentity(){
+
+ try {
+ Principal p = directCertIdentity.identity(req, cert, _certBytes);
+ assertEquals(( (p) == null),true);
+
+ } catch (CertificateException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ //assertTrue(true);
+
+ }
}