From 0ed473b17619d749bbdf56ad17199e71fb04c2be Mon Sep 17 00:00:00 2001 From: Instrumental Date: Mon, 26 Mar 2018 14:09:21 -0700 Subject: AT&T 2.0.19 Code drop, stage 5 Issue-ID: AAF-197 Change-Id: I81dd2a8fd8cd4d4771e390609909c86ac09b7dac Signed-off-by: Instrumental --- authz-cass/pom.xml | 248 --- authz-cass/src/main/cql/ecomp.cql | 118 -- authz-cass/src/main/cql/init.cql | 212 --- .../src/main/java/org/onap/aaf/dao/AbsCassDAO.java | 497 ------ .../main/java/org/onap/aaf/dao/Bytification.java | 31 - .../src/main/java/org/onap/aaf/dao/CIDAO.java | 52 - .../src/main/java/org/onap/aaf/dao/Cacheable.java | 34 - .../src/main/java/org/onap/aaf/dao/Cached.java | 198 --- .../src/main/java/org/onap/aaf/dao/CachedDAO.java | 229 --- .../src/main/java/org/onap/aaf/dao/CassAccess.java | 220 --- .../main/java/org/onap/aaf/dao/CassDAOImpl.java | 328 ---- authz-cass/src/main/java/org/onap/aaf/dao/DAO.java | 44 - .../main/java/org/onap/aaf/dao/DAOException.java | 52 - .../src/main/java/org/onap/aaf/dao/DAO_RO.java | 71 - .../src/main/java/org/onap/aaf/dao/Loader.java | 214 --- .../src/main/java/org/onap/aaf/dao/Streamer.java | 32 - .../src/main/java/org/onap/aaf/dao/Touchable.java | 27 - .../org/onap/aaf/dao/aaf/cached/CachedCertDAO.java | 55 - .../org/onap/aaf/dao/aaf/cached/CachedCredDAO.java | 67 - .../org/onap/aaf/dao/aaf/cached/CachedNSDAO.java | 34 - .../org/onap/aaf/dao/aaf/cached/CachedPermDAO.java | 125 -- .../org/onap/aaf/dao/aaf/cached/CachedRoleDAO.java | 107 -- .../onap/aaf/dao/aaf/cached/CachedUserRoleDAO.java | 117 -- .../org/onap/aaf/dao/aaf/cass/ApprovalDAO.java | 206 --- .../java/org/onap/aaf/dao/aaf/cass/ArtiDAO.java | 267 ---- .../org/onap/aaf/dao/aaf/cass/CacheInfoDAO.java | 464 ------ .../org/onap/aaf/dao/aaf/cass/CacheableData.java | 36 - .../java/org/onap/aaf/dao/aaf/cass/CertDAO.java | 244 --- .../java/org/onap/aaf/dao/aaf/cass/CredDAO.java | 258 ---- .../org/onap/aaf/dao/aaf/cass/DelegateDAO.java | 139 -- .../java/org/onap/aaf/dao/aaf/cass/FutureDAO.java | 183 --- .../java/org/onap/aaf/dao/aaf/cass/HistoryDAO.java | 237 --- .../java/org/onap/aaf/dao/aaf/cass/Namespace.java | 151 -- .../main/java/org/onap/aaf/dao/aaf/cass/NsDAO.java | 542 ------- .../java/org/onap/aaf/dao/aaf/cass/NsSplit.java | 62 - .../java/org/onap/aaf/dao/aaf/cass/NsType.java | 74 - .../java/org/onap/aaf/dao/aaf/cass/PermDAO.java | 502 ------- .../java/org/onap/aaf/dao/aaf/cass/RoleDAO.java | 412 ----- .../java/org/onap/aaf/dao/aaf/cass/Status.java | 88 -- .../org/onap/aaf/dao/aaf/cass/UserRoleDAO.java | 320 ---- .../java/org/onap/aaf/dao/aaf/hl/CassExecutor.java | 74 - .../java/org/onap/aaf/dao/aaf/hl/Function.java | 1574 -------------------- .../java/org/onap/aaf/dao/aaf/hl/PermLookup.java | 184 --- .../java/org/onap/aaf/dao/aaf/hl/Question.java | 1087 -------------- .../org/onap/aaf/dao/session/SessionFilter.java | 142 -- .../org/onap/aaf/authz/cass/hl/JU_Question.java | 500 ------- .../src/test/java/org/onap/aaf/dao/JU_Cached.java | 127 -- .../test/java/org/onap/aaf/dao/JU_CachedDAO.java | 66 - .../test/java/org/onap/aaf/dao/JU_CassAccess.java | 74 - .../test/java/org/onap/aaf/dao/JU_CassDAOImpl.java | 97 -- .../java/org/onap/aaf/dao/JU_DAOException.java | 50 - .../java/org/onap/aaf/dao/aaf/test/AbsJUCass.java | 200 --- .../org/onap/aaf/dao/aaf/test/JU_ApprovalDAO.java | 147 -- .../java/org/onap/aaf/dao/aaf/test/JU_ArtiDAO.java | 137 -- .../org/onap/aaf/dao/aaf/test/JU_Bytification.java | 266 ---- .../org/onap/aaf/dao/aaf/test/JU_CacheInfoDAO.java | 65 - .../java/org/onap/aaf/dao/aaf/test/JU_CertDAO.java | 105 -- .../java/org/onap/aaf/dao/aaf/test/JU_CredDAO.java | 252 ---- .../org/onap/aaf/dao/aaf/test/JU_DelegateDAO.java | 107 -- .../org/onap/aaf/dao/aaf/test/JU_FastCalling.java | 91 -- .../org/onap/aaf/dao/aaf/test/JU_HistoryDAO.java | 154 -- .../java/org/onap/aaf/dao/aaf/test/JU_NsDAO.java | 187 --- .../java/org/onap/aaf/dao/aaf/test/JU_NsType.java | 59 - .../java/org/onap/aaf/dao/aaf/test/JU_PermDAO.java | 176 --- .../java/org/onap/aaf/dao/aaf/test/JU_RoleDAO.java | 139 -- .../org/onap/aaf/dao/aaf/test/NS_ChildUpdate.java | 74 - authz-cass/src/test/resources/cadi.properties | 52 - 67 files changed, 13483 deletions(-) delete mode 100644 authz-cass/pom.xml delete mode 100644 authz-cass/src/main/cql/ecomp.cql delete mode 100644 authz-cass/src/main/cql/init.cql delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/AbsCassDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/Bytification.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/CIDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/Cacheable.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/Cached.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/CachedDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/CassAccess.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/CassDAOImpl.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/DAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/DAOException.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/DAO_RO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/Loader.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/Streamer.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/Touchable.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedCertDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedCredDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedNSDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedPermDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedRoleDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedUserRoleDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/ApprovalDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/ArtiDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CacheInfoDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CacheableData.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CertDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CredDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/DelegateDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/FutureDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/HistoryDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/Namespace.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsSplit.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsType.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/PermDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/RoleDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/Status.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/UserRoleDAO.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/CassExecutor.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/Function.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/PermLookup.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/Question.java delete mode 100644 authz-cass/src/main/java/org/onap/aaf/dao/session/SessionFilter.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/authz/cass/hl/JU_Question.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/JU_Cached.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/JU_CachedDAO.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/JU_CassAccess.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/JU_CassDAOImpl.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/JU_DAOException.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/AbsJUCass.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_ApprovalDAO.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_ArtiDAO.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_Bytification.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CacheInfoDAO.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CertDAO.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CredDAO.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_DelegateDAO.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_FastCalling.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_HistoryDAO.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_NsDAO.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_NsType.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_PermDAO.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_RoleDAO.java delete mode 100644 authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/NS_ChildUpdate.java delete mode 100644 authz-cass/src/test/resources/cadi.properties (limited to 'authz-cass') diff --git a/authz-cass/pom.xml b/authz-cass/pom.xml deleted file mode 100644 index 3de0606d..00000000 --- a/authz-cass/pom.xml +++ /dev/null @@ -1,248 +0,0 @@ - - - - 4.0.0 - - org.onap.aaf.authz - parent - 1.0.1-SNAPSHOT - ../pom.xml - - - authz-cass - Authz Cass - Cassandra DAOs for Authz - jar - https://github.com/att/AAF - - - - Jonathan Gathman - - ATT - - - - - UTF-8 - 1.0.0-SNAPSHOT - - - 0.7.7.201606060606 - true - 3.2 - jacoco - - target/code-coverage/jacoco-ut.exec - target/code-coverage/jacoco-it.exec - - **/gen/**,**/generated-sources/**,**/yang-gen**,**/pax/** - https://nexus.onap.org - /content/repositories/snapshots/ - /content/repositories/releases/ - /content/repositories/staging/ - /content/sites/site/org/onap/aaf/authz/${project.artifactId}/${project.version} - - - - org.onap.aaf.authz - authz-core - ${project.version} - - - - org.onap.aaf.cadi - cadi-aaf - ${project.cadiVersion} - - - - com.datastax.cassandra - cassandra-driver-core - 2.1.10 - - - - - org.xerial.snappy - snappy-java - 1.1.1-M1 - - - - net.jpountz.lz4 - lz4 - 1.2.0 - - - - com.googlecode.jcsv - jcsv - 1.4.0 - - - - org.slf4j - slf4j-log4j12 - test - - - - - - - - org.apache.maven.plugins - maven-jarsigner-plugin - - - - org.apache.maven.plugins - maven-javadoc-plugin - 2.10.4 - - false - - - - attach-javadocs - - jar - - - - - - - - org.apache.maven.plugins - maven-source-plugin - 2.2.1 - - - attach-sources - - jar-no-fork - - - - - - org.sonatype.plugins - nexus-staging-maven-plugin - 1.6.7 - true - - ${nexusproxy} - 176c31dfe190a - ecomp-staging - - - - org.jacoco - jacoco-maven-plugin - ${jacoco.version} - - - **/gen/** - **/generated-sources/** - **/yang-gen/** - **/pax/** - - - - - - pre-unit-test - - prepare-agent - - - ${project.build.directory}/code-coverage/jacoco-ut.exec - surefireArgLine - - - - - - post-unit-test - test - - report - - - ${project.build.directory}/code-coverage/jacoco-ut.exec - ${project.reporting.outputDirectory}/jacoco-ut - - - - pre-integration-test - pre-integration-test - - prepare-agent - - - ${project.build.directory}/code-coverage/jacoco-it.exec - - failsafeArgLine - - - - - - post-integration-test - post-integration-test - - report - - - ${project.build.directory}/code-coverage/jacoco-it.exec - ${project.reporting.outputDirectory}/jacoco-it - - - - - - - - - - ecomp-releases - AAF Release Repository - ${nexusproxy}${releaseNexusPath} - - - ecomp-snapshots - AAF Snapshot Repository - ${nexusproxy}${snapshotNexusPath} - - - ecomp-site - dav:${nexusproxy}${sitePath} - - - - - diff --git a/authz-cass/src/main/cql/ecomp.cql b/authz-cass/src/main/cql/ecomp.cql deleted file mode 100644 index 967d6daf..00000000 --- a/authz-cass/src/main/cql/ecomp.cql +++ /dev/null @@ -1,118 +0,0 @@ -// -// Copyright (c) 2016 AT&T Intellectual Property. All rights reserved. -// -USE authz; - -// Create Root pass -INSERT INTO cred (id,ns,type,cred,expires) - VALUES ('dgl@openecomp.org','org.openecomp',1,0xab3831f27b39d7a039f9a92aa2bbfe51,'2020-12-31'); - -// Create 'com' root NS -INSERT INTO ns (name,scope,description,parent,type) - VALUES('com',1,'Root Namespace',null,1); - -INSERT INTO role(ns, name, perms, description) - VALUES('com','admin',{'com.access|*|*'},'Com Admins'); - -INSERT INTO role(ns, name, perms, description) - VALUES('com','owner',{'com.access|*|read'},'Com Owners'); - -INSERT INTO perm(ns, type, instance, action, roles, description) - VALUES ('com','access','*','read',{'com.owner'},'Com Read Access'); - -INSERT INTO perm(ns, type, instance, action, roles, description) - VALUES ('com','access','*','*',{'com.admin'},'Com Write Access'); - -INSERT INTO user_role(user,role,expires,ns,rname) - VALUES ('dgl@openecomp.org','com.owner','2020-12-31','com','owner'); - -INSERT INTO user_role(user,role,expires,ns,rname) - VALUES ('dgl@openecomp.org','com.admin','2020-12-31','com','admin'); - -// Create org root NS -INSERT INTO ns (name,scope,description,parent,type) - VALUES('org',1,'Root Namespace Org',null,1); - -INSERT INTO role(ns, name, perms, description) - VALUES('org','admin',{'org.access|*|*'},'Com Admins'); - -INSERT INTO role(ns, name, perms, description) - VALUES('org','owner',{'org.access|*|read'},'Com Owners'); - -INSERT INTO perm(ns, type, instance, action, roles, description) - VALUES ('org','access','*','read',{'org.owner'},'Com Read Access'); - -INSERT INTO perm(ns, type, instance, action, roles, description) - VALUES ('org','access','*','*',{'org.admin'},'Com Write Access'); - -INSERT INTO user_role(user,role,expires,ns,rname) - VALUES ('dgl@openecomp.org','org.owner','2020-12-31','org','owner'); - -INSERT INTO user_role(user,role,expires,ns,rname) - VALUES ('dgl@openecomp.org','org.admin','2020-12-31','org','admin'); - - -// Create com.att - -INSERT INTO ns (name,scope,description,parent,type) - VALUES('com.att',2,'AT&T Namespace','com',2); - -INSERT INTO role(ns, name, perms,description) - VALUES('com.att','admin',{'com.att.access|*|*'},'AT&T Admins'); - -INSERT INTO role(ns, name, perms,description) - VALUES('com.att','owner',{'com.att.access|*|read'},'AT&T Owners'); - -INSERT INTO perm(ns, type, instance, action, roles,description) - VALUES ('com.att','access','*','read',{'com.att.owner'},'AT&T Read Access'); - -INSERT INTO perm(ns, type, instance, action, roles,description) - VALUES ('com.att','access','*','*',{'com.att.admin'},'AT&T Write Access'); - -INSERT INTO user_role(user,role,expires,ns,rname) - VALUES ('dgl@openecomp.org','com.att.owner','2020-12-31','com.att','owner'); - -INSERT INTO user_role(user,role,expires,ns,rname) - VALUES ('dgl@openecomp.org','com.att.admin','2020-12-31','com.att','admin'); - -// Create com.att.aaf - -INSERT INTO ns (name,scope,description,parent,type) - VALUES('com.att.aaf',3,'Application Authorization Framework','com.att',3); - -INSERT INTO role(ns, name, perms, description) - VALUES('com.att.aaf','admin',{'com.att.aaf.access|*|*'},'AAF Admins'); - -INSERT INTO role(ns, name, perms, description) - VALUES('com.att.aaf','owner',{'com.att.aaf.access|*|read'},'AAF Owners'); - -INSERT INTO perm(ns, type, instance, action, roles, description) - VALUES ('com.att.aaf','access','*','read',{'com.att.aaf.owner'},'AAF Read Access'); - -INSERT INTO perm(ns, type, instance, action, roles, description) - VALUES ('com.att.aaf','access','*','*',{'com.att.aaf.admin'},'AAF Write Access'); - -INSERT INTO user_role(user,role,expires,ns,rname) - VALUES ('dgl@openecomp.org','com.att.aaf.admin','2020-12-31','com.att.aaf','admin'); -INSERT INTO user_role(user,role,expires,ns,rname) - VALUES ('dgl@openecomp.org','com.att.aaf.owner','2020-12-31','com.att.aaf','owner'); - - -// Create org.openecomp -INSERT INTO ns (name,scope,description,parent,type) - VALUES('org.openecomp',2,'Open EComp NS','com.att',2); - -INSERT INTO role(ns, name, perms, description) - VALUES('org.openecomp','admin',{'org.openecomp.access|*|*'},'OpenEcomp Admins'); - -INSERT INTO role(ns, name, perms, description) - VALUES('org.openecomp','owner',{'org.openecomp.access|*|read'},'OpenEcomp Owners'); - -INSERT INTO perm(ns, type, instance, action, roles, description) - VALUES ('org.openecomp','access','*','read',{'org.openecomp.owner'},'OpenEcomp Read Access'); - -INSERT INTO perm(ns, type, instance, action, roles, description) - VALUES ('org.openecomp','access','*','*',{'org.openecomp.admin'},'OpenEcomp Write Access'); - -INSERT INTO user_role(user,role,expires,ns,rname) - VALUES ('dgl@openecomp.org','org.openecomp.admin','2020-12-31','org.openecomp','admin'); diff --git a/authz-cass/src/main/cql/init.cql b/authz-cass/src/main/cql/init.cql deleted file mode 100644 index 3b2688a6..00000000 --- a/authz-cass/src/main/cql/init.cql +++ /dev/null @@ -1,212 +0,0 @@ -// -// Copyright (c) 2016 AT&T Intellectual Property. All rights reserved. -// -// For Developer Machine single instance -// - CREATE KEYSPACE authz - WITH REPLICATION = {'class' : 'SimpleStrategy','replication_factor':1}; - -USE authz; - -// -// CORE Table function -// - -// Namespace - establish hierarchical authority to modify -// Permissions and Roles -// "scope" is flag to determine Policy. Typical important scope -// is "company" (1) -CREATE TABLE ns ( - name varchar, - scope int, // deprecated 2.0.11 - description varchar, - parent varchar, - type int, - PRIMARY KEY (name) -); -CREATE INDEX ns_parent on ns(parent); - - -CREATE TABLE ns_attrib ( - ns varchar, - key varchar, - value varchar, - PRIMARY KEY (ns,key) -); -create index ns_attrib_key on ns_attrib(key); - -// Will be cached -CREATE TABLE role ( - ns varchar, - name varchar, - perms set, // Use "Key" of "name|type|action" - description varchar, - PRIMARY KEY (ns,name) -); -CREATE INDEX role_name ON role(name); - -// Will be cached -CREATE TABLE perm ( - ns varchar, - type varchar, - instance varchar, - action varchar, - roles set, // Need to find Roles given Permissions - description varchar, - PRIMARY KEY (ns,type,instance,action) -); - -// This table is user for Authorization -CREATE TABLE user_role ( - user varchar, - role varchar, // deprecated: change to ns/rname after 2.0.11 - ns varchar, - rname varchar, - expires timestamp, - PRIMARY KEY(user,role) - ); -CREATE INDEX user_role_ns ON user_role(ns); -CREATE INDEX user_role_role ON user_role(role); - -// This table is only for the case where return User Credential (MechID) Authentication -CREATE TABLE cred ( - id varchar, - type int, - expires timestamp, - ns varchar, - other int, - notes varchar, - cred blob, - prev blob, - PRIMARY KEY (id,type,expires) - ); -CREATE INDEX cred_ns ON cred(ns); - -// Certificate Cross Table -// coordinated with CRED type 2 -CREATE TABLE cert ( - fingerprint blob, - id varchar, - x500 varchar, - expires timestamp, - PRIMARY KEY (fingerprint) - ); -CREATE INDEX cert_id ON cert(id); -CREATE INDEX cert_x500 ON cert(x500); - -CREATE TABLE notify ( - user text, - type int, - last timestamp, - checksum int, - PRIMARY KEY (user,type) -); - -CREATE TABLE x509 ( - ca text, - serial blob, - id text, - x500 text, - x509 text, - PRIMARY KEY (ca,serial) -); - - -CREATE INDEX x509_id ON x509 (id); -CREATE INDEX x509_x500 ON x509 (x500); - -// -// Deployment Artifact (for Certman) -// -CREATE TABLE artifact ( - mechid text, - machine text, - type Set, - sponsor text, - ca text, - dir text, - appName text, - os_user text, - notify text, - expires timestamp, - renewDays int, - PRIMARY KEY (mechid,machine) -); -CREATE INDEX artifact_machine ON artifact(machine); - -// -// Non-Critical Table functions -// -// Table Info - for Caching -CREATE TABLE cache ( - name varchar, - seg int, // cache Segment - touched timestamp, - PRIMARY KEY(name,seg) -); - -CREATE TABLE history ( - id timeuuid, - yr_mon int, - user varchar, - action varchar, - target varchar, // user, user_role, - subject varchar, // field for searching main portion of target key - memo varchar, //description of the action - reconstruct blob, //serialized form of the target - // detail Map, // additional information - PRIMARY KEY (id) -); -CREATE INDEX history_yr_mon ON history(yr_mon); -CREATE INDEX history_user ON history(user); -CREATE INDEX history_subject ON history(subject); - -// -// A place to hold objects to be created at a future time. -// -CREATE TABLE future ( - id uuid, // uniquify - target varchar, // Target Table - memo varchar, // Description - start timestamp, // When it should take effect - expires timestamp, // When not longer valid - construct blob, // How to construct this object (like History) - PRIMARY KEY(id) -); -CREATE INDEX future_idx ON future(target); -CREATE INDEX future_start_idx ON future(start); - - -CREATE TABLE approval ( - id timeuuid, // unique Key - ticket uuid, // Link to Future Record - user varchar, // the user who needs to be approved - approver varchar, // user approving - type varchar, // approver types i.e. Supervisor, Owner - status varchar, // approval status. pending, approved, denied - memo varchar, // Text for Approval to know what's going on - operation varchar, // List operation to perform - PRIMARY KEY(id) - ); -CREATE INDEX appr_approver_idx ON approval(approver); -CREATE INDEX appr_user_idx ON approval(user); -CREATE INDEX appr_ticket_idx ON approval(ticket); -CREATE INDEX appr_status_idx ON approval(status); - -CREATE TABLE delegate ( - user varchar, - delegate varchar, - expires timestamp, - PRIMARY KEY (user) -); -CREATE INDEX delg_delg_idx ON delegate(delegate); - -// -// Used by authz-batch processes to ensure only 1 runs at a time -// -CREATE TABLE run_lock ( - class text, - host text, - start timestamp, - PRIMARY KEY ((class)) -); diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/AbsCassDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/AbsCassDAO.java deleted file mode 100644 index c76a88f9..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/AbsCassDAO.java +++ /dev/null @@ -1,497 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Deque; -import java.util.List; -import java.util.concurrent.ConcurrentLinkedDeque; - -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.aaf.cass.Status; - -import org.onap.aaf.inno.env.APIException; -import org.onap.aaf.inno.env.Env; -import org.onap.aaf.inno.env.Slot; -import org.onap.aaf.inno.env.TimeTaken; -import org.onap.aaf.inno.env.TransStore; -import com.datastax.driver.core.BoundStatement; -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.ConsistencyLevel; -import com.datastax.driver.core.ResultSet; -import com.datastax.driver.core.ResultSetFuture; -import com.datastax.driver.core.Row; -import com.datastax.driver.core.Session; -import com.datastax.driver.core.exceptions.DriverException; - -public abstract class AbsCassDAO { - protected static final char DOT = '.'; - protected static final char DOT_PLUS_ONE = '.'+1; - protected static final String FIRST_CHAR = Character.toString((char)0); - protected static final String LAST_CHAR = Character.toString((char)Character.MAX_VALUE); - protected static final int FIELD_COMMAS = 0; - protected static final int QUESTION_COMMAS = 1; - protected static final int ASSIGNMENT_COMMAS = 2; - protected static final int WHERE_ANDS = 3; - - private Cluster cluster; - private Session session; - private final String keyspace; - // If this is null, then we own session - private final AbsCassDAO owningDAO; - protected Class dataClass; - private final String name; - private static Slot sessionSlot; - //private static final ArrayList.PSInfo> psinfos = new ArrayList.PSInfo>(); - private static final ArrayList.PSInfo> psinfos = new ArrayList.PSInfo>(); - private static final List EMPTY = new ArrayList(0); - private static final Deque resetDeque = new ConcurrentLinkedDeque(); - private static boolean resetTrigger = false; - private static long nextAvailableReset = 0; - - - public AbsCassDAO(TRANS trans, String name, Cluster cluster, String keyspace, Class dataClass) { - this.name = name; - this.cluster = cluster; - this.keyspace = keyspace; - owningDAO = null; // we own session - session = null; - this.dataClass = dataClass; - - } - - public AbsCassDAO(TRANS trans, String name, AbsCassDAO aDao, Class dataClass) { - this.name = name; - cluster = aDao.cluster; - keyspace = aDao.keyspace; - session = null; - owningDAO = aDao; // We do not own session - this.dataClass = dataClass; - } - - public static void setSessionSlot(Slot slot) { - sessionSlot = slot; - } - - //Note: Lower case ON PURPOSE. These names used to create History Messages - public enum CRUD { - create,read,update,delete - ; - -} - - public class PSInfo { - private BoundStatement ps; - private final int size; - private final Loader loader; - private final CRUD crud; // Store CRUD, because it makes a difference in Object Order, see Loader - private final String cql; - private final ConsistencyLevel consistency; - - - /** - * Create a PSInfo and create Prepared Statement - * - * @param trans - * @param theCQL - * @param loader - */ - public PSInfo(TRANS trans, String theCQL, Loader loader, ConsistencyLevel consistency) { - this.loader = loader; - this.consistency=consistency; - psinfos.add(this); - - cql = theCQL.trim().toUpperCase(); - if(cql.startsWith("INSERT")) { - crud = CRUD.create; - } else if(cql.startsWith("UPDATE")) { - crud = CRUD.update; - } else if(cql.startsWith("DELETE")) { - crud = CRUD.delete; - } else { - crud = CRUD.read; - } - - int idx = 0, count=0; - while((idx=cql.indexOf('?',idx))>=0) { - ++idx; - ++count; - } - size=count; - } - - public synchronized void reset() { - ps = null; - } - - private BoundStatement ps(TransStore trans) throws APIException, IOException { - if(ps==null) { - synchronized(this) { - if(ps==null) { - TimeTaken tt = trans.start("Preparing PSInfo " + crud.toString().toUpperCase() + " on " + name,Env.SUB); - try { - ps = new BoundStatement(getSession(trans).prepare(cql)); - ps.setConsistencyLevel(consistency); - } catch (DriverException e) { - reportPerhapsReset(trans,e); - throw e; - } finally { - tt.done(); - } - } - } - } - return ps; - } - - /** - * Execute a Prepared Statement by extracting from DATA object - * - * @param trans - * @param text - * @param data - * @return - */ - public Result execAsync(TRANS trans, String text, DATA data) { - TimeTaken tt = trans.start(text, Env.REMOTE); - try { - return Result.ok(getSession(trans).executeAsync( - ps(trans).bind(loader.extract(data, size, crud)))); - } catch (DriverException | APIException | IOException e) { - AbsCassDAO.this.reportPerhapsReset(trans,e); - return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql); - } finally { - tt.done(); - } - } - - /** - * Execute a Prepared Statement on Object[] key - * - * @param trans - * @param text - * @param objs - * @return - */ - public Result execAsync(TRANS trans, String text, Object ... objs) { - TimeTaken tt = trans.start(text, Env.REMOTE); - try { - return Result.ok(getSession(trans).executeAsync(ps(trans).bind(objs))); - } catch (DriverException | APIException | IOException e) { - AbsCassDAO.this.reportPerhapsReset(trans,e); - return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql); - } finally { - tt.done(); - } - } - - /* - * Note: - * - */ - - /** - * Execute a Prepared Statement by extracting from DATA object - * - * @param trans - * @param text - * @param data - * @return - */ - public Result exec(TRANS trans, String text, DATA data) { - TimeTaken tt = trans.start(text, Env.REMOTE); - try { - /* - * "execute" (and executeAsync) - * Executes the provided query. - This method blocks until at least some result has been received from the database. However, - for SELECT queries, it does not guarantee that the result has been received in full. But it - does guarantee that some response has been received from the database, and in particular - guarantee that if the request is invalid, an exception will be thrown by this method. - - Parameters: - statement - the CQL query to execute (that can be any Statement). - Returns: - the result of the query. That result will never be null but can be empty (and will - be for any non SELECT query). - */ - return Result.ok(getSession(trans).execute( - ps(trans).bind(loader.extract(data, size, crud)))); - } catch (DriverException | APIException | IOException e) { - AbsCassDAO.this.reportPerhapsReset(trans,e); - return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql); - } finally { - tt.done(); - } - } - - /** - * Execute a Prepared Statement on Object[] key - * - * @param trans - * @param text - * @param objs - * @return - */ - public Result exec(TRANS trans, String text, Object ... objs) { - TimeTaken tt = trans.start(text, Env.REMOTE); - try { - return Result.ok(getSession(trans).execute(ps(trans).bind(objs))); - } catch (DriverException | APIException | IOException e) { - AbsCassDAO.this.reportPerhapsReset(trans,e); - return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql); - } finally { - tt.done(); - } - } - - /** - * Read the Data from Cassandra given a Prepared Statement (defined by the - * DAO Instance) - * - * This is common behavior among all DAOs. - * @throws DAOException - */ - public Result> read(TRANS trans, String text, Object[] key) { - TimeTaken tt = trans.start(text,Env.REMOTE); - - ResultSet rs; - try { - rs = getSession(trans).execute(key==null?ps(trans):ps(trans).bind(key)); -/// TEST CODE for Exception -// boolean force = true; -// if(force) { -// Map misa = new HashMap(); -// //misa.put(new InetSocketAddress(444),new Exception("no host was tried")); -// misa.put(new InetSocketAddress(444),new Exception("Connection has been closed")); -// throw new com.datastax.driver.core.exceptions.NoHostAvailableException(misa); -//// throw new com.datastax.driver.core.exceptions.AuthenticationException(new InetSocketAddress(9999),"no host was tried"); -// } -//// END TEST CODE - } catch (DriverException | APIException | IOException e) { - AbsCassDAO.this.reportPerhapsReset(trans,e); - return Result.err(Status.ERR_Backend,"%s-%s executing %s",e.getClass().getName(),e.getMessage(), cql); - } finally { - tt.done(); - } - - return extract(loader,rs,null /*let Array be created if necessary*/,dflt); - } - - public Result> read(TRANS trans, String text, DATA data) { - return read(trans,text, loader.extract(data, size, crud)); - } - - public Object[] keyFrom(DATA data) { - return loader.extract(data, size, CRUD.delete); // Delete is key only - } - - /* - * Note: in case PSInfos are deleted, we want to remove them from list. This is not expected, - * but we don't want a data leak if it does. Finalize doesn't have to happen quickly - */ - @Override - protected void finalize() throws Throwable { - psinfos.remove(this); - } - } - - protected final Accept dflt = new Accept() { - @Override - public boolean ok(DATA data) { - return true; - } - }; - - - @SuppressWarnings("unchecked") - protected final Result> extract(Loader loader, ResultSet rs, List indata, Accept accept) { - List rows = rs.all(); - if(rows.isEmpty()) { - return Result.ok((List)EMPTY); // Result sets now .emptyList(true); - } else { - DATA d; - List data = indata==null?new ArrayList(rows.size()):indata; - - for(Row row : rows) { - try { - d = loader.load(dataClass.newInstance(),row); - if(accept.ok(d)) { - data.add(d); - } - } catch(Exception e) { - return Result.err(e); - } - } - return Result.ok(data); - } - } - - private static final String NEW_CASSANDRA_SESSION_CREATED = "New Cassandra Session Created"; - private static final String NEW_CASSANDRA_CLUSTER_OBJECT_CREATED = "New Cassandra Cluster Object Created"; - private static final String NEW_CASSANDRA_SESSION = "New Cassandra Session"; - - private static class ResetRequest { - //package on purpose - Session session; - long timestamp; - - public ResetRequest(Session session) { - this.session = session; - timestamp = System.currentTimeMillis(); - } - } - - - public static final void primePSIs(TransStore trans) throws APIException, IOException { - for(AbsCassDAO.PSInfo psi : psinfos) { - if(psi.ps==null) { - psi.ps(trans); - } - } - } - - public final Session getSession(TransStore trans) throws APIException, IOException { - // Try to use Trans' session, if exists - if(sessionSlot!=null) { // try to get from Trans - Session sess = trans.get(sessionSlot, null); - if(sess!=null) { - return sess; - } - } - - // If there's an owning DAO, use it's session - if(owningDAO!=null) { - return owningDAO.getSession(trans); - } - - // OK, nothing else works... get our own. - if(session==null || resetTrigger) { - Cluster tempCluster = null; - Session tempSession = null; - try { - synchronized(NEW_CASSANDRA_SESSION_CREATED) { - boolean reset = false; - for(ResetRequest r : resetDeque) { - if(r.session == session) { - if(r.timestamp>nextAvailableReset) { - reset=true; - nextAvailableReset = System.currentTimeMillis() + 60000; - tempCluster = cluster; - tempSession = session; - break; - } else { - trans.warn().log("Cassandra Connection Reset Ignored: Recent Reset"); - } - } - } - - if(reset || session == null) { - TimeTaken tt = trans.start(NEW_CASSANDRA_SESSION, Env.SUB); - try { - // Note: Maitrayee recommended not closing the cluster, just - // overwrite it. 9/30/2016 assuming same for Session - // This was a bad idea. Ran out of File Handles as I suspected.. - if(reset) { - for(AbsCassDAO.PSInfo psi : psinfos) { - psi.reset(); - } - } - if(reset || cluster==null) { - cluster = CassAccess.cluster(trans, keyspace); - trans.warn().log(NEW_CASSANDRA_CLUSTER_OBJECT_CREATED); - } - if(reset || session==null) { - session = cluster.connect(keyspace); - trans.warn().log(NEW_CASSANDRA_SESSION_CREATED); - } - } finally { - resetTrigger=false; - tt.done(); - } - } - } - } finally { - TimeTaken tt = trans.start("Clear Reset Deque", Env.SUB); - try { - resetDeque.clear(); - // Not clearing Session/Cluster appears to kill off FileHandles - if(tempSession!=null && !tempSession.isClosed()) { - tempSession.close(); - } - if(tempCluster!=null && !tempCluster.isClosed()) { - tempCluster.close(); - } - } finally { - tt.done(); - } - } - } - return session; - } - - public final boolean reportPerhapsReset(TransStore trans, Exception e) { - if(owningDAO!=null) { - return owningDAO.reportPerhapsReset(trans, e); - } else { - boolean rv = false; - if(CassAccess.isResetException(e)) { - trans.warn().printf("Session Reset called for %s by %s ",session==null?"":session,e==null?"Mgmt Command":e.getClass().getName()); - resetDeque.addFirst(new ResetRequest(session)); - rv = resetTrigger = true; - } - trans.error().log(e); - return rv; - } - } - - public void close(TransStore trans) { - if(owningDAO==null) { - if(session!=null) { - TimeTaken tt = trans.start("Cassandra Session Close", Env.SUB); - try { - session.close(); - } finally { - tt.done(); - } - session = null; - } else { - trans.debug().log("close called(), Session already closed"); - } - } else { - owningDAO.close(trans); - } - } - - protected void wasModified(TRANS trans, CRUD modified, DATA data, String ... override) { - } - - protected interface Accept { - public boolean ok(DATA data); - } - -} - - - diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/Bytification.java b/authz-cass/src/main/java/org/onap/aaf/dao/Bytification.java deleted file mode 100644 index 901339e4..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/Bytification.java +++ /dev/null @@ -1,31 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; - -import java.io.IOException; -import java.nio.ByteBuffer; - -public interface Bytification { - public ByteBuffer bytify() throws IOException; - public void reconstitute(ByteBuffer bb) throws IOException; -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/CIDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/CIDAO.java deleted file mode 100644 index 05bb86d0..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/CIDAO.java +++ /dev/null @@ -1,52 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; - -import java.util.Date; - -import org.onap.aaf.authz.layer.Result; - -import org.onap.aaf.inno.env.Trans; - -public interface CIDAO { - - /** - * Touch the date field for given Table - * - * @param trans - * @param name - * @return - */ - public abstract Result touch(TRANS trans, String name, int ... seg); - - /** - * Read all Info entries, and set local Date objects - * - * This is to support regular data checks on the Database to speed up Caching behavior - * - */ - public abstract Result check(TRANS trans); - - public abstract Date get(TRANS trans, String table, int seg); - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/Cacheable.java b/authz-cass/src/main/java/org/onap/aaf/dao/Cacheable.java deleted file mode 100644 index 08482921..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/Cacheable.java +++ /dev/null @@ -1,34 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; -/** - * Interface to obtain Segment Integer from DAO Data - * for use in Caching mechanism - * - * This should typically be obtained by getting the Hash of the key, then using modulus on the size of segment. - * - * - */ -public interface Cacheable { - public int[] invalidate(Cached cache); -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/Cached.java b/authz-cass/src/main/java/org/onap/aaf/dao/Cached.java deleted file mode 100644 index 5e5323cc..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/Cached.java +++ /dev/null @@ -1,198 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; - -import java.util.Date; -import java.util.List; -import java.util.Map; -import java.util.Timer; -import java.util.TimerTask; - -import org.onap.aaf.authz.env.AuthzEnv; -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.cache.Cache; -import org.onap.aaf.dao.aaf.cass.Status; - -import org.onap.aaf.inno.env.Env; -import org.onap.aaf.inno.env.Trans; - -public class Cached extends Cache { - // Java does not allow creation of Arrays with Generics in them... - // private Map cache[]; - protected final CIDAO info; - - private static Timer infoTimer; - private Object cache[]; - public final int segSize; - - protected final String name; - - - - // Taken from String Hash, but coded, to ensure consistent across Java versions. Also covers negative case; - public int cacheIdx(String key) { - int h = 0; - for (int i = 0; i < key.length(); i++) { - h = 31*h + key.charAt(i); - } - if(h<0)h*=-1; - return h%segSize; - } - - public Cached(CIDAO info, String name, int segSize) { - this.name =name; - this.segSize = segSize; - this.info = info; - cache = new Object[segSize]; - // Create a new Map for each Segment, and store locally - for(int i=0;i data) { - @SuppressWarnings("unchecked") - Map map = ((Map)cache[cacheIdx(key)]); - map.put(key, new Dated(data)); - } - - - public int invalidate(String key) { - int cacheIdx = cacheIdx(key); - @SuppressWarnings("unchecked") - Map map = ((Map)cache[cacheIdx]); -// if(map.remove(key)!=null) // Not seeming to remove all the time - if(map!=null)map.clear(); -// System.err.println("Remove " + name + " " + key); - return cacheIdx; - } - - public Result invalidate(int segment) { - if(segment<0 || segment>=cache.length) return Result.err(Status.ERR_BadData,"Cache Segment %s is out of range",Integer.toString(segment)); - @SuppressWarnings("unchecked") - Map map = ((Map)cache[segment]); - if(map!=null) { - map.clear(); - } - return Result.ok(); - } - - protected interface Getter { - public abstract Result> get(); - }; - - // TODO utilize Segmented Caches, and fold "get" into "reads" - @SuppressWarnings("unchecked") - public Result> get(TRANS trans, String key, Getter getter) { - List ld = null; - Result> rld = null; - - int cacheIdx = cacheIdx(key); - Map map = ((Map)cache[cacheIdx]); - - // Check for saved element in cache - Dated cached = map.get(key); - // Note: These Segment Timestamps are kept up to date with DB - Date dbStamp = info.get(trans, name,cacheIdx); - - // Check for cache Entry and whether it is still good (a good Cache Entry is same or after DBEntry, so we use "before" syntax) - if(cached!=null && dbStamp.before(cached.timestamp)) { - ld = (List)cached.data; - rld = Result.ok(ld); - } else { - rld = getter.get(); - if(rld.isOK()) { // only store valid lists - map.put(key, new Dated(rld.value)); // successful item found gets put in cache -// } else if(rld.status == Result.ERR_Backend){ -// map.remove(key); - } - } - return rld; - } - - /** - * Each Cached object has multiple Segments that need cleaning. Derive each, and add to Cleansing Thread - * @param env - * @param dao - */ - public static void startCleansing(AuthzEnv env, CachedDAO ... dao) { - for(CachedDAO d : dao) { - for(int i=0;i void startRefresh(AuthzEnv env, CIDAO cidao) { - if(infoTimer==null) { - infoTimer = new Timer("CachedDAO Info Refresh Timer"); - int minRefresh = 10*1000*60; // 10 mins Integer.parseInt(env.getProperty(CACHE_MIN_REFRESH_INTERVAL,"2000")); // 2 second minimum refresh - infoTimer.schedule(new Refresh(env,cidao, minRefresh), 1000, minRefresh); // note: Refresh from DB immediately - } - } - - public static void stopTimer() { - Cache.stopTimer(); - if(infoTimer!=null) { - infoTimer.cancel(); - infoTimer = null; - } - } - - private final static class Refresh extends TimerTask { - private static final int maxRefresh = 2*60*10000; // 20 mins - private AuthzEnv env; - private CIDAO cidao; - private int minRefresh; - private long lastRun; - - public Refresh(AuthzEnv env, CIDAO cidao, int minRefresh) { - this.env = env; - this.cidao = cidao; - this.minRefresh = minRefresh; - lastRun = System.currentTimeMillis()-maxRefresh-1000; - } - - @Override - public void run() { - // Evaluate whether to refresh based on transaction rate - long now = System.currentTimeMillis(); - long interval = now-lastRun; - - if(interval < minRefresh || interval < Math.min(env.transRate(),maxRefresh)) return; - lastRun = now; - AuthzTrans trans = env.newTransNoAvg(); - Result rv = cidao.check(trans); - if(rv.status!=Result.OK) { - env.error().log("Error in CacheInfo Refresh",rv.details); - } - if(env.debug().isLoggable()) { - StringBuilder sb = new StringBuilder("Cache Info Refresh: "); - trans.auditTrail(0, sb, Env.REMOTE); - env.debug().log(sb); - } - } - } -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/CachedDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/CachedDAO.java deleted file mode 100644 index 4237b91e..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/CachedDAO.java +++ /dev/null @@ -1,229 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; - -import java.util.ArrayList; -import java.util.List; - -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.aaf.cass.Status; - -import org.onap.aaf.inno.env.Trans; - -/** - * CachedDAO - * - * Cache the response of "get" of any DAO. - * - * For simplicity's sake, at this time, we only do this for single Object keys - * - * - * @param - */ -public class CachedDAO,DATA extends Cacheable> - extends Cached implements DAO_RO{ -// private final String dirty_str; - - private final D dao; - - public CachedDAO(D dao, CIDAO info, int segsize) { - super(info, dao.table(), segsize); - - // Instantiate a new Cache per DAO name (so separate instances use the same cache) - this.dao = dao; - //read_str = "Cached READ for " + dao.table(); -// dirty_str = "Cache DIRTY on " + dao.table(); - if(dao instanceof CassDAOImpl) { - ((CassDAOImpl)dao).cache = this; - } - } - - public static, DT extends Cacheable> - CachedDAO create(DA dao, CIDAO info, int segsize) { - return new CachedDAO(dao,info, segsize); - } - - public void add(DATA data) { - String key = keyFromObjs(dao.keyFrom(data)); - List list = new ArrayList(); - list.add(data); - super.add(key,list); - } - -// public void invalidate(TRANS trans, Object ... objs) { -// TimeTaken tt = trans.start(dirty_str, Env.SUB); -// try { -// super.invalidate(keyFromObjs(objs)); -// } finally { -// tt.done(); -// } -// } - - public static String keyFromObjs(Object ... objs) { - String key; - if(objs.length==1 && objs[0] instanceof String) { - key = (String)objs[0]; - } else { - StringBuilder sb = new StringBuilder(); - boolean first = true; - for(Object o : objs) { - if(o!=null) { - if(first) { - first =false; - } else { - sb.append('|'); - } - sb.append(o.toString()); - } - } - key = sb.toString(); - } - return key; - } - - public Result create(TRANS trans, DATA data) { - Result d = dao.create(trans,data); - if(d.status==Status.OK) { - add(d.value); - } else { - trans.error().log(d.errorString()); - } - invalidate(trans,data); - return d; - } - - protected class DAOGetter implements Getter { - protected TRANS trans; - protected Object objs[]; - protected D dao; - public Result> result; - - public DAOGetter(TRANS trans, D dao, Object ... objs) { - this.trans = trans; - this.dao = dao; - this.objs = objs; - } - - /** - * Separated into single call for easy overloading - * @return - */ - public Result> call() { - return dao.read(trans, objs); - } - - @Override - public final Result> get() { - return call(); -// if(result.isOKhasData()) { // Note, given above logic, could exist, but stale -// return result.value; -// } else { -// return null; -// } - } - } - - @Override - public Result> read(final TRANS trans, final Object ... objs) { - DAOGetter getter = new DAOGetter(trans,dao,objs); - return get(trans, keyFromObjs(objs),getter); -// if(ld!=null) { -// return Result.ok(ld);//.emptyList(ld.isEmpty()); -// } -// // Result Result if exists -// if(getter.result==null) { -// return Result.err(Status.ERR_NotFound, "No Cache or Lookup found on [%s]",dao.table()); -// } -// return getter.result; - } - - // Slight Improved performance available when String and Obj versions are known. - public Result> read(final String key, final TRANS trans, final Object ... objs) { - DAOGetter getter = new DAOGetter(trans,dao,objs); - return get(trans, key, getter); -// if(ld!=null) { -// return Result.ok(ld);//.emptyList(ld.isEmpty()); -// } -// // Result Result if exists -// if(getter.result==null) { -// return Result.err(Status.ERR_NotFound, "No Cache or Lookup found on [%s]",dao.table()); -// } -// return getter.result; - } - - @Override - public Result> read(TRANS trans, DATA data) { - return read(trans,dao.keyFrom(data)); - } - public Result update(TRANS trans, DATA data) { - Result d = dao.update(trans, data); - if(d.status==Status.OK) { - add(data); - } else { - trans.error().log(d.errorString()); - } - return d; - } - - public Result delete(TRANS trans, DATA data, boolean reread) { - if(reread) { // If reread, get from Cache, if possible, not DB exclusively - Result> rd = read(trans,data); - if(rd.notOK()) { - return Result.err(rd); - } else { - trans.error().log(rd.errorString()); - } - if(rd.isEmpty()) { - data.invalidate(this); - return Result.err(Status.ERR_NotFound,"Not Found"); - } - data = rd.value.get(0); - } - Result rv=dao.delete(trans, data, false); - data.invalidate(this); - return rv; - } - - @Override - public void close(TRANS trans) { - if(dao!=null) { - dao.close(trans); - } - } - - - @Override - public String table() { - return dao.table(); - } - - public D dao() { - return dao; - } - - public void invalidate(TRANS trans, DATA data) { - if(info.touch(trans, dao.table(),data.invalidate(this)).notOK()) { - trans.error().log("Cannot touch CacheInfo for Role"); - } - } -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/CassAccess.java b/authz-cass/src/main/java/org/onap/aaf/dao/CassAccess.java deleted file mode 100644 index 79bd6e0e..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/CassAccess.java +++ /dev/null @@ -1,220 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import org.onap.aaf.authz.env.AuthzEnv; - -import org.onap.aaf.cadi.routing.GreatCircle; -import org.onap.aaf.inno.env.APIException; -import org.onap.aaf.inno.env.Env; -import org.onap.aaf.inno.env.util.Split; -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.Cluster.Builder; -import com.datastax.driver.core.policies.DCAwareRoundRobinPolicy; - -public class CassAccess { - public static final String KEYSPACE = "authz"; - public static final String CASSANDRA_CLUSTERS = "cassandra.clusters"; - public static final String CASSANDRA_CLUSTERS_PORT = "cassandra.clusters.port"; - public static final String CASSANDRA_CLUSTERS_USER_NAME = "cassandra.clusters.user"; - public static final String CASSANDRA_CLUSTERS_PASSWORD = "cassandra.clusters.password"; - public static final String CASSANDRA_RESET_EXCEPTIONS = "cassandra.reset.exceptions"; - public static final String LATITUDE = "LATITUDE"; - public static final String LONGITUDE = "LONGITUDE"; - private static final List resetExceptions = new ArrayList(); - public static final String ERR_ACCESS_MSG = "Accessing Backend"; - private static Builder cb = null; - - /** - * To create DCAwareRoundRobing Policy: - * Need Properties - * LATITUDE (or AFT_LATITUDE) - * LONGITUDE (or AFT_LONGITUDE) - * CASSANDRA CLUSTERS with additional information: - * machine:DC:lat:long,machine:DC:lat:long - * @param env - * @param prefix - * @return - * @throws APIException - * @throws IOException - */ - - @SuppressWarnings("deprecation") - public static synchronized Cluster cluster(Env env, String prefix) throws APIException, IOException { - if(cb == null) { - String pre; - if(prefix==null) { - pre=""; - } else { - env.info().log("Cassandra Connection for ",prefix); - pre = prefix+'.'; - } - cb = Cluster.builder(); - String str = env.getProperty(pre+CASSANDRA_CLUSTERS_PORT,"9042"); - if(str!=null) { - env.init().log("Cass Port = ",str ); - cb.withPort(Integer.parseInt(str)); - } - str = env.getProperty(pre+CASSANDRA_CLUSTERS_USER_NAME,null); - if(str!=null) { - env.init().log("Cass User = ",str ); - String epass = env.getProperty(pre + CASSANDRA_CLUSTERS_PASSWORD,null); - if(epass==null) { - throw new APIException("No Password configured for " + str); - } - //TODO Figure out way to ensure Decryptor setting in AuthzEnv - if(env instanceof AuthzEnv) { - cb.withCredentials(str,((AuthzEnv)env).decrypt(epass,true)); - } else { - cb.withCredentials(str, env.decryptor().decrypt(epass)); - } - } - - str = env.getProperty(pre+CASSANDRA_RESET_EXCEPTIONS,null); - if(str!=null) { - env.init().log("Cass ResetExceptions = ",str ); - for(String ex : Split.split(',', str)) { - resetExceptions.add(new Resettable(env,ex)); - } - } - - str = env.getProperty(LATITUDE,env.getProperty("AFT_LATITUDE",null)); - Double lat = str!=null?Double.parseDouble(str):null; - str = env.getProperty(LONGITUDE,env.getProperty("AFT_LONGITUDE",null)); - Double lon = str!=null?Double.parseDouble(str):null; - if(lat == null || lon == null) { - throw new APIException("LATITUDE(or AFT_LATITUDE) and/or LONGITUDE(or AFT_LATITUDE) are not set"); - } - - env.init().printf("Service Latitude,Longitude = %f,%f",lat,lon); - - str = env.getProperty(pre+CASSANDRA_CLUSTERS,"localhost"); - env.init().log("Cass Clusters = ",str ); - String[] machs = Split.split(',', str); - String[] cpoints = new String[machs.length]; - String bestDC = null; - int numInBestDC = 1; - double mlat, mlon,temp,distance = -1.0; - for(int i=0;i0) { - cpoints[i]=minfo[0]; - } - - // Calc closest DC with Great Circle - if(minfo.length>3) { - mlat = Double.parseDouble(minfo[2]); - mlon = Double.parseDouble(minfo[3]); - if((temp=GreatCircle.calc(lat, lon, mlat, mlon)) > distance) { - distance = temp; - if(bestDC!=null && bestDC.equals(minfo[1])) { - ++numInBestDC; - } else { - bestDC = minfo[1]; - numInBestDC = 1; - } - } else { - if(bestDC!=null && bestDC.equals(minfo[1])) { - ++numInBestDC; - } - } - } - } - - cb.addContactPoints(cpoints); - - if(bestDC!=null) { - // 8/26/2016 Management has determined that Accuracy is preferred over speed in bad situations - // Local DC Aware Load Balancing appears to have the highest normal performance, with the best - // Degraded Accuracy - cb.withLoadBalancingPolicy(new DCAwareRoundRobinPolicy( - bestDC, numInBestDC, true /*allow LocalDC to look at other DCs for LOCAL_QUORUM */)); - env.init().printf("Cassandra configured for DCAwareRoundRobinPolicy at %s with emergency remote of up to %d node(s)" - ,bestDC, numInBestDC); - } else { - env.init().printf("Cassandra is using Default Policy, which is not DC aware"); - } - } - return cb.build(); - } - - private static class Resettable { - private Class cls; - private List messages; - - @SuppressWarnings("unchecked") - public Resettable(Env env, String propData) throws APIException { - if(propData!=null && propData.length()>1) { - String[] split = Split.split(':', propData); - if(split.length>0) { - try { - cls = (Class)Class.forName(split[0]); - } catch (ClassNotFoundException e) { - throw new APIException("Declared Cassandra Reset Exception, " + propData + ", cannot be ClassLoaded"); - } - } - if(split.length>1) { - messages=new ArrayList(); - for(int i=1;i - */ -public class CassDAOImpl extends AbsCassDAO implements DAO { - public static final String USER_NAME = "__USER_NAME__"; - protected static final String CREATE_SP = "CREATE "; - protected static final String UPDATE_SP = "UPDATE "; - protected static final String DELETE_SP = "DELETE "; - protected static final String SELECT_SP = "SELECT "; - - protected final String C_TEXT = getClass().getSimpleName() + " CREATE"; - protected final String R_TEXT = getClass().getSimpleName() + " READ"; - protected final String U_TEXT = getClass().getSimpleName() + " UPDATE"; - protected final String D_TEXT = getClass().getSimpleName() + " DELETE"; - private String table; - - protected final ConsistencyLevel readConsistency,writeConsistency; - - // Setteable only by CachedDAO - protected Cached cache; - - /** - * A Constructor from the originating Cluster. This DAO will open the Session at need, - * and shutdown the session when "close()" is called. - * - * @param cluster - * @param keyspace - * @param dataClass - */ - public CassDAOImpl(TRANS trans, String name, Cluster cluster, String keyspace, Class dataClass, String table, ConsistencyLevel read, ConsistencyLevel write) { - super(trans, name, cluster,keyspace,dataClass); - this.table = table; - readConsistency = read; - writeConsistency = write; - } - - /** - * A Constructor to share Session with other DAOs. - * - * This method get the Session and Cluster information from the calling DAO, and won't - * touch the Session on closure. - * - * @param aDao - * @param dataClass - */ - public CassDAOImpl(TRANS trans, String name, AbsCassDAO aDao, Class dataClass, String table, ConsistencyLevel read, ConsistencyLevel write) { - super(trans, name, aDao,dataClass); - this.table = table; - readConsistency = read; - writeConsistency = write; - } - - protected PSInfo createPS; - protected PSInfo readPS; - protected PSInfo updatePS; - protected PSInfo deletePS; - private boolean async=false; - - public void async(boolean bool) { - async = bool; - } - - public final String[] setCRUD(TRANS trans, String table, Class dc,Loader loader) { - return setCRUD(trans, table, dc, loader, -1); - } - - public final String[] setCRUD(TRANS trans, String table, Class dc,Loader loader, int max) { - Field[] fields = dc.getDeclaredFields(); - int end = max>=0 & max0) { - for(int i=0;i0) { - sbfc.append(','); - sbq.append(','); - if(i=keylimit) { - if(i>keylimit) { - sbup.append(','); - } - sbup.append(fields[i].getName()); - sbup.append("=?"); - } - if(i create(TRANS trans, DATA data) { - if(createPS==null) { - Result.err(Result.ERR_NotImplemented,"Create is disabled for %s",getClass().getSimpleName()); - } - if(async) /*ResultSetFuture */ { - Result rs = createPS.execAsync(trans, C_TEXT, data); - if(rs.notOK()) { - return Result.err(rs); - } - } else { - Result rs = createPS.exec(trans, C_TEXT, data); - if(rs.notOK()) { - return Result.err(rs); - } - } - wasModified(trans, CRUD.create, data); - return Result.ok(data); - } - - /** - * Read the Unique Row associated with Full Keys - */ - public Result> read(TRANS trans, DATA data) { - if(readPS==null) { - Result.err(Result.ERR_NotImplemented,"Read is disabled for %s",getClass().getSimpleName()); - } - return readPS.read(trans, R_TEXT, data); - } - - public Result> read(TRANS trans, Object ... key) { - if(readPS==null) { - Result.err(Result.ERR_NotImplemented,"Read is disabled for %s",getClass().getSimpleName()); - } - return readPS.read(trans, R_TEXT, key); - } - - public Result update(TRANS trans, DATA data) { - if(updatePS==null) { - Result.err(Result.ERR_NotImplemented,"Update is disabled for %s",getClass().getSimpleName()); - } - if(async)/* ResultSet rs =*/ { - Result rs = updatePS.execAsync(trans, U_TEXT, data); - if(rs.notOK()) { - return Result.err(rs); - } - } else { - Result rs = updatePS.exec(trans, U_TEXT, data); - if(rs.notOK()) { - return Result.err(rs); - } - } - - wasModified(trans, CRUD.update, data); - return Result.ok(); - } - - // This method Sig for Cached... - public Result delete(TRANS trans, DATA data, boolean reread) { - if(deletePS==null) { - Result.err(Result.ERR_NotImplemented,"Delete is disabled for %s",getClass().getSimpleName()); - } - // Since Deleting will be stored off, for possible re-constitution, need the whole thing - if(reread) { - Result> rd = read(trans,data); - if(rd.notOK()) { - return Result.err(rd); - } - if(rd.isEmpty()) { - return Result.err(Status.ERR_NotFound,"Not Found"); - } - for(DATA d : rd.value) { - if(async) { - Result rs = deletePS.execAsync(trans, D_TEXT, d); - if(rs.notOK()) { - return Result.err(rs); - } - } else { - Result rs = deletePS.exec(trans, D_TEXT, d); - if(rs.notOK()) { - return Result.err(rs); - } - } - wasModified(trans, CRUD.delete, d); - } - } else { - if(async)/* ResultSet rs =*/ { - Result rs = deletePS.execAsync(trans, D_TEXT, data); - if(rs.notOK()) { - return Result.err(rs); - } - } else { - Result rs = deletePS.exec(trans, D_TEXT, data); - if(rs.notOK()) { - return Result.err(rs); - } - } - wasModified(trans, CRUD.delete, data); - } - return Result.ok(); - } - - public final Object[] keyFrom(DATA data) { - return createPS.keyFrom(data); - } - - @Override - public String table() { - return table; - } - - public static final String CASS_READ_CONSISTENCY="cassandra.readConsistency"; - public static final String CASS_WRITE_CONSISTENCY="cassandra.writeConsistency"; - protected static ConsistencyLevel readConsistency(AuthzTrans trans, String table) { - String prop = trans.getProperty(CASS_READ_CONSISTENCY+'.'+table); - if(prop==null) { - prop = trans.getProperty(CASS_READ_CONSISTENCY); - if(prop==null) { - return ConsistencyLevel.ONE; // this is Cassandra Default - } - } - return ConsistencyLevel.valueOf(prop); - } - - protected static ConsistencyLevel writeConsistency(AuthzTrans trans, String table) { - String prop = trans.getProperty(CASS_WRITE_CONSISTENCY+'.'+table); - if(prop==null) { - prop = trans.getProperty(CASS_WRITE_CONSISTENCY); - if(prop==null) { - return ConsistencyLevel.ONE; // this is Cassandra Default\ - } - } - return ConsistencyLevel.valueOf(prop); - } - - public static DataInputStream toDIS(ByteBuffer bb) { - byte[] b = bb.array(); - return new DataInputStream( - new ByteArrayInputStream(b,bb.position(),bb.limit()) - ); - } - - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/DAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/DAO.java deleted file mode 100644 index acdb36da..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/DAO.java +++ /dev/null @@ -1,44 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; - -import org.onap.aaf.authz.layer.Result; - -import org.onap.aaf.inno.env.Trans; - - -/** - * DataAccessObject Interface - * - * Extend the ReadOnly form (for Get), and add manipulation methods - * - * @param - */ -public interface DAO extends DAO_RO { - public Result create(TRANS trans, DATA data); - public Result update(TRANS trans, DATA data); - // In many cases, the data has been correctly read first, so we shouldn't read again - // Use reread=true if you are using DATA with only a Key - public Result delete(TRANS trans, DATA data, boolean reread); - public Object[] keyFrom(DATA data); -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/DAOException.java b/authz-cass/src/main/java/org/onap/aaf/dao/DAOException.java deleted file mode 100644 index 85b8c841..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/DAOException.java +++ /dev/null @@ -1,52 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; - -public class DAOException extends Exception { - - /** - * - */ - private static final long serialVersionUID = 1527904125585539823L; - -// // TODO - enum in result class == is our intended design, currently the DAO layer does not use Result so we still use these for now -// public final static DAOException RoleNotFoundDAOException = new DAOException("RoleNotFound"); -// public final static DAOException PermissionNotFoundDAOException = new DAOException("PermissionNotFound"); -// public final static DAOException UserNotFoundDAOException = new DAOException("UserNotFound"); - - public DAOException() { - } - - public DAOException(String message) { - super(message); - } - - public DAOException(Throwable cause) { - super(cause); - } - - public DAOException(String message, Throwable cause) { - super(message, cause); - } - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/DAO_RO.java b/authz-cass/src/main/java/org/onap/aaf/dao/DAO_RO.java deleted file mode 100644 index a853675d..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/DAO_RO.java +++ /dev/null @@ -1,71 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; - -import java.util.List; - -import org.onap.aaf.authz.layer.Result; - -import org.onap.aaf.inno.env.Trans; - -/** - * DataAccessObject - ReadOnly - * - * It is useful to have a ReadOnly part of the interface for CachedDAO - * - * Normal DAOs will implement full DAO - * - * - * @param - */ -public interface DAO_RO { - /** - * Get a List of Data given Key of Object Array - * @param objs - * @return - * @throws DAOException - */ - public Result> read(TRANS trans, Object ... key); - - /** - * Get a List of Data given Key of DATA Object - * @param trans - * @param key - * @return - * @throws DAOException - */ - public Result> read(TRANS trans, DATA key); - - /** - * close DAO - */ - public void close(TRANS trans); - - /** - * Return name of referenced Data - * @return - */ - public String table(); - - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/Loader.java b/authz-cass/src/main/java/org/onap/aaf/dao/Loader.java deleted file mode 100644 index 42a73f4b..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/Loader.java +++ /dev/null @@ -1,214 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - -import com.datastax.driver.core.Row; - -public abstract class Loader { - private int keylimit; - public Loader(int keylimit) { - this.keylimit = keylimit; - } - - public int keylimit() { - return keylimit; - } - - protected abstract DATA load(DATA data, Row row); - protected abstract void key(DATA data, int idx, Object[] obj); - protected abstract void body(DATA data, int idx, Object[] obj); - - public final Object[] extract(DATA data, int size, CassDAOImpl.CRUD type) { - Object[] rv=null; - switch(type) { - case delete: - rv = new Object[keylimit()]; - key(data,0,rv); - break; - case update: - rv = new Object[size]; - body(data,0,rv); - int body = size-keylimit(); - if(body>0) { - key(data,body,rv); - } - break; - default: - rv = new Object[size]; - key(data,0,rv); - if(size>keylimit()) { - body(data,keylimit(),rv); - } - break; - } - return rv; - } - - public static void writeString(DataOutputStream os, String s) throws IOException { - if(s==null) { - os.writeInt(-1); - } else { - switch(s.length()) { - case 0: - os.writeInt(0); - break; - default: - byte[] bytes = s.getBytes(); - os.writeInt(bytes.length); - os.write(bytes); - } - } - } - - /** - * We use bytes here to set a Maximum - * - * @param is - * @param MAX - * @return - * @throws IOException - */ - public static String readString(DataInputStream is, byte[] _buff) throws IOException { - int l = is.readInt(); - byte[] buff = _buff; - switch(l) { - case -1: return null; - case 0: return ""; - default: - // Cover case where there is a large string, without always allocating a large buffer. - if(l>buff.length) { - buff = new byte[l]; - } - is.read(buff,0,l); - return new String(buff,0,l); - } - } - - /** - * Write a set with proper sizing - * - * Note: at the moment, this is just String. Probably can develop system where types - * are supported too... but not now. - * - * @param os - * @param set - * @throws IOException - */ - public static void writeStringSet(DataOutputStream os, Collection set) throws IOException { - if(set==null) { - os.writeInt(-1); - } else { - os.writeInt(set.size()); - for(String s : set) { - writeString(os, s); - } - } - - } - - public static Set readStringSet(DataInputStream is, byte[] buff) throws IOException { - int l = is.readInt(); - if(l<0) { - return null; - } - Set set = new HashSet(l); - for(int i=0;i readStringList(DataInputStream is, byte[] buff) throws IOException { - int l = is.readInt(); - if(l<0) { - return null; - } - List list = new ArrayList(l); - for(int i=0;i map) throws IOException { - if(map==null) { - os.writeInt(-1); - } else { - Set> es = map.entrySet(); - os.writeInt(es.size()); - for(Entry e : es) { - writeString(os, e.getKey()); - writeString(os, e.getValue()); - } - } - - } - - public static Map readStringMap(DataInputStream is, byte[] buff) throws IOException { - int l = is.readInt(); - if(l<0) { - return null; - } - Map map = new HashMap(l); - for(int i=0;iversion) { - throw new IOException("Unsupported Data Version: " + v); - } - return v; - } - -} - diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/Streamer.java b/authz-cass/src/main/java/org/onap/aaf/dao/Streamer.java deleted file mode 100644 index f645dd6d..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/Streamer.java +++ /dev/null @@ -1,32 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; - -public interface Streamer { - public abstract void marshal(DATA data, DataOutputStream os) throws IOException; - public abstract void unmarshal(DATA data, DataInputStream is) throws IOException; -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/Touchable.java b/authz-cass/src/main/java/org/onap/aaf/dao/Touchable.java deleted file mode 100644 index dc3ab052..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/Touchable.java +++ /dev/null @@ -1,27 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; - -public interface Touchable { - // Or make all DAOs accept list of CIDAOs... -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedCertDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedCertDAO.java deleted file mode 100644 index 567bd062..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedCertDAO.java +++ /dev/null @@ -1,55 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cached; - -import java.util.List; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.CIDAO; -import org.onap.aaf.dao.CachedDAO; -import org.onap.aaf.dao.aaf.cass.CertDAO; - -public class CachedCertDAO extends CachedDAO { - public CachedCertDAO(CertDAO dao, CIDAO info) { - super(dao, info, CertDAO.CACHE_SEG); - } - - /** - * Pass through Cert ID Lookup - * - * @param trans - * @param ns - * @return - */ - - public Result> readID(AuthzTrans trans, final String id) { - return dao().readID(trans, id); - } - - public Result> readX500(AuthzTrans trans, final String x500) { - return dao().readX500(trans, x500); - } - - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedCredDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedCredDAO.java deleted file mode 100644 index 14675039..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedCredDAO.java +++ /dev/null @@ -1,67 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cached; - -import java.util.List; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.CIDAO; -import org.onap.aaf.dao.CachedDAO; -import org.onap.aaf.dao.aaf.cass.CredDAO; -import org.onap.aaf.dao.aaf.cass.Status; - -public class CachedCredDAO extends CachedDAO { - public CachedCredDAO(CredDAO dao, CIDAO info) { - super(dao, info, CredDAO.CACHE_SEG); - } - - /** - * Pass through Cred Lookup - * - * Unlike Role and Perm, we don't need or want to cache these elements... Only used for NS Delete. - * - * @param trans - * @param ns - * @return - */ - public Result> readNS(AuthzTrans trans, final String ns) { - - return dao().readNS(trans, ns); - } - - public Result> readID(AuthzTrans trans, final String id) { - DAOGetter getter = new DAOGetter(trans,dao()) { - public Result> call() { - return dao().readID(trans, id); - } - }; - - Result> lurd = get(trans, id, getter); - if(lurd.isOK() && lurd.isEmpty()) { - return Result.err(Status.ERR_UserNotFound,"No User Cred found"); - } - return lurd; - } - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedNSDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedNSDAO.java deleted file mode 100644 index aae74e25..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedNSDAO.java +++ /dev/null @@ -1,34 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cached; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.dao.CIDAO; -import org.onap.aaf.dao.CachedDAO; -import org.onap.aaf.dao.aaf.cass.NsDAO; - -public class CachedNSDAO extends CachedDAO { - public CachedNSDAO(NsDAO dao, CIDAO info) { - super(dao, info, NsDAO.CACHE_SEG); - } -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedPermDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedPermDAO.java deleted file mode 100644 index 7d4c7fe3..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedPermDAO.java +++ /dev/null @@ -1,125 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cached; - -import java.util.List; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.CIDAO; -import org.onap.aaf.dao.CachedDAO; -import org.onap.aaf.dao.aaf.cass.PermDAO; -import org.onap.aaf.dao.aaf.cass.RoleDAO; -import org.onap.aaf.dao.aaf.cass.Status; -import org.onap.aaf.dao.aaf.cass.PermDAO.Data; - -public class CachedPermDAO extends CachedDAO { - - public CachedPermDAO(PermDAO dao, CIDAO info) { - super(dao, info, PermDAO.CACHE_SEG); - } - - public Result> readNS(AuthzTrans trans, final String ns) { - DAOGetter getter = new DAOGetter(trans,dao()) { - public Result> call() { - return dao.readNS(trans, ns); - } - }; - - Result> lurd = get(trans, ns, getter); - if(lurd.isOKhasData()) { - return lurd; - } else { - - } -// if(getter.result==null) { -// if(lurd==null) { - return Result.err(Status.ERR_PermissionNotFound,"No Permission found - " + lurd.details); -// } else { -// return Result.ok(lurd); -// } -// } -// return getter.result; - } - - public Result> readChildren(AuthzTrans trans, final String ns, final String type) { - return dao().readChildren(trans,ns,type); - } - - /** - * - * @param trans - * @param ns - * @param type - * @return - */ - public Result> readByType(AuthzTrans trans, final String ns, final String type) { - DAOGetter getter = new DAOGetter(trans,dao()) { - public Result> call() { - return dao.readByType(trans, ns, type); - } - }; - - // Note: Can reuse index1 here, because there is no name collision versus response - Result> lurd = get(trans, ns+'|'+type, getter); - if(lurd.isOK() && lurd.isEmpty()) { - return Result.err(Status.ERR_PermissionNotFound,"No Permission found"); - } - return lurd; - } - - /** - * Add desciption to this permission - * - * @param trans - * @param ns - * @param type - * @param instance - * @param action - * @param description - * @return - */ - public Result addDescription(AuthzTrans trans, String ns, String type, - String instance, String action, String description) { - //TODO Invalidate? - return dao().addDescription(trans, ns, type, instance, action, description); - } - - public Result addRole(AuthzTrans trans, PermDAO.Data perm, RoleDAO.Data role) { - Result rv = dao().addRole(trans,perm,role.encode()); - if(trans.debug().isLoggable()) - trans.debug().log("Adding",role.encode(),"to", perm, "with CachedPermDAO.addRole"); - invalidate(trans,perm); - return rv; - } - - public Result delRole(AuthzTrans trans, Data perm, RoleDAO.Data role) { - Result rv = dao().delRole(trans,perm,role.encode()); - if(trans.debug().isLoggable()) - trans.debug().log("Removing",role.encode(),"from", perm, "with CachedPermDAO.delRole"); - invalidate(trans,perm); - return rv; - } - - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedRoleDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedRoleDAO.java deleted file mode 100644 index 788efbe8..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedRoleDAO.java +++ /dev/null @@ -1,107 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cached; - -import java.util.List; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.CIDAO; -import org.onap.aaf.dao.CachedDAO; -import org.onap.aaf.dao.aaf.cass.PermDAO; -import org.onap.aaf.dao.aaf.cass.RoleDAO; -import org.onap.aaf.dao.aaf.cass.Status; -import org.onap.aaf.dao.aaf.cass.RoleDAO.Data; - -public class CachedRoleDAO extends CachedDAO { - public CachedRoleDAO(RoleDAO dao, CIDAO info) { - super(dao, info, RoleDAO.CACHE_SEG); - } - - public Result> readNS(AuthzTrans trans, final String ns) { - DAOGetter getter = new DAOGetter(trans,dao()) { - public Result> call() { - return dao.readNS(trans, ns); - } - }; - - Result> lurd = get(trans, ns, getter); - if(lurd.isOK() && lurd.isEmpty()) { - return Result.err(Status.ERR_RoleNotFound,"No Role found"); - } - return lurd; - } - - public Result> readName(AuthzTrans trans, final String name) { - DAOGetter getter = new DAOGetter(trans,dao()) { - public Result> call() { - return dao().readName(trans, name); - } - }; - - Result> lurd = get(trans, name, getter); - if(lurd.isOK() && lurd.isEmpty()) { - return Result.err(Status.ERR_RoleNotFound,"No Role found"); - } - return lurd; - } - - public Result> readChildren(AuthzTrans trans, final String ns, final String name) { - // At this point, I'm thinking it's better not to try to cache "*" results - // Data probably won't be accurate, and adding it makes every update invalidate most of the cache - // 2/4/2014 - return dao().readChildren(trans,ns,name); - } - - public Result addPerm(AuthzTrans trans, RoleDAO.Data rd, PermDAO.Data perm) { - Result rv = dao().addPerm(trans,rd,perm); - if(trans.debug().isLoggable()) - trans.debug().log("Adding",perm,"to", rd, "with CachedRoleDAO.addPerm"); - invalidate(trans, rd); - return rv; - } - - public Result delPerm(AuthzTrans trans, RoleDAO.Data rd, PermDAO.Data perm) { - Result rv = dao().delPerm(trans,rd,perm); - if(trans.debug().isLoggable()) - trans.debug().log("Removing",perm,"from", rd, "with CachedRoleDAO.addPerm"); - invalidate(trans, rd); - return rv; - } - - /** - * Add description to this role - * - * @param trans - * @param ns - * @param name - * @param description - * @return - */ - public Result addDescription(AuthzTrans trans, String ns, String name, String description) { - //TODO Invalidate? - return dao().addDescription(trans, ns, name, description); - - } - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedUserRoleDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedUserRoleDAO.java deleted file mode 100644 index 68231ea0..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cached/CachedUserRoleDAO.java +++ /dev/null @@ -1,117 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cached; - -import java.util.ArrayList; -import java.util.List; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.CIDAO; -import org.onap.aaf.dao.CachedDAO; -import org.onap.aaf.dao.aaf.cass.Status; -import org.onap.aaf.dao.aaf.cass.UserRoleDAO; -import org.onap.aaf.dao.aaf.cass.UserRoleDAO.Data; - -import org.onap.aaf.inno.env.Slot; - -public class CachedUserRoleDAO extends CachedDAO { - private Slot transURSlot; - - public CachedUserRoleDAO(UserRoleDAO dao, CIDAO info) { - super(dao, info, UserRoleDAO.CACHE_SEG); - transURSlot = dao.transURSlot; - } - - /** - * Special Case. - * User Roles by User are very likely to be called many times in a Transaction, to validate "May User do..." - * Pull result, and make accessible by the Trans, which is always keyed by User. - * @param trans - * @param user - * @return - */ - public Result> readByUser(AuthzTrans trans, final String user) { - DAOGetter getter = new DAOGetter(trans,dao()) { - public Result> call() { - // If the call is for THIS user, and it exists, get from TRANS, add to TRANS if not. - if(user!=null && user.equals(trans.user())) { - Result> transLD = trans.get(transURSlot,null); - if(transLD==null ) { - transLD = dao.readByUser(trans, user); - } - return transLD; - } else { - return dao.readByUser(trans, user); - } - } - }; - Result> lurd = get(trans, user, getter); - if(lurd.isOK() && lurd.isEmpty()) { - return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for [%s]",user); - } - return lurd; - } - - - public Result> readByRole(AuthzTrans trans, final String role) { - DAOGetter getter = new DAOGetter(trans,dao()) { - public Result> call() { - return dao.readByRole(trans, role); - } - }; - Result> lurd = get(trans, role, getter); - if(lurd.isOK() && lurd.isEmpty()) { - return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for [%s]",role); - } - return lurd; - } - - public Result> readUserInRole(final AuthzTrans trans, final String user, final String role) { - DAOGetter getter = new DAOGetter(trans,dao()) { - public Result> call() { - if(user.equals(trans.user())) { - Result> rrbu = readByUser(trans, user); - if(rrbu.isOK()) { - List ld = new ArrayList(1); - for(Data d : rrbu.value) { - if(d.role.equals(role)) { - ld.add(d); - break; - } - } - return Result.ok(ld).emptyList(ld.isEmpty()); - } else { - return rrbu; - } - } - return dao.readByUserRole(trans, user, role); - } - }; - Result> lurd = get(trans, keyFromObjs(user,role), getter); - if(lurd.isOK() && lurd.isEmpty()) { - return Result.err(Status.ERR_UserRoleNotFound,"UserRole not found for role [%s] and user [%s]",role,user); - } - return lurd; - } -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/ApprovalDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/ApprovalDAO.java deleted file mode 100644 index dec1c9ae..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/ApprovalDAO.java +++ /dev/null @@ -1,206 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cass; - -import java.util.Date; -import java.util.List; -import java.util.UUID; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.CassDAOImpl; -import org.onap.aaf.dao.Loader; - -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.Row; - - -public class ApprovalDAO extends CassDAOImpl { - public static final String PENDING = "pending"; - public static final String DENIED = "denied"; - public static final String APPROVED = "approved"; - - private static final String TABLE = "approval"; - private HistoryDAO historyDAO; - private PSInfo psByUser, psByApprover, psByTicket, psByStatus; - - - public ApprovalDAO(AuthzTrans trans, Cluster cluster, String keyspace) { - super(trans, ApprovalDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - historyDAO = new HistoryDAO(trans, this); - init(trans); - } - - - public ApprovalDAO(AuthzTrans trans, HistoryDAO hDAO) { - super(trans, ApprovalDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - historyDAO=hDAO; - init(trans); - } - - private static final int KEYLIMIT = 1; - public static class Data { - public UUID id; - public UUID ticket; - public String user; - public String approver; - public String type; - public String status; - public String memo; - public String operation; - public Date updated; - } - - private static class ApprovalLoader extends Loader { - public static final ApprovalLoader deflt = new ApprovalLoader(KEYLIMIT); - - public ApprovalLoader(int keylimit) { - super(keylimit); - } - - @Override - public Data load(Data data, Row row) { - data.id = row.getUUID(0); - data.ticket = row.getUUID(1); - data.user = row.getString(2); - data.approver = row.getString(3); - data.type = row.getString(4); - data.status = row.getString(5); - data.memo = row.getString(6); - data.operation = row.getString(7); - if(row.getColumnDefinitions().size()>8) { - // Rows reported in MicroSeconds - data.updated = new Date(row.getLong(8)/1000); - } - return data; - } - - @Override - protected void key(Data data, int idx, Object[] obj) { - obj[idx]=data.id; - } - - @Override - protected void body(Data data, int _idx, Object[] obj) { - int idx = _idx; - obj[idx]=data.ticket; - obj[++idx]=data.user; - obj[++idx]=data.approver; - obj[++idx]=data.type; - obj[++idx]=data.status; - obj[++idx]=data.memo; - obj[++idx]=data.operation; - } - } - - private void init(AuthzTrans trans) { - String[] helpers = setCRUD(trans, TABLE, Data.class, ApprovalLoader.deflt,8); - // Need a specialty Creator to handle the "now()" - replace(CRUD.create, new PSInfo(trans, "INSERT INTO " + TABLE + " (" + helpers[FIELD_COMMAS] + - ") VALUES(now(),?,?,?,?,?,?,?)",new ApprovalLoader(0) { - @Override - protected void key(Data data, int idx, Object[] obj) { - // Overridden because key is the "now()" - } - },writeConsistency) - ); - - psByUser = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE + - " WHERE user = ?", new ApprovalLoader(1) { - @Override - protected void key(Data data, int idx, Object[] obj) { - obj[idx]=data.user; - } - }, readConsistency); - - psByApprover = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE + - " WHERE approver = ?", new ApprovalLoader(1) { - @Override - protected void key(Data data, int idx, Object[] obj) { - obj[idx]=data.approver; - } - }, readConsistency); - - psByTicket = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE + - " WHERE ticket = ?", new ApprovalLoader(1) { - @Override - protected void key(Data data, int idx, Object[] obj) { - obj[idx]=data.ticket; - } - }, readConsistency); - - psByStatus = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + ", WRITETIME(status) FROM " + TABLE + - " WHERE status = ?", new ApprovalLoader(1) { - @Override - protected void key(Data data, int idx, Object[] obj) { - obj[idx]=data.status; - } - }, readConsistency); - - - } - - public Result> readByUser(AuthzTrans trans, String user) { - return psByUser.read(trans, R_TEXT, new Object[]{user}); - } - - public Result> readByApprover(AuthzTrans trans, String approver) { - return psByApprover.read(trans, R_TEXT, new Object[]{approver}); - } - - public Result> readByTicket(AuthzTrans trans, UUID ticket) { - return psByTicket.read(trans, R_TEXT, new Object[]{ticket}); - } - - public Result> readByStatus(AuthzTrans trans, String status) { - return psByStatus.read(trans, R_TEXT, new Object[]{status}); - } - - /** - * Log Modification statements to History - * - * @param modified which CRUD action was done - * @param data entity data that needs a log entry - * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data - */ - @Override - protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) { - boolean memo = override.length>0 && override[0]!=null; - boolean subject = override.length>1 && override[1]!=null; - - HistoryDAO.Data hd = HistoryDAO.newInitedData(); - hd.user = trans.user(); - hd.action = modified.name(); - hd.target = TABLE; - hd.subject = subject?override[1]:data.user + "|" + data.approver; - hd.memo = memo - ? String.format("%s by %s", override[0], hd.user) - : (modified.name() + "d approval for " + data.user); - // Detail? - // Reconstruct? - if(historyDAO.create(trans, hd).status!=Status.OK) { - trans.error().log("Cannot log to History"); - } - } - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/ArtiDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/ArtiDAO.java deleted file mode 100644 index bc5532e0..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/ArtiDAO.java +++ /dev/null @@ -1,267 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cass; - -import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.Date; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.Bytification; -import org.onap.aaf.dao.CassDAOImpl; -import org.onap.aaf.dao.Loader; -import org.onap.aaf.dao.Streamer; - -import org.onap.aaf.inno.env.util.Chrono; -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.Row; - -/** - * CredDAO manages credentials. - * Date: 7/19/13 - */ -public class ArtiDAO extends CassDAOImpl { - public static final String TABLE = "artifact"; - - private HistoryDAO historyDAO; - private PSInfo psByMechID,psByMachine; - - public ArtiDAO(AuthzTrans trans, Cluster cluster, String keyspace) { - super(trans, ArtiDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - init(trans); - } - - public ArtiDAO(AuthzTrans trans, HistoryDAO hDao, CacheInfoDAO ciDao) { - super(trans, ArtiDAO.class.getSimpleName(),hDao, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - historyDAO = hDao; - init(trans); - } - - public static final int KEYLIMIT = 2; - public static class Data implements Bytification { - public String mechid; - public String machine; - private Set type; - public String sponsor; - public String ca; - public String dir; - public String appName; - public String os_user; - public String notify; - public Date expires; - public int renewDays; - -// // Getters - public Set type(boolean mutable) { - if (type == null) { - type = new HashSet(); - } else if (mutable && !(type instanceof HashSet)) { - type = new HashSet(type); - } - return type; - } - - - @Override - public ByteBuffer bytify() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - ArtifactLoader.deflt.marshal(this,new DataOutputStream(baos)); - return ByteBuffer.wrap(baos.toByteArray()); - } - - @Override - public void reconstitute(ByteBuffer bb) throws IOException { - ArtifactLoader.deflt.unmarshal(this, toDIS(bb)); - } - - public String toString() { - return mechid + ' ' + machine + ' ' + Chrono.dateTime(expires); - } - } - - private static class ArtifactLoader extends Loader implements Streamer{ - public static final int MAGIC=95829343; - public static final int VERSION=1; - public static final int BUFF_SIZE=48; // Note: - - public static final ArtifactLoader deflt = new ArtifactLoader(KEYLIMIT); - public ArtifactLoader(int keylimit) { - super(keylimit); - } - - @Override - public Data load(Data data, Row row) { - data.mechid = row.getString(0); - data.machine = row.getString(1); - data.type = row.getSet(2, String.class); - data.sponsor = row.getString(3); - data.ca = row.getString(4); - data.dir = row.getString(5); - data.appName = row.getString(6); - data.os_user = row.getString(7); - data.notify = row.getString(8); - data.expires = row.getDate(9); - data.renewDays = row.getInt(10); - return data; - } - - @Override - protected void key(final Data data, final int idx, Object[] obj) { - int i; - obj[i=idx] = data.mechid; - obj[++i] = data.machine; - } - - @Override - protected void body(final Data data, final int idx, Object[] obj) { - int i; - obj[i=idx] = data.type; - obj[++i] = data.sponsor; - obj[++i] = data.ca; - obj[++i] = data.dir; - obj[++i] = data.appName; - obj[++i] = data.os_user; - obj[++i] = data.notify; - obj[++i] = data.expires; - obj[++i] = data.renewDays; - } - - @Override - public void marshal(Data data, DataOutputStream os) throws IOException { - writeHeader(os,MAGIC,VERSION); - writeString(os, data.mechid); - writeString(os, data.machine); - os.writeInt(data.type.size()); - for(String s : data.type) { - writeString(os, s); - } - writeString(os, data.sponsor); - writeString(os, data.ca); - writeString(os, data.dir); - writeString(os, data.appName); - writeString(os, data.os_user); - writeString(os, data.notify); - os.writeLong(data.expires==null?-1:data.expires.getTime()); - os.writeInt(data.renewDays); - } - - @Override - public void unmarshal(Data data, DataInputStream is) throws IOException { - /*int version = */readHeader(is,MAGIC,VERSION); - // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields - byte[] buff = new byte[BUFF_SIZE]; - data.mechid = readString(is,buff); - data.machine = readString(is,buff); - int size = is.readInt(); - data.type = new HashSet(size); - for(int i=0;i> readByMechID(AuthzTrans trans, String mechid) { - return psByMechID.read(trans, R_TEXT, new Object[]{mechid}); - } - - public Result> readByMachine(AuthzTrans trans, String machine) { - return psByMachine.read(trans, R_TEXT, new Object[]{machine}); - } - - /** - * Log Modification statements to History - * - * @param modified which CRUD action was done - * @param data entity data that needs a log entry - * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data - */ - @Override - protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) { - boolean memo = override.length>0 && override[0]!=null; - boolean subject = override.length>1 && override[1]!=null; - - HistoryDAO.Data hd = HistoryDAO.newInitedData(); - hd.user = trans.user(); - hd.action = modified.name(); - hd.target = TABLE; - hd.subject = subject?override[1]: data.mechid; - hd.memo = memo - ? String.format("%s by %s", override[0], hd.user) - : String.format("%sd %s for %s",modified.name(),data.mechid,data.machine); - // Detail? - if(modified==CRUD.delete) { - try { - hd.reconstruct = data.bytify(); - } catch (IOException e) { - trans.error().log(e,"Could not serialize CredDAO.Data"); - } - } - - if(historyDAO.create(trans, hd).status!=Status.OK) { - trans.error().log("Cannot log to History"); - } - } -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CacheInfoDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CacheInfoDAO.java deleted file mode 100644 index e7cab3ef..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CacheInfoDAO.java +++ /dev/null @@ -1,464 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cass; - -import java.io.IOException; -import java.net.HttpURLConnection; -import java.net.URI; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Map.Entry; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.TimeUnit; - -import org.onap.aaf.authz.env.AuthzEnv; -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.AbsCassDAO; -import org.onap.aaf.dao.CIDAO; -import org.onap.aaf.dao.CassAccess; -import org.onap.aaf.dao.CassDAOImpl; -import org.onap.aaf.dao.Loader; - -import org.onap.aaf.cadi.CadiException; -import org.onap.aaf.cadi.SecuritySetter; -import org.onap.aaf.cadi.client.Future; -import org.onap.aaf.cadi.client.Rcli; -import org.onap.aaf.cadi.client.Retryable; -import org.onap.aaf.cadi.http.HMangr; -import org.onap.aaf.inno.env.APIException; -import org.onap.aaf.inno.env.Env; -import org.onap.aaf.inno.env.TimeTaken; -import org.onap.aaf.inno.env.Trans; -import com.datastax.driver.core.BoundStatement; -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.ResultSet; -import com.datastax.driver.core.Row; -import com.datastax.driver.core.exceptions.DriverException; - -public class CacheInfoDAO extends CassDAOImpl implements CIDAO { - - private static final String TABLE = "cache"; - public static final Map info = new ConcurrentHashMap(); - - private static CacheUpdate cacheUpdate; - - - private BoundStatement check; - // Hold current time stamps from Tables - private final Date startTime; - - public CacheInfoDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException { - super(trans, CacheInfoDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE,readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - startTime = new Date(); - init(trans); - } - - public CacheInfoDAO(AuthzTrans trans, AbsCassDAO aDao) throws APIException, IOException { - super(trans, CacheInfoDAO.class.getSimpleName(),aDao,Data.class,TABLE,readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - startTime = new Date(); - init(trans); - } - - - ////////////////////////////////////////// - // Data Definition, matches Cassandra DM - ////////////////////////////////////////// - private static final int KEYLIMIT = 2; - /** - */ - public static class Data { - public Data() { - name = null; - touched = null; - } - public Data(String name, int seg) { - this.name = name; - this.seg = seg; - touched = null; - } - - public String name; - public int seg; - public Date touched; - } - - private static class InfoLoader extends Loader { - public static final InfoLoader dflt = new InfoLoader(KEYLIMIT); - - public InfoLoader(int keylimit) { - super(keylimit); - } - - @Override - public Data load(Data data, Row row) { - // Int more efficient - data.name = row.getString(0); - data.seg = row.getInt(1); - data.touched = row.getDate(2); - return data; - } - - @Override - protected void key(Data data, int _idx, Object[] obj) { - int idx = _idx; - - obj[idx]=data.name; - obj[++idx]=data.seg; - } - - @Override - protected void body(Data data, int idx, Object[] obj) { - obj[idx]=data.touched; - } - } - - public static void startUpdate(AuthzEnv env, HMangr hman, SecuritySetter ss, String ip, int port) { - if(cacheUpdate==null) { - Thread t= new Thread(cacheUpdate = new CacheUpdate(env,hman,ss, ip,port),"CacheInfo Update Thread"); - t.setDaemon(true); - t.start(); - } - } - - public static void stopUpdate() { - if(cacheUpdate!=null) { - cacheUpdate.go=false; - } - } - - private final static class CacheUpdate extends Thread { - public static BlockingQueue notifyDQ = new LinkedBlockingQueue(2000); - - private static final String VOID_CT="application/Void+json;q=1.0;charset=utf-8;version=2.0,application/json;q=1.0;version=2.0,*/*;q=1.0"; - private AuthzEnv env; - private HMangr hman; - private SecuritySetter ss; - private final String authority; - public boolean go = true; - - public CacheUpdate(AuthzEnv env, HMangr hman, SecuritySetter ss, String ip, int port) { - this.env = env; - this.hman = hman; - this.ss = ss; - - this.authority = ip+':'+port; - } - - private static class Transfer { - public String table; - public int segs[]; - public Transfer(String table, int[] segs) { - this.table = table; - this.segs = segs; - } - } - private class CacheClear extends Retryable { - public int total=0; - private AuthzTrans trans; - private String type; - private String segs; - - public CacheClear(AuthzTrans trans) { - this.trans = trans; - } - - public void set(Entry es) { - type = es.getKey(); - segs = es.getValue().toString(); - } - - @Override - public Integer code(Rcli client) throws APIException, CadiException { - URI to = client.getURI(); - if(!to.getAuthority().equals(authority)) { - Future f = client.delete("/mgmt/cache/"+type+'/'+segs,VOID_CT); - if(f.get(hman.readTimeout())) { - ++total; - } else { - trans.error().log("Error During AAF Peer Notify",f.code(),f.body()); - } - } - return total; - } - } - - private class IntHolder { - private int[] raw; - HashSet set; - - public IntHolder(int ints[]) { - raw = ints; - set = null; - } - public void add(int[] ints) { - if(set==null) { - set = new HashSet(); - - for(int i=0;i gather = null; - AuthzTrans trans = null; - long start=0; - // Do a block poll first - do { - if(gather==null) { - start = System.nanoTime(); - trans = env.newTransNoAvg(); - cc = new CacheClear(trans); - gather = new HashMap(); - } - IntHolder prev = gather.get(data.table); - if(prev==null) { - gather.put(data.table,new IntHolder(data.segs)); - } else { - prev.add(data.segs); - } - // continue while there is data - } while((data = notifyDQ.poll())!=null); - if(gather!=null) { - for(Entry es : gather.entrySet()) { - cc.set(es); - try { - if(hman.all(ss, cc, false)!=null) { - ++count; - } - } catch (Exception e) { - trans.error().log(e, "Error on Cache Update"); - } - } - if(env.debug().isLoggable()) { - float millis = (System.nanoTime()-start)/1000000f; - StringBuilder sb = new StringBuilder("Direct Cache Refresh: "); - sb.append("Updated "); - sb.append(count); - if(count==1) { - sb.append(" entry for "); - } else { - sb.append(" entries for "); - } - int peers = count<=0?0:cc.total/count; - sb.append(peers); - sb.append(" client"); - if(peers!=1) { - sb.append('s'); - } - sb.append(" in "); - sb.append(millis); - sb.append("ms"); - trans.auditTrail(0, sb, Env.REMOTE); - env.debug().log(sb); - } - } - } catch (InterruptedException e1) { - go = false; - } - } while(go); - } - } - - private void init(AuthzTrans trans) throws APIException, IOException { - - String[] helpers = setCRUD(trans, TABLE, Data.class, InfoLoader.dflt); - check = getSession(trans).prepare(SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE).bind(); - - disable(CRUD.create); - disable(CRUD.delete); - } - - /* (non-Javadoc) - * @see org.onap.aaf.dao.aaf.cass.CIDAO#touch(org.onap.aaf.authz.env.AuthzTrans, java.lang.String, int) - */ - - @Override - public Result touch(AuthzTrans trans, String name, int ... seg) { - ///////////// - // Direct Service Cache Invalidation - ///////////// - // ConcurrentQueues are open-ended. We don't want any Memory leaks - // Note: we keep a separate counter, because "size()" on a Linked Queue is expensive - if(cacheUpdate!=null) { - try { - if(!CacheUpdate.notifyDQ.offer(new CacheUpdate.Transfer(name, seg),2,TimeUnit.SECONDS)) { - trans.error().log("Cache Notify Queue is not accepting messages, bouncing may be appropriate" ); - } - } catch (InterruptedException e) { - trans.error().log("Cache Notify Queue posting was interrupted" ); - } - } - - ///////////// - // Table Based Cache Invalidation (original) - ///////////// - // Note: Save time with multiple Sequence Touches, but PreparedStmt doesn't support IN - StringBuilder start = new StringBuilder("CacheInfoDAO Touch segments "); - start.append(name); - start.append(": "); - StringBuilder sb = new StringBuilder("BEGIN BATCH\n"); - boolean first = true; - for(int s : seg) { - sb.append(UPDATE_SP); - sb.append(TABLE); - sb.append(" SET touched=dateof(now()) WHERE name = '"); - sb.append(name); - sb.append("' AND seg = "); - sb.append(s); - sb.append(";\n"); - if(first) { - first =false; - } else { - start.append(','); - } - start.append(s); - } - sb.append("APPLY BATCH;"); - TimeTaken tt = trans.start(start.toString(),Env.REMOTE); - try { - getSession(trans).executeAsync(sb.toString()); - } catch (DriverException | APIException | IOException e) { - reportPerhapsReset(trans,e); - return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); - } finally { - tt.done(); - } - return Result.ok(); - } - - /* (non-Javadoc) - * @see org.onap.aaf.dao.aaf.cass.CIDAO#check(org.onap.aaf.authz.env.AuthzTrans) - */ - @Override - public Result check(AuthzTrans trans) { - ResultSet rs; - TimeTaken tt = trans.start("Check Table Timestamps",Env.REMOTE); - try { - rs = getSession(trans).execute(check); - } catch (DriverException | APIException | IOException e) { - reportPerhapsReset(trans,e); - return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); - } finally { - tt.done(); - } - - String lastName = null; - Date[] dates = null; - for(Row row : rs.all()) { - String name = row.getString(0); - int seg = row.getInt(1); - if(!name.equals(lastName)) { - dates = info.get(name); - lastName=name; - } - if(dates==null) { - dates=new Date[seg+1]; - info.put(name,dates); - } else if(dates.length<=seg) { - Date[] temp = new Date[seg+1]; - System.arraycopy(dates, 0, temp, 0, dates.length); - dates = temp; - info.put(name, dates); - } - Date temp = row.getDate(2); - if(dates[seg]==null || dates[seg].before(temp)) { - dates[seg]=temp; - } - } - return Result.ok(); - } - - /* (non-Javadoc) - * @see org.onap.aaf.dao.aaf.cass.CIDAO#get(java.lang.String, int) - */ - @Override - public Date get(AuthzTrans trans, String table, int seg) { - Date[] dates = info.get(table); - if(dates==null) { - dates = new Date[seg+1]; - touch(trans,table, seg); - } else if(dates.length<=seg) { - Date[] temp = new Date[seg+1]; - System.arraycopy(dates, 0, temp, 0, dates.length); - dates = temp; - } - Date rv = dates[seg]; - if(rv==null) { - rv=dates[seg]=startTime; - } - return rv; - } - - @Override - protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) { - // Do nothing - } - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CacheableData.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CacheableData.java deleted file mode 100644 index 75648130..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CacheableData.java +++ /dev/null @@ -1,36 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cass; - -import org.onap.aaf.dao.Cacheable; -import org.onap.aaf.dao.Cached; -import org.onap.aaf.dao.CachedDAO; - -public abstract class CacheableData implements Cacheable { - // WARNING: DON'T attempt to add any members here, as it will - // be treated by system as fields expected in Tables - protected int seg(Cached cache, Object ... fields) { - return cache==null?0:cache.invalidate(CachedDAO.keyFromObjs(fields)); - } - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CertDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CertDAO.java deleted file mode 100644 index 4ed6a3e2..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CertDAO.java +++ /dev/null @@ -1,244 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cass; - -import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.math.BigInteger; -import java.nio.ByteBuffer; -import java.util.List; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.Bytification; -import org.onap.aaf.dao.CIDAO; -import org.onap.aaf.dao.Cached; -import org.onap.aaf.dao.CassDAOImpl; -import org.onap.aaf.dao.Loader; -import org.onap.aaf.dao.Streamer; - -import org.onap.aaf.inno.env.APIException; -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.Row; - -/** - * CredDAO manages credentials. - * Date: 7/19/13 - */ -public class CertDAO extends CassDAOImpl { - public static final String TABLE = "x509"; - public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F - - private HistoryDAO historyDAO; - private CIDAO infoDAO; - private PSInfo psX500,psID; - - public CertDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException { - super(trans, CertDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - init(trans); - } - - public CertDAO(AuthzTrans trans, HistoryDAO hDao, CacheInfoDAO ciDao) throws APIException, IOException { - super(trans, CertDAO.class.getSimpleName(),hDao, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - historyDAO = hDao; - infoDAO = ciDao; - init(trans); - } - - public static final int KEYLIMIT = 2; - public static class Data extends CacheableData implements Bytification { - - public String ca; - public BigInteger serial; - public String id; - public String x500; - public String x509; - - @Override - public int[] invalidate(Cached cache) { - return new int[] { - seg(cache,ca,serial) - }; - } - - @Override - public ByteBuffer bytify() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - CertLoader.deflt.marshal(this,new DataOutputStream(baos)); - return ByteBuffer.wrap(baos.toByteArray()); - } - - @Override - public void reconstitute(ByteBuffer bb) throws IOException { - CertLoader.deflt.unmarshal(this, toDIS(bb)); - } - } - - private static class CertLoader extends Loader implements Streamer{ - public static final int MAGIC=85102934; - public static final int VERSION=1; - public static final int BUFF_SIZE=48; // Note: - - public static final CertLoader deflt = new CertLoader(KEYLIMIT); - public CertLoader(int keylimit) { - super(keylimit); - } - - @Override - public Data load(Data data, Row row) { - data.ca = row.getString(0); - ByteBuffer bb = row.getBytesUnsafe(1); - byte[] bytes = new byte[bb.remaining()]; - bb.get(bytes); - data.serial = new BigInteger(bytes); - data.id = row.getString(2); - data.x500 = row.getString(3); - data.x509 = row.getString(4); - return data; - } - - @Override - protected void key(Data data, int idx, Object[] obj) { - obj[idx] = data.ca; - obj[++idx] = ByteBuffer.wrap(data.serial.toByteArray()); - } - - @Override - protected void body(Data data, int _idx, Object[] obj) { - int idx = _idx; - - obj[idx] = data.id; - obj[++idx] = data.x500; - obj[++idx] = data.x509; - - - } - - @Override - public void marshal(Data data, DataOutputStream os) throws IOException { - writeHeader(os,MAGIC,VERSION); - writeString(os, data.id); - writeString(os, data.x500); - writeString(os, data.x509); - writeString(os, data.ca); - if(data.serial==null) { - os.writeInt(-1); - } else { - byte[] dsba = data.serial.toByteArray(); - int l = dsba.length; - os.writeInt(l); - os.write(dsba,0,l); - } - } - - @Override - public void unmarshal(Data data, DataInputStream is) throws IOException { - /*int version = */readHeader(is,MAGIC,VERSION); - // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields - byte[] buff = new byte[BUFF_SIZE]; - data.id = readString(is,buff); - data.x500 = readString(is,buff); - data.x509 = readString(is,buff); - data.ca = readString(is,buff); - int i = is.readInt(); - if(i<0) { - data.serial=null; - } else { - byte[] bytes = new byte[i]; // a bit dangerous, but lessened because of all the previous sized data reads - is.read(bytes); - data.serial = new BigInteger(bytes); - } - } - } - - public Result> read(AuthzTrans trans, Object ... key) { - // Translate BigInteger to Byte array for lookup - return super.read(trans, key[0],ByteBuffer.wrap(((BigInteger)key[1]).toByteArray())); - } - - private void init(AuthzTrans trans) throws APIException, IOException { - // Set up sub-DAOs - if(historyDAO==null) { - historyDAO = new HistoryDAO(trans,this); - } - if(infoDAO==null) { - infoDAO = new CacheInfoDAO(trans,this); - } - - String[] helpers = setCRUD(trans, TABLE, Data.class, CertLoader.deflt); - - psID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + - " WHERE id = ?", CertLoader.deflt,readConsistency); - - psX500 = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + - " WHERE x500 = ?", CertLoader.deflt,readConsistency); - - } - - public Result> readX500(AuthzTrans trans, String x500) { - return psX500.read(trans, R_TEXT, new Object[]{x500}); - } - - public Result> readID(AuthzTrans trans, String id) { - return psID.read(trans, R_TEXT, new Object[]{id}); - } - - /** - * Log Modification statements to History - * - * @param modified which CRUD action was done - * @param data entity data that needs a log entry - * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data - */ - @Override - protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) { - boolean memo = override.length>0 && override[0]!=null; - boolean subject = override.length>1 && override[1]!=null; - - HistoryDAO.Data hd = HistoryDAO.newInitedData(); - hd.user = trans.user(); - hd.action = modified.name(); - hd.target = TABLE; - hd.subject = subject?override[1]: data.id; - hd.memo = memo - ? String.format("%s by %s", override[0], hd.user) - : (modified.name() + "d certificate info for " + data.id); - // Detail? - if(modified==CRUD.delete) { - try { - hd.reconstruct = data.bytify(); - } catch (IOException e) { - trans.error().log(e,"Could not serialize CertDAO.Data"); - } - } - - if(historyDAO.create(trans, hd).status!=Status.OK) { - trans.error().log("Cannot log to History"); - } - if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).status!=Status.OK) { - trans.error().log("Cannot touch Cert"); - } - } -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CredDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CredDAO.java deleted file mode 100644 index dad5fdb3..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/CredDAO.java +++ /dev/null @@ -1,258 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cass; - -import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.Date; -import java.util.List; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.Bytification; -import org.onap.aaf.dao.CIDAO; -import org.onap.aaf.dao.Cached; -import org.onap.aaf.dao.CassDAOImpl; -import org.onap.aaf.dao.Loader; -import org.onap.aaf.dao.Streamer; - -import org.onap.aaf.inno.env.APIException; -import org.onap.aaf.inno.env.util.Chrono; -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.Row; - -/** - * CredDAO manages credentials. - * Date: 7/19/13 - */ -public class CredDAO extends CassDAOImpl { - public static final String TABLE = "cred"; - public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F - public static final int RAW = -1; - public static final int BASIC_AUTH = 1; - public static final int BASIC_AUTH_SHA256 = 2; - public static final int CERT_SHA256_RSA =200; - - private HistoryDAO historyDAO; - private CIDAO infoDAO; - private PSInfo psNS; - private PSInfo psID; - - public CredDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException { - super(trans, CredDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - init(trans); - } - - public CredDAO(AuthzTrans trans, HistoryDAO hDao, CacheInfoDAO ciDao) throws APIException, IOException { - super(trans, CredDAO.class.getSimpleName(),hDao, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - historyDAO = hDao; - infoDAO = ciDao; - init(trans); - } - - public static final int KEYLIMIT = 3; - public static class Data extends CacheableData implements Bytification { - - public String id; - public Integer type; - public Date expires; - public Integer other; - public String ns; - public String notes; - public ByteBuffer cred; // this is a blob in cassandra - - - @Override - public int[] invalidate(Cached cache) { - return new int[] { - seg(cache,id) // cache is for all entities - }; - } - - @Override - public ByteBuffer bytify() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - CredLoader.deflt.marshal(this,new DataOutputStream(baos)); - return ByteBuffer.wrap(baos.toByteArray()); - } - - @Override - public void reconstitute(ByteBuffer bb) throws IOException { - CredLoader.deflt.unmarshal(this, toDIS(bb)); - } - - public String toString() { - return id + ' ' + type + ' ' + Chrono.dateTime(expires); - } - } - - private static class CredLoader extends Loader implements Streamer{ - public static final int MAGIC=153323443; - public static final int VERSION=1; - public static final int BUFF_SIZE=48; // Note: - - public static final CredLoader deflt = new CredLoader(KEYLIMIT); - public CredLoader(int keylimit) { - super(keylimit); - } - - @Override - public Data load(Data data, Row row) { - data.id = row.getString(0); - data.type = row.getInt(1); // NOTE: in datastax driver, If the int value is NULL, 0 is returned! - data.expires = row.getDate(2); - data.other = row.getInt(3); - data.ns = row.getString(4); - data.notes = row.getString(5); - data.cred = row.getBytesUnsafe(6); - return data; - } - - @Override - protected void key(Data data, int _idx, Object[] obj) { - int idx = _idx; - - obj[idx] = data.id; - obj[++idx] = data.type; - obj[++idx] = data.expires; - } - - @Override - protected void body(Data data, int idx, Object[] obj) { - int i; - obj[i=idx] = data.other; - obj[++i] = data.ns; - obj[++i] = data.notes; - obj[++i] = data.cred; - } - - @Override - public void marshal(Data data, DataOutputStream os) throws IOException { - writeHeader(os,MAGIC,VERSION); - writeString(os, data.id); - os.writeInt(data.type); - os.writeLong(data.expires==null?-1:data.expires.getTime()); - os.writeInt(data.other==null?0:data.other); - writeString(os, data.ns); - writeString(os, data.notes); - if(data.cred==null) { - os.writeInt(-1); - } else { - int l = data.cred.limit()-data.cred.position(); - os.writeInt(l); - os.write(data.cred.array(),data.cred.position(),l); - } - } - - @Override - public void unmarshal(Data data, DataInputStream is) throws IOException { - /*int version = */readHeader(is,MAGIC,VERSION); - // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields - byte[] buff = new byte[BUFF_SIZE]; - data.id = readString(is,buff); - data.type = is.readInt(); - - long l = is.readLong(); - data.expires = l<0?null:new Date(l); - data.other = is.readInt(); - data.ns = readString(is,buff); - data.notes = readString(is,buff); - - int i = is.readInt(); - if(i<0) { - data.cred=null; - } else { - byte[] bytes = new byte[i]; // a bit dangerous, but lessened because of all the previous sized data reads - is.read(bytes); - data.cred = ByteBuffer.wrap(bytes); - } - } - } - - private void init(AuthzTrans trans) throws APIException, IOException { - // Set up sub-DAOs - if(historyDAO==null) { - historyDAO = new HistoryDAO(trans,this); - } - if(infoDAO==null) { - infoDAO = new CacheInfoDAO(trans,this); - } - - - String[] helpers = setCRUD(trans, TABLE, Data.class, CredLoader.deflt); - - psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + - " WHERE ns = ?", CredLoader.deflt,readConsistency); - - psID = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + - " WHERE id = ?", CredLoader.deflt,readConsistency); - } - - public Result> readNS(AuthzTrans trans, String ns) { - return psNS.read(trans, R_TEXT, new Object[]{ns}); - } - - public Result> readID(AuthzTrans trans, String id) { - return psID.read(trans, R_TEXT, new Object[]{id}); - } - - /** - * Log Modification statements to History - * - * @param modified which CRUD action was done - * @param data entity data that needs a log entry - * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data - */ - @Override - protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) { - boolean memo = override.length>0 && override[0]!=null; - boolean subject = override.length>1 && override[1]!=null; - - HistoryDAO.Data hd = HistoryDAO.newInitedData(); - hd.user = trans.user(); - hd.action = modified.name(); - hd.target = TABLE; - hd.subject = subject?override[1]: data.id; - hd.memo = memo - ? String.format("%s by %s", override[0], hd.user) - : (modified.name() + "d credential for " + data.id); - // Detail? - if(modified==CRUD.delete) { - try { - hd.reconstruct = data.bytify(); - } catch (IOException e) { - trans.error().log(e,"Could not serialize CredDAO.Data"); - } - } - - if(historyDAO.create(trans, hd).status!=Status.OK) { - trans.error().log("Cannot log to History"); - } - if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).status!=Status.OK) { - trans.error().log("Cannot touch Cred"); - } - } -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/DelegateDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/DelegateDAO.java deleted file mode 100644 index 6ff71208..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/DelegateDAO.java +++ /dev/null @@ -1,139 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cass; - -import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.Date; -import java.util.List; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.AbsCassDAO; -import org.onap.aaf.dao.Bytification; -import org.onap.aaf.dao.CassDAOImpl; -import org.onap.aaf.dao.Loader; -import org.onap.aaf.dao.Streamer; - -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.Row; - -public class DelegateDAO extends CassDAOImpl { - - public static final String TABLE = "delegate"; - private PSInfo psByDelegate; - - public DelegateDAO(AuthzTrans trans, Cluster cluster, String keyspace) { - super(trans, DelegateDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - init(trans); - } - - public DelegateDAO(AuthzTrans trans, AbsCassDAO aDao) { - super(trans, DelegateDAO.class.getSimpleName(),aDao,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - init(trans); - } - - private static final int KEYLIMIT = 1; - public static class Data implements Bytification { - public String user; - public String delegate; - public Date expires; - - @Override - public ByteBuffer bytify() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - DelegateLoader.dflt.marshal(this,new DataOutputStream(baos)); - return ByteBuffer.wrap(baos.toByteArray()); - } - - @Override - public void reconstitute(ByteBuffer bb) throws IOException { - DelegateLoader.dflt.unmarshal(this, toDIS(bb)); - } - } - - private static class DelegateLoader extends Loader implements Streamer{ - public static final int MAGIC=0xD823ACF2; - public static final int VERSION=1; - public static final int BUFF_SIZE=48; - - public static final DelegateLoader dflt = new DelegateLoader(KEYLIMIT); - - public DelegateLoader(int keylimit) { - super(keylimit); - } - - @Override - public Data load(Data data, Row row) { - data.user = row.getString(0); - data.delegate = row.getString(1); - data.expires = row.getDate(2); - return data; - } - - @Override - protected void key(Data data, int idx, Object[] obj) { - obj[idx]=data.user; - } - - @Override - protected void body(Data data, int _idx, Object[] obj) { - int idx = _idx; - - obj[idx]=data.delegate; - obj[++idx]=data.expires; - } - - @Override - public void marshal(Data data, DataOutputStream os) throws IOException { - writeHeader(os,MAGIC,VERSION); - writeString(os, data.user); - writeString(os, data.delegate); - os.writeLong(data.expires.getTime()); - } - - @Override - public void unmarshal(Data data, DataInputStream is) throws IOException { - /*int version = */readHeader(is,MAGIC,VERSION); - // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields - byte[] buff = new byte[BUFF_SIZE]; - data.user = readString(is, buff); - data.delegate = readString(is,buff); - data.expires = new Date(is.readLong()); - } - } - - private void init(AuthzTrans trans) { - String[] helpers = setCRUD(trans, TABLE, Data.class, DelegateLoader.dflt); - psByDelegate = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + - " WHERE delegate = ?", new DelegateLoader(1),readConsistency); - - } - - public Result> readByDelegate(AuthzTrans trans, String delegate) { - return psByDelegate.read(trans, R_TEXT, new Object[]{delegate}); - } -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/FutureDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/FutureDAO.java deleted file mode 100644 index 4fda97a1..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/FutureDAO.java +++ /dev/null @@ -1,183 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cass; - -import java.nio.ByteBuffer; -import java.util.Date; -import java.util.List; -import java.util.UUID; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.CassDAOImpl; -import org.onap.aaf.dao.DAOException; -import org.onap.aaf.dao.Loader; - -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.ResultSet; -import com.datastax.driver.core.Row; - -/** - * FutureDAO stores Construction information to create - * elements at another time. - * - * 8/20/2013 - */ -public class FutureDAO extends CassDAOImpl { - private static final String TABLE = "future"; - private final HistoryDAO historyDAO; -// private static String createString; - private PSInfo psByStartAndTarget; - - public FutureDAO(AuthzTrans trans, Cluster cluster, String keyspace) { - super(trans, FutureDAO.class.getSimpleName(),cluster, keyspace, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - historyDAO = new HistoryDAO(trans, this); - init(trans); - } - - public FutureDAO(AuthzTrans trans, HistoryDAO hDAO) { - super(trans, FutureDAO.class.getSimpleName(),hDAO, Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - historyDAO=hDAO; - init(trans); - } - - public static final int KEYLIMIT = 1; - public static class Data { - public UUID id; - public String target; - public String memo; - public Date start; - public Date expires; - public ByteBuffer construct; // this is a blob in cassandra - } - - private static class FLoader extends Loader { - public FLoader() { - super(KEYLIMIT); - } - - public FLoader(int keylimit) { - super(keylimit); - } - - @Override - public Data load(Data data, Row row) { - data.id = row.getUUID(0); - data.target = row.getString(1); - data.memo = row.getString(2); - data.start = row.getDate(3); - data.expires = row.getDate(4); - data.construct = row.getBytes(5); - return data; - } - - @Override - protected void key(Data data, int idx, Object[] obj) { - obj[idx] = data.id; - } - - @Override - protected void body(Data data, int _idx, Object[] obj) { - int idx = _idx; - - obj[idx] = data.target; - obj[++idx] = data.memo; - obj[++idx] = data.start; - obj[++idx] = data.expires; - obj[++idx] = data.construct; - } - } - - private void init(AuthzTrans trans) { - // Set up sub-DAOs - String[] helpers = setCRUD(trans, TABLE, Data.class, new FLoader(KEYLIMIT)); - - // Uh, oh. Can't use "now()" in Prepared Statements (at least at this level) -// createString = "INSERT INTO " + TABLE + " ("+helpers[FIELD_COMMAS] +") VALUES (now(),"; -// -// // Need a specialty Creator to handle the "now()" -// replace(CRUD.Create, new PSInfo(trans, "INSERT INTO future (" + helpers[FIELD_COMMAS] + -// ") VALUES(now(),?,?,?,?,?)",new FLoader(0))); - - // Other SELECT style statements... match with a local Method - psByStartAndTarget = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + - " FROM future WHERE start <= ? and target = ? ALLOW FILTERING", new FLoader(2) { - @Override - protected void key(Data data, int _idx, Object[] obj) { - int idx = _idx; - - obj[idx]=data.start; - obj[++idx]=data.target; - } - },readConsistency); - - - } - - public Result> readByStartAndTarget(AuthzTrans trans, Date start, String target) throws DAOException { - return psByStartAndTarget.read(trans, R_TEXT, new Object[]{start, target}); - } - - /** - * Override create to add secondary ID to Subject in History, and create Data.ID, if it is null - */ - public Result create(AuthzTrans trans, FutureDAO.Data data, String id) { - // If ID is not set (typical), create one. - if(data.id==null) { - StringBuilder sb = new StringBuilder(trans.user()); - sb.append(data.target); - sb.append(System.currentTimeMillis()); - data.id = UUID.nameUUIDFromBytes(sb.toString().getBytes()); - } - Result rs = createPS.exec(trans, C_TEXT, data); - if(rs.notOK()) { - return Result.err(rs); - } - wasModified(trans, CRUD.create, data, null, id); - return Result.ok(data); - } - - /** - * Log Modification statements to History - * - * @param modified which CRUD action was done - * @param data entity data that needs a log entry - * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data - */ - @Override - protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) { - boolean memo = override.length>0 && override[0]!=null; - boolean subject = override.length>1 && override[1]!=null; - HistoryDAO.Data hd = HistoryDAO.newInitedData(); - hd.user = trans.user(); - hd.action = modified.name(); - hd.target = TABLE; - hd.subject = subject?override[1]:""; - hd.memo = memo?String.format("%s by %s", override[0], hd.user):data.memo; - - if(historyDAO.create(trans, hd).status!=Status.OK) { - trans.error().log("Cannot log to History"); - } - } - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/HistoryDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/HistoryDAO.java deleted file mode 100644 index e72c774a..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/HistoryDAO.java +++ /dev/null @@ -1,237 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cass; - -import java.nio.ByteBuffer; -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.List; -import java.util.UUID; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.AbsCassDAO; -import org.onap.aaf.dao.CassDAOImpl; -import org.onap.aaf.dao.Loader; - -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.ConsistencyLevel; -import com.datastax.driver.core.ResultSet; -import com.datastax.driver.core.Row; - -/** - * History - * - * - * History is a special case, because we don't want Updates or Deletes... Too likely to mess up history. - * - * 9-9-2013 - Found a problem with using "Prepare". You cannot prepare anything with a "now()" in it, as - * it is evaluated once during the prepare, and kept. That renders any use of "now()" pointless. Therefore - * the Create function needs to be run fresh everytime. - * - * Fixed in Cassandra 1.2.6 https://issues.apache.org/jira/browse/CASSANDRA-5616 - * - */ -public class HistoryDAO extends CassDAOImpl { - private static final String TABLE = "history"; - - public static final SimpleDateFormat monthFormat = new SimpleDateFormat("yyyyMM"); -// private static final SimpleDateFormat dayTimeFormat = new SimpleDateFormat("ddHHmmss"); - - private String[] helpers; - - private HistLoader defLoader; - - private AbsCassDAO.PSInfo readByUser, readBySubject, readByYRMN; - - public HistoryDAO(AuthzTrans trans, Cluster cluster, String keyspace) { - super(trans, HistoryDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY); - init(trans); - } - - public HistoryDAO(AuthzTrans trans, AbsCassDAO aDao) { - super(trans, HistoryDAO.class.getSimpleName(),aDao,Data.class,TABLE,ConsistencyLevel.LOCAL_ONE,ConsistencyLevel.ANY); - init(trans); - } - - - private static final int KEYLIMIT = 1; - public static class Data { - public UUID id; - public int yr_mon; - public String user; - public String action; - public String target; - public String subject; - public String memo; -// Map detail = null; -// public Map detail() { -// if(detail == null) { -// detail = new HashMap(); -// } -// return detail; -// } - public ByteBuffer reconstruct; - } - - private static class HistLoader extends Loader { - public HistLoader(int keylimit) { - super(keylimit); - } - - @Override - public Data load(Data data, Row row) { - data.id = row.getUUID(0); - data.yr_mon = row.getInt(1); - data.user = row.getString(2); - data.action = row.getString(3); - data.target = row.getString(4); - data.subject = row.getString(5); - data.memo = row.getString(6); -// data.detail = row.getMap(6, String.class, String.class); - data.reconstruct = row.getBytes(7); - return data; - } - - @Override - protected void key(Data data, int idx, Object[] obj) { - obj[idx]=data.id; - } - - @Override - protected void body(Data data, int _idx, Object[] obj) { - int idx = _idx; - obj[idx]=data.yr_mon; - obj[++idx]=data.user; - obj[++idx]=data.action; - obj[++idx]=data.target; - obj[++idx]=data.subject; - obj[++idx]=data.memo; -// obj[++idx]=data.detail; - obj[++idx]=data.reconstruct; - } - }; - - private void init(AuthzTrans trans) { - // Loader must match fields order - defLoader = new HistLoader(KEYLIMIT); - helpers = setCRUD(trans, TABLE, Data.class, defLoader); - - // Need a specialty Creator to handle the "now()" - // 9/9/2013 - jg - Just great... now() is evaluated once on Client side, invalidating usage (what point is a now() from a long time in the past? - // Unless this is fixed, we're putting in non-prepared statement - // Solved in Cassandra. Make sure you are running 1.2.6 Cassandra or later. https://issues.apache.org/jira/browse/CASSANDRA-5616 - replace(CRUD.create, new PSInfo(trans, "INSERT INTO history (" + helpers[FIELD_COMMAS] + - ") VALUES(now(),?,?,?,?,?,?,?)", - new HistLoader(0) { - @Override - protected void key(Data data, int idx, Object[] obj) { - } - },writeConsistency) - ); -// disable(CRUD.Create); - - replace(CRUD.read, new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + - " FROM history WHERE id = ?", defLoader,readConsistency) -// new HistLoader(2) { -// @Override -// protected void key(Data data, int idx, Object[] obj) { -// obj[idx]=data.yr_mon; -// obj[++idx]=data.id; -// } -// }) - ); - disable(CRUD.update); - disable(CRUD.delete); - - readByUser = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + - " FROM history WHERE user = ?", defLoader,readConsistency); - readBySubject = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + - " FROM history WHERE subject = ? and target = ? ALLOW FILTERING", defLoader,readConsistency); - readByYRMN = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + - " FROM history WHERE yr_mon = ?", defLoader,readConsistency); - async(true); //TODO dropping messages with Async - } - - public static Data newInitedData() { - Data data = new Data(); - Date now = new Date(); - data.yr_mon = Integer.parseInt(monthFormat.format(now)); - // data.day_time = Integer.parseInt(dayTimeFormat.format(now)); - return data; - } - - public Result> readByYYYYMM(AuthzTrans trans, int yyyymm) { - Result rs = readByYRMN.exec(trans, "yr_mon", yyyymm); - if(rs.notOK()) { - return Result.err(rs); - } - return extract(defLoader,rs.value,null,dflt); - } - - /** - * Gets the history for a user in the specified year and month - * year - the year in yyyy format - * month - the month in a year ...values 1 - 12 - **/ - public Result> readByUser(AuthzTrans trans, String user, int ... yyyymm) { - if(yyyymm.length==0) { - return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified"); - } - Result rs = readByUser.exec(trans, "user", user); - if(rs.notOK()) { - return Result.err(rs); - } - return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt); - } - - public Result> readBySubject(AuthzTrans trans, String subject, String target, int ... yyyymm) { - if(yyyymm.length==0) { - return Result.err(Status.ERR_BadData, "No or invalid yyyymm specified"); - } - Result rs = readBySubject.exec(trans, "subject", subject, target); - if(rs.notOK()) { - return Result.err(rs); - } - return extract(defLoader,rs.value,null,yyyymm.length>0?new YYYYMM(yyyymm):dflt); - } - - private class YYYYMM implements Accept { - private int[] yyyymm; - public YYYYMM(int yyyymm[]) { - this.yyyymm = yyyymm; - } - @Override - public boolean ok(Data data) { - int dym = data.yr_mon; - for(int ym:yyyymm) { - if(dym==ym) { - return true; - } - } - return false; - } - - }; - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/Namespace.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/Namespace.java deleted file mode 100644 index 98c46165..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/Namespace.java +++ /dev/null @@ -1,151 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cass; - -import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.List; -import java.util.Map.Entry; - -import org.onap.aaf.cssa.rserv.Pair; -import org.onap.aaf.dao.Bytification; -import org.onap.aaf.dao.CassDAOImpl; -import org.onap.aaf.dao.Loader; - - -public class Namespace implements Bytification { - public static final int MAGIC=250935515; - public static final int VERSION=1; - public static final int BUFF_SIZE=48; - - public String name; - public List owner; - public List admin; - public List> attrib; - public String description; - public Integer type; - public String parent; - public Namespace() {} - - public Namespace(NsDAO.Data ndd) { - name = ndd.name; - description = ndd.description; - type = ndd.type; - parent = ndd.parent; - if(ndd.attrib!=null && !ndd.attrib.isEmpty()) { - attrib = new ArrayList>(); - for( Entry entry : ndd.attrib.entrySet()) { - attrib.add(new Pair(entry.getKey(),entry.getValue())); - } - } - } - - public Namespace(NsDAO.Data ndd,List owner, List admin) { - name = ndd.name; - this.owner = owner; - this.admin = admin; - description = ndd.description; - type = ndd.type; - parent = ndd.parent; - if(ndd.attrib!=null && !ndd.attrib.isEmpty()) { - attrib = new ArrayList>(); - for( Entry entry : ndd.attrib.entrySet()) { - attrib.add(new Pair(entry.getKey(),entry.getValue())); - } - } - } - - public NsDAO.Data data() { - NsDAO.Data ndd = new NsDAO.Data(); - ndd.name = name; - ndd.description = description; - ndd.parent = parent; - ndd.type = type; - return ndd; - } - - @Override - public ByteBuffer bytify() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - DataOutputStream os = new DataOutputStream(baos); - - Loader.writeHeader(os,MAGIC,VERSION); - Loader.writeString(os, name); - os.writeInt(type); - Loader.writeStringSet(os,admin); - Loader.writeStringSet(os,owner); - Loader.writeString(os,description); - Loader.writeString(os,parent); - - return ByteBuffer.wrap(baos.toByteArray()); - } - - @Override - public void reconstitute(ByteBuffer bb) throws IOException { - DataInputStream is = CassDAOImpl.toDIS(bb); - /*int version = */Loader.readHeader(is,MAGIC,VERSION); - // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields - - byte[] buff = new byte[BUFF_SIZE]; - name = Loader.readString(is, buff); - type = is.readInt(); - admin = Loader.readStringList(is,buff); - owner = Loader.readStringList(is,buff); - description = Loader.readString(is,buff); - parent = Loader.readString(is,buff); - - } - - /* (non-Javadoc) - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - return name.hashCode(); - } - - - /* (non-Javadoc) - * @see java.lang.Object#toString() - */ - @Override - public String toString() { - return name.toString(); - } - - /* (non-Javadoc) - * @see java.lang.Object#equals(java.lang.Object) - */ - @Override - public boolean equals(Object arg0) { - if(arg0==null || !(arg0 instanceof Namespace)) { - return false; - } - return name.equals(((Namespace)arg0).name); - } - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsDAO.java deleted file mode 100644 index 9e181955..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsDAO.java +++ /dev/null @@ -1,542 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cass; - -import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.Bytification; -import org.onap.aaf.dao.Cached; -import org.onap.aaf.dao.CassAccess; -import org.onap.aaf.dao.CassDAOImpl; -import org.onap.aaf.dao.Loader; -import org.onap.aaf.dao.Streamer; - -import java.util.Set; - -import org.onap.aaf.inno.env.APIException; -import org.onap.aaf.inno.env.Env; -import org.onap.aaf.inno.env.TimeTaken; -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.ResultSet; -import com.datastax.driver.core.Row; -import com.datastax.driver.core.exceptions.DriverException; - -/** - * NsDAO - * - * Data Access Object for Namespace Data - * - */ -public class NsDAO extends CassDAOImpl { - public static final String TABLE = "ns"; - public static final String TABLE_ATTRIB = "ns_attrib"; - public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F - public static final int ROOT = 1; - public static final int COMPANY=2; - public static final int APP = 3; - - private static final String BEGIN_BATCH = "BEGIN BATCH\n"; - private static final String APPLY_BATCH = "APPLY BATCH;\n"; - private static final String SQSCCR = "';\n"; - private static final String SQCSQ = "','"; - - private HistoryDAO historyDAO; - private CacheInfoDAO infoDAO; - private PSInfo psNS; - - public NsDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException { - super(trans, NsDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - init(trans); - } - - public NsDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO iDAO) throws APIException, IOException { - super(trans, NsDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - historyDAO=hDAO; - infoDAO = iDAO; - init(trans); - } - - - ////////////////////////////////////////// - // Data Definition, matches Cassandra DM - ////////////////////////////////////////// - private static final int KEYLIMIT = 1; - /** - * Data class that matches the Cassandra Table "role" - * - */ - public static class Data extends CacheableData implements Bytification { - public String name; - public int type; - public String description; - public String parent; - public Map attrib; - -// //////////////////////////////////////// -// // Getters - public Map attrib(boolean mutable) { - if (attrib == null) { - attrib = new HashMap(); - } else if (mutable && !(attrib instanceof HashMap)) { - attrib = new HashMap(attrib); - } - return attrib; - } - - @Override - public int[] invalidate(Cached cache) { - return new int[] { - seg(cache,name) - }; - } - - public NsSplit split(String name) { - return new NsSplit(this,name); - } - - @Override - public ByteBuffer bytify() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - NSLoader.deflt.marshal(this,new DataOutputStream(baos)); - return ByteBuffer.wrap(baos.toByteArray()); - } - - @Override - public void reconstitute(ByteBuffer bb) throws IOException { - NSLoader.deflt.unmarshal(this,toDIS(bb)); - } - - @Override - public String toString() { - return name; - } - - } - - private void init(AuthzTrans trans) throws APIException, IOException { - // Set up sub-DAOs - if(historyDAO==null) { - historyDAO = new HistoryDAO(trans, this); - } - if(infoDAO==null) { - infoDAO = new CacheInfoDAO(trans,this); - } - - String[] helpers = setCRUD(trans, TABLE, Data.class, NSLoader.deflt,4/*need to skip attrib */); - - psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + - " WHERE parent = ?", new NSLoader(1),readConsistency); - - } - - private static final class NSLoader extends Loader implements Streamer { - public static final int MAGIC=250935515; - public static final int VERSION=1; - public static final int BUFF_SIZE=48; - - public static final NSLoader deflt = new NSLoader(KEYLIMIT); - - public NSLoader(int keylimit) { - super(keylimit); - } - - @Override - public Data load(Data data, Row row) { - // Int more efficient - data.name = row.getString(0); - data.type = row.getInt(1); - data.description = row.getString(2); - data.parent = row.getString(3); - return data; - } - - @Override - protected void key(Data data, int idx, Object[] obj) { - obj[idx]=data.name; - } - - @Override - protected void body(Data data, int _idx, Object[] obj) { - int idx = _idx; - - obj[idx]=data.type; - obj[++idx]=data.description; - obj[++idx]=data.parent; - } - - @Override - public void marshal(Data data, DataOutputStream os) throws IOException { - writeHeader(os,MAGIC,VERSION); - writeString(os, data.name); - os.writeInt(data.type); - writeString(os,data.description); - writeString(os,data.parent); - if(data.attrib==null) { - os.writeInt(-1); - } else { - os.writeInt(data.attrib.size()); - for(Entry es : data.attrib(false).entrySet()) { - writeString(os,es.getKey()); - writeString(os,es.getValue()); - } - } - } - - @Override - public void unmarshal(Data data, DataInputStream is) throws IOException { - /*int version = */readHeader(is,MAGIC,VERSION); - // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields - - byte[] buff = new byte[BUFF_SIZE]; - data.name = readString(is, buff); - data.type = is.readInt(); - data.description = readString(is,buff); - data.parent = readString(is,buff); - int count = is.readInt(); - if(count>0) { - Map da = data.attrib(true); - for(int i=0;i create(AuthzTrans trans, Data data) { - String ns = data.name; - // Ensure Parent is set - int ldot = ns.lastIndexOf('.'); - data.parent=ldot<0?".":ns.substring(0,ldot); - - // insert Attributes - StringBuilder stmt = new StringBuilder(); - stmt.append(BEGIN_BATCH); - attribInsertStmts(stmt, data); - stmt.append(APPLY_BATCH); - try { - getSession(trans).execute(stmt.toString()); -//// TEST CODE for Exception -// boolean force = true; -// if(force) { -// throw new com.datastax.driver.core.exceptions.NoHostAvailableException(new HashMap()); -//// throw new com.datastax.driver.core.exceptions.AuthenticationException(new InetSocketAddress(9999),"Sample Message"); -// } -////END TEST CODE - - } catch (DriverException | APIException | IOException e) { - reportPerhapsReset(trans,e); - trans.info().log(stmt); - return Result.err(Result.ERR_Backend, "Backend Access"); - } - return super.create(trans, data); - } - - @Override - public Result update(AuthzTrans trans, Data data) { - String ns = data.name; - // Ensure Parent is set - int ldot = ns.lastIndexOf('.'); - data.parent=ldot<0?".":ns.substring(0,ldot); - - StringBuilder stmt = new StringBuilder(); - stmt.append(BEGIN_BATCH); - try { - Map localAttr = data.attrib; - Result> rremoteAttr = readAttribByNS(trans,ns); - if(rremoteAttr.notOK()) { - return Result.err(rremoteAttr); - } - // update Attributes - String str; - for(Entry es : localAttr.entrySet()) { - str = rremoteAttr.value.get(es.getKey()); - if(str==null || !str.equals(es.getValue())) { - attribInsertStmt(stmt, ns, es.getKey(),es.getValue()); - } - } - - // No point in deleting... insert overwrites... -// for(Entry es : remoteAttr.entrySet()) { -// str = localAttr.get(es.getKey()); -// if(str==null || !str.equals(es.getValue())) { -// attribDeleteStmt(stmt, ns, es.getKey()); -// } -// } - if(stmt.length()>BEGIN_BATCH.length()) { - stmt.append(APPLY_BATCH); - getSession(trans).execute(stmt.toString()); - } - } catch (DriverException | APIException | IOException e) { - reportPerhapsReset(trans,e); - trans.info().log(stmt); - return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); - } - - return super.update(trans,data); - } - - /* (non-Javadoc) - * @see org.onap.aaf.dao.CassDAOImpl#read(org.onap.aaf.inno.env.TransStore, java.lang.Object) - */ - @Override - public Result> read(AuthzTrans trans, Data data) { - Result> rld = super.read(trans, data); - - if(rld.isOKhasData()) { - for(Data d : rld.value) { - // Note: Map is null at this point, save time/mem by assignment - Result> rabn = readAttribByNS(trans,d.name); - if(rabn.isOK()) { - d.attrib = rabn.value; - } else { - return Result.err(rabn); - } - } - } - return rld; - } - - /* (non-Javadoc) - * @see org.onap.aaf.dao.CassDAOImpl#read(org.onap.aaf.inno.env.TransStore, java.lang.Object[]) - */ - @Override - public Result> read(AuthzTrans trans, Object... key) { - Result> rld = super.read(trans, key); - - if(rld.isOKhasData()) { - for(Data d : rld.value) { - // Note: Map is null at this point, save time/mem by assignment - Result> rabn = readAttribByNS(trans,d.name); - if(rabn.isOK()) { - d.attrib = rabn.value; - } else { - return Result.err(rabn); - } - } - } - return rld; - } - - @Override - public Result delete(AuthzTrans trans, Data data, boolean reread) { - TimeTaken tt = trans.start("Delete NS Attributes " + data.name, Env.REMOTE); - try { - StringBuilder stmt = new StringBuilder(); - attribDeleteAllStmt(stmt, data); - try { - getSession(trans).execute(stmt.toString()); - } catch (DriverException | APIException | IOException e) { - reportPerhapsReset(trans,e); - trans.info().log(stmt); - return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); - } - } finally { - tt.done(); - } - return super.delete(trans, data, reread); - - } - - public Result> readAttribByNS(AuthzTrans trans, String ns) { - Map map = new HashMap(); - TimeTaken tt = trans.start("readAttribByNS " + ns, Env.REMOTE); - try { - ResultSet rs = getSession(trans).execute("SELECT key,value FROM " - + TABLE_ATTRIB - + " WHERE ns='" - + ns - + "';"); - - for(Iterator iter = rs.iterator();iter.hasNext(); ) { - Row r = iter.next(); - map.put(r.getString(0), r.getString(1)); - } - } catch (DriverException | APIException | IOException e) { - reportPerhapsReset(trans,e); - return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); - } finally { - tt.done(); - } - return Result.ok(map); - } - - public Result> readNsByAttrib(AuthzTrans trans, String key) { - Set set = new HashSet(); - TimeTaken tt = trans.start("readNsBykey " + key, Env.REMOTE); - try { - ResultSet rs = getSession(trans).execute("SELECT ns FROM " - + TABLE_ATTRIB - + " WHERE key='" - + key - + "';"); - - for(Iterator iter = rs.iterator();iter.hasNext(); ) { - Row r = iter.next(); - set.add(r.getString(0)); - } - } catch (DriverException | APIException | IOException e) { - reportPerhapsReset(trans,e); - return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); - } finally { - tt.done(); - } - return Result.ok(set); - } - - public Result attribAdd(AuthzTrans trans, String ns, String key, String value) { - try { - getSession(trans).execute(attribInsertStmt(new StringBuilder(),ns,key,value).toString()); - return Result.ok(); - } catch (DriverException | APIException | IOException e) { - reportPerhapsReset(trans,e); - return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); - } - } - - private StringBuilder attribInsertStmt(StringBuilder sb, String ns, String key, String value) { - sb.append("INSERT INTO "); - sb.append(TABLE_ATTRIB); - sb.append(" (ns,key,value) VALUES ('"); - sb.append(ns); - sb.append(SQCSQ); - sb.append(key); - sb.append(SQCSQ); - sb.append(value); - sb.append("');"); - return sb; - } - - public Result attribRemove(AuthzTrans trans, String ns, String key) { - try { - getSession(trans).execute(attribDeleteStmt(new StringBuilder(),ns,key).toString()); - return Result.ok(); - } catch (DriverException | APIException | IOException e) { - reportPerhapsReset(trans,e); - return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); - } - } - - private StringBuilder attribDeleteStmt(StringBuilder stmt, String ns, String key) { - stmt.append("DELETE FROM "); - stmt.append(TABLE_ATTRIB); - stmt.append(" WHERE ns='"); - stmt.append(ns); - stmt.append("' AND key='"); - stmt.append(key); - stmt.append("';"); - return stmt; - } - - private void attribDeleteAllStmt(StringBuilder stmt, Data data) { - stmt.append(" DELETE FROM "); - stmt.append(TABLE_ATTRIB); - stmt.append(" WHERE ns='"); - stmt.append(data.name); - stmt.append(SQSCCR); - } - - private void attribInsertStmts(StringBuilder stmt, Data data) { - // INSERT new Attrib - for(Entry es : data.attrib(false).entrySet() ) { - stmt.append(" "); - attribInsertStmt(stmt,data.name,es.getKey(),es.getValue()); - } - } - - /** - * Add description to Namespace - * @param trans - * @param ns - * @param description - * @return - */ - public Result addDescription(AuthzTrans trans, String ns, String description) { - try { - getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '" - + description + "' WHERE name = '" + ns + "';"); - } catch (DriverException | APIException | IOException e) { - reportPerhapsReset(trans,e); - return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); - } - - Data data = new Data(); - data.name=ns; - wasModified(trans, CRUD.update, data, "Added description " + description + " to namespace " + ns, null ); - return Result.ok(); - } - - public Result> getChildren(AuthzTrans trans, String parent) { - return psNS.read(trans, R_TEXT, new Object[]{parent}); - } - - - /** - * Log Modification statements to History - * - * @param modified which CRUD action was done - * @param data entity data that needs a log entry - * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data - */ - @Override - protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) { - boolean memo = override.length>0 && override[0]!=null; - boolean subject = override.length>1 && override[1]!=null; - - //TODO Must log history - HistoryDAO.Data hd = HistoryDAO.newInitedData(); - hd.user = trans.user(); - hd.action = modified.name(); - hd.target = TABLE; - hd.subject = subject ? override[1] : data.name; - hd.memo = memo ? override[0] : (data.name + " was " + modified.name() + 'd' ); - if(modified==CRUD.delete) { - try { - hd.reconstruct = data.bytify(); - } catch (IOException e) { - trans.error().log(e,"Could not serialize NsDAO.Data"); - } - } - - if(historyDAO.create(trans, hd).status!=Status.OK) { - trans.error().log("Cannot log to History"); - } - if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) { - trans.error().log("Cannot touch CacheInfo"); - } - } - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsSplit.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsSplit.java deleted file mode 100644 index 21e57287..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsSplit.java +++ /dev/null @@ -1,62 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cass; - -public class NsSplit { - public final String ns; - public final String name; - public final NsDAO.Data nsd; - - public NsSplit(NsDAO.Data nsd, String child) { - this.nsd = nsd; - if(child.startsWith(nsd.name)) { - ns = nsd.name; - int dot = ns.length(); - if(dot=0) { - nsd.parent = ns.substring(0, dot); - } else { - nsd.parent = "."; - } - } - - public boolean isOK() { - return ns!=null && name !=null; - } -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsType.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsType.java deleted file mode 100644 index c098acb1..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/NsType.java +++ /dev/null @@ -1,74 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cass; - -/** - * Defines the Type Codes in the NS Table. - * - */ -public enum NsType { - UNKNOWN (-1), - DOT (0), - ROOT (1), - COMPANY (2), - APP (3), - STACKED_APP (10), - STACK (11); - - public final int type; - private NsType(int t) { - type = t; - } - /** - * This is not the Ordinal, but the Type that is stored in NS Tables - * - * @param t - * @return - */ - public static NsType fromType(int t) { - for(NsType nst : values()) { - if(t==nst.type) { - return nst; - } - } - return UNKNOWN; - } - - /** - * Use this one rather than "valueOf" to avoid Exception - * @param s - * @return - */ - public static NsType fromString(String s) { - if(s!=null) { - for(NsType nst : values()) { - if(nst.name().equals(s)) { - return nst; - } - } - } - return UNKNOWN; - } - - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/PermDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/PermDAO.java deleted file mode 100644 index e0b368f5..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/PermDAO.java +++ /dev/null @@ -1,502 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cass; - -import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.Bytification; -import org.onap.aaf.dao.Cached; -import org.onap.aaf.dao.CassAccess; -import org.onap.aaf.dao.CassDAOImpl; -import org.onap.aaf.dao.DAOException; -import org.onap.aaf.dao.Loader; -import org.onap.aaf.dao.Streamer; -import org.onap.aaf.dao.aaf.hl.Question; - -import org.onap.aaf.inno.env.APIException; -import org.onap.aaf.inno.env.util.Split; -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.Row; -import com.datastax.driver.core.exceptions.DriverException; - -public class PermDAO extends CassDAOImpl { - - public static final String TABLE = "perm"; - - public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F - private static final String STAR = "*"; - - private final HistoryDAO historyDAO; - private final CacheInfoDAO infoDAO; - - private PSInfo psNS, psChildren, psByType; - - public PermDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException { - super(trans, PermDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - init(trans); - historyDAO = new HistoryDAO(trans, this); - infoDAO = new CacheInfoDAO(trans,this); - } - - public PermDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) { - super(trans, PermDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - historyDAO = hDAO; - infoDAO=ciDAO; - init(trans); - } - - - private static final int KEYLIMIT = 4; - public static class Data extends CacheableData implements Bytification { - public String ns; - public String type; - public String instance; - public String action; - public Set roles; - public String description; - - public Data() {} - - public Data(NsSplit nss, String instance, String action) { - ns = nss.ns; - type = nss.name; - this.instance = instance; - this.action = action; - } - - public String fullType() { - return ns + '.' + type; - } - - public String fullPerm() { - return ns + '.' + type + '|' + instance + '|' + action; - } - - public String encode() { - return ns + '|' + type + '|' + instance + '|' + action; - } - - /** - * Decode Perm String, including breaking into appropriate Namespace - * - * @param trans - * @param q - * @param p - * @return - */ - public static Result decode(AuthzTrans trans, Question q, String p) { - String[] ss = Split.splitTrim('|', p,4); - if(ss[2]==null) { - return Result.err(Status.ERR_BadData,"Perm Encodings must be separated by '|'"); - } - Data data = new Data(); - if(ss[3]==null) { // older 3 part encoding must be evaluated for NS - Result nss = q.deriveNsSplit(trans, ss[0]); - if(nss.notOK()) { - return Result.err(nss); - } - data.ns=nss.value.ns; - data.type=nss.value.name; - data.instance=ss[1]; - data.action=ss[2]; - } else { // new 4 part encoding - data.ns=ss[0]; - data.type=ss[1]; - data.instance=ss[2]; - data.action=ss[3]; - } - return Result.ok(data); - } - - /** - * Decode Perm String, including breaking into appropriate Namespace - * - * @param trans - * @param q - * @param p - * @return - */ - public static Result decodeToArray(AuthzTrans trans, Question q, String p) { - String[] ss = Split.splitTrim('|', p,4); - if(ss[2]==null) { - return Result.err(Status.ERR_BadData,"Perm Encodings must be separated by '|'"); - } - - if(ss[3]==null) { // older 3 part encoding must be evaluated for NS - ss[3] = ss[2]; - ss[2] = ss[1]; - Result nss = q.deriveNsSplit(trans, ss[0]); - if(nss.notOK()) { - return Result.err(nss); - } - ss[1] = nss.value.name; - ss[0] = nss.value.ns; - } - return Result.ok(ss); - } - - public static Data create(NsDAO.Data ns, String name) { - NsSplit nss = new NsSplit(ns,name); - Data rv = new Data(); - rv.ns = nss.ns; - String[] s = nss.name.split("\\|"); - switch(s.length) { - case 3: - rv.type=s[0]; - rv.instance=s[1]; - rv.action=s[2]; - break; - case 2: - rv.type=s[0]; - rv.instance=s[1]; - rv.action=STAR; - break; - default: - rv.type=s[0]; - rv.instance = STAR; - rv.action = STAR; - } - return rv; - } - - public static Data create(AuthzTrans trans, Question q, String name) { - String[] s = name.split("\\|"); - Result rdns = q.deriveNsSplit(trans, s[0]); - Data rv = new PermDAO.Data(); - if(rdns.isOKhasData()) { - switch(s.length) { - case 3: - rv.type=s[1]; - rv.instance=s[2]; - rv.action=s[3]; - break; - case 2: - rv.type=s[1]; - rv.instance=s[2]; - rv.action=STAR; - break; - default: - rv.type=s[1]; - rv.instance = STAR; - rv.action = STAR; - } - } - return rv; - } - - //////////////////////////////////////// - // Getters - public Set roles(boolean mutable) { - if (roles == null) { - roles = new HashSet(); - } else if (mutable && !(roles instanceof HashSet)) { - roles = new HashSet(roles); - } - return roles; - } - - @Override - public int[] invalidate(Cached cache) { - return new int[] { - seg(cache,ns), - seg(cache,ns,type), - seg(cache,ns,type,STAR), - seg(cache,ns,type,instance,action) - }; - } - - @Override - public ByteBuffer bytify() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - PermLoader.deflt.marshal(this, new DataOutputStream(baos)); - return ByteBuffer.wrap(baos.toByteArray()); - } - - @Override - public void reconstitute(ByteBuffer bb) throws IOException { - PermLoader.deflt.unmarshal(this, toDIS(bb)); - } - - @Override - public String toString() { - return encode(); - } - } - - private static class PermLoader extends Loader implements Streamer { - public static final int MAGIC=283939453; - public static final int VERSION=1; - public static final int BUFF_SIZE=96; - - public static final PermLoader deflt = new PermLoader(KEYLIMIT); - - public PermLoader(int keylimit) { - super(keylimit); - } - - @Override - public Data load(Data data, Row row) { - // Int more efficient Match "fields" string - data.ns = row.getString(0); - data.type = row.getString(1); - data.instance = row.getString(2); - data.action = row.getString(3); - data.roles = row.getSet(4,String.class); - data.description = row.getString(5); - return data; - } - - @Override - protected void key(Data data, int _idx, Object[] obj) { - int idx = _idx; - obj[idx]=data.ns; - obj[++idx]=data.type; - obj[++idx]=data.instance; - obj[++idx]=data.action; - } - - @Override - protected void body(Data data, int _idx, Object[] obj) { - int idx = _idx; - obj[idx]=data.roles; - obj[++idx]=data.description; - } - - @Override - public void marshal(Data data, DataOutputStream os) throws IOException { - writeHeader(os,MAGIC,VERSION); - writeString(os, data.ns); - writeString(os, data.type); - writeString(os, data.instance); - writeString(os, data.action); - writeStringSet(os, data.roles); - writeString(os, data.description); - } - - @Override - public void unmarshal(Data data, DataInputStream is) throws IOException { - /*int version = */readHeader(is,MAGIC,VERSION); - // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields - byte[] buff = new byte[BUFF_SIZE]; - data.ns = readString(is, buff); - data.type = readString(is,buff); - data.instance = readString(is,buff); - data.action = readString(is,buff); - data.roles = readStringSet(is,buff); - data.description = readString(is,buff); - } - } - - private void init(AuthzTrans trans) { - // the 3 is the number of key fields - String[] helpers = setCRUD(trans, TABLE, Data.class, PermLoader.deflt); - - // Other SELECT style statements... match with a local Method - psByType = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + - " WHERE ns = ? AND type = ?", new PermLoader(2) { - @Override - protected void key(Data data, int idx, Object[] obj) { - obj[idx]=data.type; - } - },readConsistency); - - psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + - " WHERE ns = ?", new PermLoader(1),readConsistency); - - psChildren = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + - " WHERE ns=? AND type > ? AND type < ?", - new PermLoader(3) { - @Override - protected void key(Data data, int _idx, Object[] obj) { - int idx = _idx; - obj[idx] = data.ns; - obj[++idx]=data.type + DOT; - obj[++idx]=data.type + DOT_PLUS_ONE; - } - },readConsistency); - - } - - - /** - * Add a single Permission to the Role's Permission Collection - * - * @param trans - * @param roleFullName - * @param perm - * @param type - * @param action - * @return - */ - public Result addRole(AuthzTrans trans, PermDAO.Data perm, String roleFullName) { - // Note: Prepared Statements for Collection updates aren't supported - //ResultSet rv = - try { - getSession(trans).execute(UPDATE_SP + TABLE + " SET roles = roles + {'" + roleFullName + "'} " + - "WHERE " + - "ns = '" + perm.ns + "' AND " + - "type = '" + perm.type + "' AND " + - "instance = '" + perm.instance + "' AND " + - "action = '" + perm.action + "';" - ); - } catch (DriverException | APIException | IOException e) { - reportPerhapsReset(trans,e); - return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); - } - - wasModified(trans, CRUD.update, perm, "Added role " + roleFullName + " to perm " + - perm.ns + '.' + perm.type + '|' + perm.instance + '|' + perm.action); - return Result.ok(); - } - - /** - * Remove a single Permission from the Role's Permission Collection - * @param trans - * @param roleFullName - * @param perm - * @param type - * @param action - * @return - */ - public Result delRole(AuthzTrans trans, PermDAO.Data perm, String roleFullName) { - // Note: Prepared Statements for Collection updates aren't supported - //ResultSet rv = - try { - getSession(trans).execute(UPDATE_SP + TABLE + " SET roles = roles - {'" + roleFullName + "'} " + - "WHERE " + - "ns = '" + perm.ns + "' AND " + - "type = '" + perm.type + "' AND " + - "instance = '" + perm.instance + "' AND " + - "action = '" + perm.action + "';" - ); - } catch (DriverException | APIException | IOException e) { - reportPerhapsReset(trans,e); - return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); - } - - //TODO how can we tell when it doesn't? - wasModified(trans, CRUD.update, perm, "Removed role " + roleFullName + " from perm " + - perm.ns + '.' + perm.type + '|' + perm.instance + '|' + perm.action); - return Result.ok(); - } - - - - /** - * Additional method: - * Select all Permissions by Name - * - * @param name - * @return - * @throws DAOException - */ - public Result> readByType(AuthzTrans trans, String ns, String type) { - return psByType.read(trans, R_TEXT, new Object[]{ns, type}); - } - - public Result> readChildren(AuthzTrans trans, String ns, String type) { - return psChildren.read(trans, R_TEXT, new Object[]{ns, type+DOT, type + DOT_PLUS_ONE}); - } - - public Result> readNS(AuthzTrans trans, String ns) { - return psNS.read(trans, R_TEXT, new Object[]{ns}); - } - - /** - * Add description to this permission - * - * @param trans - * @param ns - * @param type - * @param instance - * @param action - * @param description - * @return - */ - public Result addDescription(AuthzTrans trans, String ns, String type, - String instance, String action, String description) { - try { - getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '" - + description + "' WHERE ns = '" + ns + "' AND type = '" + type + "'" - + "AND instance = '" + instance + "' AND action = '" + action + "';"); - } catch (DriverException | APIException | IOException e) { - reportPerhapsReset(trans,e); - return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); - } - - Data data = new Data(); - data.ns=ns; - data.type=type; - data.instance=instance; - data.action=action; - wasModified(trans, CRUD.update, data, "Added description " + description + " to permission " - + data.encode(), null ); - return Result.ok(); - } - - /** - * Log Modification statements to History - */ - @Override - protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) { - boolean memo = override.length>0 && override[0]!=null; - boolean subject = override.length>1 && override[1]!=null; - - // Need to update history - HistoryDAO.Data hd = HistoryDAO.newInitedData(); - hd.user = trans.user(); - hd.action = modified.name(); - hd.target = TABLE; - hd.subject = subject ? override[1] : data.fullType(); - if (memo) { - hd.memo = String.format("%s", override[0]); - } else { - hd.memo = String.format("%sd %s|%s|%s", modified.name(),data.fullType(),data.instance,data.action); - } - - if(modified==CRUD.delete) { - try { - hd.reconstruct = data.bytify(); - } catch (IOException e) { - trans.error().log(e,"Could not serialize PermDAO.Data"); - } - } - - if(historyDAO.create(trans, hd).status!=Status.OK) { - trans.error().log("Cannot log to History"); - } - if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) { - trans.error().log("Cannot touch CacheInfo"); - } - } -} - diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/RoleDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/RoleDAO.java deleted file mode 100644 index 5b0190e9..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/RoleDAO.java +++ /dev/null @@ -1,412 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cass; - -import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.Bytification; -import org.onap.aaf.dao.Cached; -import org.onap.aaf.dao.CassAccess; -import org.onap.aaf.dao.CassDAOImpl; -import org.onap.aaf.dao.Loader; -import org.onap.aaf.dao.Streamer; -import org.onap.aaf.dao.aaf.hl.Question; - -import org.onap.aaf.inno.env.APIException; -import org.onap.aaf.inno.env.util.Split; -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.Row; -import com.datastax.driver.core.exceptions.DriverException; - -public class RoleDAO extends CassDAOImpl { - - public static final String TABLE = "role"; - public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F - - private final HistoryDAO historyDAO; - private final CacheInfoDAO infoDAO; - - private PSInfo psChildren, psNS, psName; - - public RoleDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException { - super(trans, RoleDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - // Set up sub-DAOs - historyDAO = new HistoryDAO(trans, this); - infoDAO = new CacheInfoDAO(trans,this); - init(trans); - } - - public RoleDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) { - super(trans, RoleDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - historyDAO = hDAO; - infoDAO = ciDAO; - init(trans); - } - - - ////////////////////////////////////////// - // Data Definition, matches Cassandra DM - ////////////////////////////////////////// - private static final int KEYLIMIT = 2; - /** - * Data class that matches the Cassandra Table "role" - */ - public static class Data extends CacheableData implements Bytification { - public String ns; - public String name; - public Set perms; - public String description; - - //////////////////////////////////////// - // Getters - public Set perms(boolean mutable) { - if (perms == null) { - perms = new HashSet(); - } else if (mutable && !(perms instanceof HashSet)) { - perms = new HashSet(perms); - } - return perms; - } - - public static Data create(NsDAO.Data ns, String name) { - NsSplit nss = new NsSplit(ns,name); - RoleDAO.Data rv = new Data(); - rv.ns = nss.ns; - rv.name=nss.name; - return rv; - } - - public String fullName() { - return ns + '.' + name; - } - - public String encode() { - return ns + '|' + name; - } - - /** - * Decode Perm String, including breaking into appropriate Namespace - * - * @param trans - * @param q - * @param r - * @return - */ - public static Result decode(AuthzTrans trans, Question q, String r) { - String[] ss = Split.splitTrim('|', r,2); - Data data = new Data(); - if(ss[1]==null) { // older 1 part encoding must be evaluated for NS - Result nss = q.deriveNsSplit(trans, ss[0]); - if(nss.notOK()) { - return Result.err(nss); - } - data.ns=nss.value.ns; - data.name=nss.value.name; - } else { // new 4 part encoding - data.ns=ss[0]; - data.name=ss[1]; - } - return Result.ok(data); - } - - /** - * Decode from UserRole Data - * @param urdd - * @return - */ - public static RoleDAO.Data decode(UserRoleDAO.Data urdd) { - RoleDAO.Data rd = new RoleDAO.Data(); - rd.ns = urdd.ns; - rd.name = urdd.rname; - return rd; - } - - - /** - * Decode Perm String, including breaking into appropriate Namespace - * - * @param trans - * @param q - * @param p - * @return - */ - public static Result decodeToArray(AuthzTrans trans, Question q, String p) { - String[] ss = Split.splitTrim('|', p,2); - if(ss[1]==null) { // older 1 part encoding must be evaluated for NS - Result nss = q.deriveNsSplit(trans, ss[0]); - if(nss.notOK()) { - return Result.err(nss); - } - ss[0] = nss.value.ns; - ss[1] = nss.value.name; - } - return Result.ok(ss); - } - - @Override - public int[] invalidate(Cached cache) { - return new int[] { - seg(cache,ns,name), - seg(cache,ns), - seg(cache,name), - }; - } - - @Override - public ByteBuffer bytify() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - RoleLoader.deflt.marshal(this,new DataOutputStream(baos)); - return ByteBuffer.wrap(baos.toByteArray()); - } - - @Override - public void reconstitute(ByteBuffer bb) throws IOException { - RoleLoader.deflt.unmarshal(this, toDIS(bb)); - } - - @Override - public String toString() { - return ns + '.' + name; - } - } - - private static class RoleLoader extends Loader implements Streamer { - public static final int MAGIC=923577343; - public static final int VERSION=1; - public static final int BUFF_SIZE=96; - - public static final RoleLoader deflt = new RoleLoader(KEYLIMIT); - - public RoleLoader(int keylimit) { - super(keylimit); - } - - @Override - public Data load(Data data, Row row) { - // Int more efficient - data.ns = row.getString(0); - data.name = row.getString(1); - data.perms = row.getSet(2,String.class); - data.description = row.getString(3); - return data; - } - - @Override - protected void key(Data data, int _idx, Object[] obj) { - int idx = _idx; - obj[idx]=data.ns; - obj[++idx]=data.name; - } - - @Override - protected void body(Data data, int _idx, Object[] obj) { - int idx = _idx; - obj[idx]=data.perms; - obj[++idx]=data.description; - } - - @Override - public void marshal(Data data, DataOutputStream os) throws IOException { - writeHeader(os,MAGIC,VERSION); - writeString(os, data.ns); - writeString(os, data.name); - writeStringSet(os,data.perms); - writeString(os, data.description); - } - - @Override - public void unmarshal(Data data, DataInputStream is) throws IOException { - /*int version = */readHeader(is,MAGIC,VERSION); - // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields - byte[] buff = new byte[BUFF_SIZE]; - data.ns = readString(is, buff); - data.name = readString(is,buff); - data.perms = readStringSet(is,buff); - data.description = readString(is,buff); - } - }; - - private void init(AuthzTrans trans) { - String[] helpers = setCRUD(trans, TABLE, Data.class, RoleLoader.deflt); - - psNS = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + - " WHERE ns = ?", new RoleLoader(1),readConsistency); - - psName = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + - " WHERE name = ?", new RoleLoader(1),readConsistency); - - psChildren = new PSInfo(trans, SELECT_SP + helpers[FIELD_COMMAS] + " FROM " + TABLE + - " WHERE ns=? AND name > ? AND name < ?", - new RoleLoader(3) { - @Override - protected void key(Data data, int _idx, Object[] obj) { - int idx = _idx; - obj[idx] = data.ns; - obj[++idx]=data.name + DOT; - obj[++idx]=data.name + DOT_PLUS_ONE; - } - },readConsistency); - - } - - public Result> readNS(AuthzTrans trans, String ns) { - return psNS.read(trans, R_TEXT + " NS " + ns, new Object[]{ns}); - } - - public Result> readName(AuthzTrans trans, String name) { - return psName.read(trans, R_TEXT + name, new Object[]{name}); - } - - public Result> readChildren(AuthzTrans trans, String ns, String role) { - if(role.length()==0 || "*".equals(role)) { - return psChildren.read(trans, R_TEXT, new Object[]{ns, FIRST_CHAR, LAST_CHAR}); - } else { - return psChildren.read(trans, R_TEXT, new Object[]{ns, role+DOT, role+DOT_PLUS_ONE}); - } - } - - /** - * Add a single Permission to the Role's Permission Collection - * - * @param trans - * @param role - * @param perm - * @param type - * @param action - * @return - */ - public Result addPerm(AuthzTrans trans, RoleDAO.Data role, PermDAO.Data perm) { - // Note: Prepared Statements for Collection updates aren't supported - String pencode = perm.encode(); - try { - getSession(trans).execute(UPDATE_SP + TABLE + " SET perms = perms + {'" + - pencode + "'} WHERE " + - "ns = '" + role.ns + "' AND name = '" + role.name + "';"); - } catch (DriverException | APIException | IOException e) { - reportPerhapsReset(trans,e); - return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); - } - - wasModified(trans, CRUD.update, role, "Added permission " + pencode + " to role " + role.fullName()); - return Result.ok(); - } - - /** - * Remove a single Permission from the Role's Permission Collection - * @param trans - * @param role - * @param perm - * @param type - * @param action - * @return - */ - public Result delPerm(AuthzTrans trans, RoleDAO.Data role, PermDAO.Data perm) { - // Note: Prepared Statements for Collection updates aren't supported - - String pencode = perm.encode(); - - //ResultSet rv = - try { - getSession(trans).execute(UPDATE_SP + TABLE + " SET perms = perms - {'" + - pencode + "'} WHERE " + - "ns = '" + role.ns + "' AND name = '" + role.name + "';"); - } catch (DriverException | APIException | IOException e) { - reportPerhapsReset(trans,e); - return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); - } - - //TODO how can we tell when it doesn't? - wasModified(trans, CRUD.update, role, "Removed permission " + pencode + " from role " + role.fullName() ); - return Result.ok(); - } - - /** - * Add description to role - * - * @param trans - * @param ns - * @param name - * @param description - * @return - */ - public Result addDescription(AuthzTrans trans, String ns, String name, String description) { - try { - getSession(trans).execute(UPDATE_SP + TABLE + " SET description = '" - + description + "' WHERE ns = '" + ns + "' AND name = '" + name + "';"); - } catch (DriverException | APIException | IOException e) { - reportPerhapsReset(trans,e); - return Result.err(Result.ERR_Backend, CassAccess.ERR_ACCESS_MSG); - } - - Data data = new Data(); - data.ns=ns; - data.name=name; - wasModified(trans, CRUD.update, data, "Added description " + description + " to role " + data.fullName(), null ); - return Result.ok(); - } - - - /** - * Log Modification statements to History - * @param modified which CRUD action was done - * @param data entity data that needs a log entry - * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data - */ - @Override - protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) { - boolean memo = override.length>0 && override[0]!=null; - boolean subject = override.length>1 && override[1]!=null; - - HistoryDAO.Data hd = HistoryDAO.newInitedData(); - hd.user = trans.user(); - hd.action = modified.name(); - hd.target = TABLE; - hd.subject = subject ? override[1] : data.fullName(); - hd.memo = memo ? override[0] : (data.fullName() + " was " + modified.name() + 'd' ); - if(modified==CRUD.delete) { - try { - hd.reconstruct = data.bytify(); - } catch (IOException e) { - trans.error().log(e,"Could not serialize RoleDAO.Data"); - } - } - - if(historyDAO.create(trans, hd).status!=Status.OK) { - trans.error().log("Cannot log to History"); - } - if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) { - trans.error().log("Cannot touch CacheInfo for Role"); - } - } - - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/Status.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/Status.java deleted file mode 100644 index 246df6ae..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/Status.java +++ /dev/null @@ -1,88 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cass; - -import org.onap.aaf.authz.layer.Result; - - - - -/** - * Add additional Behavior for Specific Applications for Results - * - * In this case, we add additional BitField information accessible by - * method ( - * - * @param - */ -public class Status extends Result { - - // 10/1/2013: Initially, I used enum, but it's not extensible. - public final static int ERR_NsNotFound = Result.ERR_General+1, - ERR_RoleNotFound = Result.ERR_General+2, - ERR_PermissionNotFound = Result.ERR_General+3, - ERR_UserNotFound = Result.ERR_General+4, - ERR_UserRoleNotFound = Result.ERR_General+5, - ERR_DelegateNotFound = Result.ERR_General+6, - ERR_InvalidDelegate = Result.ERR_General+7, - ERR_DependencyExists = Result.ERR_General+8, - ERR_NoApprovals = Result.ERR_General+9, - ACC_Now = Result.ERR_General+10, - ACC_Future = Result.ERR_General+11, - ERR_ChoiceNeeded = Result.ERR_General+12, - ERR_FutureNotRequested = Result.ERR_General+13; - - /** - * Constructor for Result set. - * @param data - * @param status - */ - private Status(RV value, int status, String details, String[] variables ) { - super(value,status,details,variables); - } - - public static String name(int status) { - switch(status) { - case OK: return "OK"; - case ERR_NsNotFound: return "ERR_NsNotFound"; - case ERR_RoleNotFound: return "ERR_RoleNotFound"; - case ERR_PermissionNotFound: return "ERR_PermissionNotFound"; - case ERR_UserNotFound: return "ERR_UserNotFound"; - case ERR_UserRoleNotFound: return "ERR_UserRoleNotFound"; - case ERR_DelegateNotFound: return "ERR_DelegateNotFound"; - case ERR_InvalidDelegate: return "ERR_InvalidDelegate"; - case ERR_ConflictAlreadyExists: return "ERR_ConflictAlreadyExists"; - case ERR_DependencyExists: return "ERR_DependencyExists"; - case ERR_ActionNotCompleted: return "ERR_ActionNotCompleted"; - case ERR_Denied: return "ERR_Denied"; - case ERR_Policy: return "ERR_Policy"; - case ERR_BadData: return "ERR_BadData"; - case ERR_NotImplemented: return "ERR_NotImplemented"; - case ERR_NotFound: return "ERR_NotFound"; - case ERR_ChoiceNeeded: return "ERR_ChoiceNeeded"; - } - //case ERR_General: or unknown... - return "ERR_General"; - } - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/UserRoleDAO.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/UserRoleDAO.java deleted file mode 100644 index 29681603..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/cass/UserRoleDAO.java +++ /dev/null @@ -1,320 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.cass; - -import java.io.ByteArrayOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.Date; -import java.util.List; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.Bytification; -import org.onap.aaf.dao.Cached; -import org.onap.aaf.dao.CassDAOImpl; -import org.onap.aaf.dao.DAOException; -import org.onap.aaf.dao.Loader; -import org.onap.aaf.dao.Streamer; -import org.onap.aaf.dao.aaf.hl.Question; - -import org.onap.aaf.inno.env.APIException; -import org.onap.aaf.inno.env.Slot; -import org.onap.aaf.inno.env.util.Chrono; -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.Row; - -public class UserRoleDAO extends CassDAOImpl { - public static final String TABLE = "user_role"; - - public static final int CACHE_SEG = 0x40; // yields segment 0x0-0x3F - - private static final String TRANS_UR_SLOT = "_TRANS_UR_SLOT_"; - public Slot transURSlot; - - private final HistoryDAO historyDAO; - private final CacheInfoDAO infoDAO; - - private PSInfo psByUser, psByRole, psUserInRole; - - - - public UserRoleDAO(AuthzTrans trans, Cluster cluster, String keyspace) throws APIException, IOException { - super(trans, UserRoleDAO.class.getSimpleName(),cluster,keyspace,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - transURSlot = trans.slot(TRANS_UR_SLOT); - init(trans); - - // Set up sub-DAOs - historyDAO = new HistoryDAO(trans, this); - infoDAO = new CacheInfoDAO(trans,this); - } - - public UserRoleDAO(AuthzTrans trans, HistoryDAO hDAO, CacheInfoDAO ciDAO) { - super(trans, UserRoleDAO.class.getSimpleName(),hDAO,Data.class,TABLE, readConsistency(trans,TABLE), writeConsistency(trans,TABLE)); - transURSlot = trans.slot(TRANS_UR_SLOT); - historyDAO = hDAO; - infoDAO = ciDAO; - init(trans); - } - - private static final int KEYLIMIT = 2; - public static class Data extends CacheableData implements Bytification { - public String user; - public String role; - public String ns; - public String rname; - public Date expires; - - @Override - public int[] invalidate(Cached cache) { - // Note: I'm not worried about Name collisions, because the formats are different: - // myName ... etc versus - // com. ... - // The "dot" makes the difference. - return new int[] { - seg(cache,user,role), - seg(cache,user), - seg(cache,role) - }; - } - - @Override - public ByteBuffer bytify() throws IOException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - URLoader.deflt.marshal(this,new DataOutputStream(baos)); - return ByteBuffer.wrap(baos.toByteArray()); - } - - @Override - public void reconstitute(ByteBuffer bb) throws IOException { - URLoader.deflt.unmarshal(this, toDIS(bb)); - } - - public void role(String ns, String rname) { - this.ns = ns; - this.rname = rname; - this.role = ns + '.' + rname; - } - - public void role(RoleDAO.Data rdd) { - ns = rdd.ns; - rname = rdd.name; - role = rdd.fullName(); - } - - - public boolean role(AuthzTrans trans, Question ques, String role) { - this.role = role; - Result rnss = ques.deriveNsSplit(trans, role); - if(rnss.isOKhasData()) { - ns = rnss.value.ns; - rname = rnss.value.name; - return true; - } else { - return false; - } - } - - @Override - public String toString() { - return user + '|' + ns + '|' + rname + '|' + Chrono.dateStamp(expires); - } - - - } - - private static class URLoader extends Loader implements Streamer { - public static final int MAGIC=738469903; - public static final int VERSION=1; - public static final int BUFF_SIZE=48; - - public static final URLoader deflt = new URLoader(KEYLIMIT); - - public URLoader(int keylimit) { - super(keylimit); - } - - @Override - public Data load(Data data, Row row) { - data.user = row.getString(0); - data.role = row.getString(1); - data.ns = row.getString(2); - data.rname = row.getString(3); - data.expires = row.getDate(4); - return data; - } - - @Override - protected void key(Data data, int _idx, Object[] obj) { - int idx = _idx; - obj[idx]=data.user; - obj[++idx]=data.role; - } - - @Override - protected void body(Data data, int _idx, Object[] obj) { - int idx = _idx; - obj[idx]=data.ns; - obj[++idx]=data.rname; - obj[++idx]=data.expires; - } - - @Override - public void marshal(Data data, DataOutputStream os) throws IOException { - writeHeader(os,MAGIC,VERSION); - - writeString(os, data.user); - writeString(os, data.role); - writeString(os, data.ns); - writeString(os, data.rname); - os.writeLong(data.expires==null?-1:data.expires.getTime()); - } - - @Override - public void unmarshal(Data data, DataInputStream is) throws IOException { - /*int version = */readHeader(is,MAGIC,VERSION); - // If Version Changes between Production runs, you'll need to do a switch Statement, and adequately read in fields - - byte[] buff = new byte[BUFF_SIZE]; - data.user = readString(is,buff); - data.role = readString(is,buff); - data.ns = readString(is,buff); - data.rname = readString(is,buff); - long l = is.readLong(); - data.expires = l<0?null:new Date(l); - } - - }; - - private void init(AuthzTrans trans) { - String[] helper = setCRUD(trans, TABLE, Data.class, URLoader.deflt); - - psByUser = new PSInfo(trans, SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE user = ?", - new URLoader(1) { - @Override - protected void key(Data data, int idx, Object[] obj) { - obj[idx]=data.user; - } - },readConsistency); - - // Note: We understand this call may have poor performance, so only should be used in Management (Delete) func - psByRole = new PSInfo(trans, SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE role = ? ALLOW FILTERING", - new URLoader(1) { - @Override - protected void key(Data data, int idx, Object[] obj) { - obj[idx]=data.role; - } - },readConsistency); - - psUserInRole = new PSInfo(trans,SELECT_SP + helper[FIELD_COMMAS] + " FROM user_role WHERE user = ? AND role = ?", - URLoader.deflt,readConsistency); - } - - public Result> readByUser(AuthzTrans trans, String user) { - return psByUser.read(trans, R_TEXT + " by User " + user, new Object[]{user}); - } - - /** - * Note: Use Sparingly. Cassandra's forced key structure means this will perform fairly poorly - * @param trans - * @param role - * @return - * @throws DAOException - */ - public Result> readByRole(AuthzTrans trans, String role) { - return psByRole.read(trans, R_TEXT + " by Role " + role, new Object[]{role}); - } - - /** - * Direct Lookup of User Role - * Don't forget to check for Expiration - */ - public Result> readByUserRole(AuthzTrans trans, String user, String role) { - return psUserInRole.read(trans, R_TEXT + " by User " + user + " and Role " + role, new Object[]{user,role}); - } - - - /** - * Log Modification statements to History - * @param modified which CRUD action was done - * @param data entity data that needs a log entry - * @param overrideMessage if this is specified, we use it rather than crafting a history message based on data - */ - @Override - protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) { - boolean memo = override.length>0 && override[0]!=null; - boolean subject = override.length>1 && override[1]!=null; - - HistoryDAO.Data hd = HistoryDAO.newInitedData(); - HistoryDAO.Data hdRole = HistoryDAO.newInitedData(); - - hd.user = hdRole.user = trans.user(); - hd.action = modified.name(); - // Modifying User/Role is an Update to Role, not a Create. JG, 07-14-2015 - hdRole.action = CRUD.update.name(); - hd.target = TABLE; - hdRole.target = RoleDAO.TABLE; - hd.subject = subject?override[1] : (data.user + '|'+data.role); - hdRole.subject = data.role; - switch(modified) { - case create: - hd.memo = hdRole.memo = memo - ? String.format("%s by %s", override[0], hd.user) - : String.format("%s added to %s",data.user,data.role); - break; - case update: - hd.memo = hdRole.memo = memo - ? String.format("%s by %s", override[0], hd.user) - : String.format("%s - %s was updated",data.user,data.role); - break; - case delete: - hd.memo = hdRole.memo = memo - ? String.format("%s by %s", override[0], hd.user) - : String.format("%s removed from %s",data.user,data.role); - try { - hd.reconstruct = hdRole.reconstruct = data.bytify(); - } catch (IOException e) { - trans.warn().log(e,"Deleted UserRole could not be serialized"); - } - break; - default: - hd.memo = hdRole.memo = memo - ? String.format("%s by %s", override[0], hd.user) - : "n/a"; - } - - if(historyDAO.create(trans, hd).status!=Status.OK) { - trans.error().log("Cannot log to History"); - } - - if(historyDAO.create(trans, hdRole).status!=Status.OK) { - trans.error().log("Cannot log to History"); - } - // uses User as Segment - if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) { - trans.error().log("Cannot touch CacheInfo"); - } - } -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/CassExecutor.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/CassExecutor.java deleted file mode 100644 index f05a9172..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/CassExecutor.java +++ /dev/null @@ -1,74 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.hl; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.authz.org.Executor; -import org.onap.aaf.dao.aaf.cass.NsSplit; -import org.onap.aaf.dao.aaf.cass.NsDAO.Data; - -public class CassExecutor implements Executor { - - private Question q; - private Function f; - private AuthzTrans trans; - - public CassExecutor(AuthzTrans trans, Function f) { - this.trans = trans; - this.f = f; - this.q = this.f.q; - } - - @Override - public boolean hasPermission(String user, String ns, String type, String instance, String action) { - return isGranted(user, ns, type, instance, action); - } - - @Override - public boolean inRole(String name) { - Result nss = q.deriveNsSplit(trans, name); - if(nss.notOK())return false; - return q.roleDAO.read(trans, nss.value.ns,nss.value.name).isOKhasData(); - } - - public boolean isGranted(String user, String ns, String type, String instance, String action) { - return q.isGranted(trans, user, ns, type, instance,action); - } - - @Override - public String namespace() throws Exception { - Result res = q.validNSOfDomain(trans,trans.user()); - if(res.isOK()) { - String user[] = trans.user().split("\\."); - return user[user.length-1] + '.' + user[user.length-2]; - } - throw new Exception(res.status + ' ' + res.details); - } - - @Override - public String id() { - return trans.user(); - } - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/Function.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/Function.java deleted file mode 100644 index 0404fee6..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/Function.java +++ /dev/null @@ -1,1574 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.hl; - -import static org.onap.aaf.authz.layer.Result.OK; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.UUID; - -import org.onap.aaf.authz.common.Define; -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.authz.org.Executor; -import org.onap.aaf.authz.org.Organization; -import org.onap.aaf.authz.org.Organization.Expiration; -import org.onap.aaf.authz.org.Organization.Identity; -import org.onap.aaf.authz.org.Organization.Policy; -import org.onap.aaf.dao.DAOException; -import org.onap.aaf.dao.aaf.cass.ApprovalDAO; -import org.onap.aaf.dao.aaf.cass.CredDAO; -import org.onap.aaf.dao.aaf.cass.DelegateDAO; -import org.onap.aaf.dao.aaf.cass.FutureDAO; -import org.onap.aaf.dao.aaf.cass.Namespace; -import org.onap.aaf.dao.aaf.cass.NsDAO; -import org.onap.aaf.dao.aaf.cass.NsSplit; -import org.onap.aaf.dao.aaf.cass.NsType; -import org.onap.aaf.dao.aaf.cass.PermDAO; -import org.onap.aaf.dao.aaf.cass.RoleDAO; -import org.onap.aaf.dao.aaf.cass.Status; -import org.onap.aaf.dao.aaf.cass.UserRoleDAO; -import org.onap.aaf.dao.aaf.cass.NsDAO.Data; -import org.onap.aaf.dao.aaf.hl.Question.Access; - -public class Function { - - public static final String FOP_CRED = "cred"; - public static final String FOP_DELEGATE = "delegate"; - public static final String FOP_NS = "ns"; - public static final String FOP_PERM = "perm"; - public static final String FOP_ROLE = "role"; - public static final String FOP_USER_ROLE = "user_role"; - // First Action should ALWAYS be "write", see "CreateRole" - public final Question q; - - public Function(AuthzTrans trans, Question question) { - q = question; - } - - private class ErrBuilder { - private StringBuilder sb; - private List ao; - - public void log(Result result) { - if (result.notOK()) { - if (sb == null) { - sb = new StringBuilder(); - ao = new ArrayList(); - } - sb.append(result.details); - sb.append('\n'); - for (String s : result.variables) { - ao.add(s); - } - } - } - - public String[] vars() { - String[] rv = new String[ao.size()]; - ao.toArray(rv); - return rv; - } - - public boolean hasErr() { - return sb != null; - } - - @Override - public String toString() { - return sb == null ? "" : String.format(sb.toString(), ao); - } - } - - /** - * createNS - * - * Create Namespace - * - * @param trans - * @param org - * @param ns - * @param user - * @return - * @throws DAOException - * - * To create an NS, you need to: 1) validate permission to - * modify parent NS 2) Does NS exist already? 3) Create NS with - * a) "user" as owner. NOTE: Per 10-15 request for AAF 1.0 4) - * Loop through Roles with Parent NS, and map any that start - * with this NS into this one 5) Loop through Perms with Parent - * NS, and map any that start with this NS into this one - */ - public Result createNS(AuthzTrans trans, Namespace namespace, boolean fromApproval) { - Result rq; - - if (namespace.name.endsWith(Question.DOT_ADMIN) - || namespace.name.endsWith(Question.DOT_OWNER)) { - return Result.err(Status.ERR_BadData, - "'admin' and 'owner' are reserved names in AAF"); - } - - try { - for (String u : namespace.owner) { - Organization org = trans.org(); - Identity orgUser = org.getIdentity(trans, u); - if (orgUser == null || !orgUser.isResponsible()) { - // check if user has explicit permission - String reason; - if (org.isTestEnv() && (reason=org.validate(trans, Policy.AS_EMPLOYEE, - new CassExecutor(trans, this), u))!=null) { - return Result.err(Status.ERR_Policy,reason); - } - } - } - } catch (Exception e) { - trans.error().log(e, - "Could not contact Organization for User Validation"); - } - - String user = trans.user(); - // 1) May Change Parent? - int idx = namespace.name.lastIndexOf('.'); - String parent; - if (idx < 0) { - if (!q.isGranted(trans, user, Define.ROOT_NS,Question.NS, ".", "create")) { - return Result.err(Result.ERR_Security, - "%s may not create Root Namespaces", user); - } - parent = null; - fromApproval = true; - } else { - parent = namespace.name.substring(0, idx); - } - - if (!fromApproval) { - Result rparent = q.deriveNs(trans, parent); - if (rparent.notOK()) { - return Result.err(rparent); - } - rparent = q.mayUser(trans, user, rparent.value, Access.write); - if (rparent.notOK()) { - return Result.err(rparent); - } - } - - // 2) Does requested NS exist - if (q.nsDAO.read(trans, namespace.name).isOKhasData()) { - return Result.err(Status.ERR_ConflictAlreadyExists, - "Target Namespace already exists"); - } - - // Someone must be responsible. - if (namespace.owner == null || namespace.owner.isEmpty()) { - return Result - .err(Status.ERR_Policy, - "Namespaces must be assigned at least one responsible party"); - } - - // 3) Create NS - Date now = new Date(); - - Result r; - // 3a) Admin - - try { - // Originally, added the enterer as Admin, but that's not necessary, - // or helpful for Operations folks.. - // Admins can be empty, because they can be changed by lower level - // NSs - // if(ns.admin(false).isEmpty()) { - // ns.admin(true).add(user); - // } - if (namespace.admin != null) { - for (String u : namespace.admin) { - if ((r = checkValidID(trans, now, u)).notOK()) { - return r; - } - } - } - - // 3b) Responsible - Organization org = trans.org(); - for (String u : namespace.owner) { - Identity orgUser = org.getIdentity(trans, u); - if (orgUser == null) { - return Result - .err(Status.ERR_BadData, - "NS must be created with an %s approved Responsible Party", - org.getName()); - } - } - } catch (Exception e) { - return Result.err(Status.ERR_UserNotFound, e.getMessage()); - } - - // VALIDATIONS done... Add NS - if ((rq = q.nsDAO.create(trans, namespace.data())).notOK()) { - return Result.err(rq); - } - - // Since Namespace is now created, we need to grab all subsequent errors - ErrBuilder eb = new ErrBuilder(); - - // Add UserRole(s) - UserRoleDAO.Data urdd = new UserRoleDAO.Data(); - urdd.expires = trans.org().expiration(null, Expiration.UserInRole).getTime(); - urdd.role(namespace.name, Question.ADMIN); - for (String admin : namespace.admin) { - urdd.user = admin; - eb.log(q.userRoleDAO.create(trans, urdd)); - } - urdd.role(namespace.name,Question.OWNER); - for (String owner : namespace.owner) { - urdd.user = owner; - eb.log(q.userRoleDAO.create(trans, urdd)); - } - - addNSAdminRolesPerms(trans, eb, namespace.name); - - addNSOwnerRolesPerms(trans, eb, namespace.name); - - if (parent != null) { - // Build up with any errors - - Result parentNS = q.deriveNs(trans, parent); - String targetNs = parentNS.value.name; // Get the Parent Namespace, - // not target - String targetName = namespace.name.substring(parentNS.value.name.length() + 1); // Remove the Parent Namespace from the - // Target + a dot, and you'll get the name - int targetNameDot = targetName.length() + 1; - - // 4) Change any roles with children matching this NS, and - Result> rrdc = q.roleDAO.readChildren(trans, targetNs, targetName); - if (rrdc.isOKhasData()) { - for (RoleDAO.Data rdd : rrdc.value) { - // Remove old Role from Perms, save them off - List lpdd = new ArrayList(); - for(String p : rdd.perms(false)) { - Result rpdd = PermDAO.Data.decode(trans,q,p); - if(rpdd.isOKhasData()) { - PermDAO.Data pdd = rpdd.value; - lpdd.add(pdd); - q.permDAO.delRole(trans, pdd, rdd); - } else{ - trans.error().log(rpdd.errorString()); - } - } - - // Save off Old keys - String delP1 = rdd.ns; - String delP2 = rdd.name; - - // Write in new key - rdd.ns = namespace.name; - rdd.name = (delP2.length() > targetNameDot) ? delP2 - .substring(targetNameDot) : ""; - - // Need to use non-cached, because switching namespaces, not - // "create" per se - if ((rq = q.roleDAO.create(trans, rdd)).isOK()) { - // Put Role back into Perm, with correct info - for(PermDAO.Data pdd : lpdd) { - q.permDAO.addRole(trans, pdd, rdd); - } - // Change data for User Roles - Result> rurd = q.userRoleDAO.readByRole(trans, rdd.fullName()); - if(rurd.isOKhasData()) { - for(UserRoleDAO.Data urd : rurd.value) { - urd.ns = rdd.ns; - urd.rname = rdd.name; - q.userRoleDAO.update(trans, urd); - } - } - // Now delete old one - rdd.ns = delP1; - rdd.name = delP2; - if ((rq = q.roleDAO.delete(trans, rdd, false)).notOK()) { - eb.log(rq); - } - } else { - eb.log(rq); - } - } - } - - // 4) Change any Permissions with children matching this NS, and - Result> rpdc = q.permDAO.readChildren(trans,targetNs, targetName); - if (rpdc.isOKhasData()) { - for (PermDAO.Data pdd : rpdc.value) { - // Remove old Perm from Roles, save them off - List lrdd = new ArrayList(); - - for(String rl : pdd.roles(false)) { - Result rrdd = RoleDAO.Data.decode(trans,q,rl); - if(rrdd.isOKhasData()) { - RoleDAO.Data rdd = rrdd.value; - lrdd.add(rdd); - q.roleDAO.delPerm(trans, rdd, pdd); - } else{ - trans.error().log(rrdd.errorString()); - } - } - - // Save off Old keys - String delP1 = pdd.ns; - String delP2 = pdd.type; - pdd.ns = namespace.name; - pdd.type = (delP2.length() > targetNameDot) ? delP2 - .substring(targetNameDot) : ""; - if ((rq = q.permDAO.create(trans, pdd)).isOK()) { - // Put Role back into Perm, with correct info - for(RoleDAO.Data rdd : lrdd) { - q.roleDAO.addPerm(trans, rdd, pdd); - } - - pdd.ns = delP1; - pdd.type = delP2; - if ((rq = q.permDAO.delete(trans, pdd, false)).notOK()) { - eb.log(rq); - // } else { - // Need to invalidate directly, because we're - // switching places in NS, not normal cache behavior - // q.permDAO.invalidate(trans,pdd); - } - } else { - eb.log(rq); - } - } - } - if (eb.hasErr()) { - return Result.err(Status.ERR_ActionNotCompleted,eb.sb.toString(), eb.vars()); - } - } - return Result.ok(); - } - - private void addNSAdminRolesPerms(AuthzTrans trans, ErrBuilder eb, String ns) { - // Admin Role/Perm - RoleDAO.Data rd = new RoleDAO.Data(); - rd.ns = ns; - rd.name = "admin"; - rd.description = "AAF Namespace Administrators"; - - PermDAO.Data pd = new PermDAO.Data(); - pd.ns = ns; - pd.type = "access"; - pd.instance = Question.ASTERIX; - pd.action = Question.ASTERIX; - pd.description = "AAF Namespace Write Access"; - - rd.perms = new HashSet(); - rd.perms.add(pd.encode()); - eb.log(q.roleDAO.create(trans, rd)); - - pd.roles = new HashSet(); - pd.roles.add(rd.encode()); - eb.log(q.permDAO.create(trans, pd)); - } - - private void addNSOwnerRolesPerms(AuthzTrans trans, ErrBuilder eb, String ns) { - RoleDAO.Data rd = new RoleDAO.Data(); - rd.ns = ns; - rd.name = "owner"; - rd.description = "AAF Namespace Owners"; - - PermDAO.Data pd = new PermDAO.Data(); - pd.ns = ns; - pd.type = "access"; - pd.instance = Question.ASTERIX; - pd.action = Question.READ; - pd.description = "AAF Namespace Read Access"; - - rd.perms = new HashSet(); - rd.perms.add(pd.encode()); - eb.log(q.roleDAO.create(trans, rd)); - - pd.roles = new HashSet(); - pd.roles.add(rd.encode()); - eb.log(q.permDAO.create(trans, pd)); - } - - /** - * deleteNS - * - * Delete Namespace - * - * @param trans - * @param org - * @param ns - * @param force - * @param user - * @return - * @throws DAOException - * - * - * To delete an NS, you need to: 1) validate permission to - * modify this NS 2) Find all Roles with this NS, and 2a) if - * Force, delete them, else modify to Parent NS 3) Find all - * Perms with this NS, and modify to Parent NS 3a) if Force, - * delete them, else modify to Parent NS 4) Find all IDs - * associated to this NS, and deny if exists. 5) Remove NS - */ - public Result deleteNS(AuthzTrans trans, String ns) { - boolean force = trans.forceRequested(); - boolean move = trans.moveRequested(); - // 1) Validate - Result> nsl; - if ((nsl = q.nsDAO.read(trans, ns)).notOKorIsEmpty()) { - return Result.err(Status.ERR_NsNotFound, "%s does not exist", ns); - } - NsDAO.Data nsd = nsl.value.get(0); - NsType nt; - if (move && !q.canMove(nt = NsType.fromType(nsd.type))) { - return Result.err(Status.ERR_Denied, "Namespace Force=move not permitted for Type %s",nt.name()); - } - - Result dnr = q.mayUser(trans, trans.user(), nsd, Access.write); - if (dnr.status != Status.OK) { - return Result.err(dnr); - } - - // 2) Find Parent - String user = trans.user(); - int idx = ns.lastIndexOf('.'); - NsDAO.Data parent; - if (idx < 0) { - if (!q.isGranted(trans, user, Define.ROOT_NS,Question.NS, ".", "delete")) { - return Result.err(Result.ERR_Security, - "%s may not delete Root Namespaces", user); - } - parent = null; - } else { - Result rlparent = q.deriveNs(trans, ns.substring(0, idx)); - if (rlparent.notOKorIsEmpty()) { - return Result.err(rlparent); - } - parent = rlparent.value; - } - - // Build up with any errors - // If sb != null below is an indication of error - StringBuilder sb = null; - ErrBuilder er = new ErrBuilder(); - - // 2a) Deny if any IDs on Namespace - Result> creds = q.credDAO.readNS(trans, ns); - if (creds.isOKhasData()) { - if (force || move) { - for (CredDAO.Data cd : creds.value) { - er.log(q.credDAO.delete(trans, cd, false)); - // Since we're deleting all the creds, we should delete all - // the user Roles for that Cred - Result> rlurd = q.userRoleDAO - .readByUser(trans, cd.id); - if (rlurd.isOK()) { - for (UserRoleDAO.Data data : rlurd.value) { - q.userRoleDAO.delete(trans, data, false); - } - } - - } - } else { - // first possible StringBuilder Create. - sb = new StringBuilder(); - sb.append('['); - sb.append(ns); - sb.append("] contains users"); - } - } - - // 2b) Find (or delete if forced flag is set) dependencies - // First, find if NS Perms are the only ones - Result> rpdc = q.permDAO.readNS(trans, ns); - if (rpdc.isOKhasData()) { - // Since there are now NS perms, we have to count NON-NS perms. - // FYI, if we delete them now, and the NS is not deleted, it is in - // an inconsistent state. - boolean nonaccess = false; - for (PermDAO.Data pdd : rpdc.value) { - if (!"access".equals(pdd.type)) { - nonaccess = true; - break; - } - } - if (nonaccess && !force && !move) { - if (sb == null) { - sb = new StringBuilder(); - sb.append('['); - sb.append(ns); - sb.append("] contains "); - } else { - sb.append(", "); - } - sb.append("permissions"); - } - } - - Result> rrdc = q.roleDAO.readNS(trans, ns); - if (rrdc.isOKhasData()) { - // Since there are now NS roles, we have to count NON-NS roles. - // FYI, if we delete th)em now, and the NS is not deleted, it is in - // an inconsistent state. - int count = rrdc.value.size(); - for (RoleDAO.Data rdd : rrdc.value) { - if ("admin".equals(rdd.name) || "owner".equals(rdd.name)) { - --count; - } - } - if (count > 0 && !force && !move) { - if (sb == null) { - sb = new StringBuilder(); - sb.append('['); - sb.append(ns); - sb.append("] contains "); - } else { - sb.append(", "); - } - sb.append("roles"); - } - } - - // 2c) Deny if dependencies exist that would be moved to root level - // parent is root level parent here. Need to find closest parent ns that - // exists - if (sb != null) { - if (!force && !move) { - sb.append(".\n Delete dependencies and try again. Note: using \"force=true\" will delete all. \"force=move\" will delete Creds, but move Roles and Perms to parent."); - return Result.err(Status.ERR_DependencyExists, sb.toString()); - } - - if (move && (parent == null || parent.type == NsType.COMPANY.type)) { - return Result - .err(Status.ERR_DependencyExists, - "Cannot move users, roles or permissions to [%s].\nDelete dependencies and try again", - parent.name); - } - } else if (move && parent != null) { - sb = new StringBuilder(); - // 3) Change any roles with children matching this NS, and - moveRoles(trans, parent, sb, rrdc); - // 4) Change any Perms with children matching this NS, and - movePerms(trans, parent, sb, rpdc); - } - - if (sb != null && sb.length() > 0) { - return Result.err(Status.ERR_DependencyExists, sb.toString()); - } - - if (er.hasErr()) { - if (trans.debug().isLoggable()) { - trans.debug().log(er.toString()); - } - return Result.err(Status.ERR_DependencyExists, - "Namespace members cannot be deleted for %s", ns); - } - - // 5) OK... good to go for NS Deletion... - if (!rpdc.isEmpty()) { - for (PermDAO.Data perm : rpdc.value) { - deletePerm(trans, perm, true, true); - } - } - if (!rrdc.isEmpty()) { - for (RoleDAO.Data role : rrdc.value) { - deleteRole(trans, role, true, true); - } - } - - return q.nsDAO.delete(trans, nsd, false); - } - - public Result> getOwners(AuthzTrans trans, String ns, - boolean includeExpired) { - return getUsersByRole(trans, ns + Question.DOT_OWNER, includeExpired); - } - - private Result mayAddOwner(AuthzTrans trans, String ns, String id) { - Result rq = q.deriveNs(trans, ns); - if (rq.notOK()) { - return Result.err(rq); - } - - rq = q.mayUser(trans, trans.user(), rq.value, Access.write); - if (rq.notOK()) { - return Result.err(rq); - } - - Identity user; - Organization org = trans.org(); - try { - if ((user = org.getIdentity(trans, id)) == null) { - return Result.err(Status.ERR_Policy, - "%s reports that this is not a valid credential", - org.getName()); - } - if (user.isResponsible()) { - return Result.ok(); - } else { - String reason="This is not a Test Environment"; - if (org.isTestEnv() && (reason = org.validate(trans, Policy.AS_EMPLOYEE, - new CassExecutor(trans, this), id))==null) { - return Result.ok(); - } - return Result.err(Status.ERR_Policy,reason); - } - } catch (Exception e) { - return Result.err(e); - } - } - - private Result mayAddAdmin(AuthzTrans trans, String ns, String id) { - // Does NS Exist? - Result r = checkValidID(trans, new Date(), id); - if (r.notOK()) { - return r; - } - // Is id able to be an Admin - Result rq = q.deriveNs(trans, ns); - if (rq.notOK()) { - return Result.err(rq); - } - - rq = q.mayUser(trans, trans.user(), rq.value, Access.write); - if (rq.notOK()) { - return Result.err(rq); - } - return r; - } - - private Result checkValidID(AuthzTrans trans, Date now, String user) { - Organization org = trans.org(); - if (user.endsWith(org.getRealm())) { - try { - if (org.getIdentity(trans, user) == null) { - return Result.err(Status.ERR_Denied, - "%s reports that %s is a faulty ID", org.getName(), - user); - } - return Result.ok(); - } catch (Exception e) { - return Result.err(Result.ERR_Security, - "%s is not a valid %s Credential", user, org.getName()); - } - } else { - Result> cdr = q.credDAO.readID(trans, user); - if (cdr.notOKorIsEmpty()) { - return Result.err(Status.ERR_Security, - "%s is not a valid AAF Credential", user); - } - - for (CredDAO.Data cd : cdr.value) { - if (cd.expires.after(now)) { - return Result.ok(); - } - } - } - return Result.err(Result.ERR_Security, "%s has expired", user); - } - - public Result delOwner(AuthzTrans trans, String ns, String id) { - Result rq = q.deriveNs(trans, ns); - if (rq.notOK()) { - return Result.err(rq); - } - - rq = q.mayUser(trans, trans.user(), rq.value, Access.write); - if (rq.notOK()) { - return Result.err(rq); - } - - return delUserRole(trans, id, ns,Question.OWNER); - } - - public Result> getAdmins(AuthzTrans trans, String ns, boolean includeExpired) { - return getUsersByRole(trans, ns + Question.DOT_ADMIN, includeExpired); - } - - public Result delAdmin(AuthzTrans trans, String ns, String id) { - Result rq = q.deriveNs(trans, ns); - if (rq.notOK()) { - return Result.err(rq); - } - - rq = q.mayUser(trans, trans.user(), rq.value, Access.write); - if (rq.notOK()) { - return Result.err(rq); - } - - return delUserRole(trans, id, ns, Question.ADMIN); - } - - /** - * Helper function that moves permissions from a namespace being deleted to - * its parent namespace - * - * @param trans - * @param parent - * @param sb - * @param rpdc - * - list of permissions in namespace being deleted - */ - private void movePerms(AuthzTrans trans, NsDAO.Data parent, - StringBuilder sb, Result> rpdc) { - - Result rv; - Result pd; - - if (rpdc.isOKhasData()) { - for (PermDAO.Data pdd : rpdc.value) { - String delP2 = pdd.type; - if ("access".equals(delP2)) { - continue; - } - // Remove old Perm from Roles, save them off - List lrdd = new ArrayList(); - - for(String rl : pdd.roles(false)) { - Result rrdd = RoleDAO.Data.decode(trans,q,rl); - if(rrdd.isOKhasData()) { - RoleDAO.Data rdd = rrdd.value; - lrdd.add(rdd); - q.roleDAO.delPerm(trans, rdd, pdd); - } else{ - trans.error().log(rrdd.errorString()); - } - } - - // Save off Old keys - String delP1 = pdd.ns; - NsSplit nss = new NsSplit(parent, pdd.fullType()); - pdd.ns = nss.ns; - pdd.type = nss.name; - // Use direct Create/Delete, because switching namespaces - if ((pd = q.permDAO.create(trans, pdd)).isOK()) { - // Put Role back into Perm, with correct info - for(RoleDAO.Data rdd : lrdd) { - q.roleDAO.addPerm(trans, rdd, pdd); - } - - pdd.ns = delP1; - pdd.type = delP2; - if ((rv = q.permDAO.delete(trans, pdd, false)).notOK()) { - sb.append(rv.details); - sb.append('\n'); - // } else { - // Need to invalidate directly, because we're switching - // places in NS, not normal cache behavior - // q.permDAO.invalidate(trans,pdd); - } - } else { - sb.append(pd.details); - sb.append('\n'); - } - } - } - } - - /** - * Helper function that moves roles from a namespace being deleted to its - * parent namespace - * - * @param trans - * @param parent - * @param sb - * @param rrdc - * - list of roles in namespace being deleted - */ - private void moveRoles(AuthzTrans trans, NsDAO.Data parent, - StringBuilder sb, Result> rrdc) { - - Result rv; - Result rd; - - if (rrdc.isOKhasData()) { - for (RoleDAO.Data rdd : rrdc.value) { - String delP2 = rdd.name; - if ("admin".equals(delP2) || "owner".equals(delP2)) { - continue; - } - // Remove old Role from Perms, save them off - List lpdd = new ArrayList(); - for(String p : rdd.perms(false)) { - Result rpdd = PermDAO.Data.decode(trans,q,p); - if(rpdd.isOKhasData()) { - PermDAO.Data pdd = rpdd.value; - lpdd.add(pdd); - q.permDAO.delRole(trans, pdd, rdd); - } else{ - trans.error().log(rpdd.errorString()); - } - } - - // Save off Old keys - String delP1 = rdd.ns; - - NsSplit nss = new NsSplit(parent, rdd.fullName()); - rdd.ns = nss.ns; - rdd.name = nss.name; - // Use direct Create/Delete, because switching namespaces - if ((rd = q.roleDAO.create(trans, rdd)).isOK()) { - // Put Role back into Perm, with correct info - for(PermDAO.Data pdd : lpdd) { - q.permDAO.addRole(trans, pdd, rdd); - } - - rdd.ns = delP1; - rdd.name = delP2; - if ((rv = q.roleDAO.delete(trans, rdd, true)).notOK()) { - sb.append(rv.details); - sb.append('\n'); - // } else { - // Need to invalidate directly, because we're switching - // places in NS, not normal cache behavior - // q.roleDAO.invalidate(trans,rdd); - } - } else { - sb.append(rd.details); - sb.append('\n'); - } - } - } - } - - /** - * Create Permission (and any missing Permission between this and Parent) if - * we have permission - * - * Pass in the desired Management Permission for this Permission - * - * If Force is set, then Roles listed will be created, if allowed, - * pre-granted. - */ - public Result createPerm(AuthzTrans trans, PermDAO.Data perm, boolean fromApproval) { - String user = trans.user(); - // Next, see if User is allowed to Manage Parent Permission - - Result rnsd; - if (!fromApproval) { - rnsd = q.mayUser(trans, user, perm, Access.write); - if (rnsd.notOK()) { - return Result.err(rnsd); - } - } else { - rnsd = q.deriveNs(trans, perm.ns); - } - - // Does Child exist? - if (!trans.forceRequested()) { - if (q.permDAO.read(trans, perm).isOKhasData()) { - return Result.err(Status.ERR_ConflictAlreadyExists, - "Permission [%s.%s|%s|%s] already exists.", perm.ns, - perm.type, perm.instance, perm.action); - } - } - - // Attempt to add perms to roles, creating as possible - Set roles; - String pstring = perm.encode(); - - // For each Role - for (String role : roles = perm.roles(true)) { - Result rdd = RoleDAO.Data.decode(trans,q,role); - if(rdd.isOKhasData()) { - RoleDAO.Data rd = rdd.value; - if (!fromApproval) { - // May User write to the Role in question. - Result rns = q.mayUser(trans, user, rd, - Access.write); - if (rns.notOK()) { - // Remove the role from Add, because - roles.remove(role); // Don't allow adding - trans.warn() - .log("User [%s] does not have permission to relate Permissions to Role [%s]", - user, role); - } - } - - Result> rlrd; - if ((rlrd = q.roleDAO.read(trans, rd)).notOKorIsEmpty()) { - rd.perms(true).add(pstring); - if (q.roleDAO.create(trans, rd).notOK()) { - roles.remove(role); // Role doesn't exist, and can't be - // created - } - } else { - rd = rlrd.value.get(0); - if (!rd.perms.contains(pstring)) { - q.roleDAO.addPerm(trans, rd, perm); - } - } - } - } - - Result pdr = q.permDAO.create(trans, perm); - if (pdr.isOK()) { - return Result.ok(); - } else { - return Result.err(pdr); - } - } - - public Result deletePerm(final AuthzTrans trans, final PermDAO.Data perm, boolean force, boolean fromApproval) { - String user = trans.user(); - - // Next, see if User is allowed to Manage Permission - Result rnsd; - if (!fromApproval) { - rnsd = q.mayUser(trans, user, perm, Access.write); - if (rnsd.notOK()) { - return Result.err(rnsd); - } - } - // Does Perm exist? - Result> pdr = q.permDAO.read(trans, perm); - if (pdr.notOKorIsEmpty()) { - return Result.err(Status.ERR_PermissionNotFound,"Permission [%s.%s|%s|%s] does not exist.", - perm.ns,perm.type, perm.instance, perm.action); - } - // Get perm, but with rest of data. - PermDAO.Data fullperm = pdr.value.get(0); - - // Attached to any Roles? - if (fullperm.roles != null) { - if (force) { - for (String role : fullperm.roles) { - Result rv = null; - Result rrdd = RoleDAO.Data.decode(trans, q, role); - if(rrdd.isOKhasData()) { - trans.debug().log("Removing", role, "from", fullperm, "on Perm Delete"); - if ((rv = q.roleDAO.delPerm(trans, rrdd.value, fullperm)).notOK()) { - if (rv.notOK()) { - trans.error().log("Error removing Role during delFromPermRole: ", - trans.getUserPrincipal(), - rv.errorString()); - } - } - } else { - return Result.err(rrdd); - } - } - } else if (!fullperm.roles.isEmpty()) { - return Result - .err(Status.ERR_DependencyExists, - "Permission [%s.%s|%s|%s] cannot be deleted as it is attached to 1 or more roles.", - fullperm.ns, fullperm.type, fullperm.instance, fullperm.action); - } - } - - return q.permDAO.delete(trans, fullperm, false); - } - - public Result deleteRole(final AuthzTrans trans, final RoleDAO.Data role, boolean force, boolean fromApproval) { - String user = trans.user(); - - // Next, see if User is allowed to Manage Role - Result rnsd; - if (!fromApproval) { - rnsd = q.mayUser(trans, user, role, Access.write); - if (rnsd.notOK()) { - return Result.err(rnsd); - } - } - - // Are there any Users Attached to Role? - Result> urdr = q.userRoleDAO.readByRole(trans,role.fullName()); - if (force) { - if (urdr.isOKhasData()) { - for (UserRoleDAO.Data urd : urdr.value) { - q.userRoleDAO.delete(trans, urd, false); - } - } - } else if (urdr.isOKhasData()) { - return Result.err(Status.ERR_DependencyExists, - "Role [%s.%s] cannot be deleted as it is used by 1 or more Users.", - role.ns, role.name); - } - - // Does Role exist? - Result> rdr = q.roleDAO.read(trans, role); - if (rdr.notOKorIsEmpty()) { - return Result.err(Status.ERR_RoleNotFound, - "Role [%s.%s] does not exist", role.ns, role.name); - } - RoleDAO.Data fullrole = rdr.value.get(0); // full key search - - // Remove Self from Permissions... always, force or not. Force only applies to Dependencies (Users) - if (fullrole.perms != null) { - for (String perm : fullrole.perms(false)) { - Result rpd = PermDAO.Data.decode(trans,q,perm); - if (rpd.isOK()) { - trans.debug().log("Removing", perm, "from", fullrole,"on Role Delete"); - - Result r = q.permDAO.delRole(trans, rpd.value, fullrole); - if (r.notOK()) { - trans.error().log("ERR_FDR1 unable to remove",fullrole,"from",perm,':',r.status,'-',r.details); - } - } else { - trans.error().log("ERR_FDR2 Could not remove",perm,"from",fullrole); - } - } - } - return q.roleDAO.delete(trans, fullrole, false); - } - - /** - * Only owner of Permission may add to Role - * - * If force set, however, Role will be created before Grant, if User is - * allowed to create. - * - * @param trans - * @param role - * @param pd - * @return - */ - public Result addPermToRole(AuthzTrans trans, RoleDAO.Data role,PermDAO.Data pd, boolean fromApproval) { - String user = trans.user(); - - if (!fromApproval) { - Result rRoleCo = q.deriveFirstNsForType(trans, role.ns, NsType.COMPANY); - if(rRoleCo.notOK()) { - return Result.err(rRoleCo); - } - Result rPermCo = q.deriveFirstNsForType(trans, pd.ns, NsType.COMPANY); - if(rPermCo.notOK()) { - return Result.err(rPermCo); - } - - // Not from same company - if(!rRoleCo.value.name.equals(rPermCo.value.name)) { - Result r; - // Only grant if User ALSO has Write ability in Other Company - if((r = q.mayUser(trans, user, role, Access.write)).notOK()) { - return Result.err(r); - } - } - - - // Must be Perm Admin, or Granted Special Permission - Result ucp = q.mayUser(trans, user, pd, Access.write); - if (ucp.notOK()) { - // Don't allow CLI potential Grantees to change their own AAF - // Perms, - if ((Define.ROOT_NS.equals(pd.ns) && Question.NS.equals(pd.type)) - || !q.isGranted(trans, trans.user(),Define.ROOT_NS,Question.PERM, rPermCo.value.name, "grant")) { - // Not otherwise granted - // TODO Needed? - return Result.err(ucp); - } - // Final Check... Don't allow Grantees to add to Roles they are - // part of - Result> rlurd = q.userRoleDAO - .readByUser(trans, trans.user()); - if (rlurd.isOK()) { - for (UserRoleDAO.Data ur : rlurd.value) { - if (role.ns.equals(ur.ns) && role.name.equals(ur.rname)) { - return Result.err(ucp); - } - } - } - } - } - - Result> rlpd = q.permDAO.read(trans, pd); - if (rlpd.notOKorIsEmpty()) { - return Result.err(Status.ERR_PermissionNotFound, - "Permission must exist to add to Role"); - } - - Result> rlrd = q.roleDAO.read(trans, role); // Already - // Checked - // for - // can - // change - // Role - Result rv; - - if (rlrd.notOKorIsEmpty()) { - if (trans.forceRequested()) { - Result ucr = q.mayUser(trans, user, role, - Access.write); - if (ucr.notOK()) { - return Result - .err(Status.ERR_Denied, - "Role [%s.%s] does not exist. User [%s] cannot create.", - role.ns, role.name, user); - } - - role.perms(true).add(pd.encode()); - Result rdd = q.roleDAO.create(trans, role); - if (rdd.isOK()) { - rv = Result.ok(); - } else { - rv = Result.err(rdd); - } - } else { - return Result.err(Status.ERR_RoleNotFound, - "Role [%s.%s] does not exist.", role.ns, role.name); - } - } else { - role = rlrd.value.get(0); - if (role.perms(false).contains(pd.encode())) { - return Result.err(Status.ERR_ConflictAlreadyExists, - "Permission [%s.%s] is already a member of role [%s,%s]", - pd.ns, pd.type, role.ns, role.name); - } - role.perms(true).add(pd.encode()); // this is added for Caching - // access purposes... doesn't - // affect addPerm - rv = q.roleDAO.addPerm(trans, role, pd); - } - if (rv.status == Status.OK) { - return q.permDAO.addRole(trans, pd, role); - // exploring how to add information message to successful http - // request - } - return rv; - } - - /** - * Either Owner of Role or Permission may delete from Role - * - * @param trans - * @param role - * @param pd - * @return - */ - public Result delPermFromRole(AuthzTrans trans, RoleDAO.Data role,PermDAO.Data pd, boolean fromApproval) { - String user = trans.user(); - if (!fromApproval) { - Result ucr = q.mayUser(trans, user, role, Access.write); - Result ucp = q.mayUser(trans, user, pd, Access.write); - - // If Can't change either Role or Perm, then deny - if (ucr.notOK() && ucp.notOK()) { - return Result.err(Status.ERR_Denied, - "User [" + trans.user() - + "] does not have permission to delete [" - + pd.encode() + "] from Role [" - + role.fullName() + ']'); - } - } - - Result> rlr = q.roleDAO.read(trans, role); - if (rlr.notOKorIsEmpty()) { - // If Bad Data, clean out - Result> rlp = q.permDAO.read(trans, pd); - if (rlp.isOKhasData()) { - for (PermDAO.Data pv : rlp.value) { - q.permDAO.delRole(trans, pv, role); - } - } - return Result.err(rlr); - } - String perm1 = pd.encode(); - boolean notFound; - if (trans.forceRequested()) { - notFound = false; - } else { // only check if force not set. - notFound = true; - for (RoleDAO.Data r : rlr.value) { - if (r.perms != null) { - for (String perm : r.perms) { - if (perm1.equals(perm)) { - notFound = false; - break; - } - } - if(!notFound) { - break; - } - } - } - } - if (notFound) { // Need to check both, in case of corruption - return Result.err(Status.ERR_PermissionNotFound, - "Permission [%s.%s|%s|%s] not associated with any Role", - pd.ns,pd.type,pd.instance,pd.action); - } - - // Read Perm for full data - Result> rlp = q.permDAO.read(trans, pd); - Result rv = null; - if (rlp.isOKhasData()) { - for (PermDAO.Data pv : rlp.value) { - if ((rv = q.permDAO.delRole(trans, pv, role)).isOK()) { - if ((rv = q.roleDAO.delPerm(trans, role, pv)).notOK()) { - trans.error().log( - "Error removing Perm during delFromPermRole:", - trans.getUserPrincipal(), rv.errorString()); - } - } else { - trans.error().log( - "Error removing Role during delFromPermRole:", - trans.getUserPrincipal(), rv.errorString()); - } - } - } else { - rv = q.roleDAO.delPerm(trans, role, pd); - if (rv.notOK()) { - trans.error().log("Error removing Role during delFromPermRole", - rv.errorString()); - } - } - return rv == null ? Result.ok() : rv; - } - - public Result delPermFromRole(AuthzTrans trans, String role,PermDAO.Data pd) { - Result nss = q.deriveNsSplit(trans, role); - if (nss.notOK()) { - return Result.err(nss); - } - RoleDAO.Data rd = new RoleDAO.Data(); - rd.ns = nss.value.ns; - rd.name = nss.value.name; - return delPermFromRole(trans, rd, pd, false); - } - - /** - * Add a User to Role - * - * 1) Role must exist 2) User must be a known Credential (i.e. mechID ok if - * Credential) or known Organizational User - * - * @param trans - * @param org - * @param urData - * @return - * @throws DAOException - */ - public Result addUserRole(AuthzTrans trans,UserRoleDAO.Data urData) { - Result rv; - if(Question.ADMIN.equals(urData.rname)) { - rv = mayAddAdmin(trans, urData.ns, urData.user); - } else if(Question.OWNER.equals(urData.rname)) { - rv = mayAddOwner(trans, urData.ns, urData.user); - } else { - rv = checkValidID(trans, new Date(), urData.user); - } - if(rv.notOK()) { - return rv; - } - - // Check if record exists - if (q.userRoleDAO.read(trans, urData).isOKhasData()) { - return Result.err(Status.ERR_ConflictAlreadyExists, - "User Role exists"); - } - if (q.roleDAO.read(trans, urData.ns, urData.rname).notOKorIsEmpty()) { - return Result.err(Status.ERR_RoleNotFound, - "Role [%s.%s] does not exist", urData.ns, urData.rname); - } - - urData.expires = trans.org().expiration(null, Expiration.UserInRole, urData.user).getTime(); - - - Result udr = q.userRoleDAO.create(trans, urData); - switch (udr.status) { - case OK: - return Result.ok(); - default: - return Result.err(udr); - } - } - - public Result addUserRole(AuthzTrans trans, String user, String ns, String rname) { - UserRoleDAO.Data urdd = new UserRoleDAO.Data(); - urdd.ns = ns; - urdd.role(ns, rname); - urdd.user = user; - return addUserRole(trans,urdd); - } - - /** - * Extend User Role. - * - * extend the Expiration data, according to Organization rules. - * - * @param trans - * @param org - * @param urData - * @return - */ - public Result extendUserRole(AuthzTrans trans, UserRoleDAO.Data urData, boolean checkForExist) { - // Check if record still exists - if (checkForExist && q.userRoleDAO.read(trans, urData).notOKorIsEmpty()) { - return Result.err(Status.ERR_UserRoleNotFound, - "User Role does not exist"); - } - if (q.roleDAO.read(trans, urData.ns, urData.rname).notOKorIsEmpty()) { - return Result.err(Status.ERR_RoleNotFound, - "Role [%s.%s] does not exist", urData.ns,urData.rname); - } - // Special case for "Admin" roles. Issue brought forward with Prod - // problem 9/26 - - urData.expires = trans.org().expiration(null, Expiration.UserInRole).getTime(); // get - // Full - // time - // starting - // today - return q.userRoleDAO.update(trans, urData); - } - - // //////////////////////////////////////////////////// - // Special User Role Functions - // These exist, because User Roles have Expiration dates, which must be - // accounted for - // Also, as of July, 2015, Namespace Owners and Admins are now regular User - // Roles - // //////////////////////////////////////////////////// - public Result> getUsersByRole(AuthzTrans trans, String role, boolean includeExpired) { - Result> rurdd = q.userRoleDAO.readByRole(trans,role); - if (rurdd.notOK()) { - return Result.err(rurdd); - } - Date now = new Date(); - List list = rurdd.value; - List rv = new ArrayList(list.size()); // presize - for (UserRoleDAO.Data urdd : rurdd.value) { - if (includeExpired || urdd.expires.after(now)) { - rv.add(urdd.user); - } - } - return Result.ok(rv); - } - - public Result delUserRole(AuthzTrans trans, String user, String ns, String rname) { - UserRoleDAO.Data urdd = new UserRoleDAO.Data(); - urdd.user = user; - urdd.role(ns,rname); - Result> r = q.userRoleDAO.read(trans, urdd); - if (r.status == 404 || r.isEmpty()) { - return Result.err(Status.ERR_UserRoleNotFound, - "UserRole [%s] [%s.%s]", user, ns, rname); - } - if (r.notOK()) { - return Result.err(r); - } - - return q.userRoleDAO.delete(trans, urdd, false); - } - - public Result> createFuture(AuthzTrans trans, FutureDAO.Data data, String id, String user, - NsDAO.Data nsd, String op) { - // Create Future Object - List approvers=null; - Result fr = q.futureDAO.create(trans, data, id); - if (fr.isOK()) { - // User Future ID as ticket for Approvals - final UUID ticket = fr.value.id; - ApprovalDAO.Data ad; - try { - Organization org = trans.org(); - approvers = org.getApprovers(trans, user); - for (Identity u : approvers) { - ad = new ApprovalDAO.Data(); - // Note ad.id is set by ApprovalDAO Create - ad.ticket = ticket; - ad.user = user; - ad.approver = u.id(); - ad.status = ApprovalDAO.PENDING; - ad.memo = data.memo; - ad.type = org.getApproverType(); - ad.operation = op; - // Note ad.updated is created in System - Result ar = q.approvalDAO.create(trans,ad); - if (ar.notOK()) { - return Result.err(Status.ERR_ActionNotCompleted, - "Approval for %s, %s could not be created: %s", - ad.user, ad.approver, ar.details); - } - } - if (nsd != null) { - Result> rrbr = q.userRoleDAO - .readByRole(trans, nsd.name + Question.DOT_OWNER); - if (rrbr.isOK()) { - for (UserRoleDAO.Data urd : rrbr.value) { - ad = new ApprovalDAO.Data(); - // Note ad.id is set by ApprovalDAO Create - ad.ticket = ticket; - ad.user = user; - ad.approver = urd.user; - ad.status = ApprovalDAO.PENDING; - ad.memo = data.memo; - ad.type = "owner"; - ad.operation = op; - // Note ad.updated is created in System - Result ar = q.approvalDAO.create(trans, ad); - if (ar.notOK()) { - return Result.err(Status.ERR_ActionNotCompleted, - "Approval for %s, %s could not be created: %s", - ad.user, ad.approver, - ar.details); - } - } - } - } - } catch (Exception e) { - return Result.err(e); - } - } - - return Result.ok(approvers); - } - - public Result performFutureOp(AuthzTrans trans, ApprovalDAO.Data cd) { - Result> fd = q.futureDAO.read(trans, cd.ticket); - Result> allApprovalsForTicket = q.approvalDAO - .readByTicket(trans, cd.ticket); - Result rv = Result.ok(); - for (FutureDAO.Data curr : fd.value) { - if ("approved".equalsIgnoreCase(cd.status)) { - if (allApprovalsForTicket.value.size() <= 1) { - // should check if any other pendings before performing - // actions - try { - if (FOP_ROLE.equalsIgnoreCase(curr.target)) { - RoleDAO.Data data = new RoleDAO.Data(); - data.reconstitute(curr.construct); - if ("C".equalsIgnoreCase(cd.operation)) { - Result rd; - if ((rd = q.roleDAO.dao().create(trans, data)).notOK()) { - rv = Result.err(rd); - } - } else if ("D".equalsIgnoreCase(cd.operation)) { - rv = deleteRole(trans, data, true, true); - } - - } else if (FOP_PERM.equalsIgnoreCase(curr.target)) { - PermDAO.Data pdd = new PermDAO.Data(); - pdd.reconstitute(curr.construct); - if ("C".equalsIgnoreCase(cd.operation)) { - rv = createPerm(trans, pdd, true); - } else if ("D".equalsIgnoreCase(cd.operation)) { - rv = deletePerm(trans, pdd, true, true); - } else if ("G".equalsIgnoreCase(cd.operation)) { - Set roles = pdd.roles(true); - Result rrdd = null; - for (String roleStr : roles) { - rrdd = RoleDAO.Data.decode(trans, q, roleStr); - if (rrdd.isOKhasData()) { - rv = addPermToRole(trans, rrdd.value, pdd, true); - } else { - trans.error().log(rrdd.errorString()); - } - } - } else if ("UG".equalsIgnoreCase(cd.operation)) { - Set roles = pdd.roles(true); - Result rrdd; - for (String roleStr : roles) { - rrdd = RoleDAO.Data.decode(trans, q, roleStr); - if (rrdd.isOKhasData()) { - rv = delPermFromRole(trans, rrdd.value, pdd, true); - } else { - trans.error().log(rrdd.errorString()); - } - } - } - - } else if (FOP_USER_ROLE.equalsIgnoreCase(curr.target)) { - UserRoleDAO.Data data = new UserRoleDAO.Data(); - data.reconstitute(curr.construct); - // if I am the last to approve, create user role - if ("C".equalsIgnoreCase(cd.operation)) { - rv = addUserRole(trans, data); - } else if ("U".equals(cd.operation)) { - rv = extendUserRole(trans, data, true); - } - - } else if (FOP_NS.equalsIgnoreCase(curr.target)) { - Namespace namespace = new Namespace(); - namespace.reconstitute(curr.construct); - - if ("C".equalsIgnoreCase(cd.operation)) { - rv = createNS(trans, namespace, true); - } - - } else if (FOP_DELEGATE.equalsIgnoreCase(curr.target)) { - DelegateDAO.Data data = new DelegateDAO.Data(); - data.reconstitute(curr.construct); - if ("C".equalsIgnoreCase(cd.operation)) { - Result dd; - if ((dd = q.delegateDAO.create(trans, data)).notOK()) { - rv = Result.err(dd); - } - } else if ("U".equalsIgnoreCase(cd.operation)) { - rv = q.delegateDAO.update(trans, data); - } - } else if (FOP_CRED.equalsIgnoreCase(curr.target)) { - CredDAO.Data data = new CredDAO.Data(); - data.reconstitute(curr.construct); - if ("C".equalsIgnoreCase(cd.operation)) { - Result rd; - if ((rd = q.credDAO.dao().create(trans, data)).notOK()) { - rv = Result.err(rd); - } - } - } - } catch (IOException e) { - trans.error().log("IOException: ", e.getMessage(), - " \n occurred while performing", cd.memo, - " from approval ", cd.id.toString()); - } - } - } else if ("denied".equalsIgnoreCase(cd.status)) { - for (ApprovalDAO.Data ad : allApprovalsForTicket.value) { - q.approvalDAO.delete(trans, ad, false); - } - q.futureDAO.delete(trans, curr, false); - if (FOP_USER_ROLE.equalsIgnoreCase(curr.target)) { - // if I am the last to approve, create user role - if ("U".equals(cd.operation)) { - UserRoleDAO.Data data = new UserRoleDAO.Data(); - try { - data.reconstitute(curr.construct); - } catch (IOException e) { - trans.error().log("Cannot reconstitue",curr.memo); - } - rv = delUserRole(trans, data.user, data.ns, data.rname); - } - } - - } - - // if I am the last to approve, delete the future object - if (rv.isOK() && allApprovalsForTicket.value.size() <= 1) { - q.futureDAO.delete(trans, curr, false); - } - - } // end for each - return rv; - - } - - public Executor newExecutor(AuthzTrans trans) { - return new CassExecutor(trans, this); - } - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/PermLookup.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/PermLookup.java deleted file mode 100644 index 40f59178..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/PermLookup.java +++ /dev/null @@ -1,184 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.hl; - -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeSet; - -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.aaf.cass.PermDAO; -import org.onap.aaf.dao.aaf.cass.RoleDAO; -import org.onap.aaf.dao.aaf.cass.Status; -import org.onap.aaf.dao.aaf.cass.UserRoleDAO; - -/** - * PermLookup is a Storage class for the various pieces of looking up Permission - * during Transactions to avoid duplicate processing - * - * - */ -// Package on purpose -class PermLookup { - private AuthzTrans trans; - private String user; - private Question q; - private Result> userRoles = null; - private Result> roles = null; - private Result> permNames = null; - private Result> perms = null; - - private PermLookup() {} - - static PermLookup get(AuthzTrans trans, Question q, String user) { - PermLookup lp=null; - Map permMap = trans.get(Question.PERMS, null); - if (permMap == null) { - trans.put(Question.PERMS, permMap = new HashMap()); - } else { - lp = permMap.get(user); - } - - if (lp == null) { - lp = new PermLookup(); - lp.trans = trans; - lp.user = user; - lp.q = q; - permMap.put(user, lp); - } - return lp; - } - - public Result> getUserRoles() { - if(userRoles==null) { - userRoles = q.userRoleDAO.readByUser(trans,user); - if(userRoles.isOKhasData()) { - List lurdd = new ArrayList(); - Date now = new Date(); - for(UserRoleDAO.Data urdd : userRoles.value) { - if(urdd.expires.after(now)) { // Remove Expired - lurdd.add(urdd); - } - } - if(lurdd.size()==0) { - return userRoles = Result.err(Status.ERR_UserNotFound, - "%s not found or not associated with any Roles: ", - user); - } else { - return userRoles = Result.ok(lurdd); - } - } else { - return userRoles; - } - } else { - return userRoles; - } - } - - public Result> getRoles() { - if(roles==null) { - Result> rur = getUserRoles(); - if(rur.isOK()) { - List lrdd = new ArrayList(); - for (UserRoleDAO.Data urdata : rur.value) { - // Gather all permissions from all Roles - if(urdata.ns==null || urdata.rname==null) { - trans.error().printf("DB Content Error: nulls in User Role %s %s", urdata.user,urdata.role); - } else { - Result> rlrd = q.roleDAO.read( - trans, urdata.ns, urdata.rname); - if(rlrd.isOK()) { - lrdd.addAll(rlrd.value); - } - } - } - return roles = Result.ok(lrdd); - } else { - return roles = Result.err(rur); - } - } else { - return roles; - } - } - - public Result> getPermNames() { - if(permNames==null) { - Result> rlrd = getRoles(); - if (rlrd.isOK()) { - Set pns = new TreeSet(); - for (RoleDAO.Data rdata : rlrd.value) { - pns.addAll(rdata.perms(false)); - } - return permNames = Result.ok(pns); - } else { - return permNames = Result.err(rlrd); - } - } else { - return permNames; - } - } - - public Result> getPerms(boolean lookup) { - if(perms==null) { - // Note: It should be ok for a Valid user to have no permissions - - // 8/12/2013 - Result> rss = getPermNames(); - if(rss.isOK()) { - List lpdd = new ArrayList(); - for (String perm : rss.value) { - if(lookup) { - Result ap = PermDAO.Data.decodeToArray(trans, q, perm); - if(ap.isOK()) { - Result> rlpd = q.permDAO.read(perm,trans,ap); - if (rlpd.isOKhasData()) { - for (PermDAO.Data pData : rlpd.value) { - lpdd.add(pData); - } - } - } else { - trans.error().log("In getPermsByUser, for", user, perm); - } - } else { - Result pr = PermDAO.Data.decode(trans, q, perm); - if (pr.notOK()) { - trans.error().log("In getPermsByUser, for", user, pr.errorString()); - } else { - lpdd.add(pr.value); - } - } - - } - return perms = Result.ok(lpdd); - } else { - return perms = Result.err(rss); - } - } else { - return perms; - } - } -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/Question.java b/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/Question.java deleted file mode 100644 index c552cc93..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/aaf/hl/Question.java +++ /dev/null @@ -1,1087 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.hl; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.security.NoSuchAlgorithmException; -import java.security.SecureRandom; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.Date; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.TreeSet; - -import org.onap.aaf.authz.common.Define; -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.env.AuthzTransFilter; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.authz.org.Organization; -import org.onap.aaf.authz.org.Organization.Identity; -import org.onap.aaf.dao.AbsCassDAO; -import org.onap.aaf.dao.CachedDAO; -import org.onap.aaf.dao.DAOException; -import org.onap.aaf.dao.aaf.cached.CachedCertDAO; -import org.onap.aaf.dao.aaf.cached.CachedCredDAO; -import org.onap.aaf.dao.aaf.cached.CachedNSDAO; -import org.onap.aaf.dao.aaf.cached.CachedPermDAO; -import org.onap.aaf.dao.aaf.cached.CachedRoleDAO; -import org.onap.aaf.dao.aaf.cached.CachedUserRoleDAO; -import org.onap.aaf.dao.aaf.cass.ApprovalDAO; -import org.onap.aaf.dao.aaf.cass.CacheInfoDAO; -import org.onap.aaf.dao.aaf.cass.CertDAO; -import org.onap.aaf.dao.aaf.cass.CredDAO; -import org.onap.aaf.dao.aaf.cass.DelegateDAO; -import org.onap.aaf.dao.aaf.cass.FutureDAO; -import org.onap.aaf.dao.aaf.cass.HistoryDAO; -import org.onap.aaf.dao.aaf.cass.NsDAO; -import org.onap.aaf.dao.aaf.cass.NsSplit; -import org.onap.aaf.dao.aaf.cass.NsType; -import org.onap.aaf.dao.aaf.cass.PermDAO; -import org.onap.aaf.dao.aaf.cass.RoleDAO; -import org.onap.aaf.dao.aaf.cass.Status; -import org.onap.aaf.dao.aaf.cass.UserRoleDAO; -import org.onap.aaf.dao.aaf.cass.NsDAO.Data; - -import org.onap.aaf.cadi.Hash; -import org.onap.aaf.cadi.aaf.PermEval; -import org.onap.aaf.inno.env.APIException; -import org.onap.aaf.inno.env.Env; -import org.onap.aaf.inno.env.Slot; -import org.onap.aaf.inno.env.TimeTaken; -import org.onap.aaf.inno.env.util.Chrono; -import com.datastax.driver.core.Cluster; - -/** - * Question HL DAO - * - * A Data Access Combination Object which asks Security and other Questions - * - * - */ -public class Question { - // DON'T CHANGE FROM lower Case!!! - public static enum Type { - ns, role, perm, cred - }; - - public static final String OWNER="owner"; - public static final String ADMIN="admin"; - public static final String DOT_OWNER=".owner"; - public static final String DOT_ADMIN=".admin"; - static final String ASTERIX = "*"; - - public static enum Access { - read, write, create - }; - - public static final String READ = Access.read.name(); - public static final String WRITE = Access.write.name(); - public static final String CREATE = Access.create.name(); - - public static final String ROLE = Type.role.name(); - public static final String PERM = Type.perm.name(); - public static final String NS = Type.ns.name(); - public static final String CRED = Type.cred.name(); - private static final String DELG = "delg"; - public static final String ATTRIB = "attrib"; - - - public static final int MAX_SCOPE = 10; - public static final int APP_SCOPE = 3; - public static final int COMPANY_SCOPE = 2; - static Slot PERMS; - - private static Set specialLog = null; - public static final SecureRandom random = new SecureRandom(); - private static long traceID = random.nextLong(); - private static final String SPECIAL_LOG_SLOT = "SPECIAL_LOG_SLOT"; - private static Slot specialLogSlot = null; - private static Slot transIDSlot = null; - - - public final HistoryDAO historyDAO; - public final CachedNSDAO nsDAO; - public final CachedRoleDAO roleDAO; - public final CachedPermDAO permDAO; - public final CachedUserRoleDAO userRoleDAO; - public final CachedCredDAO credDAO; - public final CachedCertDAO certDAO; - public final DelegateDAO delegateDAO; - public final FutureDAO futureDAO; - public final ApprovalDAO approvalDAO; - private final CacheInfoDAO cacheInfoDAO; - - // final ContactDAO contDAO; - // private static final String DOMAIN = "@aaf.att.com"; - // private static final int DOMAIN_LENGTH = 0; - - public Question(AuthzTrans trans, Cluster cluster, String keyspace, boolean startClean) throws APIException, IOException { - PERMS = trans.slot("USER_PERMS"); - trans.init().log("Instantiating DAOs"); - historyDAO = new HistoryDAO(trans, cluster, keyspace); - - // Deal with Cached Entries - cacheInfoDAO = new CacheInfoDAO(trans, historyDAO); - - nsDAO = new CachedNSDAO(new NsDAO(trans, historyDAO, cacheInfoDAO), - cacheInfoDAO); - permDAO = new CachedPermDAO( - new PermDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO); - roleDAO = new CachedRoleDAO( - new RoleDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO); - userRoleDAO = new CachedUserRoleDAO(new UserRoleDAO(trans, historyDAO, - cacheInfoDAO), cacheInfoDAO); - credDAO = new CachedCredDAO( - new CredDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO); - certDAO = new CachedCertDAO( - new CertDAO(trans, historyDAO, cacheInfoDAO), cacheInfoDAO); - - futureDAO = new FutureDAO(trans, historyDAO); - delegateDAO = new DelegateDAO(trans, historyDAO); - approvalDAO = new ApprovalDAO(trans, historyDAO); - - // Only want to aggressively cleanse User related Caches... The others, - // just normal refresh - if(startClean) { - CachedDAO.startCleansing(trans.env(), credDAO, userRoleDAO); - CachedDAO.startRefresh(trans.env(), cacheInfoDAO); - } - // Set a Timer to Check Caches to send messages for Caching changes - - if(specialLogSlot==null) { - specialLogSlot = trans.slot(SPECIAL_LOG_SLOT); - transIDSlot = trans.slot(AuthzTransFilter.TRANS_ID_SLOT); - } - - AbsCassDAO.primePSIs(trans); - } - - - public void close(AuthzTrans trans) { - historyDAO.close(trans); - cacheInfoDAO.close(trans); - nsDAO.close(trans); - permDAO.close(trans); - roleDAO.close(trans); - userRoleDAO.close(trans); - credDAO.close(trans); - certDAO.close(trans); - delegateDAO.close(trans); - futureDAO.close(trans); - approvalDAO.close(trans); - } - - public Result permFrom(AuthzTrans trans, String type, - String instance, String action) { - Result rnd = deriveNs(trans, type); - if (rnd.isOK()) { - return Result.ok(new PermDAO.Data(new NsSplit(rnd.value, type), - instance, action)); - } else { - return Result.err(rnd); - } - } - - /** - * getPermsByUser - * - * Because this call is frequently called internally, AND because we already - * look for it in the initial Call, we cache within the Transaction - * - * @param trans - * @param user - * @return - */ - public Result> getPermsByUser(AuthzTrans trans, String user, boolean lookup) { - return PermLookup.get(trans, this, user).getPerms(lookup); - } - - public Result> getPermsByUserFromRolesFilter(AuthzTrans trans, String user, String forUser) { - PermLookup plUser = PermLookup.get(trans, this, user); - Result> plPermNames = plUser.getPermNames(); - if(plPermNames.notOK()) { - return Result.err(plPermNames); - } - - Set nss; - if(forUser.equals(user)) { - nss = null; - } else { - // Setup a TreeSet to check on Namespaces to - nss = new TreeSet(); - PermLookup fUser = PermLookup.get(trans, this, forUser); - Result> forUpn = fUser.getPermNames(); - if(forUpn.notOK()) { - return Result.err(forUpn); - } - - for(String pn : forUpn.value) { - Result decoded = PermDAO.Data.decodeToArray(trans, this, pn); - if(decoded.isOKhasData()) { - nss.add(decoded.value[0]); - } else { - trans.error().log(pn,", derived from a Role, is invalid:",decoded.errorString()); - } - } - } - - List rlpUser = new ArrayList(); - Result rpdd; - PermDAO.Data pdd; - for(String pn : plPermNames.value) { - rpdd = PermDAO.Data.decode(trans, this, pn); - if(rpdd.isOKhasData()) { - pdd=rpdd.value; - if(nss==null || nss.contains(pdd.ns)) { - rlpUser.add(pdd); - } - } else { - trans.error().log(pn,", derived from a Role, is invalid. Run Data Cleanup:",rpdd.errorString()); - } - } - return Result.ok(rlpUser); - } - - public Result> getPermsByType(AuthzTrans trans, String perm) { - Result nss = deriveNsSplit(trans, perm); - if (nss.notOK()) { - return Result.err(nss); - } - return permDAO.readByType(trans, nss.value.ns, nss.value.name); - } - - public Result> getPermsByName(AuthzTrans trans, - String type, String instance, String action) { - Result nss = deriveNsSplit(trans, type); - if (nss.notOK()) { - return Result.err(nss); - } - return permDAO.read(trans, nss.value.ns, nss.value.name, instance,action); - } - - public Result> getPermsByRole(AuthzTrans trans, String role, boolean lookup) { - Result nss = deriveNsSplit(trans, role); - if (nss.notOK()) { - return Result.err(nss); - } - - Result> rlrd = roleDAO.read(trans, nss.value.ns, - nss.value.name); - if (rlrd.notOKorIsEmpty()) { - return Result.err(rlrd); - } - // Using Set to avoid duplicates - Set permNames = new HashSet(); - if (rlrd.isOKhasData()) { - for (RoleDAO.Data drr : rlrd.value) { - permNames.addAll(drr.perms(false)); - } - } - - // Note: It should be ok for a Valid user to have no permissions - - // 8/12/2013 - List perms = new ArrayList(); - for (String perm : permNames) { - Result pr = PermDAO.Data.decode(trans, this, perm); - if (pr.notOK()) { - return Result.err(pr); - } - - if(lookup) { - Result> rlpd = permDAO.read(trans, pr.value); - if (rlpd.isOKhasData()) { - for (PermDAO.Data pData : rlpd.value) { - perms.add(pData); - } - } - } else { - perms.add(pr.value); - } - } - - return Result.ok(perms); - } - - public Result> getRolesByName(AuthzTrans trans, - String role) { - Result nss = deriveNsSplit(trans, role); - if (nss.notOK()) { - return Result.err(nss); - } - String r = nss.value.name; - if (r.endsWith(".*")) { // do children Search - return roleDAO.readChildren(trans, nss.value.ns, - r.substring(0, r.length() - 2)); - } else if (ASTERIX.equals(r)) { - return roleDAO.readChildren(trans, nss.value.ns, ASTERIX); - } else { - return roleDAO.read(trans, nss.value.ns, r); - } - } - - /** - * Derive NS - * - * Given a Child Namespace, figure out what the best Namespace parent is. - * - * For instance, if in the NS table, the parent "com.att" exists, but not - * "com.att.child" or "com.att.a.b.c", then passing in either - * "com.att.child" or "com.att.a.b.c" will return "com.att" - * - * Uses recursive search on Cached DAO data - * - * @param trans - * @param child - * @return - */ - public Result deriveNs(AuthzTrans trans, String child) { - Result> r = nsDAO.read(trans, child); - - if (r.isOKhasData()) { - return Result.ok(r.value.get(0)); - } else { - int dot = child == null ? -1 : child.lastIndexOf('.'); - if (dot < 0) { - return Result.err(Status.ERR_NsNotFound, - "No Namespace for [%s]", child); - } else { - return deriveNs(trans, child.substring(0, dot)); - } - } - } - - public Result deriveFirstNsForType(AuthzTrans trans, String str, NsType type) { - NsDAO.Data nsd; - - System.out.println("value of str before for loop ---------0---++++++++++++++++++" +str); - for(int idx = str.indexOf('.');idx>=0;idx=str.indexOf('.',idx+1)) { - // System.out.println("printing value of str-----------------1------------++++++++++++++++++++++" +str); - Result> rld = nsDAO.read(trans, str.substring(0,idx)); - System.out.println("value of idx is -----------------++++++++++++++++++++++++++" +idx); - System.out.println("printing value of str.substring-----------------1------------++++++++++++++++++++++" + (str.substring(0,idx))); - System.out.println("value of ResultListData ------------------2------------+++++++++++++++++++++++++++" +rld); - if(rld.isOKhasData()) { - System.out.println("In if loop -----------------3-------------- ++++++++++++++++"); - System.out.println("value of nsd=rld.value.get(0).type -----------4------++++++++++++++++++++++++++++++++++++" +(nsd=rld.value.get(0)).type); - System.out.println("value of rld.value.get(0).name.toString()+++++++++++++++++++++++++++++++ " +rld.value.get(0).name); - if(type.type == (nsd=rld.value.get(0)).type) { - return Result.ok(nsd); - } - } else { - System.out.println("In else loop ----------------4------------+++++++++++++++++++++++"); - return Result.err(Status.ERR_NsNotFound,"There is no valid Company Namespace for %s",str.substring(0,idx)); - } - } - return Result.err(Status.ERR_NotFound, str + " does not contain type " + type.name()); - } - - public Result deriveNsSplit(AuthzTrans trans, String child) { - Result ndd = deriveNs(trans, child); - if (ndd.isOK()) { - NsSplit nss = new NsSplit(ndd.value, child); - if (nss.isOK()) { - return Result.ok(nss); - } else { - return Result.err(Status.ERR_NsNotFound, - "Cannot split [%s] into valid namespace elements", - child); - } - } - return Result.err(ndd); - } - - /** - * Translate an ID into it's domain - * - * i.e. myid1234@myapp.att.com results in domain of com.att.myapp - * - * @param id - * @return - */ - public static String domain2ns(String id) { - int at = id.indexOf('@'); - if (at >= 0) { - String[] domain = id.substring(at + 1).split("\\."); - StringBuilder ns = new StringBuilder(id.length()); - boolean first = true; - for (int i = domain.length - 1; i >= 0; --i) { - if (first) { - first = false; - } else { - ns.append('.'); - } - ns.append(domain[i]); - } - return ns.toString(); - } else { - return ""; - } - - } - - /** - * Validate Namespace of ID@Domain - * - * Namespace is reverse order of Domain. - * - * i.e. myid1234@myapp.att.com results in domain of com.att.myapp - * - * @param trans - * @param id - * @return - */ - public Result validNSOfDomain(AuthzTrans trans, String id) { - // Take domain, reverse order, and check on NS - String ns; - if(id.indexOf('@')<0) { // it's already an ns, not an ID - ns = id; - } else { - ns = domain2ns(id); - } - if (ns.length() > 0) { - if(!trans.org().getDomain().equals(ns)) { - Result> rlnsd = nsDAO.read(trans, ns); - if (rlnsd.isOKhasData()) { - return Result.ok(rlnsd.value.get(0)); - } - } - } - return Result.err(Status.ERR_NsNotFound, - "A Namespace is not available for %s", id); - } - - public Result mayUser(AuthzTrans trans, String user,NsDAO.Data ndd, Access access) { - // .access|:role:| - String ns = ndd.name; - int last; - do { - if (isGranted(trans, user, ns, "access", ":ns", access.name())) { - return Result.ok(ndd); - } - if ((last = ns.lastIndexOf('.')) >= 0) { - ns = ns.substring(0, last); - } - } while (last >= 0); - // .ns|::ns| - // AAF-724 - Make consistent response for May User", and not take the - // last check... too confusing. - Result rv = mayUserVirtueOfNS(trans, user, ndd, ":" + ndd.name + ":ns", access.name()); - if (rv.isOK()) { - return rv; - } else if(rv.status==Result.ERR_Backend) { - return Result.err(rv); - } else { - return Result.err(Status.ERR_Denied, "[%s] may not %s in NS [%s]", - user, access.name(), ndd.name); - } - } - - public Result mayUser(AuthzTrans trans, String user, RoleDAO.Data rdd, Access access) { - Result rnsd = deriveNs(trans, rdd.ns); - if (rnsd.isOK()) { - return mayUser(trans, user, rnsd.value, rdd, access); - } - return rnsd; - } - - public Result mayUser(AuthzTrans trans, String user, NsDAO.Data ndd, RoleDAO.Data rdd, Access access) { - // 1) Is User in the Role? - Result> rurd = userRoleDAO.readUserInRole(trans, user, rdd.fullName()); - if (rurd.isOKhasData()) { - return Result.ok(ndd); - } - - String roleInst = ":role:" + rdd.name; - // .access|:role:| - String ns = rdd.ns; - int last; - do { - if (isGranted(trans, user, ns,"access", roleInst, access.name())) { - return Result.ok(ndd); - } - if ((last = ns.lastIndexOf('.')) >= 0) { - ns = ns.substring(0, last); - } - } while (last >= 0); - - // Check if Access by Global Role perm - // .ns|::role:name| - Result rnsd = mayUserVirtueOfNS(trans, user, ndd, ":" - + rdd.ns + roleInst, access.name()); - if (rnsd.isOK()) { - return rnsd; - } else if(rnsd.status==Result.ERR_Backend) { - return Result.err(rnsd); - } - - // Check if Access to Whole NS - // AAF-724 - Make consistent response for May User", and not take the - // last check... too confusing. - Result rv = mayUserVirtueOfNS(trans, user, ndd, - ":" + rdd.ns + ":ns", access.name()); - if (rv.isOK()) { - return rv; - } else if(rnsd.status==Result.ERR_Backend) { - return Result.err(rnsd); - } else { - return Result.err(Status.ERR_Denied, "[%s] may not %s Role [%s]", - user, access.name(), rdd.fullName()); - } - - } - - public Result mayUser(AuthzTrans trans, String user,PermDAO.Data pdd, Access access) { - Result rnsd = deriveNs(trans, pdd.ns); - if (rnsd.isOK()) { - return mayUser(trans, user, rnsd.value, pdd, access); - } - return rnsd; - } - - public Result mayUser(AuthzTrans trans, String user,NsDAO.Data ndd, PermDAO.Data pdd, Access access) { - if (isGranted(trans, user, pdd.ns, pdd.type, pdd.instance, pdd.action)) { - return Result.ok(ndd); - } - String permInst = ":perm:" + pdd.type + ':' + pdd.instance + ':' + pdd.action; - // .access|:role:| - String ns = ndd.name; - int last; - do { - if (isGranted(trans, user, ns, "access", permInst, access.name())) { - return Result.ok(ndd); - } - if ((last = ns.lastIndexOf('.')) >= 0) { - ns = ns.substring(0, last); - } - } while (last >= 0); - - // Check if Access by NS perm - // .ns|::role:name| - Result rnsd = mayUserVirtueOfNS(trans, user, ndd, ":" + pdd.ns + permInst, access.name()); - if (rnsd.isOK()) { - return rnsd; - } else if(rnsd.status==Result.ERR_Backend) { - return Result.err(rnsd); - } - - // Check if Access to Whole NS - // AAF-724 - Make consistent response for May User", and not take the - // last check... too confusing. - Result rv = mayUserVirtueOfNS(trans, user, ndd, ":" + pdd.ns + ":ns", access.name()); - if (rv.isOK()) { - return rv; - } else { - return Result.err(Status.ERR_Denied, - "[%s] may not %s Perm [%s|%s|%s]", user, access.name(), - pdd.fullType(), pdd.instance, pdd.action); - } - - } - - public Result mayUser(AuthzTrans trans, DelegateDAO.Data dd, Access access) { - try { - boolean isUser = trans.user().equals(dd.user); - boolean isDelegate = dd.delegate != null - && (dd.user.equals(dd.delegate) || trans.user().equals( - dd.delegate)); - Organization org = trans.org(); - switch (access) { - case create: - if (org.getIdentity(trans, dd.user) == null) { - return Result.err(Status.ERR_UserNotFound, - "[%s] is not a user in the company database.", - dd.user); - } - if (!dd.user.equals(dd.delegate) && org.getIdentity(trans, dd.delegate) == null) { - return Result.err(Status.ERR_UserNotFound, - "[%s] is not a user in the company database.", - dd.delegate); - } - if (!trans.forceRequested() && dd.user != null && dd.user.equals(dd.delegate)) { - return Result.err(Status.ERR_BadData, - "[%s] cannot be a delegate for self", dd.user); - } - if (!isUser && !isGranted(trans, trans.user(), Define.ROOT_NS,DELG, - org.getDomain(), Question.CREATE)) { - return Result.err(Status.ERR_Denied, - "[%s] may not create a delegate for [%s]", - trans.user(), dd.user); - } - break; - case read: - case write: - if (!isUser && !isDelegate && - !isGranted(trans, trans.user(), Define.ROOT_NS,DELG,org.getDomain(), access.name())) { - return Result.err(Status.ERR_Denied, - "[%s] may not %s delegates for [%s]", trans.user(), - access.name(), dd.user); - } - break; - default: - return Result.err(Status.ERR_BadData,"Unknown Access type [%s]", access.name()); - } - } catch (Exception e) { - return Result.err(e); - } - return Result.ok(); - } - - /* - * Check (recursively, if necessary), if able to do something based on NS - */ - private Result mayUserVirtueOfNS(AuthzTrans trans, String user, NsDAO.Data nsd, String ns_and_type, String access) { - String ns = nsd.name; - - // If an ADMIN of the Namespace, then allow - - Result> rurd; - if ((rurd = userRoleDAO.readUserInRole(trans, user, nsd.name+ADMIN)).isOKhasData()) { - return Result.ok(nsd); - } else if(rurd.status==Result.ERR_Backend) { - return Result.err(rurd); - } - - // If Specially granted Global Permission - if (isGranted(trans, user, Define.ROOT_NS,NS, ns_and_type, access)) { - return Result.ok(nsd); - } - - // Check recur - - int dot = ns.length(); - if ((dot = ns.lastIndexOf('.', dot - 1)) >= 0) { - Result rnsd = deriveNs(trans, ns.substring(0, dot)); - if (rnsd.isOK()) { - rnsd = mayUserVirtueOfNS(trans, user, rnsd.value, ns_and_type,access); - } else if(rnsd.status==Result.ERR_Backend) { - return Result.err(rnsd); - } - if (rnsd.isOK()) { - return Result.ok(nsd); - } else if(rnsd.status==Result.ERR_Backend) { - return Result.err(rnsd); - } - } - return Result.err(Status.ERR_Denied, "%s may not %s %s", user, access, - ns_and_type); - } - - - /** - * isGranted - * - * Important function - Check internal Permission Schemes for Permission to - * do things - * - * @param trans - * @param type - * @param instance - * @param action - * @return - */ - public boolean isGranted(AuthzTrans trans, String user, String ns, String type,String instance, String action) { - Result> perms = getPermsByUser(trans, user, false); - if (perms.isOK()) { - for (PermDAO.Data pd : perms.value) { - if (ns.equals(pd.ns)) { - if (type.equals(pd.type)) { - if (PermEval.evalInstance(pd.instance, instance)) { - if(PermEval.evalAction(pd.action, action)) { // don't return action here, might miss other action - return true; - } - } - } - } - } - } - return false; - } - - public Result doesUserCredMatch(AuthzTrans trans, String user, byte[] cred) throws DAOException { - Result> result; - TimeTaken tt = trans.start("Read DB Cred", Env.REMOTE); - try { - result = credDAO.readID(trans, user); - } finally { - tt.done(); - } - - Result rv = null; - if(result.isOK()) { - if (result.isEmpty()) { - rv = Result.err(Status.ERR_UserNotFound, user); - if (willSpecialLog(trans,user)) { - trans.audit().log("Special DEBUG:", user, " does not exist in DB"); - } - } else { - Date now = new Date();//long now = System.currentTimeMillis(); - ByteBuffer md5=null; - - // Bug noticed 6/22. Sorting on the result can cause Concurrency Issues. - List cddl; - if(result.value.size() > 1) { - cddl = new ArrayList(result.value.size()); - for(CredDAO.Data old : result.value) { - if(old.type==CredDAO.BASIC_AUTH || old.type==CredDAO.BASIC_AUTH_SHA256) { - cddl.add(old); - } - } - if(cddl.size()>1) { - Collections.sort(cddl,new Comparator() { - @Override - public int compare(org.onap.aaf.dao.aaf.cass.CredDAO.Data a, - org.onap.aaf.dao.aaf.cass.CredDAO.Data b) { - return b.expires.compareTo(a.expires); - } - }); - } - } else { - cddl = result.value; - } - - for (CredDAO.Data cdd : cddl) { - if (cdd.expires.after(now)) { - try { - switch(cdd.type) { - case CredDAO.BASIC_AUTH: - if(md5==null) { - md5=ByteBuffer.wrap(Hash.encryptMD5(cred)); - } - if(md5.compareTo(cdd.cred)==0) { - return Result.ok(cdd.expires); - } else if (willSpecialLog(trans,user)) { - trans.audit().log("Special DEBUG:", user, "Client sent: ", trans.encryptor().encrypt(new String(cred)) ,cdd.expires); - } - break; - case CredDAO.BASIC_AUTH_SHA256: - ByteBuffer bb = ByteBuffer.allocate(Integer.SIZE + cred.length); - bb.putInt(cdd.other); - bb.put(cred); - byte[] hash = Hash.hashSHA256(bb.array()); - - ByteBuffer sha256 = ByteBuffer.wrap(hash); - if(sha256.compareTo(cdd.cred)==0) { - return Result.ok(cdd.expires); - } else if (willSpecialLog(trans,user)) { - trans.audit().log("Special DEBUG:", user, "Client sent: ", trans.encryptor().encrypt(new String(cred)) ,cdd.expires); - } - break; - default: - trans.error().log("Unknown Credential Type %s for %s, %s",Integer.toString(cdd.type),cdd.id, Chrono.dateTime(cdd.expires)); - } - } catch (NoSuchAlgorithmException e) { - trans.error().log(e); - } - } else { - rv = Result.err(Status.ERR_Security, - "Credentials expired " + cdd.expires.toString()); - } - } // end for each - } - } else { - return Result.err(result); - } - return rv == null ? Result.create((Date) null, Status.ERR_Security, - "Wrong credential") : rv; - } - - - public Result userCredSetup(AuthzTrans trans, CredDAO.Data cred) { - if(cred.type==CredDAO.RAW) { - TimeTaken tt = trans.start("Hash Cred", Env.SUB); - try { - cred.type = CredDAO.BASIC_AUTH_SHA256; - cred.other = random.nextInt(); - ByteBuffer bb = ByteBuffer.allocate(Integer.SIZE + cred.cred.capacity()); - bb.putInt(cred.other); - bb.put(cred.cred); - byte[] hash = Hash.hashSHA256(bb.array()); - cred.cred = ByteBuffer.wrap(hash); - return Result.ok(cred); - } catch (NoSuchAlgorithmException e) { - return Result.err(Status.ERR_General,e.getLocalizedMessage()); - } finally { - tt.done(); - } - - } - return Result.err(Status.ERR_Security,"invalid/unreadable credential"); - } - - - public static final String APPROVED = "APPROVE"; - public static final String REJECT = "REJECT"; - public static final String PENDING = "PENDING"; - - public Result canAddUser(AuthzTrans trans, UserRoleDAO.Data data, - List approvals) { - // get the approval policy for the organization - - // get the list of approvals with an accept status - - // validate the approvals against the policy - - // for now check if all approvals are received and return - // SUCCESS/FAILURE/SKIP - boolean bReject = false; - boolean bPending = false; - - for (ApprovalDAO.Data approval : approvals) { - if (approval.status.equals(REJECT)) { - bReject = true; - } else if (approval.status.equals(PENDING)) { - bPending = true; - } - } - if (bReject) { - return Result.err(Status.ERR_Policy, - "Approval Polocy not conformed"); - } - if (bPending) { - return Result.err(Status.ERR_ActionNotCompleted, - "Required Approvals not received"); - } - - return Result.ok(); - } - - private static final String NO_CACHE_NAME = "No Cache Data named %s"; - - public Result clearCache(AuthzTrans trans, String cname) { - boolean all = "all".equals(cname); - Result rv = null; - - if (all || NsDAO.TABLE.equals(cname)) { - int seg[] = series(NsDAO.CACHE_SEG); - for(int i: seg) {cacheClear(trans, NsDAO.TABLE,i);} - rv = cacheInfoDAO.touch(trans, NsDAO.TABLE, seg); - } - if (all || PermDAO.TABLE.equals(cname)) { - int seg[] = series(NsDAO.CACHE_SEG); - for(int i: seg) {cacheClear(trans, PermDAO.TABLE,i);} - rv = cacheInfoDAO.touch(trans, PermDAO.TABLE,seg); - } - if (all || RoleDAO.TABLE.equals(cname)) { - int seg[] = series(NsDAO.CACHE_SEG); - for(int i: seg) {cacheClear(trans, RoleDAO.TABLE,i);} - rv = cacheInfoDAO.touch(trans, RoleDAO.TABLE,seg); - } - if (all || UserRoleDAO.TABLE.equals(cname)) { - int seg[] = series(NsDAO.CACHE_SEG); - for(int i: seg) {cacheClear(trans, UserRoleDAO.TABLE,i);} - rv = cacheInfoDAO.touch(trans, UserRoleDAO.TABLE,seg); - } - if (all || CredDAO.TABLE.equals(cname)) { - int seg[] = series(NsDAO.CACHE_SEG); - for(int i: seg) {cacheClear(trans, CredDAO.TABLE,i);} - rv = cacheInfoDAO.touch(trans, CredDAO.TABLE,seg); - } - if (all || CertDAO.TABLE.equals(cname)) { - int seg[] = series(NsDAO.CACHE_SEG); - for(int i: seg) {cacheClear(trans, CertDAO.TABLE,i);} - rv = cacheInfoDAO.touch(trans, CertDAO.TABLE,seg); - } - - if (rv == null) { - rv = Result.err(Status.ERR_BadData, NO_CACHE_NAME, cname); - } - return rv; - } - - public Result cacheClear(AuthzTrans trans, String cname,Integer segment) { - Result rv; - if (NsDAO.TABLE.equals(cname)) { - rv = nsDAO.invalidate(segment); - } else if (PermDAO.TABLE.equals(cname)) { - rv = permDAO.invalidate(segment); - } else if (RoleDAO.TABLE.equals(cname)) { - rv = roleDAO.invalidate(segment); - } else if (UserRoleDAO.TABLE.equals(cname)) { - rv = userRoleDAO.invalidate(segment); - } else if (CredDAO.TABLE.equals(cname)) { - rv = credDAO.invalidate(segment); - } else if (CertDAO.TABLE.equals(cname)) { - rv = certDAO.invalidate(segment); - } else { - rv = Result.err(Status.ERR_BadData, NO_CACHE_NAME, cname); - } - return rv; - } - - private int[] series(int max) { - int[] series = new int[max]; - for (int i = 0; i < max; ++i) - series[i] = i; - return series; - } - - public boolean isDelegated(AuthzTrans trans, String user, String approver) { - Result> userDelegatedFor = delegateDAO - .readByDelegate(trans, user); - for (DelegateDAO.Data curr : userDelegatedFor.value) { - if (curr.user.equals(approver) && curr.delegate.equals(user) - && curr.expires.after(new Date())) { - return true; - } - } - return false; - } - - public static boolean willSpecialLog(AuthzTrans trans, String user) { - Boolean b = trans.get(specialLogSlot, null); - if(b==null) { - if(specialLog==null) { - return false; - } else { - b = specialLog.contains(user); - trans.put(specialLogSlot, b); - } - } - return b; - } - - public static void logEncryptTrace(AuthzTrans trans, String data) { - long ti; - trans.put(transIDSlot, ti=nextTraceID()); - trans.trace().log("id="+Long.toHexString(ti)+",data=\""+trans.env().encryptor().encrypt(data)+'"'); - } - - private synchronized static long nextTraceID() { - return ++traceID; - } - - public static synchronized boolean specialLogOn(AuthzTrans trans, String id) { - if (specialLog == null) { - specialLog = new HashSet(); - } - boolean rc = specialLog.add(id); - if(rc) { - trans.trace().log("Trace on for",id); - } - return rc; - } - - public static synchronized boolean specialLogOff(AuthzTrans trans, String id) { - if(specialLog==null) { - return false; - } - boolean rv = specialLog.remove(id); - if (specialLog.isEmpty()) { - specialLog = null; - } - if(rv) { - trans.trace().log("Trace off for",id); - } - return rv; - } - - /** - * canMove - * Which Types can be moved - * @param nsType - * @return - */ - public boolean canMove(NsType nsType) { - boolean rv; - switch(nsType) { - case DOT: - case ROOT: - case COMPANY: - case UNKNOWN: - rv = false; - break; - default: - rv = true; - } - return rv; - } - - public Result isOwnerSponsor(AuthzTrans trans, String user, String ns, Identity mechID) { - - Identity caller; - Organization org = trans.org(); - try { - caller = org.getIdentity(trans, user); - if(caller==null || !caller.isFound()) { - return Result.err(Status.ERR_NotFound,"%s is not a registered %s entity",user,org.getName()); - } - } catch (Exception e) { - return Result.err(e); - } - String sponsor = mechID.responsibleTo(); - Result> rur = userRoleDAO.read(trans, user,ns+DOT_OWNER); - boolean isOwner = false; - if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){ - if(urdd.expires.after(new Date())) { - isOwner = true; - } - }}; - if(!isOwner) { - return Result.err(Status.ERR_Policy,"%s is not a current owner of %s",user,ns); - } - - if(!caller.id().equals(sponsor)) { - return Result.err(Status.ERR_Denied,"%s is not the sponsor of %s",user,mechID.id()); - } - return Result.ok(sponsor); - } - - public boolean isAdmin(AuthzTrans trans, String user, String ns) { - Date now = new Date(); - Result> rur = userRoleDAO.read(trans, user,ns+ADMIN); - if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){ - if(urdd.expires.after(now)) { - return true; - } - }}; - return false; - } - - public boolean isOwner(AuthzTrans trans, String user, String ns) { - Result> rur = userRoleDAO.read(trans, user,ns+DOT_OWNER); - Date now = new Date(); - if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){ - if(urdd.expires.after(now)) { - return true; - } - }}; - return false; - } - - public int countOwner(AuthzTrans trans, String user, String ns) { - Result> rur = userRoleDAO.read(trans, user,ns+DOT_OWNER); - Date now = new Date(); - int count = 0; - if(rur.isOKhasData()) {for(UserRoleDAO.Data urdd : rur.value){ - if(urdd.expires.after(now)) { - ++count; - } - }}; - return count; - } - -} diff --git a/authz-cass/src/main/java/org/onap/aaf/dao/session/SessionFilter.java b/authz-cass/src/main/java/org/onap/aaf/dao/session/SessionFilter.java deleted file mode 100644 index 9e604438..00000000 --- a/authz-cass/src/main/java/org/onap/aaf/dao/session/SessionFilter.java +++ /dev/null @@ -1,142 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.session; - -import java.io.IOException; - -import javax.servlet.Filter; -import javax.servlet.FilterChain; -import javax.servlet.FilterConfig; -import javax.servlet.ServletException; -import javax.servlet.ServletRequest; -import javax.servlet.ServletResponse; - -import org.onap.aaf.cssa.rserv.TransFilter; - -import org.onap.aaf.inno.env.APIException; -import org.onap.aaf.inno.env.Env; -import org.onap.aaf.inno.env.EnvStore; -import org.onap.aaf.inno.env.Slot; -import org.onap.aaf.inno.env.TransStore; -import org.onap.aaf.inno.env.util.Pool; -import org.onap.aaf.inno.env.util.Pool.Creator; -import org.onap.aaf.inno.env.util.Pool.Pooled; -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.Session; - -public class SessionFilter implements Filter { - public static final String SESSION_SLOT = "__SESSION__"; - private static Slot sessionSlot; - private static Pool pool; - - public SessionFilter(EnvStore env, Cluster cluster, String keyspace) { - synchronized(env) { - if(sessionSlot==null) { - sessionSlot = env.slot(SESSION_SLOT); - } - if(pool==null) { - pool = new Pool(new SessionCreator(env,cluster,keyspace)); - } - } - } - - @Override - public void init(FilterConfig fc) throws ServletException { - // Session does not need any sort of configuration from Filter - } - - @Override - public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) throws IOException, ServletException { - @SuppressWarnings("unchecked") - TRANS trans = (TRANS)req.getAttribute(TransFilter.TRANS_TAG); - try { - Pooled psess = pool.get(); - try { - trans.put(sessionSlot, psess.content); - chain.doFilter(req, resp); - } finally { - psess.done(); - } - } catch (APIException e) { - throw new ServletException(e); - } - } - - public Pooled load(TRANS trans) throws APIException { - Pooled psess = pool.get(); - trans.put(sessionSlot, psess.content); - return psess; - } - - - /** - * Clear will drain the pool, so that new Sessions will be constructed. - * - * Suitable for Management calls. - */ - public static void clear() { - if(pool!=null) { - pool.drain(); - } - } - - @Override - public void destroy() { - pool.drain(); - } - - private class SessionCreator implements Creator { - private Cluster cluster; - private String keyspace; - private Env env; - - public SessionCreator(Env env, Cluster cluster, String keyspace) { - this.cluster = cluster; - this.keyspace = keyspace; - this.env = env; - } - - @Override - public Session create() throws APIException { - env.info().log("Creating a Cassandra Session"); - return cluster.connect(keyspace); - } - - @Override - public void destroy(Session t) { - env.info().log("Shutting down a Cassandra Session"); - t.close(); - } - - @Override - public boolean isValid(Session t) { - return true; - } - - @Override - public void reuse(Session t) { - // Nothing is needed to reuse this Session - } - - } -} diff --git a/authz-cass/src/test/java/org/onap/aaf/authz/cass/hl/JU_Question.java b/authz-cass/src/test/java/org/onap/aaf/authz/cass/hl/JU_Question.java deleted file mode 100644 index 86bc1ab3..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/authz/cass/hl/JU_Question.java +++ /dev/null @@ -1,500 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.authz.cass.hl; - -import static junit.framework.Assert.assertEquals; -import static junit.framework.Assert.assertFalse; -import static junit.framework.Assert.assertTrue; - -import java.security.Principal; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; - -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.aaf.cass.NsDAO; -import org.onap.aaf.dao.aaf.cass.PermDAO; -import org.onap.aaf.dao.aaf.cass.RoleDAO; -import org.onap.aaf.dao.aaf.cass.UserRoleDAO; -import org.onap.aaf.dao.aaf.cass.NsDAO.Data; -import org.onap.aaf.dao.aaf.hl.Question; -import org.onap.aaf.dao.aaf.hl.Question.Access; -import org.onap.aaf.dao.aaf.test.AbsJUCass; - -import org.onap.aaf.inno.env.Env; -import org.onap.aaf.inno.env.TimeTaken; - -public class JU_Question extends AbsJUCass { - - private static final int EXPIRES_IN = 60000000; - private static final String COM_TEST_JU = "com.test.ju_question"; - private static final String JU9999_JU_TEST_COM = "ju9999@ju.test.com"; - private static final String JU9998_JU_TEST_COM = "ju9998@ju.test.com"; - private static final String READ = "read"; - private static final int NFR_1 = 80; - private static final int NFR_2 = 4000; - private static final int ROLE_LEVEL1 = 1000; - private static final int PERM_LEVEL1 = 1000; -// private static final int PERM_LEVEL2 = 20; - private static Question q; - private static NsDAO.Data ndd; - - @BeforeClass - public static void startupBeforeClass() throws Exception { - details=false; - AuthzTrans trans = env.newTransNoAvg(); - q = new Question(trans,cluster,AUTHZ, false); - ndd = new NsDAO.Data(); - ndd.name=COM_TEST_JU; - ndd.type=3; // app - ndd.parent="com.test"; - ndd.description="Temporary Namespace for JU_Question"; - q.nsDAO.create(trans, ndd); - } - - @AfterClass - public static void endAfterClass() throws Exception { - q.nsDAO.delete(trans, ndd,false); - } -// @Test - public void mayUserRead_EmptyPerm() { - PermDAO.Data pdd = new PermDAO.Data(); - Result result = q.mayUser(trans,JU9999_JU_TEST_COM,pdd,Access.read); - assertFalse(result.isOK()); - } - -// @Test - public void mayUserRead_OnePermNotExist() { - Result result = q.mayUser(trans,JU9999_JU_TEST_COM,newPerm(0,0,READ),Access.read); - assertFalse(result.isOK()); - assertEquals("Denied - ["+ JU9999_JU_TEST_COM +"] may not read Perm [" + COM_TEST_JU + ".myPerm0|myInstance0|read]",result.errorString()); - } - -// @Test - public void mayUserRead_OnePermExistDenied() { - PermDAO.Data perm = newPerm(0,0,READ); - q.permDAO.create(trans,perm); - try { - Result result; - TimeTaken tt = trans.start("q.mayUser...", Env.SUB); - try { - result = q.mayUser(trans,JU9999_JU_TEST_COM,perm,Access.read); - } finally { - tt.done(); - assertTrue("NFR time < "+ NFR_1 + "ms",tt.millis() result; - TimeTaken tt = trans.start("q.mayUser...", Env.SUB); - try { - result = q.mayUser(trans,JU9999_JU_TEST_COM,perm,Access.read); - } finally { - tt.done(); - assertTrue("NFR time < "+ NFR_1 + "ms",tt.millis()> pres; - TimeTaken tt = trans.start("q.getPerms...", Env.SUB); - try { - pres = q.getPermsByUserFromRolesFilter(trans, JU9999_JU_TEST_COM, JU9999_JU_TEST_COM); - } finally { - tt.done(); - trans.info().log("filter_OnePermOneRleExistOK",tt); - assertTrue("NFR time < "+ NFR_1 + "ms",tt.millis() lrole = new ArrayList(); - List lur = new ArrayList(); - try { - q.permDAO.create(trans,perm); - for(int i=0;i result; - TimeTaken tt = trans.start("mayUserRead_OnePermMultiRoleExistOK", Env.SUB); - try { - result = q.mayUser(trans,JU9999_JU_TEST_COM,perm,Access.read); - } finally { - tt.done(); - env.info().log(tt,ROLE_LEVEL1,"iterations"); - assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis() lperm = new ArrayList(); - try { - for(int i=0;i result; - TimeTaken tt = trans.start("mayUserRead_MultiPermOneRoleExistOK", Env.SUB); - try { - result = q.mayUser(trans,JU9999_JU_TEST_COM,lperm.get(PERM_LEVEL1-1),Access.read); - } finally { - tt.done(); - env.info().log(tt,PERM_LEVEL1,"iterations"); - assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis() lperm = new ArrayList(); -// List lrole = new ArrayList(); -// List lur = new ArrayList(); -// -// try { -// RoleDAO.Data role; -// UserRoleDAO.Data ur; -// for(int i=0;i result; -// TimeTaken tt = trans.start("mayUserRead_MultiPermMultiRoleExistOK", Env.SUB); -// try { -// result = q.mayUser(trans,JU9999_JU_TEST_COM,lperm.get(ROLE_LEVEL1*PERM_LEVEL2-1),Access.read); -// } finally { -// tt.done(); -// env.info().log(tt,lperm.size(),"perms",", ",lrole.size(),"role"); -// assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis() lperm = new ArrayList(); - List lrole = new ArrayList(); - List lur = new ArrayList(); - load(roleLevel, permLevel, lperm,lrole,lur); - - - Result> pres; - trans.setUser(new Principal() { - @Override - public String getName() { - return JU9999_JU_TEST_COM; - } - }); - - try { - TimeTaken group = trans.start(" Original Security Method (1st time)", Env.SUB); - try { - TimeTaken tt = trans.start(" Get User Perms for "+JU9998_JU_TEST_COM, Env.SUB); - try { - pres = q.getPermsByUser(trans,JU9998_JU_TEST_COM,true); - } finally { - tt.done(); - env.info().log(tt," Looked up (full) getPermsByUser for",JU9998_JU_TEST_COM); - } - assertTrue(pres.isOK()); - tt = trans.start(" q.mayUser", Env.SUB); - List reduced = new ArrayList(); - - try { - for(PermDAO.Data p : pres.value) { - Result r = q.mayUser(trans,JU9999_JU_TEST_COM,p,Access.read); - if(r.isOK()) { - reduced.add(p); - } - } - } finally { - tt.done(); - env.info().log(tt," reduced" + pres.value.size(),"perms","to",reduced.size()); - // assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis() lperm = new ArrayList(); - List lrole = new ArrayList(); - List lur = new ArrayList(); - load(roleLevel, permLevel, lperm,lrole,lur); - - try { - - Result> pres; - TimeTaken tt = trans.start(" mayUserRead_MultiPermMultiRoleExist_New New Filter", Env.SUB); - try { - pres = q.getPermsByUserFromRolesFilter(trans, JU9999_JU_TEST_COM, JU9998_JU_TEST_COM); - } finally { - tt.done(); - env.info().log(tt,lperm.size(),"perms",", ",lrole.size(),"role", lur.size(), "UserRoles"); -// assertTrue("NFR time < "+ NFR_2 + "ms",tt.millis() lperm , List lrole, List lur) { - RoleDAO.Data role; - UserRoleDAO.Data ur; - PermDAO.Data perm; - - int onethirdR=roleLevel/3; - int twothirdR=onethirdR*2; - int onethirdP=permLevel/3; - int twothirdP=onethirdP*2; - - for(int i=0;i lperm , List lrole, List lur) { - for(PermDAO.Data perm : lperm) { - q.permDAO.delete(trans, perm, false); - } - for(RoleDAO.Data role : lrole) { - q.roleDAO.delete(trans, role, false); - } - for(UserRoleDAO.Data ur : lur) { - q.userRoleDAO.delete(trans, ur, false); - } - - } - private PermDAO.Data newPerm(int permNum, int instNum, String action, RoleDAO.Data ... grant) { - PermDAO.Data pdd = new PermDAO.Data(); - pdd.ns=COM_TEST_JU; - pdd.type="myPerm"+permNum; - pdd.instance="myInstance"+instNum; - pdd.action=action; - for(RoleDAO.Data r : grant) { - pdd.roles(true).add(r.fullName()); - r.perms(true).add(pdd.encode()); - } - return pdd; - } - - private RoleDAO.Data newRole(int roleNum, PermDAO.Data ... grant) { - RoleDAO.Data rdd = new RoleDAO.Data(); - rdd.ns = COM_TEST_JU+roleNum; - rdd.name = "myRole"+roleNum; - for(PermDAO.Data p : grant) { - rdd.perms(true).add(p.encode()); - p.roles(true).add(rdd.fullName()); - } - return rdd; - } - - private UserRoleDAO.Data newUserRole(RoleDAO.Data role,String user, long offset) { - UserRoleDAO.Data urd = new UserRoleDAO.Data(); - urd.user=user; - urd.role(role); - urd.expires=new Date(System.currentTimeMillis()+offset); - return urd; - } - - -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/JU_Cached.java b/authz-cass/src/test/java/org/onap/aaf/dao/JU_Cached.java deleted file mode 100644 index aa0785a4..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/JU_Cached.java +++ /dev/null @@ -1,127 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; - -import static org.junit.Assert.*; - -import java.util.Date; -import java.util.List; -import java.util.Map; -import java.util.Timer; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.onap.aaf.authz.env.AuthzEnv; -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.cache.Cache; -import org.onap.aaf.cache.Cache.Dated; -import org.onap.aaf.dao.CIDAO; -import org.onap.aaf.dao.Cached; -import org.onap.aaf.dao.Cached.Getter; -import org.powermock.modules.junit4.PowerMockRunner; - -//import org.onap.aaf.dao.Cached.Refresh; -import org.onap.aaf.inno.env.Trans; - -@RunWith(PowerMockRunner.class) -public class JU_Cached { - Cached cached; - @Mock - CIDAO ciDaoMock; - @Mock - AuthzEnv authzEnvMock; - @Mock - CIDAO cidaoATMock; - - String name = "nameString"; - - @Before - public void setUp(){ - cached = new Cached(ciDaoMock, name, 0); - } - - @Test(expected=ArithmeticException.class) - public void testCachedIdx(){ - int Result = cached.cacheIdx("1234567890"); - } - - @Test(expected=ArithmeticException.class) - public void testInvalidate(){ - int Res = cached.invalidate(name); - } - - @SuppressWarnings("static-access") - @Test - public void testStopTimer(){ - cached.stopTimer(); - assertTrue(true); - } - - @SuppressWarnings("static-access") - @Test - public void testStartRefresh(){ - cached.startRefresh(authzEnvMock, cidaoATMock); - assertTrue(true); - } -// @Mock -// Trans transMock; -// @Mock -// Getter getterMock; -// -// @Test -// public void testGet(){ -// cached.get(transMock, name, getterMock); -// fail("not implemented"); -// } -// -// @SuppressWarnings("unchecked") -// public Result> get(TRANS trans, String key, Getter getter) { -// List ld = null; -// Result> rld = null; -// -// int cacheIdx = cacheIdx(key); -// Map map = ((Map)cache[cacheIdx]); -// -// // Check for saved element in cache -// Dated cached = map.get(key); -// // Note: These Segment Timestamps are kept up to date with DB -// Date dbStamp = info.get(trans, name,cacheIdx); -// -// // Check for cache Entry and whether it is still good (a good Cache Entry is same or after DBEntry, so we use "before" syntax) -// if(cached!=null && dbStamp.before(cached.timestamp)) { -// ld = (List)cached.data; -// rld = Result.ok(ld); -// } else { -// rld = getter.get(); -// if(rld.isOK()) { // only store valid lists -// map.put(key, new Dated(rld.value)); // successful item found gets put in cache -//// } else if(rld.status == Result.ERR_Backend){ -//// map.remove(key); -// } -// } -// return rld; -// } -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/JU_CachedDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/JU_CachedDAO.java deleted file mode 100644 index 3bb78d29..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/JU_CachedDAO.java +++ /dev/null @@ -1,66 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; - -import static org.junit.Assert.*; - -import java.util.ArrayList; -import java.util.List; - -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.onap.aaf.dao.CIDAO; -import org.onap.aaf.dao.CachedDAO; -import org.onap.aaf.dao.DAO; -import org.powermock.modules.junit4.PowerMockRunner; - -import org.onap.aaf.inno.env.Trans; - -@RunWith(PowerMockRunner.class) -public class JU_CachedDAO { - CachedDAO cachedDAO; - @Mock - DAO daoMock; - @Mock - CIDAO ciDAOMock; - int segsize=1; - Object[ ] objs = new Object[2]; - - @Before - public void setUp(){ - objs[0] = "helo"; - objs[1] = "polo"; - cachedDAO = new CachedDAO(daoMock, ciDAOMock, segsize); - } - - @Test - public void testKeyFromObjs(){ - String result = cachedDAO.keyFromObjs(objs); - System.out.println("value of resut " +result); - assertTrue(true); - } - -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/JU_CassAccess.java b/authz-cass/src/test/java/org/onap/aaf/dao/JU_CassAccess.java deleted file mode 100644 index 41443fb3..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/JU_CassAccess.java +++ /dev/null @@ -1,74 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; - -import static org.junit.Assert.*; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.onap.aaf.dao.CassAccess; -import org.powermock.modules.junit4.PowerMockRunner; - -import org.onap.aaf.inno.env.APIException; -import org.onap.aaf.inno.env.Env; -//import org.onap.aaf.dao.CassAccess.Resettable; -import com.datastax.driver.core.Cluster.Builder; - -@RunWith(PowerMockRunner.class) -public class JU_CassAccess { - CassAccess cassAccess; - - public static final String KEYSPACE = "authz"; - public static final String CASSANDRA_CLUSTERS = "cassandra.clusters"; - public static final String CASSANDRA_CLUSTERS_PORT = "cassandra.clusters.port"; - public static final String CASSANDRA_CLUSTERS_USER_NAME = "cassandra.clusters.user"; - public static final String CASSANDRA_CLUSTERS_PASSWORD = "cassandra.clusters.password"; - public static final String CASSANDRA_RESET_EXCEPTIONS = "cassandra.reset.exceptions"; - public static final String LATITUDE = "LATITUDE"; - public static final String LONGITUDE = "LONGITUDE"; - //private static final List resetExceptions = new ArrayList(); - public static final String ERR_ACCESS_MSG = "Accessing Backend"; - private static Builder cb = null; - @Mock - Env envMock; - String prefix=null; - - @Before - public void setUp(){ - cassAccess = new CassAccess(); - } - - - @Test(expected=APIException.class) - public void testCluster() throws APIException, IOException { - cassAccess.cluster(envMock, prefix); - - } - -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/JU_CassDAOImpl.java b/authz-cass/src/test/java/org/onap/aaf/dao/JU_CassDAOImpl.java deleted file mode 100644 index 34106e26..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/JU_CassDAOImpl.java +++ /dev/null @@ -1,97 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; - -import static org.junit.Assert.*; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.dao.CassDAOImpl; -import org.onap.aaf.dao.Loader; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.modules.junit4.PowerMockRunner; - -import org.onap.aaf.inno.env.Data; -import org.onap.aaf.inno.env.Trans; -import org.onap.aaf.inno.env.TransStore; -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.ConsistencyLevel; - -@RunWith(PowerMockRunner.class) -public class JU_CassDAOImpl { - -public static final String CASS_READ_CONSISTENCY="cassandra.readConsistency"; -public static final String CASS_WRITE_CONSISTENCY="cassandra.writeConsistency"; - -CassDAOImpl cassDAOImpl; - - -@Mock -TransStore transStoreMock; -@SuppressWarnings("rawtypes") -Class dcMock; -@SuppressWarnings("rawtypes") -Loader loaderMock; -Cluster clusterMock; -Class classDataMock; -ConsistencyLevel consistencyLevelMock; -Trans transMock; - -@Mock -AuthzTrans authzTransMock; - - - - @SuppressWarnings({ "rawtypes", "unchecked" }) - @Before - public void setUp() - { - String name = "name"; - String keySpace = "keySpace"; - String table = "table"; - cassDAOImpl = new CassDAOImpl(transStoreMock, name, clusterMock, keySpace, classDataMock, table, consistencyLevelMock, consistencyLevelMock); - } - - - @Test - public void testReadConsistency() { - String table = "users"; - PowerMockito.when(authzTransMock.getProperty(CASS_READ_CONSISTENCY+'.'+table)).thenReturn("TWO"); - ConsistencyLevel consistencyLevel = cassDAOImpl.readConsistency(authzTransMock, table); - System.out.println("Consistency level" + consistencyLevel.name()); - assertEquals("TWO", consistencyLevel.name()); - } - - @Test - public void testWriteConsistency() { - String table = "users"; - PowerMockito.when(authzTransMock.getProperty(CASS_WRITE_CONSISTENCY+'.'+table)).thenReturn(null); - ConsistencyLevel consistencyLevel = cassDAOImpl.writeConsistency(authzTransMock, table); - System.out.println("Consistency level" + consistencyLevel.name()); - assertEquals("ONE", consistencyLevel.name()); - } - -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/JU_DAOException.java b/authz-cass/src/test/java/org/onap/aaf/dao/JU_DAOException.java deleted file mode 100644 index 4c3b11c7..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/JU_DAOException.java +++ /dev/null @@ -1,50 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao; - -import static org.junit.Assert.*; - -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.onap.aaf.dao.DAOException; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.modules.junit4.PowerMockRunner; - -@RunWith(PowerMockRunner.class) -public class JU_DAOException { -DAOException daoException; - - //DAOException daoException = new DAOException(); - String message = "message"; - Throwable cause; - @Before - public void setUp(){ - daoException = new DAOException(); - } - - @Test - public void test(){ - assertTrue(true); - } -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/AbsJUCass.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/AbsJUCass.java deleted file mode 100644 index 887f88b2..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/AbsJUCass.java +++ /dev/null @@ -1,200 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.test; - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.net.URL; -import java.security.NoSuchAlgorithmException; -import java.util.Properties; - -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.onap.aaf.authz.env.AuthzEnv; -import org.onap.aaf.authz.env.AuthzTrans; -import org.onap.aaf.dao.CassAccess; -import org.onap.aaf.dao.CassDAOImpl; - -import org.onap.aaf.cadi.Hash; -import org.onap.aaf.cadi.Symm; -import org.onap.aaf.inno.env.APIException; -import org.onap.aaf.inno.env.Env; -import org.onap.aaf.inno.env.Trans.Metric; -import com.datastax.driver.core.Cluster; - -import junit.framework.Assert; - -/** - * Do Setup of Cassandra for Cassandra JUnit Testing - * - * - */ -public class AbsJUCass { - protected static final String AUTHZ = "authz"; - protected static Cluster cluster; - protected static AuthzEnv env; - protected static int iterations = 0; - protected static float totals=0.0f; - protected static float remote = 0.0f; - protected static float json = 0.0f; - protected static AuthzTrans trans; - protected static boolean details = true; - - @BeforeClass - public static void startup() throws APIException, IOException { - synchronized(AUTHZ) { - if(env==null) { - final String resource = "cadi.properties"; - File f = new File("etc" + resource); - InputStream is=null; - Properties props = new Properties(); - try { - if(f.exists()) { - is = new FileInputStream(f); - } else { - URL rsrc = ClassLoader.getSystemResource(resource); - is = rsrc.openStream(); - } - props.load(is); - } finally { - if(is==null) { - env= new AuthzEnv(); - Assert.fail(resource + " must exist in etc dir, or in Classpath"); - } - is.close(); - } - env = new AuthzEnv(props); - } - } - cluster = CassAccess.cluster(env,"LOCAL"); - - env.info().log("Connecting to Cluster"); - try { - cluster.connect(AUTHZ); - } catch(Exception e) { - cluster=null; - env.error().log(e); - Assert.fail("Not able to connect to DB: " + e.getLocalizedMessage()); - } - env.info().log("Connected"); - - // Load special data here - - // WebPhone - env.setProperty("java.naming.provider.url","ldap://ldap.webphone.att.com:389"); - env.setProperty("com.sun.jndi.ldap.connect.pool","true"); - - iterations = 0; - - } - - @AfterClass - public static void shutdown() { - if(cluster!=null) { - cluster.close(); - cluster = null; - } - } - - @Before - public void newTrans() { - trans = env.newTrans(); - - trans.setProperty(CassDAOImpl.USER_NAME, System.getProperty("user.name")); - } - - @After - public void auditTrail() { - if(totals==0) { // "updateTotals()" was not called... just do one Trans - StringBuilder sb = new StringBuilder(); - Metric metric = trans.auditTrail(4, sb, Env.JSON, Env.REMOTE); - if(details) { - env.info().log( - sb, - "Total time:", - totals += metric.total, - "JSON time: ", - metric.buckets[0], - "REMOTE time: ", - metric.buckets[1] - ); - } else { - totals += metric.total; - } - } - } - - protected void updateTotals() { - Metric metric = trans.auditTrail(0, null, Env.JSON, Env.REMOTE); - totals+=metric.total; - json +=metric.buckets[0]; - remote+=metric.buckets[1]; - } - - - @AfterClass - public static void print() { - float transTime; - if(iterations==0) { - transTime=totals; - } else { - transTime=totals/iterations; - } - env.info().log( - "Total time:", - totals, - "JSON time:", - json, - "REMOTE time:", - remote, - "Iterations:", - iterations, - "Transaction time:", - transTime - ); - } - - /** - * Take a User/Pass and turn into an MD5 Hashed BasicAuth - * - * @param user - * @param pass - * @return - * @throws IOException - * @throws NoSuchAlgorithmException - */ - public static byte[] userPassToBytes(String user, String pass) - throws IOException, NoSuchAlgorithmException { - // Take the form of BasicAuth, so as to allow any character in Password - // (this is an issue in 1.0) - // Also, it makes it quicker to evaluate Basic Auth direct questions - String ba = Symm.base64url.encode(user + ':' + pass); - // Take MD5 Hash, so that data in DB can't be reversed out. - return Hash.encryptMD5(ba.getBytes()); - } - -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_ApprovalDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_ApprovalDAO.java deleted file mode 100644 index 46720c37..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_ApprovalDAO.java +++ /dev/null @@ -1,147 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.test; - - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotSame; -import static org.junit.Assert.assertTrue; - -import java.util.Date; -import java.util.List; -import java.util.UUID; - -import org.junit.Test; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.aaf.cass.ApprovalDAO; -import org.onap.aaf.dao.aaf.cass.ApprovalDAO.Data; - -public class JU_ApprovalDAO extends AbsJUCass { - @Test - public void testCRUD() throws Exception { - ApprovalDAO rrDAO = new ApprovalDAO(trans, cluster, AUTHZ); - ApprovalDAO.Data data = new ApprovalDAO.Data(); - - data.ticket = UUID.randomUUID(); // normally, read from Future object - data.user = "testid@test.com"; - data.approver = "mySuper@att.com"; - data.type = "supervisor"; - data.status = "pending"; - data.operation = "C"; - data.updated = new Date(); - - try { - // Test create - rrDAO.create(trans, data); - - // Test Read by Ticket - Result> rlad; - rlad = rrDAO.readByTicket(trans, data.ticket); - assertTrue(rlad.isOK()); - assertEquals(1,rlad.value.size()); - compare(data,rlad.value.get(0)); - - // Hold onto original ID for deletion, and read tests - UUID id = rlad.value.get(0).id; - - try { - // Test Read by User - rlad = rrDAO.readByUser(trans, data.user); - assertTrue(rlad.isOKhasData()); - boolean ok = false; - for(ApprovalDAO.Data a : rlad.value) { - if(a.id.equals(id)) { - ok = true; - compare(data,a); - } - } - assertTrue(ok); - - // Test Read by Approver - rlad = rrDAO.readByApprover(trans, data.approver); - assertTrue(rlad.isOKhasData()); - ok = false; - for(ApprovalDAO.Data a : rlad.value) { - if(a.id.equals(id)) { - ok = true; - compare(data,a); - } - } - assertTrue(ok); - - // Test Read by ID - rlad = rrDAO.read(trans, id); - assertTrue(rlad.isOKhasData()); - ok = false; - for(ApprovalDAO.Data a : rlad.value) { - if(a.id.equals(id)) { - ok = true; - compare(data,a); - } - } - assertTrue(ok); - - // Test Update - data.status = "approved"; - data.id = id; - assertTrue(rrDAO.update(trans, data).isOK()); - - rlad = rrDAO.read(trans, id); - assertTrue(rlad.isOKhasData()); - ok = false; - for(ApprovalDAO.Data a : rlad.value) { - if(a.id.equals(id)) { - ok = true; - compare(data,a); - } - } - assertTrue(ok); - - } finally { - // Delete - data.id = id; - rrDAO.delete(trans, data, true); - rlad = rrDAO.read(trans, id); - assertTrue(rlad.isOK()); - assertTrue(rlad.isEmpty()); - } - - } finally { - rrDAO.close(trans); - } - } - - private void compare(Data d1, Data d2) { - assertNotSame(d1.id,d2.id); - assertEquals(d1.ticket,d2.ticket); - assertEquals(d1.user,d2.user); - assertEquals(d1.approver,d2.approver); - assertEquals(d1.type,d2.type); - assertEquals(d1.status,d2.status); - assertEquals(d1.operation,d2.operation); - assertNotSame(d1.updated,d2.updated); - } - - - -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_ArtiDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_ArtiDAO.java deleted file mode 100644 index 0c92dc75..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_ArtiDAO.java +++ /dev/null @@ -1,137 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.security.NoSuchAlgorithmException; -import java.util.Date; -import java.util.List; - -import org.junit.Test; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.aaf.cass.ArtiDAO; -import org.onap.aaf.dao.aaf.cass.ArtiDAO.Data; - -/** - * UserDAO unit test. - * User: tp007s - * Date: 7/19/13 - */ -public class JU_ArtiDAO extends AbsJUCass { - @Test - public void test() throws IOException, NoSuchAlgorithmException { - ArtiDAO adao = new ArtiDAO(trans,cluster,"authz"); - try { - // Create - ArtiDAO.Data data = new ArtiDAO.Data(); - data.mechid="m55555@perturbed.att.com"; - data.machine="perturbed1232.att.com"; - data.type(false).add("file"); - data.type(false).add("jks"); - data.sponsor="Fred Flintstone"; - data.ca="devl"; - data.dir="/opt/app/aft/keys"; - data.appName="kumquat"; - data.os_user="aft"; - data.notify="email:myname@bogus.email.com"; - data.expires=new Date(); - -// Bytification - ByteBuffer bb = data.bytify(); - Data bdata = new ArtiDAO.Data(); - bdata.reconstitute(bb); - checkData1(data, bdata); - - -// DB work - adao.create(trans,data); - try { - // Validate Read with key fields in Data - Result> rlcd = adao.read(trans,data); - assertTrue(rlcd.isOKhasData()); - for(ArtiDAO.Data d : rlcd.value) { - checkData1(data,d); - } - - // Validate Read with key fields in Data - rlcd = adao.read(trans,data.mechid, data.machine); - assertTrue(rlcd.isOKhasData()); - for(ArtiDAO.Data d : rlcd.value) { - checkData1(data,d); - } - - // By Machine - rlcd = adao.readByMachine(trans,data.machine); - assertTrue(rlcd.isOKhasData()); - for(ArtiDAO.Data d : rlcd.value) { - checkData1(data,d); - } - - // By MechID - rlcd = adao.readByMechID(trans,data.mechid); - assertTrue(rlcd.isOKhasData()); - for(ArtiDAO.Data d : rlcd.value) { - checkData1(data,d); - } - - // Update - data.sponsor = "Wilma Flintstone"; - adao.update(trans,data); - rlcd = adao.read(trans,data); - assertTrue(rlcd.isOKhasData()); - for(ArtiDAO.Data d : rlcd.value) { - checkData1(data,d); - } - - } finally { - // Always delete data, even if failure. - adao.delete(trans,data, true); - } - } finally { - adao.close(trans); - } - - - } - - private void checkData1(Data data, Data d) { - assertEquals(data.mechid,d.mechid); - assertEquals(data.machine,d.machine); - assertEquals(data.type(false).size(),d.type(false).size()); - for(String s: data.type(false)) { - assertTrue(d.type(false).contains(s)); - } - assertEquals(data.sponsor,d.sponsor); - assertEquals(data.ca,d.ca); - assertEquals(data.dir,d.dir); - assertEquals(data.appName,d.appName); - assertEquals(data.os_user,d.os_user); - assertEquals(data.notify,d.notify); - assertEquals(data.expires,d.expires); - } - -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_Bytification.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_Bytification.java deleted file mode 100644 index 65efef40..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_Bytification.java +++ /dev/null @@ -1,266 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.Date; - -import org.junit.Test; -import org.onap.aaf.dao.aaf.cass.CredDAO; -import org.onap.aaf.dao.aaf.cass.NsDAO; -import org.onap.aaf.dao.aaf.cass.NsType; -import org.onap.aaf.dao.aaf.cass.PermDAO; -import org.onap.aaf.dao.aaf.cass.RoleDAO; -import org.onap.aaf.dao.aaf.cass.UserRoleDAO; - -public class JU_Bytification { - - @Test - public void testNS() throws IOException { - - // Normal - NsDAO.Data ns = new NsDAO.Data(); - ns.name = "com.att."; - ns.type = NsType.APP.type; - - ByteBuffer bb = ns.bytify(); - - NsDAO.Data nsr = new NsDAO.Data(); - nsr.reconstitute(bb); - check(ns,nsr); - - // Empty admin -// ns.admin(true).clear(); - bb = ns.bytify(); - nsr = new NsDAO.Data(); - nsr.reconstitute(bb); - check(ns,nsr); - - // Empty responsible -// ns.responsible(true).clear(); - bb = ns.bytify(); - nsr = new NsDAO.Data(); - nsr.reconstitute(bb); - check(ns,nsr); - - bb = ns.bytify(); - nsr = new NsDAO.Data(); - nsr.reconstitute(bb); - check(ns,nsr); - } - - private void check(NsDAO.Data a, NsDAO.Data b) { - assertEquals(a.name,b.name); - assertEquals(a.type,b.type); -// assertEquals(a.admin.size(),b.admin.size()); - -// for(String s: a.admin) { -// assertTrue(b.admin.contains(s)); -// } -// -// assertEquals(a.responsible.size(),b.responsible.size()); -// for(String s: a.responsible) { -// assertTrue(b.responsible.contains(s)); -// } - } - - @Test - public void testRole() throws IOException { - RoleDAO.Data rd1 = new RoleDAO.Data(); - rd1.ns = "com.att."; - rd1.name = "my.role"; - rd1.perms(true).add("com.att..my.Perm|myInstance|myAction"); - rd1.perms(true).add("com.att..my.Perm|myInstance|myAction2"); - - // Normal - ByteBuffer bb = rd1.bytify(); - RoleDAO.Data rd2 = new RoleDAO.Data(); - rd2.reconstitute(bb); - check(rd1,rd2); - - // Overshoot Buffer - StringBuilder sb = new StringBuilder(300); - sb.append("role|instance|veryLongAction..."); - for(int i=0;i<280;++i) { - sb.append('a'); - } - rd1.perms(true).add(sb.toString()); - bb = rd1.bytify(); - rd2 = new RoleDAO.Data(); - rd2.reconstitute(bb); - check(rd1,rd2); - - // No Perms - rd1.perms.clear(); - - bb = rd1.bytify(); - rd2 = new RoleDAO.Data(); - rd2.reconstitute(bb); - check(rd1,rd2); - - // 1000 Perms - for(int i=0;i<1000;++i) { - rd1.perms(true).add("com|inst|action"+ i); - } - - bb = rd1.bytify(); - rd2 = new RoleDAO.Data(); - rd2.reconstitute(bb); - check(rd1,rd2); - - } - - private void check(RoleDAO.Data a, RoleDAO.Data b) { - assertEquals(a.ns,b.ns); - assertEquals(a.name,b.name); - - assertEquals(a.perms.size(),b.perms.size()); - for(String s: a.perms) { - assertTrue(b.perms.contains(s)); - } - } - - @Test - public void testPerm() throws IOException { - PermDAO.Data pd1 = new PermDAO.Data(); - pd1.ns = "com.att."; - pd1.type = "my.perm"; - pd1.instance = "instance"; - pd1.action = "read"; - pd1.roles(true).add("com.att..my.Role"); - pd1.roles(true).add("com.att..my.Role2"); - - // Normal - ByteBuffer bb = pd1.bytify(); - PermDAO.Data rd2 = new PermDAO.Data(); - rd2.reconstitute(bb); - check(pd1,rd2); - - // No Perms - pd1.roles.clear(); - - bb = pd1.bytify(); - rd2 = new PermDAO.Data(); - rd2.reconstitute(bb); - check(pd1,rd2); - - // 1000 Perms - for(int i=0;i<1000;++i) { - pd1.roles(true).add("com.att..my.Role"+ i); - } - - bb = pd1.bytify(); - rd2 = new PermDAO.Data(); - rd2.reconstitute(bb); - check(pd1,rd2); - - } - - private void check(PermDAO.Data a, PermDAO.Data b) { - assertEquals(a.ns,b.ns); - assertEquals(a.type,b.type); - assertEquals(a.instance,b.instance); - assertEquals(a.action,b.action); - - assertEquals(a.roles.size(),b.roles.size()); - for(String s: a.roles) { - assertTrue(b.roles.contains(s)); - } - } - - @Test - public void testUserRole() throws IOException { - UserRoleDAO.Data urd1 = new UserRoleDAO.Data(); - urd1.user = "myname@abc.att.com"; - urd1.role("com.att.","my.role"); - urd1.expires = new Date(); - - // Normal - ByteBuffer bb = urd1.bytify(); - UserRoleDAO.Data urd2 = new UserRoleDAO.Data(); - urd2.reconstitute(bb); - check(urd1,urd2); - - // A null - urd1.expires = null; - urd1.role = null; - - bb = urd1.bytify(); - urd2 = new UserRoleDAO.Data(); - urd2.reconstitute(bb); - check(urd1,urd2); - } - - private void check(UserRoleDAO.Data a, UserRoleDAO.Data b) { - assertEquals(a.user,b.user); - assertEquals(a.role,b.role); - assertEquals(a.expires,b.expires); - } - - - @Test - public void testCred() throws IOException { - CredDAO.Data cd = new CredDAO.Data(); - cd.id = "m55555@abc.att.com"; - cd.ns = "com.att.abc"; - cd.type = 2; - cd.cred = ByteBuffer.wrap(new byte[]{1,34,5,3,25,0,2,5,3,4}); - cd.expires = new Date(); - - // Normal - ByteBuffer bb = cd.bytify(); - CredDAO.Data cd2 = new CredDAO.Data(); - cd2.reconstitute(bb); - check(cd,cd2); - - // nulls - cd.expires = null; - cd.cred = null; - - bb = cd.bytify(); - cd2 = new CredDAO.Data(); - cd2.reconstitute(bb); - check(cd,cd2); - - } - - private void check(CredDAO.Data a, CredDAO.Data b) { - assertEquals(a.id,b.id); - assertEquals(a.ns,b.ns); - assertEquals(a.type,b.type); - if(a.cred==null) { - assertEquals(a.cred,b.cred); - } else { - int l = a.cred.limit(); - assertEquals(l,b.cred.limit()); - for (int i=0;i id = new CacheInfoDAO(trans, cluster, AUTHZ); - Date date = new Date(); - - id.touch(trans, RoleDAO.TABLE,1); - try { - Thread.sleep(3000); - } catch (InterruptedException e) { - } - Result rid = id.check(trans); - Assert.assertEquals(rid.status,Status.OK); - Date[] dates = CacheInfoDAO.info.get(RoleDAO.TABLE); - if(dates.length>0 && dates[1]!=null) { - System.out.println(Chrono.dateStamp(dates[1])); - System.out.println(Chrono.dateStamp(date)); - Assert.assertTrue(Math.abs(dates[1].getTime() - date.getTime())<20000); // allow for 4 seconds, given Remote DB - } - } - -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CertDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CertDAO.java deleted file mode 100644 index 498f8ce6..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CertDAO.java +++ /dev/null @@ -1,105 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.math.BigInteger; -import java.nio.ByteBuffer; -import java.security.NoSuchAlgorithmException; -import java.util.List; - -import org.junit.Test; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.aaf.cass.CertDAO; -import org.onap.aaf.dao.aaf.cass.CertDAO.Data; - -import org.onap.aaf.inno.env.APIException; - -/** - * UserDAO unit test. - * User: tp007s - * Date: 7/19/13 - */ -public class JU_CertDAO extends AbsJUCass { - @Test - public void test() throws IOException, NoSuchAlgorithmException, APIException { - CertDAO cdao = new CertDAO(trans,cluster,"authz"); - try { - // Create - CertDAO.Data data = new CertDAO.Data(); - data.serial=new BigInteger("11839383"); - data.id = "m55555@tguard.att.com"; - data.x500="CN=ju_cert.dao.att.com, OU=AAF, O=\"ATT Services, Inc.\", L=Southfield, ST=Michigan, C=US"; - data.x509="I'm a cert"; - data.ca = "aaf"; - cdao.create(trans,data); - -// Bytification - ByteBuffer bb = data.bytify(); - Data bdata = new CertDAO.Data(); - bdata.reconstitute(bb); - checkData1(data, bdata); - - // Validate Read with key fields in Data - Result> rlcd = cdao.read(trans,data); - assertTrue(rlcd.isOKhasData()); - for(CertDAO.Data d : rlcd.value) { - checkData1(data,d); - } - - // Validate Read with key fields in Data - rlcd = cdao.read(trans,data.ca,data.serial); - assertTrue(rlcd.isOKhasData()); - for(CertDAO.Data d : rlcd.value) { - checkData1(data,d); - } - - // Update - data.id = "m66666.tguard.att.com"; - cdao.update(trans,data); - rlcd = cdao.read(trans,data); - assertTrue(rlcd.isOKhasData()); - for(CertDAO.Data d : rlcd.value) { - checkData1(data,d); - } - - cdao.delete(trans,data, true); - } finally { - cdao.close(trans); - } - - - } - - private void checkData1(Data data, Data d) { - assertEquals(data.ca,d.ca); - assertEquals(data.serial,d.serial); - assertEquals(data.id,d.id); - assertEquals(data.x500,d.x500); - assertEquals(data.x509,d.x509); - } - -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CredDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CredDAO.java deleted file mode 100644 index 3cf860ae..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_CredDAO.java +++ /dev/null @@ -1,252 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.security.NoSuchAlgorithmException; -import java.util.Date; -import java.util.List; - -import org.junit.Test; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.aaf.cass.CredDAO; -import org.onap.aaf.dao.aaf.cass.CredDAO.Data; - -import org.onap.aaf.inno.env.APIException; - -/** - * UserDAO unit test. - * User: tp007s - * Date: 7/19/13 - */ -public class JU_CredDAO extends AbsJUCass { - @Test - public void test() throws IOException, NoSuchAlgorithmException, APIException { - CredDAO udao = new CredDAO(trans,cluster,"authz"); - try { - // Create - CredDAO.Data data = new CredDAO.Data(); - data.id = "m55555@aaf.att.com"; - data.type = CredDAO.BASIC_AUTH; - data.notes = "temp pass"; - data.cred = ByteBuffer.wrap(userPassToBytes("m55555","mypass")); - data.other = 12; - data.expires = new Date(System.currentTimeMillis() + 60000*60*24*90); - udao.create(trans,data); - -// Bytification - ByteBuffer bb = data.bytify(); - Data bdata = new CredDAO.Data(); - bdata.reconstitute(bb); - checkData1(data, bdata); - - // Validate Read with key fields in Data - Result> rlcd = udao.read(trans,data); - assertTrue(rlcd.isOKhasData()); - for(CredDAO.Data d : rlcd.value) { - checkData1(data,d); - } - - // Update - data.cred = ByteBuffer.wrap(userPassToBytes("m55555","mynewpass")); - udao.update(trans,data); - rlcd = udao.read(trans,data); - assertTrue(rlcd.isOKhasData()); - for(CredDAO.Data d : rlcd.value) { - checkData1(data,d); - } - - udao.delete(trans,data, true); - } finally { - udao.close(trans); - } - - - } - - private void checkData1(Data data, Data d) { - assertEquals(data.id,d.id); - assertEquals(data.type,d.type); - assertEquals(data.ns,d.ns); - assertEquals(data.notes,d.notes); - assertEquals(data.cred,d.cred); - assertEquals(data.other,d.other); - assertEquals(data.expires,d.expires); - } - -// private String CONST_myName = "MyName"; -// public static final java.nio.ByteBuffer CONST_MY_CRED = get_CONST_MY_CRED(); -// public static final int CONST_CRED_TYPE = 11; -// -// public static final Date CONST_UPDATE_DATE = new Date(System.currentTimeMillis()+60000*24); -// @Test -// public void test() { -// UserDAO ud = new UserDAO(trans, cluster,"authz"); -// try { -// UserDAO.Data data = createPrototypeUserData(); -// ud.create(trans, data); -// -// // Validate Read with key fields in Data -// for(UserDAO.Data d : ud.read(trans, data)) { -// checkData1(data,d); -// } -// -// // Validate readByName -// for(UserDAO.Data d : ud.read(trans, CONST_myName)) { -// checkData1(data,d); -// } -// -// ud.delete(trans, data); -// List d_2 = ud.read(trans, CONST_myName); -// -// // Validate that data was deleted -// assertEquals("User should not be found after deleted", 0, d_2.size() ); -// -// data = new UserDAO.Data(); -// data.name = CONST_myName; -// data.cred = CONST_MY_CRED; -// data.cred_type= CONST_CRED_TYPE; -// data.expires = new Date(System.currentTimeMillis()+60000*24); -// final Result user = ud.r_create(trans, data); -// assertEquals("ud.createUser should work", Result.Status.OK, user.status); -// -// checkDataIgnoreDateDiff(data, user.value); -// -// // finally leave system in consistent state by deleting user again -// ud.delete(trans,data); -// -// } catch (DAOException e) { -// e.printStackTrace(); -// fail("Fail due to Exception"); -// } finally { -// ud.close(trans); -// } -// } -// -// private UserDAO.Data createPrototypeUserData() { -// UserDAO.Data data = new UserDAO.Data(); -// data.name = CONST_myName; -// -// data.cred_type = CONST_CRED_TYPE; -// data.cred = CONST_MY_CRED; -// data.expires = CONST_UPDATE_DATE; -// return data; -// } -// -// // @Test -// // public void testReadByUser() throws Exception { -// // // this test was done above in our super test, since it uses the same setup -// // } -// -// @Test -// public void testFunctionCreateUser() throws Exception { -// String name = "roger_rabbit"; -// Integer credType = CONST_CRED_TYPE; -// java.nio.ByteBuffer cred = CONST_MY_CRED; -// final UserDAO ud = new UserDAO(trans, cluster,"authz"); -// final UserDAO.Data data = createPrototypeUserData(); -// Result ret = ud.r_create(trans, data); -// Result> byUserNameLookup = ud.r_read(trans, name); -// -// assertEquals("sanity test w/ different username (different than other test cases) failed", name, byUserNameLookup.value.get(0).name); -// assertEquals("delete roger_rabbit failed", true, ud.delete(trans, byUserNameLookup.value.get(0))); -// } -// -// @Test -// public void testLowLevelCassandraCreateData_Given_UserAlreadyPresent_ShouldPass() throws Exception { -// UserDAO ud = new UserDAO(trans, cluster,"authz"); -// -// final UserDAO.Data data = createPrototypeUserData(); -// final UserDAO.Data data1 = ud.create(trans, data); -// final UserDAO.Data data2 = ud.create(trans, data); -// -// assertNotNull(data1); -// assertNotNull(data2); -// -// assertEquals(CONST_myName, data1.name); -// assertEquals(CONST_myName, data2.name); -// } -// -// @Test -// public void testCreateUser_Given_UserAlreadyPresent_ShouldFail() throws Exception { -// UserDAO ud = new UserDAO(trans, cluster,"authz"); -// -// final UserDAO.Data data = createPrototypeUserData(); -// -// // make sure that some prev test did not leave the user in the DB -// ud.delete(trans, data); -// -// // attempt to create same user twice !!! -// -// final Result data1 = ud.r_create(trans, data); -// final Result data2 = ud.r_create(trans, data); -// -// assertNotNull(data1); -// assertNotNull(data2); -// -// assertEquals(true, Result.Status.OK == data1.status); -// assertEquals(false, Result.Status.OK == data2.status); -// } -// -// private void checkData1(UserDAO.Data data, UserDAO.Data d) { -// data.name = CONST_myName; -// -// data.cred_type = CONST_CRED_TYPE; -// data.cred = CONST_MY_CRED; -// data.expires = CONST_UPDATE_DATE; -// -// assertEquals(data.name, d.name); -// assertEquals(data.cred_type, d.cred_type); -// assertEquals(data.cred, d.cred); -// assertEquals(data.expires, d.expires); -// -// } -// -// private void checkDataIgnoreDateDiff(UserDAO.Data data, UserDAO.Data d) { -// data.name = CONST_myName; -// -// data.cred_type = CONST_CRED_TYPE; -// data.cred = CONST_MY_CRED; -// data.expires = CONST_UPDATE_DATE; -// -// assertEquals(data.name, d.name); -// assertEquals(data.cred_type, d.cred_type); -// assertEquals(data.cred, d.cred); -// // we allow dates to be different, e.g. high level calls e.g. createUser sets the date itself. -// //assertEquals(data.updated, d.updated); -// -// } -// -// /** -// * Get a CONST_MY_CRED ByteBuffer, which is the java type for a cass blob. -// * @return -// */ -// private static java.nio.ByteBuffer get_CONST_MY_CRED() { -// return ByteBuffer.wrap("Hello".getBytes()); -// } -// -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_DelegateDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_DelegateDAO.java deleted file mode 100644 index d93ec399..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_DelegateDAO.java +++ /dev/null @@ -1,107 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.test; - - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.nio.ByteBuffer; -import java.util.Date; -import java.util.List; - -import org.junit.Test; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.aaf.cass.DelegateDAO; -import org.onap.aaf.dao.aaf.cass.DelegateDAO.Data; - - -public class JU_DelegateDAO extends AbsJUCass { - @Test - public void testCRUD() throws Exception { - DelegateDAO dao = new DelegateDAO(trans, cluster, AUTHZ); - DelegateDAO.Data data = new DelegateDAO.Data(); - data.user = "myname"; - data.delegate = "yourname"; - data.expires = new Date(); - -// Bytification - ByteBuffer bb = data.bytify(); - Data bdata = new DelegateDAO.Data(); - bdata.reconstitute(bb); - compare(data, bdata); - - try { - // Test create - Result ddcr = dao.create(trans,data); - assertTrue(ddcr.isOK()); - - - // Read by User - Result> records = dao.read(trans,data.user); - assertTrue(records.isOKhasData()); - for(DelegateDAO.Data rdata : records.value) - compare(data,rdata); - - // Read by Delegate - records = dao.readByDelegate(trans,data.delegate); - assertTrue(records.isOKhasData()); - for(DelegateDAO.Data rdata : records.value) - compare(data,rdata); - - // Update - data.delegate = "hisname"; - data.expires = new Date(); - assertTrue(dao.update(trans, data).isOK()); - - // Read by User - records = dao.read(trans,data.user); - assertTrue(records.isOKhasData()); - for(DelegateDAO.Data rdata : records.value) - compare(data,rdata); - - // Read by Delegate - records = dao.readByDelegate(trans,data.delegate); - assertTrue(records.isOKhasData()); - for(DelegateDAO.Data rdata : records.value) - compare(data,rdata); - - // Test delete - dao.delete(trans,data, true); - records = dao.read(trans,data.user); - assertTrue(records.isEmpty()); - - - } finally { - dao.close(trans); - } - } - - private void compare(Data d1, Data d2) { - assertEquals(d1.user, d2.user); - assertEquals(d1.delegate, d2.delegate); - assertEquals(d1.expires,d2.expires); - } - - -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_FastCalling.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_FastCalling.java deleted file mode 100644 index 9b0fa2ed..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_FastCalling.java +++ /dev/null @@ -1,91 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.security.NoSuchAlgorithmException; -import java.util.Date; -import java.util.List; - -import org.junit.Test; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.aaf.cass.CredDAO; -import org.onap.aaf.dao.aaf.cass.CredDAO.Data; - -import org.onap.aaf.inno.env.APIException; - -public class JU_FastCalling extends AbsJUCass { - - @Test - public void test() throws IOException, NoSuchAlgorithmException, APIException { - trans.setProperty("cassandra.writeConsistency.cred","ONE"); - - CredDAO udao = new CredDAO(env.newTransNoAvg(),cluster,"authz"); - System.out.println("Starting calls"); - for(iterations=0;iterations<8;++iterations) { - try { - // Create - CredDAO.Data data = new CredDAO.Data(); - data.id = "m55555@aaf.att.com"; - data.type = CredDAO.BASIC_AUTH; - data.cred = ByteBuffer.wrap(userPassToBytes("m55555","mypass")); - data.expires = new Date(System.currentTimeMillis() + 60000*60*24*90); - udao.create(trans,data); - - // Validate Read with key fields in Data - Result> rlcd = udao.read(trans,data); - assertTrue(rlcd.isOKhasData()); - for(CredDAO.Data d : rlcd.value) { - checkData1(data,d); - } - - // Update - data.cred = ByteBuffer.wrap(userPassToBytes("m55555","mynewpass")); - udao.update(trans,data); - rlcd = udao.read(trans,data); - assertTrue(rlcd.isOKhasData()); - for(CredDAO.Data d : rlcd.value) { - checkData1(data,d); - } - - udao.delete(trans,data, true); - } finally { - updateTotals(); - newTrans(); - } - } - - } - - private void checkData1(Data data, Data d) { - assertEquals(data.id,d.id); - assertEquals(data.type,d.type); - assertEquals(data.cred,d.cred); - assertEquals(data.expires,d.expires); - } - -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_HistoryDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_HistoryDAO.java deleted file mode 100644 index 29ce5d4b..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_HistoryDAO.java +++ /dev/null @@ -1,154 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.nio.ByteBuffer; -import java.util.List; -import java.util.Random; - -import org.junit.Test; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.aaf.cass.HistoryDAO; - -public class JU_HistoryDAO extends AbsJUCass { - - @Test - public void testCreate() throws Exception { - HistoryDAO historyDAO = new HistoryDAO(trans, cluster, AUTHZ); - HistoryDAO.Data data = createHistoryData(); - - try { - historyDAO.create(trans,data); - Thread.sleep(200);// History Create is Async - Result> records = historyDAO.readByUser(trans,data.user,data.yr_mon); - assertTrue(records.isOKhasData()); - for(HistoryDAO.Data d : records.value) { - assertHistory(data, d); - } - } finally { - historyDAO.close(trans); - } - } - - @Test - public void tesReadByUser() throws Exception { - HistoryDAO historyDAO = new HistoryDAO(trans,cluster, AUTHZ); - HistoryDAO.Data data = createHistoryData(); - - try { - historyDAO.create(trans,data); - Thread.sleep(200);// History Create is Async - Result> records = historyDAO.readByUser(trans, data.user,data.yr_mon); - assertTrue(records.isOKhasData()); - for(HistoryDAO.Data d : records.value) { - assertHistory(data, d); - } - } finally { - historyDAO.close(trans); - } - } - -/* - @Test - public void readByUserAndMonth() throws Exception { - HistoryDAO historyDAO = new HistoryDAO(trans,cluster, AUTHZ); - HistoryDAO.Data data = createHistoryData(); - - try { - historyDAO.create(trans,data); - Thread.sleep(200);// History Create is Async - Result> records = historyDAO.readByUserAndMonth(trans, - data.user, Integer.valueOf(String.valueOf(data.yr_mon).substring(0, 4)), - Integer.valueOf(String.valueOf(data.yr_mon).substring(4, 6))); - assertTrue(records.isOKhasData()); - for(HistoryDAO.Data d : records.value) { - assertHistory(data, d); - } - } finally { - historyDAO.close(trans); - } - } -*/ - //TODO readadd this -// @Test -// public void readByUserAndDay() throws Exception { -// HistoryDAO historyDAO = new HistoryDAO(trans, cluster, AUTHZ); -// HistoryDAO.Data data = createHistoryData(); -// -// try { -// historyDAO.create(trans, data); -// Thread.sleep(200);// History Create is Async -// -// String dayTime = String.valueOf(data.day_time); -// String day = null; -// if (dayTime.length() < 8) -// day = dayTime.substring(0, 1); -// else -// day = dayTime.substring(0, 2); -// -// List records = historyDAO.readByUserBetweenDates(trans, -// data.user, Integer.valueOf(String.valueOf(data.yr_mon).substring(0, 4)), -// Integer.valueOf(String.valueOf(data.yr_mon).substring(4, 6)), -// Integer.valueOf(day), 0); -// assertEquals(1,records.size()); -// for(HistoryDAO.Data d : records) { -// assertHistory(data, d); -// } -// } finally { -// historyDAO.close(trans); -// } -// } - private HistoryDAO.Data createHistoryData() { - HistoryDAO.Data data = HistoryDAO.newInitedData(); - Random random = new Random(); - data.user = "test" + random.nextInt(); - data.action = "add"; - data.target = "history"; - data.memo = "adding a row into history table"; -// data.detail().put("id", "test"); -// data.detail().put("name", "test"); - //String temp = "Test Blob Message"; - data.reconstruct = ByteBuffer.wrap("Temp Blob Message".getBytes()); - return data; - } - - private void assertHistory(HistoryDAO.Data ip, HistoryDAO.Data op) { - assertEquals(ip.yr_mon, op.yr_mon); -// assertEquals(ip.day_time, op.day_time); - assertEquals(ip.user, op.user); - assertEquals(ip.action, op.action); - assertEquals(ip.target, op.target); - assertEquals(ip.memo, op.memo); - //TODO : have to see if third party assert utility can be used -// assertTrue(CollectionUtils.isEqualCollection(ip.detail, op.detail)); -// for (String key : ip.detail().keySet()) { -// assertNotNull(op.detail().get(key)); -// } - assertNotNull(op.reconstruct); - } - -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_NsDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_NsDAO.java deleted file mode 100644 index ad9ed287..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_NsDAO.java +++ /dev/null @@ -1,187 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - -import org.junit.Test; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.aaf.cass.NsDAO; -import org.onap.aaf.dao.aaf.cass.NsType; -import org.onap.aaf.dao.aaf.cass.NsDAO.Data; - -import org.onap.aaf.inno.env.APIException; - - -public class JU_NsDAO extends AbsJUCass { - private static final String CRM = "ju_crm"; - private static final String SWM = "ju_swm"; - - @Test - public void test() throws APIException, IOException { - NsDAO nsd = new NsDAO(trans, cluster, AUTHZ); - try { - final String nsparent = "com.test"; - final String ns1 = nsparent +".ju_ns"; - final String ns2 = nsparent + ".ju_ns2"; - - Map oAttribs = new HashMap(); - oAttribs.put(SWM, "swm_data"); - oAttribs.put(CRM, "crm_data"); - Data data = new NsDAO.Data(); - data.name = ns1; - data.type = NsType.APP.type; - data.attrib(true).putAll(oAttribs); - - - Result> rdrr; - - // CREATE - Result rdc = nsd.create(trans, data); - assertTrue(rdc.isOK()); - - try { -// Bytification - ByteBuffer bb = data.bytify(); - Data bdata = new NsDAO.Data(); - bdata.reconstitute(bb); - compare(data, bdata); - - // Test READ by Object - rdrr = nsd.read(trans, data); - assertTrue(rdrr.isOKhasData()); - assertEquals(rdrr.value.size(),1); - Data d = rdrr.value.get(0); - assertEquals(d.name,data.name); - assertEquals(d.type,data.type); - attribsEqual(d.attrib(false),data.attrib(false)); - attribsEqual(oAttribs,data.attrib(false)); - - // Test Read by Key - rdrr = nsd.read(trans, data.name); - assertTrue(rdrr.isOKhasData()); - assertEquals(rdrr.value.size(),1); - d = rdrr.value.get(0); - assertEquals(d.name,data.name); - assertEquals(d.type,data.type); - attribsEqual(d.attrib(false),data.attrib(false)); - attribsEqual(oAttribs,data.attrib(false)); - - // Read NS by Type - Result> rtypes = nsd.readNsByAttrib(trans, SWM); - Set types; - if(rtypes.notOK()) { - throw new IOException(rtypes.errorString()); - } else { - types = rtypes.value; - } - assertEquals(1,types.size()); - assertEquals(true,types.contains(ns1)); - - // Add second NS to test list of data returned - Data data2 = new NsDAO.Data(); - data2.name = ns2; - data2.type = 3; // app - Result rdc2 = nsd.create(trans, data2); - assertTrue(rdc2.isOK()); - - // Interrupt - test PARENT - Result> rdchildren = nsd.getChildren(trans, "com.test"); - assertTrue(rdchildren.isOKhasData()); - boolean child1 = false; - boolean child2 = false; - for(Data dchild : rdchildren.value) { - if(ns1.equals(dchild.name))child1=true; - if(ns2.equals(dchild.name))child2=true; - } - assertTrue(child1); - assertTrue(child2); - - // FINISH DATA 2 by deleting - Result rddr = nsd.delete(trans, data2, true); - assertTrue(rddr.isOK()); - - // ADD DESCRIPTION - String description = "This is my test Namespace"; - assertFalse(description.equalsIgnoreCase(data.description)); - - Result addDesc = nsd.addDescription(trans, data.name, description); - assertTrue(addDesc.isOK()); - rdrr = nsd.read(trans, data); - assertTrue(rdrr.isOKhasData()); - assertEquals(rdrr.value.size(),1); - assertEquals(rdrr.value.get(0).description,description); - - // UPDATE - String newDescription = "zz1234 Owns This Namespace Now"; - oAttribs.put("mso", "mso_data"); - data.attrib(true).put("mso", "mso_data"); - data.description = newDescription; - Result update = nsd.update(trans, data); - assertTrue(update.isOK()); - rdrr = nsd.read(trans, data); - assertTrue(rdrr.isOKhasData()); - assertEquals(rdrr.value.size(),1); - assertEquals(rdrr.value.get(0).description,newDescription); - attribsEqual(oAttribs, rdrr.value.get(0).attrib); - - - } catch (IOException e) { - e.printStackTrace(); - } finally { - // DELETE - Result rddr = nsd.delete(trans, data, true); - assertTrue(rddr.isOK()); - rdrr = nsd.read(trans, data); - assertTrue(rdrr.isOK() && rdrr.isEmpty()); - assertEquals(rdrr.value.size(),0); - } - } finally { - nsd.close(trans); - } - } - - private void compare(NsDAO.Data d, NsDAO.Data data) { - assertEquals(d.name,data.name); - assertEquals(d.type,data.type); - attribsEqual(d.attrib(false),data.attrib(false)); - attribsEqual(d.attrib(false),data.attrib(false)); - } - - private void attribsEqual(Map aa, Map ba) { - assertEquals(aa.size(),ba.size()); - for(Entry es : aa.entrySet()) { - assertEquals(es.getValue(),ba.get(es.getKey())); - } - } -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_NsType.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_NsType.java deleted file mode 100644 index 92152695..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_NsType.java +++ /dev/null @@ -1,59 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.test; - -import static org.junit.Assert.assertEquals; - -import org.junit.AfterClass; -import org.junit.Test; -import org.onap.aaf.dao.aaf.cass.NsType; - -public class JU_NsType { - - @AfterClass - public static void tearDownAfterClass() throws Exception { - } - - @Test - public void test() { - NsType nt,nt2; - String[] tests = new String[] {"DOT","ROOT","COMPANY","APP","STACKED_APP","STACK"}; - for(String s : tests) { - nt = NsType.valueOf(s); - assertEquals(s,nt.name()); - - nt2 = NsType.fromString(s); - assertEquals(nt,nt2); - - int t = nt.type; - nt2 = NsType.fromType(t); - assertEquals(nt,nt2); - } - - nt = NsType.fromType(Integer.MIN_VALUE); - assertEquals(nt,NsType.UNKNOWN); - nt = NsType.fromString("Garbage"); - assertEquals(nt,NsType.UNKNOWN); - } - -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_PermDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_PermDAO.java deleted file mode 100644 index 582ce185..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_PermDAO.java +++ /dev/null @@ -1,176 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.test; - -import static junit.framework.Assert.assertEquals; -import static junit.framework.Assert.assertTrue; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.List; -import java.util.Set; - -import org.junit.Test; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.aaf.cass.PermDAO; -import org.onap.aaf.dao.aaf.cass.RoleDAO; -import org.onap.aaf.dao.aaf.cass.PermDAO.Data; - -import org.onap.aaf.inno.env.APIException; - -/** - * Test the PermissionDAO - * - * Utilize AbsJUCass to initialize and pre-load Cass - * - * - */ -public class JU_PermDAO extends AbsJUCass{ - - @Test - public void test() throws APIException, IOException { - PermDAO pd = new PermDAO(trans,cluster,"authz"); - try { - PermDAO.Data data = new PermDAO.Data(); - data.ns = "com.test.ju_perm"; - data.type = "MyType"; - data.instance = "MyInstance"; - data.action = "MyAction"; - data.roles(true).add(data.ns + ".dev"); - - - - // CREATE - Result rpdc = pd.create(trans,data); - assertTrue(rpdc.isOK()); - - Result> rlpd; - try { -// Bytification - ByteBuffer bb = data.bytify(); - Data bdata = new PermDAO.Data(); - bdata.reconstitute(bb); - compare(data, bdata); - - // Validate Read with key fields in Data - if((rlpd = pd.read(trans,data)).isOK()) - for(PermDAO.Data d : rlpd.value) { - checkData1(data,d); - } - - // Validate readByName - if((rlpd = pd.readByType(trans,data.ns, data.type)).isOK()) - for(PermDAO.Data d : rlpd.value) { - checkData1(data,d); - } - - // Add Role - RoleDAO.Data role = new RoleDAO.Data(); - role.ns = data.ns; - role.name = "test"; - - Result rvpd = pd.addRole(trans, data, role.fullName()); - assertTrue(rvpd.isOK()); - // Validate Read with key fields in Data - if((rlpd = pd.read(trans,data)).isOK()) - for(PermDAO.Data d : rlpd.value) { - checkData2(data,d); - } - - // Remove Role - rvpd = pd.delRole(trans, data, role.fullName()); - assertTrue(rvpd.isOK()); - if((rlpd = pd.read(trans,data)).isOK()) - for(PermDAO.Data d : rlpd.value) { - checkData1(data,d); - } - - // Add Child - Data data2 = new Data(); - data2.ns = data.ns; - data2.type = data.type + ".2"; - data2.instance = data.instance; - data2.action = data.action; - - rpdc = pd.create(trans, data2); - assertTrue(rpdc.isOK()); - try { - rlpd = pd.readChildren(trans, data.ns,data.type); - assertTrue(rlpd.isOKhasData()); - assertEquals(rlpd.value.size(),1); - assertEquals(rlpd.value.get(0).fullType(),data2.fullType()); - } finally { - // Delete Child - pd.delete(trans, data2,true); - - } - } catch (IOException e) { - e.printStackTrace(); - } finally { - // DELETE - Result rpdd = pd.delete(trans,data,true); - assertTrue(rpdd.isOK()); - rlpd = pd.read(trans, data); - assertTrue(rlpd.isOK() && rlpd.isEmpty()); - assertEquals(rlpd.value.size(),0); - } - } finally { - pd.close(trans); - } - } - - private void compare(Data a, Data b) { - assertEquals(a.ns,b.ns); - assertEquals(a.type,b.type); - assertEquals(a.instance,b.instance); - assertEquals(a.action,b.action); - assertEquals(a.roles(false).size(),b.roles(false).size()); - for(String s: a.roles(false)) { - assertTrue(b.roles(false).contains(s)); - } - } - private void checkData1(Data data, Data d) { - assertEquals(data.ns,d.ns); - assertEquals(data.type,d.type); - assertEquals(data.instance,d.instance); - assertEquals(data.action,d.action); - - Set ss = d.roles(true); - assertEquals(1,ss.size()); - assertTrue(ss.contains(data.ns+".dev")); - } - - private void checkData2(Data data, Data d) { - assertEquals(data.ns,d.ns); - assertEquals(data.type,d.type); - assertEquals(data.instance,d.instance); - assertEquals(data.action,d.action); - - Set ss = d.roles(true); - assertEquals(2,ss.size()); - assertTrue(ss.contains(data.ns+".dev")); - assertTrue(ss.contains(data.ns+".test")); - } - - -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_RoleDAO.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_RoleDAO.java deleted file mode 100644 index ba61c61e..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/JU_RoleDAO.java +++ /dev/null @@ -1,139 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.test; - -import static junit.framework.Assert.assertEquals; -import static junit.framework.Assert.assertTrue; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.List; - -import org.junit.Test; -import org.onap.aaf.authz.layer.Result; -import org.onap.aaf.dao.aaf.cass.PermDAO; -import org.onap.aaf.dao.aaf.cass.RoleDAO; -import org.onap.aaf.dao.aaf.cass.RoleDAO.Data; - -import org.onap.aaf.inno.env.APIException; - - -public class JU_RoleDAO extends AbsJUCass { - - @Test - public void test() throws IOException, APIException { - RoleDAO rd = new RoleDAO(trans, cluster, AUTHZ); - try { - Data data = new RoleDAO.Data(); - data.ns = "com.test.ju_role"; - data.name = "role1"; - -// Bytification - ByteBuffer bb = data.bytify(); - Data bdata = new RoleDAO.Data(); - bdata.reconstitute(bb); - compare(data, bdata); - - // CREATE - Result rdc = rd.create(trans, data); - assertTrue(rdc.isOK()); - Result> rdrr; - try { - // READ - rdrr = rd.read(trans, data); - assertTrue(rdrr.isOKhasData()); - assertEquals(rdrr.value.size(),1); - Data d = rdrr.value.get(0); - assertEquals(d.perms.size(),0); - assertEquals(d.name,data.name); - assertEquals(d.ns,data.ns); - - PermDAO.Data perm = new PermDAO.Data(); - perm.ns = data.ns; - perm.type = "Perm"; - perm.instance = "perm1"; - perm.action = "write"; - - // ADD Perm - Result rdar = rd.addPerm(trans, data, perm); - assertTrue(rdar.isOK()); - rdrr = rd.read(trans, data); - assertTrue(rdrr.isOKhasData()); - assertEquals(rdrr.value.size(),1); - assertEquals(rdrr.value.get(0).perms.size(),1); - assertTrue(rdrr.value.get(0).perms.contains(perm.encode())); - - // DEL Perm - rdar = rd.delPerm(trans, data,perm); - assertTrue(rdar.isOK()); - rdrr = rd.read(trans, data); - assertTrue(rdrr.isOKhasData()); - assertEquals(rdrr.value.size(),1); - assertEquals(rdrr.value.get(0).perms.size(),0); - - // Add Child - Data data2 = new Data(); - data2.ns = data.ns; - data2.name = data.name + ".2"; - - rdc = rd.create(trans, data2); - assertTrue(rdc.isOK()); - try { - rdrr = rd.readChildren(trans, data.ns,data.name); - assertTrue(rdrr.isOKhasData()); - assertEquals(rdrr.value.size(),1); - assertEquals(rdrr.value.get(0).name,data.name + ".2"); - - rdrr = rd.readChildren(trans, data.ns,"*"); - assertTrue(rdrr.isOKhasData()); - assertEquals(rdrr.value.size(),2); - - } finally { - // Delete Child - rd.delete(trans, data2, true); - } - - } finally { - // DELETE - Result rddr = rd.delete(trans, data, true); - assertTrue(rddr.isOK()); - rdrr = rd.read(trans, data); - assertTrue(rdrr.isOK() && rdrr.isEmpty()); - assertEquals(rdrr.value.size(),0); - } - } finally { - rd.close(trans); - } - } - - private void compare(Data a, Data b) { - assertEquals(a.name,b.name); - assertEquals(a.description, b.description); - assertEquals(a.ns,b.ns); - assertEquals(a.perms(false).size(),b.perms(false).size()); - for(String p : a.perms(false)) { - assertTrue(b.perms(false).contains(p)); - } - } - -} diff --git a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/NS_ChildUpdate.java b/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/NS_ChildUpdate.java deleted file mode 100644 index 379eb5e4..00000000 --- a/authz-cass/src/test/java/org/onap/aaf/dao/aaf/test/NS_ChildUpdate.java +++ /dev/null @@ -1,74 +0,0 @@ -/******************************************************************************* - * ============LICENSE_START==================================================== - * * org.onap.aaf - * * =========================================================================== - * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. - * * =========================================================================== - * * Licensed under the Apache License, Version 2.0 (the "License"); - * * you may not use this file except in compliance with the License. - * * You may obtain a copy of the License at - * * - * * http://www.apache.org/licenses/LICENSE-2.0 - * * - * * Unless required by applicable law or agreed to in writing, software - * * distributed under the License is distributed on an "AS IS" BASIS, - * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * * See the License for the specific language governing permissions and - * * limitations under the License. - * * ============LICENSE_END==================================================== - * * - * * ECOMP is a trademark and service mark of AT&T Intellectual Property. - * * - ******************************************************************************/ -package org.onap.aaf.dao.aaf.test; - -import org.onap.aaf.authz.env.AuthzEnv; - -import com.datastax.driver.core.Cluster; -import com.datastax.driver.core.ResultSet; -import com.datastax.driver.core.Row; -import com.datastax.driver.core.Session; - -public class NS_ChildUpdate { - - public static void main(String[] args) { - if(args.length < 3 ) { - System.out.println("usage: NS_ChildUpdate machine mechid (encrypted)passwd"); - } else { - try { - AuthzEnv env = new AuthzEnv(); - env.setLog4JNames("log.properties","authz","authz","audit","init","trace"); - - Cluster cluster = Cluster.builder() - .addContactPoint(args[0]) - .withCredentials(args[1],env.decrypt(args[2], false)) - .build(); - - Session session = cluster.connect("authz"); - try { - ResultSet result = session.execute("SELECT name,parent FROM ns"); - int count = 0; - for(Row r : result.all()) { - ++count; - String name = r.getString(0); - String parent = r.getString(1); - if(parent==null) { - int idx = name.lastIndexOf('.'); - - parent = idx>0?name.substring(0, idx):"."; - System.out.println("UPDATE " + name + " to " + parent); - session.execute("UPDATE ns SET parent='" + parent + "' WHERE name='" + name + "';"); - } - } - System.out.println("Processed " + count + " records"); - } finally { - session.close(); - cluster.close(); - } - } catch (Exception e) { - e.printStackTrace(); - } - } - } - -} diff --git a/authz-cass/src/test/resources/cadi.properties b/authz-cass/src/test/resources/cadi.properties deleted file mode 100644 index 8f1209a9..00000000 --- a/authz-cass/src/test/resources/cadi.properties +++ /dev/null @@ -1,52 +0,0 @@ -#------------------------------------------------------------------------------- -# ============LICENSE_START==================================================== -# * org.onap.aaf -# * =========================================================================== -# * Copyright © 2017 AT&T Intellectual Property. All rights reserved. -# * =========================================================================== -# * Licensed under the Apache License, Version 2.0 (the "License"); -# * you may not use this file except in compliance with the License. -# * You may obtain a copy of the License at -# * -# * http://www.apache.org/licenses/LICENSE-2.0 -# * -# * Unless required by applicable law or agreed to in writing, software -# * distributed under the License is distributed on an "AS IS" BASIS, -# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# * See the License for the specific language governing permissions and -# * limitations under the License. -# * ============LICENSE_END==================================================== -# * -# * ECOMP is a trademark and service mark of AT&T Intellectual Property. -# * -#------------------------------------------------------------------------------- -############################################################################### -# Copyright (c) 2016 AT&T Intellectual Property. All rights reserved. -############################################################################### -## -## AUTHZ API (authz-service) Properties -## - -cadi_prop_file=com.att.aaf.props;com.att.aaf.common.props - -#cadi_trust_all_x509=true -#cadi_alias=aaf.att -https.protocols=TLSv1.1,TLSv1.2 - -cm_url=https://XXX:8150 - -basic_realm=localized -basic_warn=false -localhost_deny=false - -cass_group_name=com.att.aaf -cass_cluster_name=mithrilcsp.sbc.com -aaf_default_realm=com.att.csp - -aaf_url=https://DME2RESOLVE/service=com.att.authz.AuthorizationService/version=2.0/envContext=DEV/routeOffer=BAU_SE -aaf_id=??? -aaf_password=enc:XXX - -aaf_user_expires=3000 -aaf_clean_interval=4000 - -- cgit 1.2.3-korg