summaryrefslogtreecommitdiffstats
path: root/authz-core/src/main/java/org/onap/aaf/cache/Cache.java
blob: 3434ca70f006fde8d5df360d969f50a5baf3a192 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
/*******************************************************************************
 * ============LICENSE_START====================================================
 * * org.onap.aaf
 * * ===========================================================================
 * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
 * * ===========================================================================
 * * Licensed under the Apache License, Version 2.0 (the "License");
 * * you may not use this file except in compliance with the License.
 * * You may obtain a copy of the License at
 * * 
 *  *      http://www.apache.org/licenses/LICENSE-2.0
 * * 
 *  * Unless required by applicable law or agreed to in writing, software
 * * distributed under the License is distributed on an "AS IS" BASIS,
 * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * * See the License for the specific language governing permissions and
 * * limitations under the License.
 * * ============LICENSE_END====================================================
 * *
 * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
 * *
 ******************************************************************************/
package org.onap.aaf.cache;

import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ConcurrentHashMap;
import java.util.logging.Level;

import org.onap.aaf.inno.env.Env;
import org.onap.aaf.inno.env.Trans;

/**
 * Create and maintain a Map of Maps used for Caching
 * 
 *
 * @param <TRANS>
 * @param <DATA>
 */
public class Cache<TRANS extends Trans, DATA> {
	private static Clean clean;
	private static Timer cleanseTimer;

	public static final String CACHE_HIGH_COUNT = "CACHE_HIGH_COUNT";
	public static final String CACHE_CLEAN_INTERVAL = "CACHE_CLEAN_INTERVAL";
//	public static final String CACHE_MIN_REFRESH_INTERVAL = "CACHE_MIN_REFRESH_INTERVAL";

	private static final Map<String,Map<String,Dated>> cacheMap;

	static {
		cacheMap = new HashMap<String,Map<String,Dated>>();
	}

	/**
	 * Dated Class - store any Data with timestamp
	 * 
	 *
	 */
	public final static class Dated { 
		public Date timestamp;
		public List<?> data;
		
		public Dated(List<?> data) {
			timestamp = new Date();
			this.data = data;
		}

		public <T> Dated(T t) {
			timestamp = new Date();
			ArrayList<T> al = new ArrayList<T>(1);
			al.add(t);
			data = al;
		}

		public void touch() {
			timestamp = new Date();
		}
	}
	
	public static Map<String,Dated> obtain(String key) {
		Map<String, Dated> m = cacheMap.get(key);
		if(m==null) {
			m = new ConcurrentHashMap<String, Dated>();
			synchronized(cacheMap) {
				cacheMap.put(key, m);
			}
		}
		return m;
	}

	/**
	 * Clean will examine resources, and remove those that have expired.
	 * 
	 * If "highs" have been exceeded, then we'll expire 10% more the next time.  This will adjust after each run
	 * without checking contents more than once, making a good average "high" in the minimum speed.
	 * 
	 *
	 */
	private final static class Clean extends TimerTask {
		private final Env env;
		private Set<String> set;
		
		// The idea here is to not be too restrictive on a high, but to Expire more items by 
		// shortening the time to expire.  This is done by judiciously incrementing "advance"
		// when the "highs" are exceeded.  This effectively reduces numbers of cached items quickly.
		private final int high;
		private long advance;
		private final long timeInterval;
		
		public Clean(Env env, long cleanInterval, int highCount) {
			this.env = env;
			high = highCount;
			timeInterval = cleanInterval;
			advance = 0;
			set = new HashSet<String>();
		}
		
		public synchronized void add(String key) {
			set.add(key);
		}

		public void run() {
			int count = 0;
			int total = 0;
			// look at now.  If we need to expire more by increasing "now" by "advance"
			Date now = new Date(System.currentTimeMillis() + advance);
			
			
			for(String name : set) {
				Map<String,Dated> map = cacheMap.get(name);
				if(map!=null) for(Map.Entry<String,Dated> me : map.entrySet()) {
					++total;
					if(me.getValue().timestamp.before(now)) {
						map.remove(me.getKey());
						++count;
					}
				}
//				if(count>0) {
//					env.info().log(Level.INFO, "Cache removed",count,"expired",name,"Elements");
//				}
			}
			
			if(count>0) {
				env.info().log(Level.INFO, "Cache removed",count,"expired Cached Elements out of", total);
			}

			// If High (total) is reached during this period, increase the number of expired services removed for next time.
			// There's no point doing it again here, as there should have been cleaned items.
			if(total>high) {
				// advance cleanup by 10%, without getting greater than timeInterval.
				advance = Math.min(timeInterval, advance+(timeInterval/10));
			} else {
				// reduce advance by 10%, without getting lower than 0.
				advance = Math.max(0, advance-(timeInterval/10));
			}
		}
	}

	public static synchronized void startCleansing(Env env, String ... keys) {
		if(cleanseTimer==null) {
			cleanseTimer = new Timer("Cache Cleanup Timer");
			int cleanInterval = Integer.parseInt(env.getProperty(CACHE_CLEAN_INTERVAL,"60000")); // 1 minute clean cycles 
			int highCount = Integer.parseInt(env.getProperty(CACHE_HIGH_COUNT,"5000"));
			cleanseTimer.schedule(clean = new Clean(env, cleanInterval, highCount), cleanInterval, cleanInterval);
		}
		
		for(String key : keys) {
			clean.add(key);
		}
	}

	public static void stopTimer() {
		if(cleanseTimer!=null) {
			cleanseTimer.cancel();
			cleanseTimer = null;
		}
	}

	public static void addShutdownHook() {
		Runtime.getRuntime().addShutdownHook(new Thread() {
			@Override
			public void run() {
				Cache.stopTimer();
			}
		}); 
	}

}