001: package org.apache.lucene.search;
002:
003: /**
004: * Licensed to the Apache Software Foundation (ASF) under one or more
005: * contributor license agreements. See the NOTICE file distributed with
006: * this work for additional information regarding copyright ownership.
007: * The ASF licenses this file to You under the Apache License, Version 2.0
008: * (the "License"); you may not use this file except in compliance with
009: * the License. You may obtain a copy of the License at
010: *
011: * http://www.apache.org/licenses/LICENSE-2.0
012: *
013: * Unless required by applicable law or agreed to in writing, software
014: * distributed under the License is distributed on an "AS IS" BASIS,
015: * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
016: * See the License for the specific language governing permissions and
017: * limitations under the License.
018: */
019:
020: import java.util.Comparator;
021: import java.util.Date;
022: import java.util.HashMap;
023: import java.util.Iterator;
024: import java.util.Map;
025: import java.util.TreeSet;
026:
027: /**
028: * Filter caching singleton. It can be used by {@link org.apache.lucene.search.RemoteCachingWrapperFilter}
029: * or just to save filters locally for reuse.
030: * This class makes it possble to cache Filters even when using RMI, as it
031: * keeps the cache on the seaercher side of the RMI connection.
032: *
033: * Also could be used as a persistent storage for any filter as long as the
034: * filter provides a proper hashCode(), as that is used as the key in the cache.
035: *
036: * The cache is periodically cleaned up from a separate thread to ensure the
037: * cache doesn't exceed the maximum size.
038: * @author Matt Ericson
039: */
040: public class FilterManager {
041:
042: protected static FilterManager manager;
043:
044: /** The default maximum number of Filters in the cache */
045: protected static final int DEFAULT_CACHE_CLEAN_SIZE = 100;
046: /** The default frequency of cache clenup */
047: protected static final long DEFAULT_CACHE_SLEEP_TIME = 1000 * 60 * 10;
048:
049: /** The cache itself */
050: protected Map cache;
051: /** Maximum allowed cache size */
052: protected int cacheCleanSize;
053: /** Cache cleaning frequency */
054: protected long cleanSleepTime;
055: /** Cache cleaner that runs in a separate thread */
056: protected FilterCleaner filterCleaner;
057:
058: public synchronized static FilterManager getInstance() {
059: if (manager == null) {
060: manager = new FilterManager();
061: }
062: return manager;
063: }
064:
065: /**
066: * Sets up the FilterManager singleton.
067: */
068: protected FilterManager() {
069: cache = new HashMap();
070: cacheCleanSize = DEFAULT_CACHE_CLEAN_SIZE; // Let the cache get to 100 items
071: cleanSleepTime = DEFAULT_CACHE_SLEEP_TIME; // 10 minutes between cleanings
072:
073: filterCleaner = new FilterCleaner();
074: Thread fcThread = new Thread(filterCleaner);
075: // setto be a Daemon so it doesn't have to be stopped
076: fcThread.setDaemon(true);
077: fcThread.start();
078: }
079:
080: /**
081: * Sets the max size that cache should reach before it is cleaned up
082: * @param cacheCleanSize maximum allowed cache size
083: */
084: public void setCacheSize(int cacheCleanSize) {
085: this .cacheCleanSize = cacheCleanSize;
086: }
087:
088: /**
089: * Sets the cache cleaning frequency in milliseconds.
090: * @param cleanSleepTime cleaning frequency in millioseconds
091: */
092: public void setCleanThreadSleepTime(long cleanSleepTime) {
093: this .cleanSleepTime = cleanSleepTime;
094: }
095:
096: /**
097: * Returns the cached version of the filter. Allows the caller to pass up
098: * a small filter but this will keep a persistent version around and allow
099: * the caching filter to do its job.
100: *
101: * @param filter The input filter
102: * @return The cached version of the filter
103: */
104: public Filter getFilter(Filter filter) {
105: synchronized (cache) {
106: FilterItem fi = null;
107: fi = (FilterItem) cache.get(new Integer(filter.hashCode()));
108: if (fi != null) {
109: fi.timestamp = new Date().getTime();
110: return fi.filter;
111: }
112: cache.put(new Integer(filter.hashCode()), new FilterItem(
113: filter));
114: return filter;
115: }
116: }
117:
118: /**
119: * Holds the filter and the last time the filter was used, to make LRU-based
120: * cache cleaning possible.
121: * TODO: Clean this up when we switch to Java 1.5
122: */
123: protected class FilterItem {
124: public Filter filter;
125: public long timestamp;
126:
127: public FilterItem(Filter filter) {
128: this .filter = filter;
129: this .timestamp = new Date().getTime();
130: }
131: }
132:
133: /**
134: * Keeps the cache from getting too big.
135: * If we were using Java 1.5, we could use LinkedHashMap and we would not need this thread
136: * to clean out the cache.
137: *
138: * The SortedSet sortedFilterItems is used only to sort the items from the cache,
139: * so when it's time to clean up we have the TreeSet sort the FilterItems by
140: * timestamp.
141: *
142: * Removes 1.5 * the numbers of items to make the cache smaller.
143: * For example:
144: * If cache clean size is 10, and the cache is at 15, we would remove (15 - 10) * 1.5 = 7.5 round up to 8.
145: * This way we clean the cache a bit more, and avoid having the cache cleaner having to do it frequently.
146: */
147: protected class FilterCleaner implements Runnable {
148:
149: private boolean running = true;
150: private TreeSet sortedFilterItems;
151:
152: public FilterCleaner() {
153: sortedFilterItems = new TreeSet(new Comparator() {
154: public int compare(Object a, Object b) {
155: if (a instanceof Map.Entry
156: && b instanceof Map.Entry) {
157: FilterItem fia = (FilterItem) ((Map.Entry) a)
158: .getValue();
159: FilterItem fib = (FilterItem) ((Map.Entry) b)
160: .getValue();
161: if (fia.timestamp == fib.timestamp) {
162: return 0;
163: }
164: // smaller timestamp first
165: if (fia.timestamp < fib.timestamp) {
166: return -1;
167: }
168: // larger timestamp last
169: return 1;
170: } else {
171: throw new ClassCastException(
172: "Objects are not Map.Entry");
173: }
174: }
175: });
176: }
177:
178: public void run() {
179: while (running) {
180:
181: // sort items from oldest to newest
182: // we delete the oldest filters
183: if (cache.size() > cacheCleanSize) {
184: // empty the temporary set
185: sortedFilterItems.clear();
186: synchronized (cache) {
187: sortedFilterItems.addAll(cache.entrySet());
188: Iterator it = sortedFilterItems.iterator();
189: int numToDelete = (int) ((cache.size() - cacheCleanSize) * 1.5);
190: int counter = 0;
191: // loop over the set and delete all of the cache entries not used in a while
192: while (it.hasNext() && counter++ < numToDelete) {
193: Map.Entry entry = (Map.Entry) it.next();
194: cache.remove(entry.getKey());
195: }
196: }
197: // empty the set so we don't tie up the memory
198: sortedFilterItems.clear();
199: }
200: // take a nap
201: try {
202: Thread.sleep(cleanSleepTime);
203: } catch (InterruptedException e) {
204: // just keep going
205: }
206: }
207: }
208: }
209: }
|