Jeg vil ikke bruge WR, men SR, da disse først bliver slettet når der mangler hukommelse ( kommer lidt an på JVM'en )
Jeg har fundet følgende klasse til at gemme objekter i en cache, jeg fandt den på nettet. Jeg har lavet den om så den bruger en Hashtable istedet for en HashMap, da jeg har brug for trådsikkerhed. Jeg har testet den en del, og der er kun et meget lille overhead når den bruger en Hashtable istedet for en HashMap
org fandt jeg her :
http://archive.devx.com/java/free/articles/Kabutz01/Kabutz01-2.asp/**
* @author
http://www.devx.com/java/free/articles/Kabutz01/Kabutz01-1.asp *
*/
import java.lang.ref.*;
import java.util.*;
import dk.toldskat.util.log.*;
public class SoftHashtable extends AbstractMap
{
private final String VERSION = "VERSION 1.00.00.000";
private ExtendedLogger log = (ExtendedLogger) ExtendedLogger.getLogger((SoftHashtable.class).getName());
/** The internal HashMap that will hold the SoftReference. */
private final Map hash = new Hashtable();
/** The number of "hard" references to hold internally. */
private final int HARD_SIZE;
/** The FIFO list of hard references, order of last access. */
private final LinkedList hardCache = new LinkedList();
/** Reference queue for cleared SoftReference objects. */
private final ReferenceQueue queue = new ReferenceQueue();
public SoftHashtable()
{
this(100);
}
public SoftHashtable(int hardSize)
{
HARD_SIZE = hardSize;
}
public Object get(Object key)
{
log.info("get - Begin");
Object result = null;
// We get the SoftValue represented by that key
SoftValue soft = null;
if ((soft = (SoftValue) hash.get(key)) != null)
{
//From the SoftReference we get the value, which can be
// null if it was not in the map, or it was removed in
// the processQueue() method defined below
if ((result = soft.get()) == null)
{
// If the value has been garbage collected, remove the
// entry from the HashMap.
hash.remove(key);
}
else
{
// We now add this object to the beginning of the hard
// reference queue. One reference can occur more than
// once, because lookups of the FIFO queue are slow, so
// we don't want to search through it each time to remove
// duplicates.
hardCache.addFirst(result);
if (hardCache.size() > HARD_SIZE)
{
// Remove the last entry if list longer than HARD_SIZE
hardCache.removeLast();
}
}
}
log.info("get - End");
return result;
}
/** Here we go through the ReferenceQueue and remove garbage
collected SoftValue objects from the HashMap by looking them
up using the SoftValue.key data member. */
private void processQueue()
{
log.info("processQueue - Begin");
SoftValue sv;
while ((sv = (SoftValue) queue.poll()) != null)
{
hash.remove(sv.key); // we can access private data!
}
log.info("processQueue - End");
}
/** Here we put the key, value pair into the HashMap using
a SoftValue object. */
public Object put(Object key, Object value)
{
log.info("put - Begin");
processQueue(); // throw out garbage collected values first
log.info("put - End");
return hash.put(key, new SoftValue(value, key, queue));
}
public Object remove(Object key)
{
log.info("remove - Begin");
processQueue(); // throw out garbage collected values first
log.info("remove - End");
return hash.remove(key);
}
public void clear()
{
log.info("clear - Begin");
hardCache.clear();
processQueue(); // throw out garbage collected values
hash.clear();
log.info("clear - End");
}
public int size()
{
log.info("size - Begin");
processQueue(); // throw out garbage collected values first
log.info("size - End");
return hash.size();
}
public Set entrySet()
{
// no, no, you may NOT do that!!! GRRR
throw new UnsupportedOperationException();
}
/** We define our own subclass of SoftReference which contains
not only the value but also the key to make it easier to find
the entry in the HashMap after it's been garbage collected. */
private static class SoftValue extends SoftReference
{
private final Object key; // always make data member final
private SoftValue(Object value, Object key, ReferenceQueue q)
{
super(value, q);
this.key = key;
}
}
}