diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..b5d02b6
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,49 @@
+# Created by .ignore support plugin (hsz.mobi)
+### JetBrains template
+# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
+# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
+
+*.iml
+.idea/
+
+# CMake
+cmake-build-debug/
+
+# Mongo Explorer plugin:
+.idea/**/mongoSettings.xml
+
+## File-based project format:
+*.iws
+
+## Plugin-specific files:
+
+# IntelliJ
+out/
+
+# mpeltonen/sbt-idea plugin
+.idea_modules/
+
+
+### Java template
+# Compiled class file
+*.class
+
+# Log file
+*.log
+
+# BlueJ files
+*.ctxt
+
+# Mobile Tools for Java (J2ME)
+.mtj.tmp/
+
+# Package Files #
+*.jar
+*.war
+*.ear
+*.zip
+*.tar.gz
+*.rar
+
+# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
+hs_err_pid*
diff --git a/README.md b/README.md
index a15c091..ed90896 100644
--- a/README.md
+++ b/README.md
@@ -2,10 +2,17 @@
A quick class to provide a lightweight cache wrapper on top of the ConcurrentHashMap class
[![](https://jitpack.io/v/jbharter/JCache.svg)](https://jitpack.io/#jbharter/JCache)
-[![](https://travis-ci.org/jbharter/JCache.svg?branch=master)](https://travis-ci.org/jbharter/JCache.svg)
+[![](https://travis-ci.org/jbharter/JCache.svg?branch=master)](https://travis-ci.org/jbharter/JCache#)
+[![License: BSD-III](https://img.shields.io/badge/license-BSD--III-blue.svg)](https://opensource.org/licenses/BSD-3-Clause)
-This is my attempt at a lightweight caching type. There is probably a better built in implementation, but since I like to make things more difficult for myself, I created one on my own! This was created as an extension on the ConcurrentHashMap class, in an effort to provide an upper bound for the number of objects allowed.
+This is my attempt at a lightweight caching type. There is probably a better built in implementation, but since I like
+to make things more difficult for myself, I created one on my own! This was created as an extension on the
+ConcurrentHashMap class, in an effort to provide an upper bound for the number of objects allowed. In addition to this,
+caches can reference transform functions that allow transparent data manipulation upon retrieval. Sorted cache natively
+purges based on least computationally expensive elements, in the case that a transform is defined.
+
+## Installing
Use this in maven projects by adding this to your repositories,
```xml
@@ -26,3 +33,61 @@ and this to your dependencies
1.0.0-STABLE
```
+Alternately, you can build it yourself with Apache Maven.
+```shell
+ git clone git@github.com:jbharter/JCache.git
+ cd JCache
+ mvn -U clean package
+```
+
+## Examples
+```java
+ // Simple K -> K Cache
+ FlashCache test = new FlashCache<>(/* Step Size */1L,/* max number of elements*/10L);
+ test.put(Arrays.asList("0","1","2","3","4","5","6","7","8","9","10","11","12"));
+ test.size() // == 10 --> Cache has an upper bound on it's size
+```
+
+```java
+ // Simple K -> K cache with transform
+ FlashCache test = new FlashCache<>();
+
+ test.setMapper(String::toUpperCase);
+ test.put("rand");
+ test.setMapper(String::toLowerCase);
+ test.put("OTHER");
+
+ test.get("rand") // == "RAND"
+ test.get("OTHER") // == "other"
+```
+
+```java
+ // Simple K -> V Cache
+ FlashCache test = new FlashCache<>(/* Step Size */1L,/* max number of elements*/10L);
+ test.setMapper(Integer::parseInt);
+
+ test.put(Arrays.asList("0","1","2","3","4","5","6","7","8","9","10","11","12"));
+ test.get("3") == 3 // String key type maps to Integer Value type. The upper bound is also still imposed.
+```
+
+For a slightly more exotic cache, you can specify any transform you like, and when you approach the memory bounds, the
+cache will create space by purging the least computationally expensive elements.
+```java
+ // Compute Complexity Sorted K -> K Cache with transform
+ SortedCache test = new SortedCache<>();
+ test.setMapper(String::trim);
+ test.put(Arrays.asList("0","1","2","3","4","5","6","7","8","9","10","11","12"));
+
+ test.setMapper(str -> {
+ try {
+ Thread.sleep(1000);
+ return "fancy func";
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ return "fail";
+ }
+ });
+
+ test.poll() // != "fancy func" --> poll pops the least computationally expensive element in the cache
+
+```
diff --git a/pom.xml b/pom.xml
index db1d7f6..4a876d2 100644
--- a/pom.xml
+++ b/pom.xml
@@ -9,6 +9,20 @@
1.0.0
+
+
+
+ net.alchim31.maven
+ scala-maven-plugin
+ 3.3.1
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+ 2.0.2
+
+
+
org.apache.maven.plugins
@@ -24,6 +38,39 @@
maven-shade-plugin
2.3
+
+ net.alchim31.maven
+ scala-maven-plugin
+
+
+ scala-compile-first
+ process-resources
+
+ add-source
+ compile
+
+
+
+ scala-test-compile
+ process-test-resources
+
+ testCompile
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+
+
+ compile
+
+ compile
+
+
+
+
diff --git a/src/main/java/com/github/jbharter/caching/BaseCache.java b/src/main/java/com/github/jbharter/caching/BaseCache.java
index 8123c33..234366b 100644
--- a/src/main/java/com/github/jbharter/caching/BaseCache.java
+++ b/src/main/java/com/github/jbharter/caching/BaseCache.java
@@ -1,21 +1,26 @@
package com.github.jbharter.caching;
-import java.util.*;
+import java.util.AbstractMap;
+import java.util.Collection;
+import java.util.Map;
+import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
-import java.util.function.*;
-import java.util.stream.*;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.function.Function;
+import java.util.function.Predicate;
+import java.util.stream.Stream;
-public abstract class BaseCache {
+public abstract class BaseCache implements Cache, PurgeEvent {
// Parent Class members
static final Long DEFAULT_PURGE_STEP = 50L;
static final Long DEFAULT_UPPER_BOUND = 1000000L;
static double doubleMemPressureMax = 0.7;
- static ConcurrentHashMap instanceSet = new ConcurrentHashMap<>();
+ //private static ConcurrentHashMap instanceSet = new ConcurrentHashMap<>();
// Class instances and management functions
- protected static int getNumInstances() { return instanceSet.size(); }
- protected static Long getNumEntries() { return instanceSet.keySet().parallelStream().mapToLong(BaseCache::size).sum(); }
+ //protected static int getNumInstances() { return instanceSet.size(); }
+ //protected static Long getNumEntries() { return instanceSet.keySet().parallelStream().mapToLong(BaseCache::size).sum(); }
protected static Long getUsedMem() { return Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); }
protected static double getMemPressure() { return 1.0 - (Runtime.getRuntime().freeMemory()/(double)Runtime.getRuntime().totalMemory()); }
protected static void setMaxMemPressure(double max) { if (max > 0 && max < 1) { doubleMemPressureMax = max; } }
@@ -25,51 +30,53 @@ public abstract class BaseCache {
Long meanMemberSize = 0L;
Function super K,? extends V> mapper;
+ private AtomicLong purgeStep;
+ private AtomicLong upperBound;
+
BaseCache() {
- instanceSet.put(this,new CacheManagement(DEFAULT_PURGE_STEP,DEFAULT_UPPER_BOUND));
+ this(DEFAULT_UPPER_BOUND,DEFAULT_PURGE_STEP);
+ }
+
+ BaseCache(Long maxElements) {
+ this(maxElements,DEFAULT_PURGE_STEP);
}
- BaseCache(CacheManagement mgmt) {
- instanceSet.put(this, mgmt);
+
+ BaseCache(long maxElements, long purgeStepSize) {
+ // caches should always register themselves
+ CacheManager.getInstance().registerCache(this);
+
+ purgeStep = new AtomicLong(maxElements);
+ upperBound = new AtomicLong(purgeStepSize);
}
// Instance methods
- abstract void purge();
- abstract void purge(Long num);
public abstract V remove(K key);
public abstract V get(K key);
- public abstract void clear();
boolean notfull() { return getUpperBound() > internalCache.size(); }
- public void finalize() {
- clear();
- getInternalCache().remove(this);
- }
- protected ConcurrentHashMap getInternalCache() { return internalCache; }
- protected CacheManagement getCacheManagement(BaseCache b) {
- return instanceSet.get(b);
- }
+
+ public Map getCache() { return internalCache; }
+
public Long getUpperBound(){
- return instanceSet.get(this).getUpperBound();
+ return upperBound.get();
}
public Long getPurgeStep() {
- return instanceSet.get(this).getPurgeStep();
+ return purgeStep.get();
}
// Streams && Helpers
public Stream> stream() { return internalCache.entrySet().stream(); }
public Stream> parallelStream() { return internalCache.entrySet().parallelStream(); }
- public Stream> keysWhere (Predicate super K> predicate) { return parallelStream().filter(each -> predicate.test(each.getKey())); }
public Stream> valsWhere (Predicate super V> predicate) { return parallelStream().filter(each -> predicate.test(each.getValue())); }
public Stream> entriesOr (Predicate super K> keyPred, Predicate super V> valPred) { return parallelStream().filter(each -> keyPred.test(each.getKey()) || valPred.test(each.getValue())); }
- public Stream> entriesWhere (Predicate super Map.Entry> predicate) { return parallelStream().filter(predicate); }
// Functional Interface
- public LongSupplier count = () -> internalCache.entrySet().size();
- public Function>,Boolean> anyMatch = predicate -> internalCache.entrySet().parallelStream().anyMatch(predicate);
- public Function>,Boolean> allMatch = predicate -> internalCache.entrySet().parallelStream().allMatch(predicate);
- public Function>,Boolean> noneMatch = predicate -> internalCache.entrySet().parallelStream().noneMatch(predicate);
- public Consumer>> forEach = action -> internalCache.entrySet().forEach(action);
- public Consumer>> forEachOrdered = action -> internalCache.entrySet().forEach(action);
+// public LongSupplier count = () -> internalCache.entrySet().size();
+// public Function>,Boolean> anyMatch = predicate -> internalCache.entrySet().parallelStream().anyMatch(predicate);
+// public Function>,Boolean> allMatch = predicate -> internalCache.entrySet().parallelStream().allMatch(predicate);
+// public Function>,Boolean> noneMatch = predicate -> internalCache.entrySet().parallelStream().noneMatch(predicate);
+// public Consumer>> forEach = action -> internalCache.entrySet().forEach(action);
+// public Consumer>> forEachOrdered = action -> internalCache.entrySet().forEach(action);
// Map Interface (Generics that concretes shouldn't have to worry about.
public Long size() { return (long) internalCache.size(); }
@@ -78,39 +85,15 @@ public Long getPurgeStep() {
public boolean containsValue(V value) { return internalCache.containsValue(value); }
public Set keySet() { return internalCache.keySet(); }
+ public Stream keysStream() { return keySet().stream(); }
+ public Stream keysWhere(Predicate super K> predicate) { return keysStream().filter(predicate); }
+
public Collection values() { return internalCache.values(); }
- public Set> entrySet() { return internalCache.entrySet(); }
+ public Stream valuesStream() { return values().stream(); }
+ public Stream valuesWhere(Predicate super V> predicate) { return valuesStream().filter(predicate); }
- // Stream Interface
- public Stream map(Function super Map.Entry, ? extends R> mapper) { return internalCache.entrySet().parallelStream().map(mapper); }
- public Stream flatMap(Function super Map.Entry, ? extends Stream extends R>> mapper) { return internalCache.entrySet().parallelStream().flatMap(mapper); }
- public IntStream mapToInt(ToIntFunction super Map.Entry> mapper) { return internalCache.entrySet().parallelStream().mapToInt(mapper); }
- public IntStream flatMapToInt(Function super Map.Entry, ? extends IntStream> mapper) { return internalCache.entrySet().parallelStream().flatMapToInt(mapper); }
- public LongStream mapToLong(ToLongFunction super Map.Entry> mapper) { return internalCache.entrySet().parallelStream().mapToLong(mapper); }
- public LongStream flatMapToLong(Function super Map.Entry, ? extends LongStream> mapper) { return internalCache.entrySet().parallelStream().flatMapToLong(mapper); }
- public DoubleStream mapToDouble(ToDoubleFunction super Map.Entry> mapper) { return internalCache.entrySet().parallelStream().mapToDouble(mapper); }
- public DoubleStream flatMapToDouble(Function super Map.Entry, ? extends DoubleStream> mapper) { return internalCache.entrySet().parallelStream().flatMapToDouble(mapper); }
- public Stream> filter(Predicate super Map.Entry> predicate) { return internalCache.entrySet().parallelStream().filter(predicate); }
- public Stream> distinct() { return internalCache.entrySet().parallelStream().distinct(); }
- public Stream> sorted() { return internalCache.entrySet().parallelStream().sorted(); }
- public Stream> sorted(Comparator super Map.Entry> comparator) { return internalCache.entrySet().parallelStream().sorted(comparator); }
- public Stream> peek(Consumer super Map.Entry> action) { return internalCache.entrySet().parallelStream().peek(action); }
- public Stream> limit(long maxSize) { return internalCache.entrySet().parallelStream().limit(maxSize); }
- public Stream> skip(long n) { return internalCache.entrySet().parallelStream().skip(n); }
- public Object[] toArray() { return internalCache.entrySet().toArray(); }
- public A[] toArray(IntFunction generator) { return internalCache.entrySet().stream().toArray(generator); }
- public R collect(Supplier supplier, BiConsumer> accumulator, BiConsumer combiner) { return internalCache.entrySet().parallelStream().collect(supplier, accumulator, combiner); }
- public R collect(Collector super Map.Entry, A, R> collector) { return internalCache.entrySet().parallelStream().collect(collector); }
- public Map.Entry reduce(Map.Entry identity, BinaryOperator> accumulator) { return internalCache.entrySet().parallelStream().reduce(identity,accumulator); }
- public U reduce(U identity, BiFunction, U> accumulator, BinaryOperator combiner) { return internalCache.entrySet().parallelStream().reduce(identity, accumulator, combiner); }
- public Optional> reduce(BinaryOperator> accumulator) { return internalCache.entrySet().parallelStream().reduce(accumulator); }
- public Optional> min(Comparator super Map.Entry> comparator) { return internalCache.entrySet().parallelStream().min(comparator); }
- public Optional> max(Comparator super Map.Entry> comparator) { return internalCache.entrySet().parallelStream().max(comparator); }
- public long count() { return internalCache.entrySet().parallelStream().count(); }
- public boolean anyMatch(Predicate super Map.Entry> predicate) { return internalCache.entrySet().parallelStream().anyMatch(predicate); }
- public boolean allMatch(Predicate super Map.Entry> predicate) { return internalCache.entrySet().parallelStream().allMatch(predicate); }
- public boolean noneMatch(Predicate super Map.Entry> predicate) { return internalCache.entrySet().parallelStream().noneMatch(predicate); }
- public void forEach(Consumer super Map.Entry> action) { internalCache.entrySet().forEach(action); }
- public void forEachOrdered(Consumer super Map.Entry> action) { internalCache.entrySet().stream().sorted().forEach(action); }
+ public Set> entrySet() { return internalCache.entrySet(); }
+ public Stream> entryStream() { return entrySet().stream(); }
+ public Stream> entriesWhere(Predicate super Map.Entry> predicate) { return entryStream().filter(predicate); }
}
diff --git a/src/main/java/com/github/jbharter/caching/Cache.java b/src/main/java/com/github/jbharter/caching/Cache.java
new file mode 100644
index 0000000..2361c7e
--- /dev/null
+++ b/src/main/java/com/github/jbharter/caching/Cache.java
@@ -0,0 +1,42 @@
+package com.github.jbharter.caching;
+
+import java.util.AbstractMap;
+import java.util.Collection;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Predicate;
+import java.util.stream.Stream;
+
+public interface Cache extends PurgeEvent {
+
+ // Management
+ void purge();
+ void purge(Long purgeDepth);
+ void clear();
+
+ Map getCache();
+
+ // Query
+ V get(K key);
+ V getOrDefault(K key, V defaultValue);
+
+ Long size();
+ boolean isEmpty();
+ boolean containsKey(K key);
+ boolean containsValue(V value);
+
+ Set keySet();
+ Stream keysStream();
+ Stream keysWhere(Predicate super K> predicate);
+
+ Collection values();
+ Stream valuesStream();
+ Stream valuesWhere(Predicate super V> predicate);
+
+ Set> entrySet();
+ Stream> entryStream();
+ Stream> entriesWhere(Predicate super Map.Entry> predicate);
+
+ // Modify
+ V remove(K key);
+}
diff --git a/src/main/java/com/github/jbharter/caching/CacheManagement.java b/src/main/java/com/github/jbharter/caching/CacheManagement.java
deleted file mode 100644
index 596e201..0000000
--- a/src/main/java/com/github/jbharter/caching/CacheManagement.java
+++ /dev/null
@@ -1,35 +0,0 @@
-package com.github.jbharter.caching;
-
-import java.util.concurrent.atomic.AtomicLong;
-
-class CacheManagement {
- private AtomicLong purgeStep;
- private AtomicLong upperBound;
-
- CacheManagement() {
- this.purgeStep = new AtomicLong(BaseCache.DEFAULT_PURGE_STEP);
- this.upperBound = new AtomicLong(BaseCache.DEFAULT_UPPER_BOUND);
- }
-
- CacheManagement(Long purgeStep, Long upperBound) {
- this.purgeStep = new AtomicLong(purgeStep);
- this.upperBound = new AtomicLong(upperBound);
- }
-
- public Long getPurgeStep() {
- return purgeStep.get();
- }
-
- public void setPurgeStep(Number purgeStep) {
- this.purgeStep.set(purgeStep.longValue());
- }
-
- public Long getUpperBound() {
- return upperBound.get();
- }
-
- public void setUpperBound(Number upperBound) {
- this.upperBound.set(upperBound.longValue());
- }
-
-}
diff --git a/src/main/java/com/github/jbharter/caching/CacheManager.java b/src/main/java/com/github/jbharter/caching/CacheManager.java
new file mode 100644
index 0000000..370f821
--- /dev/null
+++ b/src/main/java/com/github/jbharter/caching/CacheManager.java
@@ -0,0 +1,82 @@
+package com.github.jbharter.caching;
+
+// Cache manager
+// - Keeps track of active caches
+// - Monitors approximate memory usage of each cache
+// - Will ask caches to reduce their size
+
+import java.util.Collection;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.CopyOnWriteArraySet;
+import java.util.function.Supplier;
+
+public class CacheManager {
+
+ private volatile static CacheManager _instance;
+
+ private Collection cacheCollection = new CopyOnWriteArraySet<>();
+
+ private Timer timer;
+
+ private Double threshold = 0.7;
+ private Long cleanupDelay = 5000L;
+ private Long cleanupPeriod = 5000L;
+
+
+ // Memory Monitoring
+ Supplier heapSize = () -> Runtime.getRuntime().totalMemory();
+ Supplier heapMax = () -> Runtime.getRuntime().maxMemory();
+ Supplier heapFree = () -> Runtime.getRuntime().freeMemory();
+ Supplier heapUsed = () -> heapSize.get() - heapFree.get();
+ Supplier heapRatio = () -> heapFree.get()/heapSize.get();
+
+ Supplier maxPressure = () -> 1.0 - heapFree.get()/heapMax.get();
+ Supplier memPressure = () -> 1.0 - heapFree.get()/heapSize.get();
+
+ private CacheManager() {
+ timer = new Timer("cache-manager",false);
+
+ TimerTask timerTask = new TimerTask() {
+ @Override
+ public void run() {
+ // Observe and report
+ Double pressure = memPressure.get();
+ System.out.println("Pressure: " + pressure);
+ if (pressure > threshold) {
+ System.out.println("Pressure is above threshold.");
+
+ if (maxPressure.get() > threshold) { //critical
+
+ cacheCollection.forEach(PurgeEvent::basicPurgeEvent);
+ } else {
+ cacheCollection.forEach(PurgeEvent::criticalPurgeEvent);
+ }
+ }
+ }
+ };
+ timer.scheduleAtFixedRate(timerTask, cleanupDelay, cleanupPeriod);
+ }
+
+ public static CacheManager getInstance() {
+ if (_instance == null) {
+ synchronized (CacheManager.class) {
+ if (_instance == null) {
+ _instance = new CacheManager();
+ }
+ }
+ }
+ return _instance;
+ }
+
+ void registerCache(Cache c) {
+ getInstance().cacheCollection.add(c);
+ }
+
+
+
+
+
+
+
+}
diff --git a/src/main/java/com/github/jbharter/caching/FlashCache.java b/src/main/java/com/github/jbharter/caching/FlashCache.java
index e6d8780..1df7f8f 100644
--- a/src/main/java/com/github/jbharter/caching/FlashCache.java
+++ b/src/main/java/com/github/jbharter/caching/FlashCache.java
@@ -1,8 +1,6 @@
package com.github.jbharter.caching;
import java.util.Collection;
-import java.util.Map;
-import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
@@ -21,19 +19,19 @@ public FlashCache(Function function) {
mapper = function;
}
public FlashCache(Long maxElements) {
- super(new CacheManagement(DEFAULT_PURGE_STEP,maxElements));
+ super(maxElements);
internalCache = new ConcurrentHashMap<>();
}
public FlashCache(Long step, Long maxElements) {
- super(new CacheManagement(step,maxElements));
+ super(step,maxElements);
internalCache = new ConcurrentHashMap<>();
}
- void purge() {
+ public void purge() {
if (keyQueue.size() > 0) internalCache.remove(keyQueue.poll());
else { clear(); }
}
- void purge(Long num) { for (int i = 0; i < num; ++i) purge(); }
+ public void purge(Long num) { for (int i = 0; i < num; ++i) purge(); }
public FlashCache setMapper(Function super K, ? extends V> map) { this.mapper = map; return this; }
private ConcurrentLinkedQueue getKeyQueue() { return keyQueue; }
@@ -63,19 +61,19 @@ public V put(K key, V value) {
}
}
public V put(K key, Function super K,? extends V> mapper) { return put(key,mapper.apply(key)); }
- public void putAll(Map extends K, ? extends V> m) {
- if (m.size() + internalCache.size() <= getUpperBound()) {
- keyQueue.addAll(m.keySet());
- internalCache.putAll(m);
- } else if (m.size() + internalCache.size() > getUpperBound()) {
- Queue keyset = new ConcurrentLinkedQueue<>(m.keySet());
- Queue valset = new ConcurrentLinkedQueue<>(m.values());
- while (m.size() + internalCache.size() <= getUpperBound()) {
- put(keyset.poll(),valset.poll());
- }
- }
- }
- public void putAll(BaseCache extends K, ? extends V> bc) { putAll(bc.getInternalCache()); }
+// public void putAll(Map extends K, ? extends V> m) {
+// if (m.size() + internalCache.size() <= getUpperBound()) {
+// keyQueue.addAll(m.keySet());
+// internalCache.putAll(m);
+// } else if (m.size() + internalCache.size() > getUpperBound()) {
+// Queue keyset = new ConcurrentLinkedQueue<>(m.keySet());
+// Queue valset = new ConcurrentLinkedQueue<>(m.values());
+// while (m.size() + internalCache.size() <= getUpperBound()) {
+// put(keyset.poll(),valset.poll());
+// }
+// }
+// }
+// public void putAll(BaseCache extends K, ? extends V> bc) { putAll(bc.getInternalCache()); }
public V remove(K key) {
keyQueue.remove(key);
@@ -86,8 +84,23 @@ public V get(K key) {
return (internalCache.containsKey(key)) ? internalCache.get(key) : put(key);
}
+ @Override
+ public V getOrDefault(K key, V defaultValue) {
+ return null;
+ }
+
public void clear() {
keyQueue.clear();
internalCache.clear();
}
+
+ @Override
+ public void basicPurgeEvent() {
+ System.out.println("FlashCache purge event");
+ }
+
+ @Override
+ public void criticalPurgeEvent() {
+ System.out.println("FlashCache critical purge event");
+ }
}
diff --git a/src/main/java/com/github/jbharter/caching/PurgeEvent.java b/src/main/java/com/github/jbharter/caching/PurgeEvent.java
new file mode 100644
index 0000000..2d52ff1
--- /dev/null
+++ b/src/main/java/com/github/jbharter/caching/PurgeEvent.java
@@ -0,0 +1,6 @@
+package com.github.jbharter.caching;
+
+public interface PurgeEvent {
+ void basicPurgeEvent();
+ void criticalPurgeEvent();
+}
diff --git a/src/main/java/com/github/jbharter/caching/SortedCache.java b/src/main/java/com/github/jbharter/caching/SortedCache.java
index d10f876..19ba86c 100644
--- a/src/main/java/com/github/jbharter/caching/SortedCache.java
+++ b/src/main/java/com/github/jbharter/caching/SortedCache.java
@@ -15,12 +15,12 @@ public class SortedCache extends BaseCache {
private PriorityBlockingQueue internalComputeTimeQueue = new PriorityBlockingQueue<>();
public SortedCache() {
- super(new CacheManagement(-1L,-1L));
+ //super(new CacheManagement(-1L,-1L));
internalCache = new ConcurrentHashMap<>();
}
public SortedCache(SortedCache c) {
- super(new CacheManagement(-1L,-1L));
- internalCache = new ConcurrentHashMap<>(c.getInternalCache());
+ //super(new CacheManagement(-1L,-1L));
+ internalCache = new ConcurrentHashMap<>(c.internalCache);
internalComputeTimeMap = new ConcurrentHashMap<>(c.getInternalComputeTimeMap());
internalComputeTimeQueue = new PriorityBlockingQueue<>(c.getInternalComputeTimeQueue());
mapper = c.mapper;
@@ -46,11 +46,16 @@ public V put(K key, Function super K,? extends V> map)
public void setMapper(Function super K,? extends V> map) { this.mapper = map; }
public V get(K key) { return internalCache.getOrDefault(key, this.mapper != null ? put(key) : null); }
- void purge() {
+ @Override
+ public V getOrDefault(K key, V defaultValue) {
+ return null;
+ }
+
+ public void purge() {
if (size() > 0) poll();
else clear();
}
- void purge(Long num) { for (int i = 0; i < num; ++i) purge(); }
+ public void purge(Long num) { for (int i = 0; i < num; ++i) purge(); }
public V remove(Object key) {
Set rset = internalComputeTimeMap.entrySet().parallelStream().filter(any -> any.getValue().equals(key)).map(Map.Entry::getKey).collect(Collectors.toSet());
rset.forEach(inSet -> {
@@ -69,4 +74,13 @@ public void clear()
public Collection values() { return internalCache.values(); }
public Set> entrySet() { return internalCache.entrySet(); }
+ @Override
+ public void basicPurgeEvent() {
+
+ }
+
+ @Override
+ public void criticalPurgeEvent() {
+
+ }
}
diff --git a/src/test/java/com.github.jbharter.caching/FlashCacheTest.java b/src/test/java/com.github.jbharter.caching/FlashCacheTest.java
index d39e386..38c1c75 100644
--- a/src/test/java/com.github.jbharter.caching/FlashCacheTest.java
+++ b/src/test/java/com.github.jbharter.caching/FlashCacheTest.java
@@ -14,18 +14,18 @@ public void testBasicCall() {
test.setMapper(String::toLowerCase);
test.put("OTHER");
- assertTrue("Basic Call Test", test.get("rand").equals("RAND"));
- assertTrue("Basic Call Test", test.get("OTHER").equals("other"));
+ assertEquals("Basic Call Test", "RAND", test.get("rand"));
+ assertEquals("Basic Call Test", "other", test.get("OTHER"));
}
public void testUpperBounds() {
- FlashCache test = new FlashCache<>(1L,10L);
- test.setMapper(String::trim);
+ FlashCache test = new FlashCache<>(1L,10L);
+ test.setMapper(Integer::parseInt);
test.put(Arrays.asList("0","1","2","3","4","5","6","7","8","9","10","11","12"));
- assertTrue("Upper Bounds Test", test.size() == 10);
+ assertEquals("Upper Bounds Test", 10, (long) test.size());
}
public void testClear() {
@@ -34,9 +34,9 @@ public void testClear() {
test.put("key","val");
test.put("key0","val0");
- assertTrue("Test Clear", test.size() == 2);
+ assertEquals("Test Clear", 2, (long) test.size());
test.clear();
- assertTrue("Test Clear", test.size() == 0);
+ assertEquals("Test Clear", 0, (long) test.size());
}
}
diff --git a/src/test/java/com.github.jbharter.caching/SortedCacheTest.java b/src/test/java/com.github.jbharter.caching/SortedCacheTest.java
index f9a966a..20f6ac8 100644
--- a/src/test/java/com.github.jbharter.caching/SortedCacheTest.java
+++ b/src/test/java/com.github.jbharter.caching/SortedCacheTest.java
@@ -12,11 +12,11 @@ public void testBasicCall() {
test.setMapper(String::trim);
test.put(" a string to be trimmed ");
- assertTrue("Basic Call", test.get(" a string to be trimmed ").equals("a string to be trimmed"));
+ assertEquals("Basic Call", "a string to be trimmed", test.get(" a string to be trimmed "));
test.setMapper(each -> each.trim().toUpperCase());
test.put(" another ");
- assertTrue("Basic Call", test.get(" another ").equals("ANOTHER"));
+ assertEquals("Basic Call", "ANOTHER", test.get(" another "));
}
public void testComputeQueue() {
@@ -51,8 +51,8 @@ public void testClear() {
test.setMapper(String::trim);
test.put(Arrays.asList("0","1","2","3","4","5","6","7","8","9","10","11","12"));
- assertTrue("Test Clear", test.size() == 13);
+ assertEquals("Test Clear", 13, (long) test.size());
test.clear();
- assertTrue("Test Clear", test.size() == 0);
+ assertEquals("Test Clear", 0, (long) test.size());
}
}