Merge branch 'stable-3.4'

* stable-3.4:
  Front CacheChronicleMap with a Guava cache

Change-Id: I0c7b73dd04e0155ab4170914c1931bce6f7828da
diff --git a/BUILD b/BUILD
index 85222ca..45afb1a 100644
--- a/BUILD
+++ b/BUILD
@@ -28,6 +28,7 @@
         "@chronicle-values//jar",
         "@chronicle-wire//jar",
         "@dev-jna//jar",
+        "@error-prone-annotations//jar",
         "@javapoet//jar",
         "@jna-platform//jar",
         "@commons-lang3//jar",
diff --git a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/AutoAdjustCaches.java b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/AutoAdjustCaches.java
index d10ffc8..4298a81 100644
--- a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/AutoAdjustCaches.java
+++ b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/AutoAdjustCaches.java
@@ -20,7 +20,6 @@
 import com.google.gerrit.common.Nullable;
 import com.google.gerrit.extensions.registration.DynamicMap;
 import com.google.gerrit.extensions.restapi.AuthException;
-import com.google.gerrit.metrics.DisabledMetricMaker;
 import com.google.gerrit.server.config.GerritServerConfig;
 import com.google.gerrit.server.config.SitePaths;
 import com.google.gerrit.server.permissions.PermissionBackendException;
@@ -91,7 +90,7 @@
         if (!(avgSizes.getKey() > 0) || !(avgSizes.getValue() > 0)) {
           logger.atWarning().log(
               "Cache [%s] has %s entries, but average of (key: %d, value: %d). Skipping.",
-              cacheName, currCache.size(), avgSizes.getKey(), avgSizes.getValue());
+              cacheName, currCache.diskStats().size(), avgSizes.getKey(), avgSizes.getValue());
           continue;
         }
 
@@ -119,10 +118,7 @@
         if (!dryRun) {
           ChronicleMapCacheImpl<Object, Object> newCache =
               new ChronicleMapCacheImpl<>(
-                  currCache.getCacheDefinition(),
-                  newChronicleMapCacheConfig,
-                  null,
-                  new DisabledMetricMaker());
+                  currCache.getCacheDefinition(), newChronicleMapCacheConfig);
 
           progressMonitor.beginTask(
               String.format("[%s] migrate content", cacheName), (int) currCache.size());
@@ -220,7 +216,7 @@
     config.setLong("cache", cacheName, "maxBloatFactor", maxBloatFactor);
   }
 
-  @SuppressWarnings("unchecked")
+  @SuppressWarnings({"unchecked", "rawtypes"})
   private Map<String, ChronicleMapCacheImpl<Object, Object>> getChronicleMapCaches() {
     return cacheMap.plugins().stream()
         .map(cacheMap::byPlugin)
@@ -229,7 +225,9 @@
                 pluginCaches.entrySet().stream()
                     .map(entry -> ImmutablePair.of(entry.getKey(), entry.getValue().get())))
         .filter(
-            pair -> pair.getValue() instanceof ChronicleMapCacheImpl && pair.getValue().size() > 0)
+            pair ->
+                pair.getValue() instanceof ChronicleMapCacheImpl
+                    && ((ChronicleMapCacheImpl) pair.getValue()).diskStats().size() > 0)
         .collect(
             Collectors.toMap(
                 ImmutablePair::getKey, p -> (ChronicleMapCacheImpl<Object, Object>) p.getValue()));
diff --git a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheDefProxy.java b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheDefProxy.java
new file mode 100644
index 0000000..3bbb9a8
--- /dev/null
+++ b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheDefProxy.java
@@ -0,0 +1,113 @@
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.googlesource.gerrit.modules.cache.chroniclemap;
+
+import com.google.common.cache.CacheLoader;
+import com.google.common.cache.Weigher;
+import com.google.gerrit.common.Nullable;
+import com.google.gerrit.server.cache.PersistentCacheDef;
+import com.google.gerrit.server.cache.serialize.CacheSerializer;
+import com.google.inject.TypeLiteral;
+import java.time.Duration;
+
+class ChronicleMapCacheDefProxy<K, V> implements PersistentCacheDef<K, V> {
+  private final PersistentCacheDef<K, V> source;
+
+  ChronicleMapCacheDefProxy(PersistentCacheDef<K, V> source) {
+    this.source = source;
+  }
+
+  @Override
+  @Nullable
+  public Duration expireAfterWrite() {
+    return source.expireAfterWrite();
+  }
+
+  @Override
+  @Nullable
+  public Duration expireFromMemoryAfterAccess() {
+    return source.expireFromMemoryAfterAccess();
+  }
+
+  @Override
+  public Duration refreshAfterWrite() {
+    return source.refreshAfterWrite();
+  }
+
+  @Override
+  public Weigher<K, V> weigher() {
+    Weigher<K, V> weigher = source.weigher();
+    if (weigher == null) {
+      return null;
+    }
+
+    // introduce weigher that performs calculations
+    // on value that is being stored not on TimedValue
+    Weigher<K, TimedValue<V>> holderWeigher = (k, v) -> weigher.weigh(k, v.getValue());
+    @SuppressWarnings("unchecked")
+    Weigher<K, V> ret = (Weigher<K, V>) holderWeigher;
+    return ret;
+  }
+
+  @Override
+  public String name() {
+    return source.name();
+  }
+
+  @Override
+  public String configKey() {
+    return source.configKey();
+  }
+
+  @Override
+  public TypeLiteral<K> keyType() {
+    return source.keyType();
+  }
+
+  @Override
+  public TypeLiteral<V> valueType() {
+    return source.valueType();
+  }
+
+  @Override
+  public long maximumWeight() {
+    return source.maximumWeight();
+  }
+
+  @Override
+  public long diskLimit() {
+    return source.diskLimit();
+  }
+
+  @Override
+  public CacheLoader<K, V> loader() {
+    return source.loader();
+  }
+
+  @Override
+  public int version() {
+    return source.version();
+  }
+
+  @Override
+  public CacheSerializer<K> keySerializer() {
+    return source.keySerializer();
+  }
+
+  @Override
+  public CacheSerializer<V> valueSerializer() {
+    return source.valueSerializer();
+  }
+}
diff --git a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheFactory.java b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheFactory.java
index 088798b..e1eb5ab 100644
--- a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheFactory.java
+++ b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheFactory.java
@@ -13,6 +13,7 @@
 // limitations under the License.
 package com.googlesource.gerrit.modules.cache.chroniclemap;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.cache.Cache;
 import com.google.common.cache.CacheLoader;
 import com.google.common.cache.LoadingCache;
@@ -26,6 +27,7 @@
 import com.google.gerrit.server.cache.PersistentCacheDef;
 import com.google.gerrit.server.config.GerritServerConfig;
 import com.google.gerrit.server.config.SitePaths;
+import com.google.gerrit.server.logging.LoggingContextAwareExecutorService;
 import com.google.gerrit.server.logging.LoggingContextAwareScheduledExecutorService;
 import com.google.inject.Inject;
 import com.google.inject.Provider;
@@ -40,6 +42,7 @@
 import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
+import net.openhft.chronicle.map.ChronicleMap;
 import org.eclipse.jgit.lib.Config;
 
 @Singleton
@@ -50,6 +53,8 @@
   private final List<ChronicleMapCacheImpl<?, ?>> caches;
   private final ScheduledExecutorService cleanup;
 
+  private final LoggingContextAwareExecutorService storePersistenceExecutor;
+
   @Inject
   ChronicleMapCacheFactory(
       MemoryCacheFactory memCacheFactory,
@@ -71,6 +76,10 @@
                     .setNameFormat("ChronicleMap-Prune-%d")
                     .setDaemon(true)
                     .build()));
+    this.storePersistenceExecutor =
+        new LoggingContextAwareExecutorService(
+            Executors.newFixedThreadPool(
+                1, new ThreadFactoryBuilder().setNameFormat("ChronicleMap-Store-%d").build()));
   }
 
   @Override
@@ -82,9 +91,40 @@
             fileName(cacheDir, in.name(), in.version()),
             in.expireAfterWrite(),
             in.refreshAfterWrite());
+    return build(in, backend, config, metricMaker);
+  }
+
+  @SuppressWarnings("unchecked")
+  @VisibleForTesting
+  <K, V> Cache<K, V> build(
+      PersistentCacheDef<K, V> in,
+      CacheBackend backend,
+      ChronicleMapCacheConfig config,
+      MetricMaker metricMaker) {
+    ChronicleMapCacheDefProxy<K, V> def = new ChronicleMapCacheDefProxy<>(in);
+
     ChronicleMapCacheImpl<K, V> cache;
     try {
-      cache = new ChronicleMapCacheImpl<>(in, config, null, metricMaker);
+      ChronicleMap<KeyWrapper<K>, TimedValue<V>> store =
+          ChronicleMapCacheImpl.createOrRecoverStore(in, config);
+
+      ChronicleMapCacheLoader<K, V> memLoader =
+          new ChronicleMapCacheLoader<>(
+              storePersistenceExecutor, store, config.getExpireAfterWrite());
+
+      LoadingCache<K, TimedValue<V>> mem =
+          (LoadingCache<K, TimedValue<V>>)
+              memCacheFactory.build(def, (CacheLoader<K, V>) memLoader, backend);
+
+      cache =
+          new ChronicleMapCacheImpl<>(
+              in,
+              config,
+              metricMaker,
+              memLoader,
+              new InMemoryCacheLoadingFromStoreImpl<>(mem, false),
+              store);
+
     } catch (IOException e) {
       throw new UncheckedIOException(e);
     }
@@ -103,9 +143,40 @@
             fileName(cacheDir, in.name(), in.version()),
             in.expireAfterWrite(),
             in.refreshAfterWrite());
+    return build(in, loader, backend, config, metricMaker);
+  }
+
+  @SuppressWarnings("unchecked")
+  @VisibleForTesting
+  public <K, V> LoadingCache<K, V> build(
+      PersistentCacheDef<K, V> in,
+      CacheLoader<K, V> loader,
+      CacheBackend backend,
+      ChronicleMapCacheConfig config,
+      MetricMaker metricMaker) {
     ChronicleMapCacheImpl<K, V> cache;
+    ChronicleMapCacheDefProxy<K, V> def = new ChronicleMapCacheDefProxy<>(in);
+
     try {
-      cache = new ChronicleMapCacheImpl<>(in, config, loader, metricMaker);
+      ChronicleMap<KeyWrapper<K>, TimedValue<V>> store =
+          ChronicleMapCacheImpl.createOrRecoverStore(in, config);
+
+      ChronicleMapCacheLoader<K, V> memLoader =
+          new ChronicleMapCacheLoader<>(
+              storePersistenceExecutor, store, loader, config.getExpireAfterWrite());
+
+      LoadingCache<K, TimedValue<V>> mem =
+          (LoadingCache<K, TimedValue<V>>)
+              memCacheFactory.build(def, (CacheLoader<K, V>) memLoader, backend);
+
+      cache =
+          new ChronicleMapCacheImpl<>(
+              in,
+              config,
+              metricMaker,
+              memLoader,
+              new InMemoryCacheLoadingFromStoreImpl<>(mem, true),
+              store);
     } catch (IOException e) {
       throw new UncheckedIOException(e);
     }
diff --git a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheImpl.java b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheImpl.java
index 77d3887..af35c88 100644
--- a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheImpl.java
+++ b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheImpl.java
@@ -14,10 +14,11 @@
 package com.googlesource.gerrit.modules.cache.chroniclemap;
 
 import com.google.common.cache.AbstractLoadingCache;
-import com.google.common.cache.CacheLoader;
 import com.google.common.cache.CacheStats;
 import com.google.common.flogger.FluentLogger;
+import com.google.common.util.concurrent.MoreExecutors;
 import com.google.gerrit.metrics.Description;
+import com.google.gerrit.metrics.DisabledMetricMaker;
 import com.google.gerrit.metrics.MetricMaker;
 import com.google.gerrit.server.cache.PersistentCache;
 import com.google.gerrit.server.cache.PersistentCacheDef;
@@ -27,7 +28,6 @@
 import java.time.Duration;
 import java.time.Instant;
 import java.util.concurrent.Callable;
-import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.atomic.LongAdder;
 import net.openhft.chronicle.map.ChronicleMap;
@@ -39,34 +39,60 @@
   private static final FluentLogger logger = FluentLogger.forEnclosingClass();
 
   private final ChronicleMapCacheConfig config;
-  private final CacheLoader<K, V> loader;
   private final ChronicleMap<KeyWrapper<K>, TimedValue<V>> store;
   private final LongAdder hitCount = new LongAdder();
   private final LongAdder missCount = new LongAdder();
   private final LongAdder loadSuccessCount = new LongAdder();
   private final LongAdder loadExceptionCount = new LongAdder();
   private final LongAdder totalLoadTime = new LongAdder();
-  private final LongAdder evictionCount = new LongAdder();
   private final InMemoryLRU<K> hotEntries;
   private final PersistentCacheDef<K, V> cacheDefinition;
+  private final ChronicleMapCacheLoader<K, V> memLoader;
+  private final InMemoryCache<K, V> mem;
 
-  @SuppressWarnings({"unchecked", "cast", "rawtypes"})
-  ChronicleMapCacheImpl(
-      PersistentCacheDef<K, V> def,
-      ChronicleMapCacheConfig config,
-      CacheLoader<K, V> loader,
-      MetricMaker metricMaker)
+  ChronicleMapCacheImpl(PersistentCacheDef<K, V> def, ChronicleMapCacheConfig config)
       throws IOException {
-    CacheSerializers.registerCacheDef(def);
 
     this.cacheDefinition = def;
     this.config = config;
-    this.loader = loader;
     this.hotEntries =
         new InMemoryLRU<>(
             (int) Math.max(config.getMaxEntries() * config.getpercentageHotKeys() / 100, 1));
+    this.store = createOrRecoverStore(def, config);
+    this.memLoader =
+        new ChronicleMapCacheLoader<>(
+            MoreExecutors.directExecutor(), store, config.getExpireAfterWrite());
+    this.mem = memLoader.asInMemoryCacheBypass();
+
+    ChronicleMapStorageMetrics metrics = new ChronicleMapStorageMetrics(new DisabledMetricMaker());
+    metrics.registerCallBackMetrics(def.name(), store, hotEntries);
+  }
+
+  ChronicleMapCacheImpl(
+      PersistentCacheDef<K, V> def,
+      ChronicleMapCacheConfig config,
+      MetricMaker metricMaker,
+      ChronicleMapCacheLoader<K, V> memLoader,
+      InMemoryCache<K, V> mem,
+      ChronicleMap<KeyWrapper<K>, TimedValue<V>> store) {
+
+    this.cacheDefinition = def;
+    this.config = config;
+    this.hotEntries =
+        new InMemoryLRU<>(
+            (int) Math.max(config.getMaxEntries() * config.getpercentageHotKeys() / 100, 1));
+    this.memLoader = memLoader;
+    this.mem = mem;
+    this.store = store;
 
     ChronicleMapStorageMetrics metrics = new ChronicleMapStorageMetrics(metricMaker);
+    metrics.registerCallBackMetrics(def.name(), store, hotEntries);
+  }
+
+  @SuppressWarnings({"unchecked", "cast", "rawtypes"})
+  static <K, V> ChronicleMap<KeyWrapper<K>, TimedValue<V>> createOrRecoverStore(
+      PersistentCacheDef<K, V> def, ChronicleMapCacheConfig config) throws IOException {
+    CacheSerializers.registerCacheDef(def);
 
     final Class<KeyWrapper<K>> keyWrapperClass = (Class<KeyWrapper<K>>) (Class) KeyWrapper.class;
     final Class<TimedValue<V>> valueWrapperClass = (Class<TimedValue<V>>) (Class) TimedValue.class;
@@ -95,7 +121,8 @@
             + "cache, since the file size is pre-allocated rather than being "
             + "a function of the number of entries in the cache",
         def.diskLimit(), def.name());
-    store = mapBuilder.createOrRecoverPersistedTo(config.getPersistedFile());
+    ChronicleMap<KeyWrapper<K>, TimedValue<V>> store =
+        mapBuilder.createOrRecoverPersistedTo(config.getPersistedFile());
 
     logger.atInfo().log(
         "Initialized '%s'|version: %s|avgKeySize: %s bytes|avgValueSize:"
@@ -110,7 +137,7 @@
         store.remainingAutoResizes(),
         store.percentageFreeSpace());
 
-    metrics.registerCallBackMetrics(def.name(), store, hotEntries);
+    return store;
   }
 
   protected PersistentCacheDef<K, V> getCacheDefinition() {
@@ -171,48 +198,28 @@
     return config;
   }
 
-  @SuppressWarnings("unchecked")
   @Override
   public V getIfPresent(Object objKey) {
-    KeyWrapper<K> keyWrapper = (KeyWrapper<K>) new KeyWrapper<>(objKey);
-    if (store.containsKey(keyWrapper)) {
-      TimedValue<V> vTimedValue = store.get(keyWrapper);
-      if (!expired(vTimedValue.getCreated())) {
-        hitCount.increment();
-        hotEntries.add((K) objKey);
-        return vTimedValue.getValue();
-      }
-      invalidate(objKey);
+    TimedValue<V> timedValue = mem.getIfPresent(objKey);
+    if (timedValue == null) {
+      missCount.increment();
+      return null;
     }
-    missCount.increment();
-    return null;
+
+    return timedValue.getValue();
   }
 
   @Override
   public V get(K key) throws ExecutionException {
     KeyWrapper<K> keyWrapper = new KeyWrapper<>(key);
-    if (store.containsKey(keyWrapper)) {
-      TimedValue<V> vTimedValue = store.get(keyWrapper);
-      if (!needsRefresh(vTimedValue.getCreated())) {
-        hitCount.increment();
-        hotEntries.add(key);
-        return vTimedValue.getValue();
+
+    if (mem.isLoadingCache()) {
+      TimedValue<V> valueHolder = mem.get(key);
+      if (needsRefresh(valueHolder.getCreated())) {
+        store.remove(keyWrapper);
+        mem.refresh(key);
       }
-    }
-    missCount.increment();
-    if (loader != null) {
-      V v = null;
-      try {
-        long start = System.nanoTime();
-        v = loader.load(key);
-        totalLoadTime.add(System.nanoTime() - start);
-        loadSuccessCount.increment();
-      } catch (Exception e) {
-        loadExceptionCount.increment();
-        throw new ExecutionException(String.format("Could not load value %s", key), e);
-      }
-      put(key, v);
-      return v;
+      return valueHolder.getValue();
     }
 
     loadExceptionCount.increment();
@@ -222,15 +229,24 @@
 
   @Override
   public V get(K key, Callable<? extends V> valueLoader) throws ExecutionException {
-    KeyWrapper<K> keyWrapper = new KeyWrapper<>(key);
-    if (store.containsKey(keyWrapper)) {
-      TimedValue<V> vTimedValue = store.get(keyWrapper);
-      if (!needsRefresh(vTimedValue.getCreated())) {
-        hitCount.increment();
-        return vTimedValue.getValue();
+    try {
+      return mem.get(key, () -> getFromStore(key, valueLoader)).getValue();
+    } catch (Exception e) {
+      if (e instanceof ExecutionException) {
+        throw (ExecutionException) e;
       }
+      throw new ExecutionException(e);
     }
-    missCount.increment();
+  }
+
+  private TimedValue<V> getFromStore(K key, Callable<? extends V> valueLoader)
+      throws ExecutionException {
+
+    TimedValue<V> valueFromCache = memLoader.loadIfPresent(key);
+    if (valueFromCache != null) {
+      return valueFromCache;
+    }
+
     V v = null;
     try {
       long start = System.nanoTime();
@@ -241,8 +257,9 @@
       loadExceptionCount.increment();
       throw new ExecutionException(String.format("Could not load key %s", key), e);
     }
-    put(key, v);
-    return v;
+    TimedValue<V> timedValue = new TimedValue<>(v);
+    putTimedToStore(key, timedValue);
+    return timedValue;
   }
 
   /**
@@ -260,6 +277,7 @@
     TimedValue<?> wrappedValue = new TimedValue<>(value, created.toInstant().toEpochMilli());
     KeyWrapper<?> wrappedKey = new KeyWrapper<>(key);
     store.put((KeyWrapper<K>) wrappedKey, (TimedValue<V>) wrappedValue);
+    mem.put((K) key, (TimedValue<V>) wrappedValue);
   }
 
   /**
@@ -275,13 +293,19 @@
   @SuppressWarnings("unchecked")
   public void putUnchecked(KeyWrapper<Object> wrappedKey, TimedValue<Object> wrappedValue) {
     store.put((KeyWrapper<K>) wrappedKey, (TimedValue<V>) wrappedValue);
+    mem.put((K) wrappedKey.getValue(), (TimedValue<V>) wrappedValue);
   }
 
   @Override
   public void put(K key, V val) {
+    TimedValue<V> timedVal = new TimedValue<>(val);
+    mem.put(key, timedVal);
+    putTimedToStore(key, timedVal);
+  }
+
+  void putTimedToStore(K key, TimedValue<V> timedVal) {
     KeyWrapper<K> wrappedKey = new KeyWrapper<>(key);
-    TimedValue<V> wrappedValue = new TimedValue<>(val);
-    store.put(wrappedKey, wrappedValue);
+    store.put(wrappedKey, timedVal);
     hotEntries.add(key);
   }
 
@@ -289,7 +313,7 @@
     if (!config.getExpireAfterWrite().isZero()) {
       store.forEachEntry(
           c -> {
-            if (expired(c.value().get().getCreated())) {
+            if (memLoader.expired(c.value().get().getCreated())) {
               hotEntries.remove(c.key().get().getValue());
               c.context().remove(c);
             }
@@ -301,12 +325,6 @@
     }
   }
 
-  private boolean expired(long created) {
-    Duration expireAfterWrite = config.getExpireAfterWrite();
-    Duration age = Duration.between(Instant.ofEpochMilli(created), TimeUtil.now());
-    return !expireAfterWrite.isZero() && age.compareTo(expireAfterWrite) > 0;
-  }
-
   private boolean needsRefresh(long created) {
     final Duration refreshAfterWrite = config.getRefreshAfterWrite();
     Duration age = Duration.between(Instant.ofEpochMilli(created), TimeUtil.now());
@@ -333,39 +351,42 @@
   public void invalidate(Object key) {
     KeyWrapper<K> wrappedKey = (KeyWrapper<K>) new KeyWrapper<>(key);
     store.remove(wrappedKey);
-    hotEntries.remove(wrappedKey.getValue());
+    mem.invalidate(key);
+    hotEntries.remove((K) key);
   }
 
   @Override
   public void invalidateAll() {
     store.clear();
     hotEntries.invalidateAll();
+    mem.invalidateAll();
   }
 
-  ConcurrentMap<KeyWrapper<K>, TimedValue<V>> getStore() {
+  ChronicleMap<KeyWrapper<K>, TimedValue<V>> getStore() {
     return store;
   }
 
   @Override
   public long size() {
-    return store.size();
+    return mem.size();
   }
 
   @Override
   public CacheStats stats() {
-    return new CacheStats(
-        hitCount.longValue(),
-        missCount.longValue(),
-        loadSuccessCount.longValue(),
-        loadExceptionCount.longValue(),
-        totalLoadTime.longValue(),
-        evictionCount.longValue());
+    return mem.stats();
   }
 
   @Override
   public DiskStats diskStats() {
     return new DiskStats(
-        size(), config.getPersistedFile().length(), hitCount.longValue(), missCount.longValue());
+        store.longSize(),
+        config.getPersistedFile().length(),
+        hitCount.longValue(),
+        missCount.longValue());
+  }
+
+  public CacheStats memStats() {
+    return mem.stats();
   }
 
   public void close() {
diff --git a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheLoader.java b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheLoader.java
new file mode 100644
index 0000000..1da3257
--- /dev/null
+++ b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheLoader.java
@@ -0,0 +1,215 @@
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.googlesource.gerrit.modules.cache.chroniclemap;
+
+import com.google.common.cache.CacheLoader;
+import com.google.common.cache.CacheStats;
+import com.google.common.flogger.FluentLogger;
+import com.google.common.util.concurrent.FutureCallback;
+import com.google.common.util.concurrent.Futures;
+import com.google.common.util.concurrent.ListenableFuture;
+import com.google.gerrit.server.logging.Metadata;
+import com.google.gerrit.server.logging.TraceContext;
+import com.google.gerrit.server.logging.TraceContext.TraceTimer;
+import com.google.gerrit.server.util.time.TimeUtil;
+import java.time.Duration;
+import java.time.Instant;
+import java.util.Optional;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Executor;
+import java.util.concurrent.atomic.LongAdder;
+import net.openhft.chronicle.map.ChronicleMap;
+
+class ChronicleMapCacheLoader<K, V> extends CacheLoader<K, TimedValue<V>> {
+  private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+
+  private final Executor storePersistenceExecutor;
+  private final Optional<CacheLoader<K, V>> loader;
+  private final ChronicleMap<KeyWrapper<K>, TimedValue<V>> store;
+  private final LongAdder loadSuccessCount = new LongAdder();
+  private final LongAdder loadExceptionCount = new LongAdder();
+  private final LongAdder totalLoadTime = new LongAdder();
+  private final LongAdder hitCount = new LongAdder();
+  private final LongAdder missCount = new LongAdder();
+  private final Duration expireAfterWrite;
+
+  /**
+   * Creates a loader for fetching entries from a ChronicleMap store and an external data source.
+   *
+   * @param storePersistenceExecutor executor for async loading/storage to ChronicleMap
+   * @param store the ChronicleMap storage
+   * @param loader the data loader from the external source
+   * @param expireAfterWrite maximum lifetime of the data loaded into ChronicleMap
+   */
+  ChronicleMapCacheLoader(
+      Executor storePersistenceExecutor,
+      ChronicleMap<KeyWrapper<K>, TimedValue<V>> store,
+      CacheLoader<K, V> loader,
+      Duration expireAfterWrite) {
+    this.storePersistenceExecutor = storePersistenceExecutor;
+    this.store = store;
+    this.loader = Optional.of(loader);
+    this.expireAfterWrite = expireAfterWrite;
+  }
+
+  /**
+   * Creates a loader for fetching entries from a ChronicleMap store.
+   *
+   * @param storePersistenceExecutor executor for async loading/storage to ChronicleMap
+   * @param store the ChronicleMap storage
+   * @param expireAfterWrite maximum lifetime of the data loaded into ChronicleMap
+   */
+  ChronicleMapCacheLoader(
+      Executor storePersistenceExecutor,
+      ChronicleMap<KeyWrapper<K>, TimedValue<V>> store,
+      Duration expireAfterWrite) {
+    this.storePersistenceExecutor = storePersistenceExecutor;
+    this.store = store;
+    this.loader = Optional.empty();
+    this.expireAfterWrite = expireAfterWrite;
+  }
+
+  @Override
+  public TimedValue<V> load(K key) throws Exception {
+    try (TraceTimer timer =
+        TraceContext.newTimer(
+            "Loading value from cache", Metadata.builder().cacheKey(key.toString()).build())) {
+      TimedValue<V> h = loadIfPresent(key);
+      if (h != null) {
+        return h;
+      }
+
+      if (loader.isPresent()) {
+        missCount.increment();
+        long start = System.nanoTime();
+        TimedValue<V> loadedValue = new TimedValue<>(loader.get().load(key));
+        loadSuccessCount.increment();
+        totalLoadTime.add(System.nanoTime() - start);
+        storePersistenceExecutor.execute(() -> store.put(new KeyWrapper<>(key), loadedValue));
+        return loadedValue;
+      }
+
+      throw new UnsupportedOperationException("No loader defined");
+    } catch (Exception e) {
+      logger.atWarning().withCause(e).log("Unable to load a value for key='%s'", key);
+      loadExceptionCount.increment();
+      throw e;
+    }
+  }
+
+  TimedValue<V> loadIfPresent(K key) {
+    TimedValue<V> h = store.get(new KeyWrapper<>(key));
+    if (h != null && !expired(h.getCreated())) {
+      hitCount.increment();
+      return h;
+    }
+
+    return null;
+  }
+
+  @Override
+  public ListenableFuture<TimedValue<V>> reload(K key, TimedValue<V> oldValue) throws Exception {
+    if (!loader.isPresent()) {
+      throw new IllegalStateException("No loader defined");
+    }
+
+    final long start = System.nanoTime();
+    ListenableFuture<V> reloadedValue = loader.get().reload(key, oldValue.getValue());
+    Futures.addCallback(
+        reloadedValue,
+        new FutureCallback<V>() {
+          @Override
+          public void onSuccess(V result) {
+            store.put(new KeyWrapper<>(key), new TimedValue<>(result));
+            loadSuccessCount.increment();
+            totalLoadTime.add(System.nanoTime() - start);
+          }
+
+          @Override
+          public void onFailure(Throwable t) {
+            logger.atWarning().withCause(t).log("Unable to reload cache value for key='%s'", key);
+            loadExceptionCount.increment();
+          }
+        },
+        storePersistenceExecutor);
+
+    return Futures.transform(reloadedValue, TimedValue::new, storePersistenceExecutor);
+  }
+
+  boolean expired(long created) {
+    Duration age = Duration.between(Instant.ofEpochMilli(created), TimeUtil.now());
+    return !expireAfterWrite.isZero() && age.compareTo(expireAfterWrite) > 0;
+  }
+
+  InMemoryCache<K, V> asInMemoryCacheBypass() {
+    return new InMemoryCache<K, V>() {
+
+      @SuppressWarnings("unchecked")
+      @Override
+      public TimedValue<V> getIfPresent(Object key) {
+        try {
+          return load((K) key);
+        } catch (Exception e) {
+          return null;
+        }
+      }
+
+      @Override
+      public TimedValue<V> get(K key, Callable<? extends TimedValue<V>> valueLoader)
+          throws Exception {
+        return valueLoader.call();
+      }
+
+      @Override
+      public void put(K key, TimedValue<V> value) {
+        store.put(new KeyWrapper<>(key), value);
+      }
+
+      @Override
+      public boolean isLoadingCache() {
+        return true;
+      }
+
+      @Override
+      public TimedValue<V> get(K key) throws ExecutionException {
+        try {
+          return load(key);
+        } catch (Exception e) {
+          throw new ExecutionException(e);
+        }
+      }
+
+      @Override
+      public void refresh(K key) {}
+
+      @Override
+      public CacheStats stats() {
+        throw new IllegalArgumentException("Cache stats not available for a loader-bypass");
+      }
+
+      @Override
+      public long size() {
+        return 0;
+      }
+
+      @Override
+      public void invalidate(Object key) {}
+
+      @Override
+      public void invalidateAll() {}
+    };
+  }
+}
diff --git a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/H2MigrationServlet.java b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/H2MigrationServlet.java
index 965c717..c74594f 100644
--- a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/H2MigrationServlet.java
+++ b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/H2MigrationServlet.java
@@ -27,7 +27,6 @@
 import com.google.gerrit.extensions.client.ChangeKind;
 import com.google.gerrit.extensions.restapi.RestApiException;
 import com.google.gerrit.httpd.WebSessionManager;
-import com.google.gerrit.metrics.DisabledMetricMaker;
 import com.google.gerrit.server.account.CachedAccountDetails;
 import com.google.gerrit.server.cache.PersistentCacheDef;
 import com.google.gerrit.server.cache.proto.Cache;
@@ -223,7 +222,7 @@
           if (chronicleMapConfig.isPresent()) {
             ChronicleMapCacheConfig cacheConfig = chronicleMapConfig.get();
             ChronicleMapCacheImpl<?, ?> chronicleMapCache =
-                new ChronicleMapCacheImpl<>(in, cacheConfig, null, new DisabledMetricMaker());
+                new ChronicleMapCacheImpl<>(in, cacheConfig);
 
             doMigrate(h2CacheFile.get(), in, chronicleMapCache);
             chronicleMapCache.close();
diff --git a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/InMemoryCache.java b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/InMemoryCache.java
new file mode 100644
index 0000000..d1601f8
--- /dev/null
+++ b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/InMemoryCache.java
@@ -0,0 +1,45 @@
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.googlesource.gerrit.modules.cache.chroniclemap;
+
+import com.google.common.cache.CacheStats;
+import com.google.errorprone.annotations.CompatibleWith;
+import com.google.gerrit.common.Nullable;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+
+interface InMemoryCache<K, V> {
+
+  @Nullable
+  TimedValue<V> getIfPresent(@CompatibleWith("K") Object key);
+
+  TimedValue<V> get(K key) throws ExecutionException;
+
+  TimedValue<V> get(K key, Callable<? extends TimedValue<V>> valueLoader) throws Exception;
+
+  void put(K key, TimedValue<V> value);
+
+  void invalidate(@CompatibleWith("K") Object key);
+
+  boolean isLoadingCache();
+
+  void refresh(K key);
+
+  CacheStats stats();
+
+  long size();
+
+  void invalidateAll();
+}
diff --git a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/InMemoryCacheLoadingFromStoreImpl.java b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/InMemoryCacheLoadingFromStoreImpl.java
new file mode 100644
index 0000000..96b75da
--- /dev/null
+++ b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/InMemoryCacheLoadingFromStoreImpl.java
@@ -0,0 +1,101 @@
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.googlesource.gerrit.modules.cache.chroniclemap;
+
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheStats;
+import com.google.common.cache.LoadingCache;
+import com.google.gerrit.common.Nullable;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+
+class InMemoryCacheLoadingFromStoreImpl<K, V> implements InMemoryCache<K, V> {
+  private final Cache<K, TimedValue<V>> loadingFromStoreCache;
+  private final boolean loadingFromSource;
+
+  /**
+   * Creates an in-memory cache backed by a LoadingCache linked to loader from ChronicleMap.
+   *
+   * @param loadingFromStoreCache LoadingCache linked to loader from ChronicleMap
+   * @param loadingFromSource true if the loadingFromStoreCache is also loading from the data source
+   */
+  InMemoryCacheLoadingFromStoreImpl(
+      LoadingCache<K, TimedValue<V>> loadingFromStoreCache, boolean loadingFromSource) {
+    this.loadingFromStoreCache = loadingFromStoreCache;
+    this.loadingFromSource = loadingFromSource;
+  }
+
+  @Override
+  public @Nullable TimedValue<V> getIfPresent(Object key) {
+    return loadingFromStoreCache.getIfPresent(key);
+  }
+
+  @Override
+  public TimedValue<V> get(K key, Callable<? extends TimedValue<V>> valueLoader) throws Exception {
+    return loadingFromStoreCache.get(key, valueLoader);
+  }
+
+  @Override
+  public void put(K key, TimedValue<V> value) {
+    loadingFromStoreCache.put(key, value);
+  }
+
+  @Override
+  public boolean isLoadingCache() {
+    return loadingFromSource;
+  }
+
+  @Override
+  public TimedValue<V> get(K key) throws ExecutionException {
+    if (loadingFromSource) {
+      return ((LoadingCache<K, TimedValue<V>>) loadingFromStoreCache).get(key);
+    }
+
+    TimedValue<V> cachedValue = getIfPresent(key);
+    if (cachedValue != null) {
+      return cachedValue;
+    }
+
+    throw new UnsupportedOperationException(
+        String.format("Could not load value for %s without any loader", key));
+  }
+
+  @Override
+  public void refresh(K key) {
+    if (loadingFromSource) {
+      ((LoadingCache<K, TimedValue<V>>) loadingFromStoreCache).refresh(key);
+    }
+  }
+
+  @Override
+  public CacheStats stats() {
+    return loadingFromStoreCache.stats();
+  }
+
+  @Override
+  public long size() {
+    return loadingFromStoreCache.size();
+  }
+
+  @Override
+  public void invalidate(Object key) {
+    loadingFromStoreCache.invalidate(key);
+  }
+
+  @Override
+  public void invalidateAll() {
+    loadingFromStoreCache.invalidateAll();
+  }
+}
diff --git a/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheIT.java b/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheIT.java
index 6f53dea..444762b 100644
--- a/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheIT.java
+++ b/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheIT.java
@@ -30,6 +30,7 @@
 @UseLocalDisk
 public class ChronicleMapCacheIT extends AbstractDaemonTest {
 
+  private static final int ZERO_INMEMORY_CACHE = 0;
   @Inject PersistentCacheFactory persistentCacheFactory;
 
   @Override
@@ -47,7 +48,7 @@
     final int negativeDiskLimit = -1;
     final Cache<String, String> cache =
         persistentCacheFactory.build(
-            new TestPersistentCacheDef("foo", null, negativeDiskLimit), CacheBackend.CAFFEINE);
+            new TestPersistentCacheDef("foo", null, negativeDiskLimit, 0), CacheBackend.CAFFEINE);
 
     assertThat(cache.getClass().getSimpleName()).isEqualTo("CaffeinatedGuavaCache");
   }
@@ -57,7 +58,8 @@
     final int positiveDiskLimit = 1024;
     assertThat(
             persistentCacheFactory.build(
-                new TestPersistentCacheDef("foo", null, positiveDiskLimit), CacheBackend.CAFFEINE))
+                new TestPersistentCacheDef("foo", null, positiveDiskLimit, ZERO_INMEMORY_CACHE),
+                CacheBackend.CAFFEINE))
         .isInstanceOf(ChronicleMapCacheImpl.class);
   }
 
diff --git a/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheTest.java b/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheTest.java
index 739c688..91ddbe3 100644
--- a/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheTest.java
+++ b/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheTest.java
@@ -19,51 +19,50 @@
 
 import com.codahale.metrics.Gauge;
 import com.codahale.metrics.MetricRegistry;
+import com.google.gerrit.acceptance.AbstractDaemonTest;
+import com.google.gerrit.acceptance.UseLocalDisk;
 import com.google.gerrit.acceptance.WaitUtil;
 import com.google.gerrit.common.Nullable;
-import com.google.gerrit.lifecycle.LifecycleManager;
 import com.google.gerrit.metrics.DisabledMetricMaker;
 import com.google.gerrit.metrics.MetricMaker;
-import com.google.gerrit.metrics.dropwizard.DropWizardMetricMaker;
+import com.google.gerrit.server.cache.CacheBackend;
+import com.google.gerrit.server.cache.MemoryCacheFactory;
 import com.google.gerrit.server.cache.serialize.CacheSerializer;
 import com.google.gerrit.server.cache.serialize.StringCacheSerializer;
 import com.google.gerrit.server.config.SitePaths;
-import com.google.inject.Guice;
 import com.google.inject.Inject;
-import com.google.inject.Injector;
 import java.io.File;
-import java.io.IOException;
 import java.nio.ByteBuffer;
-import java.nio.file.Files;
 import java.time.Duration;
 import java.util.UUID;
 import java.util.concurrent.ExecutionException;
 import net.openhft.chronicle.bytes.Bytes;
+import org.eclipse.jgit.lib.Config;
 import org.eclipse.jgit.lib.StoredConfig;
 import org.eclipse.jgit.storage.file.FileBasedConfig;
 import org.eclipse.jgit.util.FS;
 import org.junit.Before;
-import org.junit.Rule;
 import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
+import org.junit.runner.Description;
 
-public class ChronicleMapCacheTest {
-  private static final String TEST_CACHE_NAME = "test-cache-name";
+@UseLocalDisk // Needed to have Gerrit with DropWizardMetricMaker enabled
+public class ChronicleMapCacheTest extends AbstractDaemonTest {
+  private static final DisabledMetricMaker WITHOUT_METRICS = new DisabledMetricMaker();
   @Inject MetricMaker metricMaker;
   @Inject MetricRegistry metricRegistry;
+  @Inject MemoryCacheFactory memCacheFactory;
 
-  @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder();
-  private SitePaths sitePaths;
+  @Inject SitePaths sitePaths;
   private StoredConfig gerritConfig;
 
   private final String cacheDirectory = ".";
 
+  String testCacheName = "test-cache";
+
   @Before
   public void setUp() throws Exception {
-    CacheSerializers.registerCacheKeySerializer(TEST_CACHE_NAME, StringCacheSerializer.INSTANCE);
-    CacheSerializers.registerCacheValueSerializer(TEST_CACHE_NAME, StringCacheSerializer.INSTANCE);
-    sitePaths = new SitePaths(temporaryFolder.newFolder().toPath());
-    Files.createDirectories(sitePaths.etc_dir);
+    CacheSerializers.registerCacheKeySerializer(testCacheName, StringCacheSerializer.INSTANCE);
+    CacheSerializers.registerCacheValueSerializer(testCacheName, StringCacheSerializer.INSTANCE);
 
     gerritConfig =
         new FileBasedConfig(
@@ -71,18 +70,12 @@
     gerritConfig.load();
     gerritConfig.setString("cache", null, "directory", cacheDirectory);
     gerritConfig.save();
-
-    setupMetrics();
   }
 
-  public void setupMetrics() {
-    Injector injector = Guice.createInjector(new DropWizardMetricMaker.ApiModule());
-
-    LifecycleManager mgr = new LifecycleManager();
-    mgr.add(injector);
-    mgr.start();
-
-    injector.injectMembers(this);
+  @Override
+  protected void beforeTest(Description description) throws Exception {
+    super.beforeTest(description);
+    testCacheName += description.getMethodName();
   }
 
   @Test
@@ -138,8 +131,6 @@
   @Test
   public void getShouldRetrieveCachedValueWhenCacheHasSameVersion() throws Exception {
     int cacheVersion = 2;
-    gerritConfig.setString("cache", null, "directory", "cache");
-    gerritConfig.save();
     final ChronicleMapCacheImpl<String, String> cache = newCacheVersion(cacheVersion);
 
     final String originalValue = "value 1";
@@ -175,7 +166,7 @@
     final ChronicleMapCacheImpl<String, String> cache = newCacheWithLoader();
 
     cache.put("foo", "bar");
-    cache.getIfPresent("foo");
+    assertThat(cache.getIfPresent("foo")).isEqualTo("bar");
 
     assertThat(cache.stats().hitCount()).isEqualTo(1);
     assertThat(cache.stats().missCount()).isEqualTo(0);
@@ -202,12 +193,12 @@
   }
 
   @Test
-  public void shouldIncreaseLoadExceptionCountWhenNoLoaderIsAvailable() throws Exception {
+  public void shouldNotIncreaseLoadExceptionCountWhenNoLoaderIsAvailable() throws Exception {
     final ChronicleMapCacheImpl<String, String> cache = newCacheWithoutLoader();
 
     assertThrows(UnsupportedOperationException.class, () -> cache.get("foo"));
 
-    assertThat(cache.stats().loadExceptionCount()).isEqualTo(1);
+    assertThat(cache.stats().loadExceptionCount()).isEqualTo(0);
     assertThat(cache.stats().loadSuccessCount()).isEqualTo(0);
   }
 
@@ -241,7 +232,7 @@
   @Test
   public void getIfPresentShouldReturnNullWhenValueIsExpired() throws Exception {
     ChronicleMapCacheImpl<String, String> cache =
-        newCache(true, TEST_CACHE_NAME, null, Duration.ofSeconds(1), null, 1);
+        newCache(true, testCacheName, null, Duration.ofSeconds(1), null, 1, WITHOUT_METRICS);
     cache.put("foo", "some-stale-value");
     Thread.sleep(1010); // Allow cache entry to expire
     assertThat(cache.getIfPresent("foo")).isNull();
@@ -250,24 +241,38 @@
   @Test
   public void getShouldRefreshValueWhenExpired() throws Exception {
     String newCachedValue = UUID.randomUUID().toString();
+    String staleValue = "some-stale-value";
+
     ChronicleMapCacheImpl<String, String> cache =
-        newCache(true, TEST_CACHE_NAME, newCachedValue, null, Duration.ofSeconds(1), 1);
-    cache.put("foo", "some-stale-value");
-    Thread.sleep(1010); // Allow cache to be flagged as needing refresh
-    assertThat(cache.get("foo")).isEqualTo(newCachedValue);
+        newCache(
+            true, testCacheName, newCachedValue, null, Duration.ofSeconds(1), 1, WITHOUT_METRICS);
+    cache.put("foo", staleValue);
+    assertThat(cache.get("foo")).isEqualTo(staleValue);
+
+    // Wait until the cache is asynchronously refreshed
+    WaitUtil.waitUntil(
+        () -> {
+          try {
+            return cache.get("foo").equals(newCachedValue);
+          } catch (ExecutionException e) {
+            e.printStackTrace();
+            return false;
+          }
+        },
+        Duration.ofSeconds(2));
   }
 
   @Test
   public void shouldPruneExpiredValues() throws Exception {
     ChronicleMapCacheImpl<String, String> cache =
-        newCache(true, TEST_CACHE_NAME, null, Duration.ofSeconds(1), null, 1);
+        newCache(true, testCacheName, null, Duration.ofSeconds(1), null, 1, WITHOUT_METRICS);
     cache.put("foo1", "some-stale-value1");
     cache.put("foo2", "some-stale-value1");
     Thread.sleep(1010); // Allow cache entries to expire
     cache.put("foo3", "some-fresh-value3");
     cache.prune();
 
-    assertThat(cache.size()).isEqualTo(1);
+    assertThat(cache.diskStats().size()).isEqualTo(1);
     assertThat(cache.get("foo3")).isEqualTo("some-fresh-value3");
   }
 
@@ -279,6 +284,7 @@
     cache.invalidate("foo");
 
     assertThat(cache.size()).isEqualTo(0);
+    assertThat(cache.diskStats().size()).isEqualTo(0);
     assertThat(cache.get("foo")).isEqualTo(cachedValue);
   }
 
@@ -291,16 +297,17 @@
     cache.invalidateAll();
 
     assertThat(cache.size()).isEqualTo(0);
+    assertThat(cache.diskStats().size()).isEqualTo(0);
   }
 
   @Test
   public void shouldEvictOldestElementInCacheWhenIsNeverAccessed() throws Exception {
     final String fooValue = "foo";
 
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "maxEntries", 2);
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "percentageHotKeys", 10);
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "avgKeySize", "foo1".getBytes().length);
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "avgValueSize", valueSize(fooValue));
+    gerritConfig.setInt("cache", testCacheName, "maxEntries", 2);
+    gerritConfig.setInt("cache", testCacheName, "percentageHotKeys", 10);
+    gerritConfig.setInt("cache", testCacheName, "avgKeySize", "foo1".getBytes().length);
+    gerritConfig.setInt("cache", testCacheName, "avgValueSize", valueSize(fooValue));
     gerritConfig.save();
 
     ChronicleMapCacheImpl<String, String> cache = newCacheWithLoader(fooValue);
@@ -309,7 +316,7 @@
 
     cache.prune();
 
-    assertThat(cache.size()).isEqualTo(1);
+    assertThat(cache.diskStats().size()).isEqualTo(1);
     assertThat(cache.get("foo2")).isNotNull();
   }
 
@@ -317,10 +324,10 @@
   public void shouldEvictRecentlyInsertedElementInCacheWhenOldestElementIsAccessed()
       throws Exception {
     final String fooValue = "foo";
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "maxEntries", 2);
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "percentageHotKeys", 10);
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "avgKeySize", "foo1".getBytes().length);
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "avgValueSize", valueSize(fooValue));
+    gerritConfig.setInt("cache", testCacheName, "maxEntries", 2);
+    gerritConfig.setInt("cache", testCacheName, "percentageHotKeys", 10);
+    gerritConfig.setInt("cache", testCacheName, "avgKeySize", "foo1".getBytes().length);
+    gerritConfig.setInt("cache", testCacheName, "avgValueSize", valueSize(fooValue));
     gerritConfig.save();
 
     ChronicleMapCacheImpl<String, String> cache = newCacheWithLoader(fooValue);
@@ -331,7 +338,7 @@
 
     cache.prune();
 
-    assertThat(cache.size()).isEqualTo(1);
+    assertThat(cache.diskStats().size()).isEqualTo(1);
     assertThat(cache.get("foo1")).isEqualTo(fooValue);
   }
 
@@ -358,13 +365,13 @@
   @Test
   public void shouldTriggerPercentageFreeMetric() throws Exception {
     String cachedValue = UUID.randomUUID().toString();
-    String freeSpaceMetricName = "cache/chroniclemap/percentage_free_space_" + TEST_CACHE_NAME;
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "maxEntries", 2);
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "avgKeySize", cachedValue.getBytes().length);
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "avgValueSize", valueSize(cachedValue));
+    String freeSpaceMetricName = "cache/chroniclemap/percentage_free_space_" + testCacheName;
+    gerritConfig.setInt("cache", testCacheName, "maxEntries", 2);
+    gerritConfig.setInt("cache", testCacheName, "avgKeySize", cachedValue.getBytes().length);
+    gerritConfig.setInt("cache", testCacheName, "avgValueSize", valueSize(cachedValue));
     gerritConfig.save();
 
-    ChronicleMapCacheImpl<String, String> cache = newCacheWithMetrics(TEST_CACHE_NAME, cachedValue);
+    ChronicleMapCacheImpl<String, String> cache = newCacheWithMetrics(testCacheName, cachedValue);
 
     assertThat(getMetric(freeSpaceMetricName).getValue()).isEqualTo(100);
 
@@ -377,13 +384,13 @@
   @Test
   public void shouldTriggerRemainingAutoResizeMetric() throws Exception {
     String cachedValue = UUID.randomUUID().toString();
-    String autoResizeMetricName = "cache/chroniclemap/remaining_autoresizes_" + TEST_CACHE_NAME;
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "maxEntries", 2);
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "avgKeySize", cachedValue.getBytes().length);
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "avgValueSize", valueSize(cachedValue));
+    String autoResizeMetricName = "cache/chroniclemap/remaining_autoresizes_" + testCacheName;
+    gerritConfig.setInt("cache", testCacheName, "maxEntries", 2);
+    gerritConfig.setInt("cache", testCacheName, "avgKeySize", cachedValue.getBytes().length);
+    gerritConfig.setInt("cache", testCacheName, "avgValueSize", valueSize(cachedValue));
     gerritConfig.save();
 
-    ChronicleMapCacheImpl<String, String> cache = newCacheWithMetrics(TEST_CACHE_NAME, cachedValue);
+    ChronicleMapCacheImpl<String, String> cache = newCacheWithMetrics(testCacheName, cachedValue);
 
     assertThat(getMetric(autoResizeMetricName).getValue()).isEqualTo(1);
 
@@ -401,12 +408,12 @@
     int percentageHotKeys = 60;
     int maxEntries = 10;
     int expectedCapacity = 6;
-    String hotKeysCapacityMetricName = "cache/chroniclemap/hot_keys_capacity_" + TEST_CACHE_NAME;
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "maxEntries", maxEntries);
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "percentageHotKeys", percentageHotKeys);
+    String hotKeysCapacityMetricName = "cache/chroniclemap/hot_keys_capacity_" + testCacheName;
+    gerritConfig.setInt("cache", testCacheName, "maxEntries", maxEntries);
+    gerritConfig.setInt("cache", testCacheName, "percentageHotKeys", percentageHotKeys);
     gerritConfig.save();
 
-    newCacheWithMetrics(TEST_CACHE_NAME, cachedValue);
+    newCacheWithMetrics(testCacheName, cachedValue);
 
     assertThat(getMetric(hotKeysCapacityMetricName).getValue()).isEqualTo(expectedCapacity);
   }
@@ -418,12 +425,12 @@
     int maxEntries = 10;
     int maxHotKeyCapacity = 3;
     final Duration METRIC_TRIGGER_TIMEOUT = Duration.ofSeconds(2);
-    String hotKeysSizeMetricName = "cache/chroniclemap/hot_keys_size_" + TEST_CACHE_NAME;
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "maxEntries", maxEntries);
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "percentageHotKeys", percentageHotKeys);
+    String hotKeysSizeMetricName = "cache/chroniclemap/hot_keys_size_" + testCacheName;
+    gerritConfig.setInt("cache", testCacheName, "maxEntries", maxEntries);
+    gerritConfig.setInt("cache", testCacheName, "percentageHotKeys", percentageHotKeys);
     gerritConfig.save();
 
-    ChronicleMapCacheImpl<String, String> cache = newCacheWithMetrics(TEST_CACHE_NAME, cachedValue);
+    ChronicleMapCacheImpl<String, String> cache = newCacheWithMetrics(testCacheName, cachedValue);
 
     assertThat(getMetric(hotKeysSizeMetricName).getValue()).isEqualTo(0);
 
@@ -452,12 +459,12 @@
     int maxEntries = 10;
     int maxHotKeyCapacity = 3;
     final Duration METRIC_TRIGGER_TIMEOUT = Duration.ofSeconds(2);
-    String hotKeysSizeMetricName = "cache/chroniclemap/hot_keys_size_" + TEST_CACHE_NAME;
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "maxEntries", maxEntries);
-    gerritConfig.setInt("cache", TEST_CACHE_NAME, "percentageHotKeys", percentageHotKeys);
+    String hotKeysSizeMetricName = "cache/chroniclemap/hot_keys_size_" + testCacheName;
+    gerritConfig.setInt("cache", testCacheName, "maxEntries", maxEntries);
+    gerritConfig.setInt("cache", testCacheName, "percentageHotKeys", percentageHotKeys);
     gerritConfig.save();
 
-    ChronicleMapCacheImpl<String, String> cache = newCacheWithMetrics(TEST_CACHE_NAME, cachedValue);
+    ChronicleMapCacheImpl<String, String> cache = newCacheWithMetrics(testCacheName, cachedValue);
 
     for (int i = 0; i < maxHotKeyCapacity; i++) {
       cache.put(cachedValue + i, cachedValue);
@@ -491,7 +498,7 @@
   }
 
   private int valueSize(String value) {
-    final TimedValueMarshaller<String> marshaller = new TimedValueMarshaller<>(TEST_CACHE_NAME);
+    final TimedValueMarshaller<String> marshaller = new TimedValueMarshaller<>(testCacheName);
 
     Bytes<ByteBuffer> out = Bytes.elasticByteBuffer();
     marshaller.write(out, new TimedValue<>(value));
@@ -499,7 +506,7 @@
   }
 
   private ChronicleMapCacheImpl<String, String> newCacheWithMetrics(
-      String cacheName, @Nullable String cachedValue) throws IOException {
+      String cacheName, @Nullable String cachedValue) {
     return newCache(true, cacheName, cachedValue, null, null, null, null, 1, metricMaker);
   }
 
@@ -509,8 +516,8 @@
       @Nullable String loadedValue,
       @Nullable Duration expireAfterWrite,
       @Nullable Duration refreshAfterWrite,
-      Integer version)
-      throws IOException {
+      Integer version,
+      MetricMaker metricMaker) {
     return newCache(
         withLoader,
         cacheName,
@@ -520,7 +527,7 @@
         null,
         null,
         version,
-        new DisabledMetricMaker());
+        metricMaker);
   }
 
   private ChronicleMapCacheImpl<String, String> newCache(
@@ -532,10 +539,10 @@
       @Nullable CacheSerializer<String> keySerializer,
       @Nullable CacheSerializer<String> valueSerializer,
       Integer version,
-      MetricMaker metricMaker)
-      throws IOException {
+      MetricMaker metricMaker) {
     TestPersistentCacheDef cacheDef =
-        new TestPersistentCacheDef(cacheName, cachedValue, keySerializer, valueSerializer);
+        new TestPersistentCacheDef(
+            cacheName, cachedValue, keySerializer, valueSerializer, withLoader, expireAfterWrite);
 
     File persistentFile =
         ChronicleMapCacheFactory.fileName(
@@ -549,25 +556,33 @@
             expireAfterWrite != null ? expireAfterWrite : Duration.ZERO,
             refreshAfterWrite != null ? refreshAfterWrite : Duration.ZERO);
 
-    return new ChronicleMapCacheImpl<>(
-        cacheDef, config, withLoader ? cacheDef.loader() : null, metricMaker);
+    ChronicleMapCacheFactory cacheFactory =
+        new ChronicleMapCacheFactory(
+            memCacheFactory, new Config(), sitePaths, null, null, metricMaker);
+
+    if (withLoader) {
+      return (ChronicleMapCacheImpl<String, String>)
+          cacheFactory.build(
+              cacheDef, cacheDef.loader(), CacheBackend.CAFFEINE, config, metricMaker);
+    }
+    return (ChronicleMapCacheImpl<String, String>)
+        cacheFactory.build(cacheDef, CacheBackend.CAFFEINE, config, metricMaker);
   }
 
-  private ChronicleMapCacheImpl<String, String> newCacheWithLoader(@Nullable String loadedValue)
-      throws IOException {
-    return newCache(true, TEST_CACHE_NAME, loadedValue, null, null, 1);
+  private ChronicleMapCacheImpl<String, String> newCacheWithLoader(@Nullable String loadedValue) {
+    return newCache(true, testCacheName, loadedValue, null, null, 1, metricMaker);
   }
 
-  private ChronicleMapCacheImpl<String, String> newCacheWithLoader() throws IOException {
-    return newCache(true, TEST_CACHE_NAME, null, null, null, 1);
+  private ChronicleMapCacheImpl<String, String> newCacheWithLoader() {
+    return newCache(true, testCacheName, null, null, null, 1, metricMaker);
   }
 
-  private ChronicleMapCacheImpl<String, String> newCacheVersion(int version) throws IOException {
-    return newCache(true, TEST_CACHE_NAME, null, null, null, version);
+  private ChronicleMapCacheImpl<String, String> newCacheVersion(int version) {
+    return newCache(true, testCacheName, null, null, null, version, WITHOUT_METRICS);
   }
 
-  private ChronicleMapCacheImpl<String, String> newCacheWithoutLoader() throws IOException {
-    return newCache(false, TEST_CACHE_NAME, null, null, null, 1);
+  private ChronicleMapCacheImpl<String, String> newCacheWithoutLoader() {
+    return newCache(false, testCacheName, null, null, null, 1, metricMaker);
   }
 
   private <V> Gauge<V> getMetric(String name) {
diff --git a/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/MigrateH2CachesLocalDiskIT.java b/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/MigrateH2CachesLocalDiskIT.java
index 628f6a0..57de6cd 100644
--- a/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/MigrateH2CachesLocalDiskIT.java
+++ b/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/MigrateH2CachesLocalDiskIT.java
@@ -24,7 +24,6 @@
 import static org.apache.http.HttpHeaders.ACCEPT;
 import static org.eclipse.jgit.util.HttpSupport.TEXT_PLAIN;
 
-import com.google.common.cache.CacheLoader;
 import com.google.common.cache.LoadingCache;
 import com.google.gerrit.acceptance.LightweightPluginDaemonTest;
 import com.google.gerrit.acceptance.RestResponse;
@@ -37,11 +36,11 @@
 import com.google.gerrit.entities.CachedProjectConfig;
 import com.google.gerrit.entities.Project;
 import com.google.gerrit.entities.RefNames;
-import com.google.gerrit.metrics.DisabledMetricMaker;
 import com.google.gerrit.server.account.CachedAccountDetails;
 import com.google.gerrit.server.cache.PersistentCacheDef;
 import com.google.gerrit.server.cache.h2.H2CacheImpl;
 import com.google.gerrit.server.cache.proto.Cache;
+import com.google.gerrit.server.cache.proto.Cache.ProjectCacheKeyProto.Builder;
 import com.google.gerrit.server.cache.serialize.ObjectIdConverter;
 import com.google.gerrit.server.config.SitePaths;
 import com.google.gerrit.server.group.SystemGroupBackend;
@@ -245,20 +244,20 @@
         chronicleCacheFor(PERSISTED_PROJECTS_CACHE_NAME);
 
     Cache.ProjectCacheKeyProto allUsersProto = projectCacheKey(allUsers);
-    Cache.ProjectCacheKeyProto allProjectsProto = projectCacheKey(allProjects);
 
     assertThat(chronicleMapCache.get(allUsersProto)).isEqualTo(h2Cache.get(allUsersProto));
-    assertThat(chronicleMapCache.get(allProjectsProto)).isEqualTo(h2Cache.get(allProjectsProto));
   }
 
   private Cache.ProjectCacheKeyProto projectCacheKey(Project.NameKey key) throws IOException {
     try (Repository git = repoManager.openRepository(key)) {
-      return Cache.ProjectCacheKeyProto.newBuilder()
-          .setProject(key.get())
-          .setRevision(
-              ObjectIdConverter.create()
-                  .toByteString(git.exactRef(RefNames.REFS_CONFIG).getObjectId()))
-          .build();
+      Builder builder =
+          Cache.ProjectCacheKeyProto.newBuilder()
+              .setProject(key.get())
+              .setRevision(
+                  ObjectIdConverter.create()
+                      .toByteString(git.exactRef(RefNames.REFS_CONFIG).getObjectId()));
+
+      return builder.build();
     }
   }
 
@@ -272,11 +271,6 @@
     return (H2CacheImpl<K, V>) findClassBoundWithName(LoadingCache.class, named);
   }
 
-  @SuppressWarnings("unchecked")
-  private <K, V> CacheLoader<K, V> cacheLoaderFor(String named) {
-    return findClassBoundWithName(CacheLoader.class, named);
-  }
-
   private RestResponse runMigration(int sizeMultiplier, int maxBloatFactor) throws IOException {
     return adminRestSession.put(
         String.format(
@@ -328,8 +322,7 @@
             DEFAULT_SIZE_MULTIPLIER,
             DEFAULT_MAX_BLOAT_FACTOR);
 
-    return new ChronicleMapCacheImpl<>(
-        persistentDef, config, cacheLoaderFor(cacheName), new DisabledMetricMaker());
+    return new ChronicleMapCacheImpl<>(persistentDef, config);
   }
 
   private void waitForCacheToLoad(String cacheName) throws InterruptedException {
diff --git a/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/TestPersistentCacheDef.java b/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/TestPersistentCacheDef.java
index 0e52f1d..1ebb9e3 100644
--- a/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/TestPersistentCacheDef.java
+++ b/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/TestPersistentCacheDef.java
@@ -26,12 +26,19 @@
 
 public class TestPersistentCacheDef implements PersistentCacheDef<String, String> {
 
+  private static final Duration ONE_YEAR = Duration.ofDays(365);
+
   private static final Integer DEFAULT_DISK_LIMIT = 1024;
+  private static final Integer DEFAULT_MEMORY_LIMIT = 1024;
+
+  private static final Duration DEFAULT_EXPIRY_AFTER_MEMORY_ACCESS = ONE_YEAR;
 
   private final String name;
   private final String loadedValue;
   private final Duration expireAfterWrite;
   private final Duration refreshAfterWrite;
+  private final Duration expireFromMemoryAfterAccess;
+  private final Integer maximumWeight;
   private final Integer diskLimit;
   private final CacheSerializer<String> keySerializer;
   private final CacheSerializer<String> valueSerializer;
@@ -46,33 +53,51 @@
     this.loadedValue = loadedValue;
     this.expireAfterWrite = expireAfterWrite;
     this.refreshAfterWrite = refreshAfterWrite;
+    this.expireFromMemoryAfterAccess = DEFAULT_EXPIRY_AFTER_MEMORY_ACCESS;
     this.diskLimit = DEFAULT_DISK_LIMIT;
+    this.maximumWeight = DEFAULT_MEMORY_LIMIT;
     this.keySerializer = StringCacheSerializer.INSTANCE;
     this.valueSerializer = StringCacheSerializer.INSTANCE;
   }
 
-  public TestPersistentCacheDef(String name, @Nullable String loadedValue, Integer diskLimit) {
+  public TestPersistentCacheDef(
+      String name, @Nullable String loadedValue, Integer diskLimit, Integer memoryLimit) {
 
     this.name = name;
     this.loadedValue = loadedValue;
     this.expireAfterWrite = null;
     this.refreshAfterWrite = null;
+    this.expireFromMemoryAfterAccess = DEFAULT_EXPIRY_AFTER_MEMORY_ACCESS;
     this.diskLimit = diskLimit;
     this.keySerializer = StringCacheSerializer.INSTANCE;
     this.valueSerializer = StringCacheSerializer.INSTANCE;
+    this.maximumWeight = memoryLimit;
   }
 
   public TestPersistentCacheDef(
       String name,
       @Nullable String loadedValue,
       @Nullable CacheSerializer<String> keySerializer,
-      @Nullable CacheSerializer<String> valueSerializer) {
+      @Nullable CacheSerializer<String> valueSerializer,
+      boolean withLoader) {
+    this(name, loadedValue, keySerializer, valueSerializer, withLoader, null);
+  }
+
+  public TestPersistentCacheDef(
+      String name,
+      @Nullable String loadedValue,
+      @Nullable CacheSerializer<String> keySerializer,
+      @Nullable CacheSerializer<String> valueSerializer,
+      boolean withLoader,
+      @Nullable Duration maxAge) {
 
     this.name = name;
     this.loadedValue = loadedValue;
-    this.expireAfterWrite = Duration.ZERO;
-    this.refreshAfterWrite = Duration.ZERO;
+    this.expireAfterWrite = withLoader ? ONE_YEAR : null;
+    this.refreshAfterWrite = withLoader ? ONE_YEAR : null;
+    this.expireFromMemoryAfterAccess = maxAge == null ? DEFAULT_EXPIRY_AFTER_MEMORY_ACCESS : maxAge;
     this.diskLimit = DEFAULT_DISK_LIMIT;
+    this.maximumWeight = DEFAULT_MEMORY_LIMIT;
     this.keySerializer = Optional.ofNullable(keySerializer).orElse(StringCacheSerializer.INSTANCE);
     this.valueSerializer =
         Optional.ofNullable(valueSerializer).orElse(StringCacheSerializer.INSTANCE);
@@ -120,7 +145,7 @@
 
   @Override
   public long maximumWeight() {
-    return 0;
+    return maximumWeight;
   }
 
   @Override
@@ -130,7 +155,7 @@
 
   @Override
   public Duration expireFromMemoryAfterAccess() {
-    return Duration.ZERO;
+    return expireFromMemoryAfterAccess;
   }
 
   @Override