Merge branch 'stable-3.3' into master
* stable-3.3:
Move migration command over HTTP
Separate H2 command operations from SSH
Fix 'invalid metric name' error
Fix NPE thrown by LRU Cache
Change-Id: Ic562124d11ffbb2d60003bc66774acbcf4ab99d7
diff --git a/BUILD b/BUILD
index 22dd6f0..619d3c7 100644
--- a/BUILD
+++ b/BUILD
@@ -12,11 +12,13 @@
srcs = glob(["src/main/java/**/*.java"]),
manifest_entries = [
"Gerrit-SshModule: com.googlesource.gerrit.modules.cache.chroniclemap.SSHCommandModule",
+ "Gerrit-HttpModule: com.googlesource.gerrit.modules.cache.chroniclemap.HttpModule",
],
resources = glob(["src/main/resources/**/*"]),
deps = [
"//lib:h2",
"//lib/commons:io",
+ "//proto:cache_java_proto",
"@chronicle-algo//jar",
"@chronicle-bytes//jar",
"@chronicle-core//jar",
diff --git a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/AnalyzeH2Caches.java b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/AnalyzeH2Caches.java
index bf86e78..6f501a9 100644
--- a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/AnalyzeH2Caches.java
+++ b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/AnalyzeH2Caches.java
@@ -13,8 +13,16 @@
// limitations under the License.
package com.googlesource.gerrit.modules.cache.chroniclemap;
+import static com.googlesource.gerrit.modules.cache.chroniclemap.H2CacheCommand.H2_SUFFIX;
+import static com.googlesource.gerrit.modules.cache.chroniclemap.H2CacheCommand.appendToConfig;
+import static com.googlesource.gerrit.modules.cache.chroniclemap.H2CacheCommand.baseName;
+import static com.googlesource.gerrit.modules.cache.chroniclemap.H2CacheCommand.getCacheDir;
+import static com.googlesource.gerrit.modules.cache.chroniclemap.H2CacheCommand.getStats;
+import static com.googlesource.gerrit.modules.cache.chroniclemap.H2CacheCommand.logger;
+
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.config.SitePaths;
+import com.google.gerrit.sshd.SshCommand;
import com.google.inject.Inject;
import java.io.IOException;
import java.nio.file.Files;
@@ -24,7 +32,10 @@
import java.util.stream.Collectors;
import org.eclipse.jgit.lib.Config;
-public class AnalyzeH2Caches extends H2CacheSshCommand {
+public class AnalyzeH2Caches extends SshCommand {
+
+ private final Config gerritConfig;
+ private final SitePaths site;
@Inject
AnalyzeH2Caches(@GerritServerConfig Config cfg, SitePaths site) {
@@ -33,7 +44,7 @@
}
@Override
- protected void run() throws UnloggedFailure, Failure, Exception {
+ protected void run() throws Exception {
Set<Path> h2Files = getH2CacheFiles();
stdout.println("Extracting information from H2 caches...");
@@ -56,10 +67,10 @@
stdout.println(config.toText());
}
- private Set<Path> getH2CacheFiles() throws UnloggedFailure {
+ private Set<Path> getH2CacheFiles() throws Exception {
try {
- return getCacheDir()
+ return getCacheDir(gerritConfig, site)
.map(
cacheDir -> {
try {
diff --git a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheImpl.java b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheImpl.java
index e85e553..311445b 100644
--- a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheImpl.java
+++ b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheImpl.java
@@ -118,10 +118,13 @@
<K, V> void registerCallBackMetrics(
String name, ChronicleMap<K, TimedValue<V>> store, InMemoryLRU<K> hotEntries) {
- String PERCENTAGE_FREE_SPACE_METRIC = "cache/chroniclemap/percentage_free_space_" + name;
- String REMAINING_AUTORESIZES_METRIC = "cache/chroniclemap/remaining_autoresizes_" + name;
- String HOT_KEYS_CAPACITY_METRIC = "cache/chroniclemap/hot_keys_capacity_" + name;
- String HOT_KEYS_SIZE_METRIC = "cache/chroniclemap/hot_keys_size_" + name;
+ String sanitizedName = metricMaker.sanitizeMetricName(name);
+ String PERCENTAGE_FREE_SPACE_METRIC =
+ "cache/chroniclemap/percentage_free_space_" + sanitizedName;
+ String REMAINING_AUTORESIZES_METRIC =
+ "cache/chroniclemap/remaining_autoresizes_" + sanitizedName;
+ String HOT_KEYS_CAPACITY_METRIC = "cache/chroniclemap/hot_keys_capacity_" + sanitizedName;
+ String HOT_KEYS_SIZE_METRIC = "cache/chroniclemap/hot_keys_size_" + sanitizedName;
metricMaker.newCallbackMetric(
PERCENTAGE_FREE_SPACE_METRIC,
diff --git a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/H2CacheSshCommand.java b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/H2CacheCommand.java
similarity index 86%
rename from src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/H2CacheSshCommand.java
rename to src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/H2CacheCommand.java
index ab964ce..b956a7b 100644
--- a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/H2CacheSshCommand.java
+++ b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/H2CacheCommand.java
@@ -15,7 +15,6 @@
import com.google.common.flogger.FluentLogger;
import com.google.gerrit.server.config.SitePaths;
-import com.google.gerrit.sshd.SshCommand;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -28,18 +27,15 @@
import org.eclipse.jgit.lib.Config;
import org.h2.Driver;
-public abstract class H2CacheSshCommand extends SshCommand {
+public class H2CacheCommand {
protected static final FluentLogger logger = FluentLogger.forEnclosingClass();
- protected static final String H2_SUFFIX = "h2.db";
+ public static final String H2_SUFFIX = "h2.db";
- protected Config gerritConfig;
- protected SitePaths site;
-
- protected static String baseName(Path h2File) {
+ public static String baseName(Path h2File) {
return FilenameUtils.removeExtension(FilenameUtils.getBaseName(h2File.toString()));
}
- protected static H2AggregateData getStats(Path h2File) throws UnloggedFailure {
+ protected static H2AggregateData getStats(Path h2File) throws Exception {
String url = jdbcUrl(h2File);
String baseName = baseName(h2File);
try {
@@ -62,7 +58,7 @@
return H2AggregateData.empty(baseName);
}
} catch (SQLException e) {
- throw new UnloggedFailure(1, "fatal: " + e.getMessage(), e);
+ throw new Exception("fatal: " + e.getMessage(), e);
}
}
@@ -72,7 +68,8 @@
return "jdbc:h2:" + normalized + ";AUTO_SERVER=TRUE";
}
- protected Optional<Path> getCacheDir() throws IOException {
+ protected static Optional<Path> getCacheDir(Config gerritConfig, SitePaths site)
+ throws IOException {
String name = gerritConfig.getString("cache", null, "directory");
if (name == null) {
return Optional.empty();
@@ -89,7 +86,7 @@
return Optional.of(loc);
}
- protected void appendToConfig(Config config, H2AggregateData stats) {
+ protected static void appendToConfig(Config config, H2AggregateData stats) {
config.setLong("cache", stats.cacheName(), "maxEntries", stats.size());
config.setLong("cache", stats.cacheName(), "avgKeySize", stats.avgKeySize());
config.setLong("cache", stats.cacheName(), "avgValueSize", stats.avgValueSize());
diff --git a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/H2MigrationServlet.java b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/H2MigrationServlet.java
new file mode 100644
index 0000000..3dc7ef8
--- /dev/null
+++ b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/H2MigrationServlet.java
@@ -0,0 +1,321 @@
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.googlesource.gerrit.modules.cache.chroniclemap;
+
+import static com.googlesource.gerrit.modules.cache.chroniclemap.H2CacheCommand.H2_SUFFIX;
+import static com.googlesource.gerrit.modules.cache.chroniclemap.H2CacheCommand.appendToConfig;
+import static com.googlesource.gerrit.modules.cache.chroniclemap.H2CacheCommand.getStats;
+import static com.googlesource.gerrit.modules.cache.chroniclemap.H2CacheCommand.jdbcUrl;
+import static org.apache.http.HttpHeaders.ACCEPT;
+import static org.eclipse.jgit.util.HttpSupport.TEXT_PLAIN;
+
+import com.google.common.flogger.FluentLogger;
+import com.google.gerrit.entities.Account;
+import com.google.gerrit.entities.CachedProjectConfig;
+import com.google.gerrit.extensions.auth.oauth.OAuthToken;
+import com.google.gerrit.extensions.client.ChangeKind;
+import com.google.gerrit.extensions.restapi.AuthException;
+import com.google.gerrit.extensions.restapi.RestApiException;
+import com.google.gerrit.httpd.WebSessionManager;
+import com.google.gerrit.metrics.DisabledMetricMaker;
+import com.google.gerrit.server.account.CachedAccountDetails;
+import com.google.gerrit.server.cache.PersistentCacheDef;
+import com.google.gerrit.server.cache.proto.Cache;
+import com.google.gerrit.server.change.ChangeKindCacheImpl;
+import com.google.gerrit.server.change.MergeabilityCacheImpl;
+import com.google.gerrit.server.config.GerritServerConfig;
+import com.google.gerrit.server.config.SitePaths;
+import com.google.gerrit.server.git.TagSetHolder;
+import com.google.gerrit.server.notedb.ChangeNotesCache;
+import com.google.gerrit.server.notedb.ChangeNotesState;
+import com.google.gerrit.server.patch.DiffSummary;
+import com.google.gerrit.server.patch.DiffSummaryKey;
+import com.google.gerrit.server.patch.IntraLineDiff;
+import com.google.gerrit.server.patch.IntraLineDiffKey;
+import com.google.gerrit.server.patch.PatchList;
+import com.google.gerrit.server.patch.PatchListKey;
+import com.google.gerrit.server.permissions.GlobalPermission;
+import com.google.gerrit.server.permissions.PermissionBackend;
+import com.google.gerrit.server.permissions.PermissionBackendException;
+import com.google.gerrit.server.query.change.ConflictKey;
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
+import com.google.inject.TypeLiteral;
+import com.google.inject.name.Named;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.Timestamp;
+import java.util.Arrays;
+import java.util.Optional;
+import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import org.eclipse.jgit.lib.Config;
+import org.h2.Driver;
+
+@Singleton
+public class H2MigrationServlet extends HttpServlet {
+ private static final long serialVersionUID = 1L;
+ protected static final FluentLogger logger = FluentLogger.forEnclosingClass();
+
+ private final ChronicleMapCacheConfig.Factory configFactory;
+ private final SitePaths site;
+ private final Config gerritConfig;
+ private final PermissionBackend permissionBackend;
+
+ public static int DEFAULT_SIZE_MULTIPLIER = 3;
+ public static int DEFAULT_MAX_BLOAT_FACTOR = 3;
+
+ public static final String MAX_BLOAT_FACTOR_PARAM = "max-bloat-factor";
+ public static final String SIZE_MULTIPLIER_PARAM = "size-multiplier";
+
+ private final Set<PersistentCacheDef<?, ?>> persistentCacheDefs;
+
+ @Inject
+ H2MigrationServlet(
+ @GerritServerConfig Config cfg,
+ SitePaths site,
+ ChronicleMapCacheConfig.Factory configFactory,
+ PermissionBackend permissionBackend,
+ @Named("web_sessions") PersistentCacheDef<String, WebSessionManager.Val> webSessionsCacheDef,
+ @Named("accounts")
+ PersistentCacheDef<CachedAccountDetails.Key, CachedAccountDetails> accountsCacheDef,
+ @Named("oauth_tokens") PersistentCacheDef<Account.Id, OAuthToken> oauthTokenDef,
+ @Named("change_kind")
+ PersistentCacheDef<ChangeKindCacheImpl.Key, ChangeKind> changeKindCacheDef,
+ @Named("mergeability")
+ PersistentCacheDef<MergeabilityCacheImpl.EntryKey, Boolean> mergeabilityCacheDef,
+ @Named("pure_revert")
+ PersistentCacheDef<Cache.PureRevertKeyProto, Boolean> pureRevertCacheDef,
+ @Named("git_tags") PersistentCacheDef<String, TagSetHolder> gitTagsCacheDef,
+ @Named("change_notes")
+ PersistentCacheDef<ChangeNotesCache.Key, ChangeNotesState> changeNotesCacheDef,
+ @Named("diff") PersistentCacheDef<PatchListKey, PatchList> diffCacheDef,
+ @Named("diff_intraline")
+ PersistentCacheDef<IntraLineDiffKey, IntraLineDiff> diffIntraLineCacheDef,
+ @Named("diff_summary") PersistentCacheDef<DiffSummaryKey, DiffSummary> diffSummaryCacheDef,
+ @Named("persisted_projects")
+ PersistentCacheDef<Cache.ProjectCacheKeyProto, CachedProjectConfig>
+ persistedProjectsCacheDef,
+ @Named("conflicts") PersistentCacheDef<ConflictKey, Boolean> conflictsCacheDef) {
+ this.configFactory = configFactory;
+ this.site = site;
+ this.gerritConfig = cfg;
+ this.permissionBackend = permissionBackend;
+ this.persistentCacheDefs =
+ Stream.of(
+ webSessionsCacheDef,
+ accountsCacheDef,
+ oauthTokenDef,
+ changeKindCacheDef,
+ mergeabilityCacheDef,
+ pureRevertCacheDef,
+ gitTagsCacheDef,
+ changeNotesCacheDef,
+ diffCacheDef,
+ diffIntraLineCacheDef,
+ diffSummaryCacheDef,
+ persistedProjectsCacheDef,
+ conflictsCacheDef)
+ .collect(Collectors.toSet());
+ }
+
+ @Override
+ protected void doPut(HttpServletRequest req, HttpServletResponse rsp) throws IOException {
+ if (hasInvalidAcceptHeader(req)) {
+ setResponse(
+ rsp,
+ HttpServletResponse.SC_BAD_REQUEST,
+ "No advertised 'Accept' headers can be honoured. 'text/plain' should be provided in the request 'Accept' header.");
+ return;
+ }
+
+ try {
+ permissionBackend.currentUser().check(GlobalPermission.ADMINISTRATE_SERVER);
+ } catch (AuthException | PermissionBackendException e) {
+ setResponse(
+ rsp,
+ HttpServletResponse.SC_FORBIDDEN,
+ "administrateServer for plugin cache-chroniclemap not permitted");
+ return;
+ }
+ Optional<Path> cacheDir = getCacheDir();
+
+ int maxBloatFactor =
+ Optional.ofNullable(req.getParameter(MAX_BLOAT_FACTOR_PARAM))
+ .map(Integer::parseInt)
+ .orElse(DEFAULT_MAX_BLOAT_FACTOR);
+
+ int sizeMultiplier =
+ Optional.ofNullable(req.getParameter(SIZE_MULTIPLIER_PARAM))
+ .map(Integer::parseInt)
+ .orElse(DEFAULT_SIZE_MULTIPLIER);
+
+ if (!cacheDir.isPresent()) {
+ setResponse(
+ rsp,
+ HttpServletResponse.SC_BAD_REQUEST,
+ "Cannot run migration, cache directory is not configured");
+ return;
+ }
+
+ logger.atInfo().log("Migrating H2 caches to Chronicle-Map...");
+ logger.atInfo().log("* Size multiplier: " + sizeMultiplier);
+ logger.atInfo().log("* Max Bloat Factor: " + maxBloatFactor);
+
+ Config outputChronicleMapConfig = new Config();
+
+ try {
+ for (PersistentCacheDef<?, ?> in : persistentCacheDefs) {
+ Optional<Path> h2CacheFile = getH2CacheFile(cacheDir.get(), in.name());
+
+ if (h2CacheFile.isPresent()) {
+ H2AggregateData stats = getStats(h2CacheFile.get());
+
+ if (!stats.isEmpty()) {
+ ChronicleMapCacheImpl<?, ?> chronicleMapCache =
+ new ChronicleMapCacheImpl<>(
+ in,
+ makeChronicleMapConfig(
+ configFactory, cacheDir.get(), in, stats, sizeMultiplier, maxBloatFactor),
+ null,
+ new DisabledMetricMaker());
+ doMigrate(h2CacheFile.get(), in, chronicleMapCache);
+ chronicleMapCache.close();
+ appendBloatedConfig(outputChronicleMapConfig, stats, maxBloatFactor, sizeMultiplier);
+ }
+ }
+ }
+ } catch (Exception e) {
+ logger.atSevere().withCause(e).log("H2 to chronicle-map migration failed");
+ setResponse(rsp, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage());
+ }
+
+ logger.atInfo().log("Migration completed");
+ setResponse(rsp, HttpServletResponse.SC_OK, outputChronicleMapConfig.toText());
+ }
+
+ protected Optional<Path> getCacheDir() throws IOException {
+ String name = gerritConfig.getString("cache", null, "directory");
+ if (name == null) {
+ return Optional.empty();
+ }
+ Path loc = site.resolve(name);
+ if (!Files.exists(loc)) {
+ throw new IOException(
+ String.format("disk cache is configured but doesn't exist: %s", loc.toAbsolutePath()));
+ }
+ if (!Files.isReadable(loc)) {
+ throw new IOException(String.format("Can't read from disk cache: %s", loc.toAbsolutePath()));
+ }
+ logger.atFine().log("Enabling disk cache %s", loc.toAbsolutePath());
+ return Optional.of(loc);
+ }
+
+ private Optional<Path> getH2CacheFile(Path cacheDir, String name) {
+ Path h2CacheFile = cacheDir.resolve(String.format("%s.%s", name, H2_SUFFIX));
+ if (Files.exists(h2CacheFile)) {
+ return Optional.of(h2CacheFile);
+ }
+ return Optional.empty();
+ }
+
+ private void appendBloatedConfig(
+ Config config, H2AggregateData stats, int maxBloatFactor, int sizeMultiplier) {
+ appendToConfig(
+ config,
+ H2AggregateData.create(
+ stats.cacheName(),
+ stats.size() * sizeMultiplier,
+ stats.avgKeySize(),
+ stats.avgValueSize()));
+ config.setLong("cache", stats.cacheName(), "maxBloatFactor", maxBloatFactor);
+ }
+
+ protected static ChronicleMapCacheConfig makeChronicleMapConfig(
+ ChronicleMapCacheConfig.Factory configFactory,
+ Path cacheDir,
+ PersistentCacheDef<?, ?> in,
+ H2AggregateData stats,
+ int sizeMultiplier,
+ int maxBloatFactor) {
+ return configFactory.createWithValues(
+ in.configKey(),
+ ChronicleMapCacheFactory.fileName(cacheDir, in.name(), in.version()),
+ in.expireAfterWrite(),
+ in.refreshAfterWrite(),
+ stats.size() * sizeMultiplier,
+ stats.avgKeySize(),
+ stats.avgValueSize(),
+ maxBloatFactor);
+ }
+
+ private void doMigrate(
+ Path h2File, PersistentCacheDef<?, ?> in, ChronicleMapCacheImpl<?, ?> chronicleMapCache)
+ throws RestApiException {
+
+ String url = jdbcUrl(h2File);
+ try (Connection conn = Driver.load().connect(url, null)) {
+ PreparedStatement preparedStatement =
+ conn.prepareStatement("SELECT k, v, created FROM data WHERE version=?");
+ preparedStatement.setInt(1, in.version());
+
+ try (ResultSet r = preparedStatement.executeQuery()) {
+ while (r.next()) {
+ Object key =
+ isStringType(in.keyType())
+ ? r.getString(1)
+ : in.keySerializer().deserialize(r.getBytes(1));
+ Object value =
+ isStringType(in.valueType())
+ ? r.getString(2)
+ : in.valueSerializer().deserialize(r.getBytes(2));
+ Timestamp created = r.getTimestamp(3);
+ chronicleMapCache.putUnchecked(key, value, created);
+ }
+ }
+
+ } catch (Exception e) {
+ String message = String.format("FATAL: error migrating %s H2 cache", in.name());
+ logger.atSevere().withCause(e).log(message);
+ throw RestApiException.wrap(message, e);
+ }
+ }
+
+ private boolean isStringType(TypeLiteral<?> typeLiteral) {
+ return typeLiteral.getRawType().getSimpleName().equals("String");
+ }
+
+ private void setResponse(HttpServletResponse httpResponse, int statusCode, String value)
+ throws IOException {
+ httpResponse.setContentType(TEXT_PLAIN);
+ httpResponse.setStatus(statusCode);
+ PrintWriter writer = httpResponse.getWriter();
+ writer.print(value);
+ }
+
+ private boolean hasInvalidAcceptHeader(HttpServletRequest req) {
+ return req.getHeader(ACCEPT) != null
+ && !Arrays.asList("text/plain", "text/*", "*/*").contains(req.getHeader(ACCEPT));
+ }
+}
diff --git a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/HttpModule.java b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/HttpModule.java
new file mode 100644
index 0000000..28956ff
--- /dev/null
+++ b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/HttpModule.java
@@ -0,0 +1,34 @@
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.googlesource.gerrit.modules.cache.chroniclemap;
+
+import com.google.gerrit.extensions.config.FactoryModule;
+import com.google.inject.servlet.ServletModule;
+
+public class HttpModule extends ServletModule {
+
+ @Override
+ protected void configureServlets() {
+ install(
+ new FactoryModule() {
+ @Override
+ protected void configure() {
+ factory(ChronicleMapCacheConfig.Factory.class);
+ }
+ });
+
+ serve("/migrate").with(H2MigrationServlet.class);
+ }
+}
diff --git a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/InMemoryLRU.java b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/InMemoryLRU.java
index ac5183e..7f450d5 100644
--- a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/InMemoryLRU.java
+++ b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/InMemoryLRU.java
@@ -46,7 +46,13 @@
return LRUMap.containsKey(key);
}
- public boolean remove(K key) {
+ /**
+ * Remove a key from the map
+ *
+ * @param key element to remove from the map
+ * @return true when key was in the map, null otherwise
+ */
+ public Boolean remove(K key) {
return LRUMap.remove(key);
}
diff --git a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/MigrateH2Caches.java b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/MigrateH2Caches.java
deleted file mode 100644
index d61fb85..0000000
--- a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/MigrateH2Caches.java
+++ /dev/null
@@ -1,214 +0,0 @@
-// Copyright (C) 2021 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-package com.googlesource.gerrit.modules.cache.chroniclemap;
-
-import com.google.gerrit.common.data.GlobalCapability;
-import com.google.gerrit.extensions.annotations.RequiresCapability;
-import com.google.gerrit.metrics.DisabledMetricMaker;
-import com.google.gerrit.server.cache.PersistentCacheDef;
-import com.google.gerrit.server.cache.serialize.CacheSerializer;
-import com.google.gerrit.server.cache.serialize.StringCacheSerializer;
-import com.google.gerrit.server.config.GerritServerConfig;
-import com.google.gerrit.server.config.SitePaths;
-import com.google.gerrit.sshd.CommandMetaData;
-import com.google.inject.Binding;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.Key;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Timestamp;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-import org.eclipse.jgit.lib.Config;
-import org.eclipse.jgit.lib.TextProgressMonitor;
-import org.h2.Driver;
-import org.kohsuke.args4j.Option;
-
-@RequiresCapability(GlobalCapability.ADMINISTRATE_SERVER)
-@CommandMetaData(name = "migrate-h2-caches", description = "Migrate H2 caches to Chronicle-Map")
-public class MigrateH2Caches extends H2CacheSshCommand {
-
- private final Injector injector;
- private final ChronicleMapCacheConfig.Factory configFactory;
-
- protected static int DEFAULT_SIZE_MULTIPLIER = 3;
- protected static int DEFAULT_MAX_BLOAT_FACTOR = 3;
-
- @Option(
- name = "--size-multiplier",
- aliases = {"-s"},
- metaVar = "MULTIPLIER",
- usage = "Multiplicative factor for the number of entries allowed in chronicle-map")
- private int sizeMultiplier = DEFAULT_SIZE_MULTIPLIER;
-
- @Option(
- name = "--max-bloat-factor",
- aliases = {"-m"},
- metaVar = "FACTOR",
- usage = "maximum number of times chronicle-map cache is allowed to grow in size")
- private int maxBloatFactor = DEFAULT_MAX_BLOAT_FACTOR;
-
- @Inject
- MigrateH2Caches(
- @GerritServerConfig Config cfg,
- SitePaths site,
- Injector injector,
- ChronicleMapCacheConfig.Factory configFactory) {
- this.injector = injector;
- this.configFactory = configFactory;
- this.site = site;
- this.gerritConfig = cfg;
- }
-
- @Override
- protected void run() throws Exception {
- Optional<Path> cacheDir = getCacheDir();
-
- if (!cacheDir.isPresent()) {
- throw die("Cannot run migration, cache directory is not configured");
- }
-
- stdout.println("Migrating H2 caches to Chronicle-Map...");
- stdout.println("* Size multiplier: " + sizeMultiplier);
- stdout.println("* Max Bloat Factor: " + maxBloatFactor);
- Set<PersistentCacheDef<?, ?>> cacheDefs = getAllBoundPersistentCacheDefs();
-
- Config outputChronicleMapConfig = new Config();
-
- for (PersistentCacheDef<?, ?> in : cacheDefs) {
- Optional<Path> h2CacheFile = getH2CacheFile(cacheDir.get(), in.name());
-
- if (h2CacheFile.isPresent()) {
- H2AggregateData stats = getStats(h2CacheFile.get());
-
- if (!stats.isEmpty()) {
- ChronicleMapCacheImpl<?, ?> chronicleMapCache =
- new ChronicleMapCacheImpl<>(
- in,
- makeChronicleMapConfig(
- configFactory, cacheDir.get(), in, stats, sizeMultiplier, maxBloatFactor),
- null,
- new DisabledMetricMaker());
- doMigrate(h2CacheFile.get(), in, chronicleMapCache, stats.size());
- chronicleMapCache.close();
- appendBloatedConfig(outputChronicleMapConfig, stats);
- }
- }
- }
- stdout.println("Complete!");
- stdout.println();
- stdout.println("****************************");
- stdout.println("** Chronicle-map template **");
- stdout.println("****************************");
- stdout.println();
- stdout.println(outputChronicleMapConfig.toText());
- }
-
- protected static ChronicleMapCacheConfig makeChronicleMapConfig(
- ChronicleMapCacheConfig.Factory configFactory,
- Path cacheDir,
- PersistentCacheDef<?, ?> in,
- H2AggregateData stats,
- int sizeMultiplier,
- int maxBloatFactor) {
- return configFactory.createWithValues(
- in.configKey(),
- ChronicleMapCacheFactory.fileName(cacheDir, in.name(), in.version()),
- in.expireAfterWrite(),
- in.refreshAfterWrite(),
- stats.size() * sizeMultiplier,
- stats.avgKeySize(),
- stats.avgValueSize(),
- maxBloatFactor);
- }
-
- private void doMigrate(
- Path h2File,
- PersistentCacheDef<?, ?> in,
- ChronicleMapCacheImpl<?, ?> chronicleMapCache,
- long totalEntries)
- throws UnloggedFailure {
-
- TextProgressMonitor cacheProgress = new TextProgressMonitor(stdout);
- cacheProgress.beginTask(String.format("[%s]", in.name()), (int) totalEntries);
-
- String url = jdbcUrl(h2File);
- try (Connection conn = Driver.load().connect(url, null)) {
- PreparedStatement preparedStatement =
- conn.prepareStatement("SELECT k, v, created FROM data WHERE version=?");
- preparedStatement.setInt(1, in.version());
-
- try (ResultSet r = preparedStatement.executeQuery()) {
- while (r.next()) {
- Object key = in.keySerializer().deserialize(getBytes(r, 1, in.keySerializer()));
- Object value = in.valueSerializer().deserialize(getBytes(r, 2, in.valueSerializer()));
- Timestamp created = r.getTimestamp(3);
- chronicleMapCache.putUnchecked(key, value, created);
- cacheProgress.update(1);
- }
- }
-
- } catch (Exception e) {
- String message = String.format("FATAL: error migrating %s H2 cache", in.name());
- logger.atSevere().withCause(e).log(message);
- stderr.println(message);
- throw die(e);
- }
- cacheProgress.endTask();
- }
-
- private Set<PersistentCacheDef<?, ?>> getAllBoundPersistentCacheDefs() {
- Set<PersistentCacheDef<?, ?>> cacheDefs = new HashSet<>();
- for (Map.Entry<Key<?>, Binding<?>> entry : injector.getParent().getAllBindings().entrySet()) {
- final Class<?> rawType = entry.getKey().getTypeLiteral().getRawType();
- if ("PersistentCacheDef".equals(rawType.getSimpleName())) {
- cacheDefs.add((PersistentCacheDef<?, ?>) entry.getValue().getProvider().get());
- }
- }
- return cacheDefs;
- }
-
- private byte[] getBytes(ResultSet r, int columnIndex, CacheSerializer<?> serializer)
- throws SQLException {
- return (serializer instanceof StringCacheSerializer)
- ? r.getString(columnIndex).getBytes()
- : r.getBytes(columnIndex);
- }
-
- private Optional<Path> getH2CacheFile(Path cacheDir, String name) {
- Path h2CacheFile = cacheDir.resolve(String.format("%s.%s", name, H2_SUFFIX));
- if (Files.exists(h2CacheFile)) {
- return Optional.of(h2CacheFile);
- }
- return Optional.empty();
- }
-
- private void appendBloatedConfig(Config config, H2AggregateData stats) {
- appendToConfig(
- config,
- H2AggregateData.create(
- stats.cacheName(),
- stats.size() * sizeMultiplier,
- stats.avgKeySize(),
- stats.avgValueSize()));
- config.setLong("cache", stats.cacheName(), "maxBloatFactor", maxBloatFactor);
- }
-}
diff --git a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/SSHCommandModule.java b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/SSHCommandModule.java
index 81c2a62..e7e0074 100644
--- a/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/SSHCommandModule.java
+++ b/src/main/java/com/googlesource/gerrit/modules/cache/chroniclemap/SSHCommandModule.java
@@ -20,6 +20,5 @@
protected void configureCommands() {
factory(ChronicleMapCacheConfig.Factory.class);
command("analyze-h2-caches").to(AnalyzeH2Caches.class);
- command("migrate-h2-caches").to(MigrateH2Caches.class);
}
}
diff --git a/src/main/resources/Documentation/migration.md b/src/main/resources/Documentation/migration.md
index e182d85..22ebb2c 100644
--- a/src/main/resources/Documentation/migration.md
+++ b/src/main/resources/Documentation/migration.md
@@ -1,6 +1,6 @@
## Migration from H2 Caches
-This module provides an SSH command to help converting existing cache from H2 to
+This module provides a REST API to help converting existing cache from H2 to
chronicle-map, which requires the `Administrate Server` capability to be
executed.
@@ -12,6 +12,22 @@
database, it will participate to the overall database load, so you should test
accordingly.
+The following caches will be migrated (if they exist and contain any data):
+
+* accounts
+* change_kind
+* change_notes
+* conflicts
+* diff
+* diff_intraline
+* diff_summary
+* git_tags
+* mergeability
+* oauth_token
+* persisted_projects
+* pure_revert
+* web_sessions
+
The migration should be performed as follows:
* Copy `cache-chroniclemap.jar` file in the `plugins/` directory.
@@ -25,31 +41,16 @@
* You can now run the migration
```bash
-ssh -p 29418 admin@<gerrit-server> cache-chroniclemap migrate-h2-caches \
- [--max-bloat-factor FACTOR] \
- [--size-multiplier MULTIPLIER]
+curl -v -XPUT -u <admin> '<gerrit>/a/plugins/cache-chroniclemap/migrate?[size-multiplier=FACTOR]&[bax-bloat-factor=MULTIPLIER]'
```
This might require some time, depending on the size of the H2 caches and it will
terminate with the output of the configuration that should be places in
`etc/gerrit.config`in order to leverage the newly created caches correctly.
-For example:
+Output example:
-```Migrating H2 caches to Chronicle-Map...
- * Size multiplier: 1
- * Max Bloat Factor: 1
- [diff]: 100% (216/216)
- [persisted_projects]: 100% (3/3)
- [diff_summary]: 100% (216/216)
- [accounts]: 100% (2/2)
- [mergeability]: 100% (2444/2444)
- Complete!
-
- ****************************
- ** Chronicle-map template **
- ****************************
-
+```
[cache "diff"]
maxEntries = 216
avgKeySize = 188
@@ -75,15 +76,20 @@
avgKeySize = 150
avgValueSize = 20
maxBloatFactor = 1
+ [cache "web_sessions"]
+ maxEntries = 94852
+ avgKeySize = 68
+ avgValueSize = 382
+ maxBloatFactor = 1
```
-Optionally the SSH command can receive the following additional arguments:
+Optionally the REST endpoint can receive the following additional arguments:
-* --max-bloat-factor (-m) FACTOR
+* max-bloat-factor=FACTOR
maximum number of times chronicle-map cache is allowed to grow in size.
*default:3*
-* --size-multiplier (-s) MULTIPLIER
+* size-multiplier=MULTIPLIER
Multiplicative factor for the number of entries allowed in chronicle-map.
*default:3*
\ No newline at end of file
diff --git a/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheTest.java b/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheTest.java
index f8ef939..e8c1f4a 100644
--- a/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheTest.java
+++ b/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/ChronicleMapCacheTest.java
@@ -469,6 +469,23 @@
() -> (int) getMetric(hotKeysSizeMetricName).getValue() == 0, METRIC_TRIGGER_TIMEOUT);
}
+ @Test
+ public void shouldSanitizeUnwantedCharsInMetricNames() throws Exception {
+ String cacheName = "very+confusing.cache#name";
+ String sanitized = "very_confusing_cache_name";
+ String hotKeySizeMetricName = "cache/chroniclemap/hot_keys_size_" + sanitized;
+ String percentageFreeMetricName = "cache/chroniclemap/percentage_free_space_" + sanitized;
+ String autoResizeMetricName = "cache/chroniclemap/remaining_autoresizes_" + sanitized;
+ String hotKeyCapacityMetricName = "cache/chroniclemap/hot_keys_capacity_" + sanitized;
+
+ newCacheWithMetrics(cacheName);
+
+ getMetric(hotKeySizeMetricName);
+ getMetric(percentageFreeMetricName);
+ getMetric(autoResizeMetricName);
+ getMetric(hotKeyCapacityMetricName);
+ }
+
private int valueSize(String value) {
final TimedValueMarshaller<String> marshaller =
new TimedValueMarshaller<>(StringCacheSerializer.INSTANCE);
diff --git a/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/InMemoryLRUTest.java b/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/InMemoryLRUTest.java
index af27216..3d435c9 100644
--- a/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/InMemoryLRUTest.java
+++ b/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/InMemoryLRUTest.java
@@ -42,4 +42,20 @@
assertThat(map.toArray()).asList().containsExactly("B", "C");
}
+
+ @Test
+ public void remove_unexistingEntryShouldReturnNull() {
+ InMemoryLRU<Object> map = new InMemoryLRU<>(1);
+
+ assertThat(map.remove("foo")).isNull();
+ }
+
+ @Test
+ public void remove_unexistingEntryShouldReturnTrue() {
+ InMemoryLRU<Object> map = new InMemoryLRU<>(1);
+
+ map.add("foo");
+
+ assertThat(map.remove("foo")).isTrue();
+ }
}
diff --git a/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/MigrateH2CachesIT.java b/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/MigrateH2CachesIT.java
index 89fe5f6..cc600b5 100644
--- a/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/MigrateH2CachesIT.java
+++ b/src/test/java/com/googlesource/gerrit/modules/cache/chroniclemap/MigrateH2CachesIT.java
@@ -14,19 +14,23 @@
package com.googlesource.gerrit.modules.cache.chroniclemap;
+import static com.google.common.net.HttpHeaders.CONTENT_TYPE;
import static com.google.common.truth.Truth.assertThat;
-import static com.googlesource.gerrit.modules.cache.chroniclemap.H2CacheSshCommand.H2_SUFFIX;
-import static com.googlesource.gerrit.modules.cache.chroniclemap.MigrateH2Caches.DEFAULT_MAX_BLOAT_FACTOR;
-import static com.googlesource.gerrit.modules.cache.chroniclemap.MigrateH2Caches.DEFAULT_SIZE_MULTIPLIER;
+import static com.googlesource.gerrit.modules.cache.chroniclemap.H2CacheCommand.H2_SUFFIX;
+import static com.googlesource.gerrit.modules.cache.chroniclemap.H2MigrationServlet.DEFAULT_MAX_BLOAT_FACTOR;
+import static com.googlesource.gerrit.modules.cache.chroniclemap.H2MigrationServlet.DEFAULT_SIZE_MULTIPLIER;
+import static com.googlesource.gerrit.modules.cache.chroniclemap.H2MigrationServlet.MAX_BLOAT_FACTOR_PARAM;
+import static com.googlesource.gerrit.modules.cache.chroniclemap.H2MigrationServlet.SIZE_MULTIPLIER_PARAM;
+import static org.apache.http.HttpHeaders.ACCEPT;
+import static org.eclipse.jgit.util.HttpSupport.TEXT_PLAIN;
-import com.google.common.base.Joiner;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.gerrit.acceptance.LightweightPluginDaemonTest;
-import com.google.gerrit.acceptance.Sandboxed;
+import com.google.gerrit.acceptance.RestResponse;
+import com.google.gerrit.acceptance.RestSession;
import com.google.gerrit.acceptance.TestPlugin;
import com.google.gerrit.acceptance.UseLocalDisk;
-import com.google.gerrit.acceptance.UseSsh;
import com.google.gerrit.acceptance.WaitUtil;
import com.google.gerrit.entities.CachedProjectConfig;
import com.google.gerrit.entities.Project;
@@ -37,9 +41,9 @@
import com.google.gerrit.server.cache.h2.H2CacheImpl;
import com.google.gerrit.server.cache.proto.Cache;
import com.google.gerrit.server.cache.serialize.ObjectIdConverter;
+import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.config.SitePaths;
import com.google.gerrit.server.git.GitRepositoryManager;
-import com.google.gerrit.sshd.BaseCommand;
import com.google.inject.Binding;
import com.google.inject.Inject;
import com.google.inject.Key;
@@ -48,78 +52,131 @@
import java.nio.file.Path;
import java.time.Duration;
import java.util.Map;
+import org.apache.http.message.BasicHeader;
+import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.Repository;
import org.junit.Before;
import org.junit.Test;
-@Sandboxed
-@UseSsh
@TestPlugin(
name = "cache-chroniclemap",
- sshModule = "com.googlesource.gerrit.modules.cache.chroniclemap.SSHCommandModule")
+ httpModule = "com.googlesource.gerrit.modules.cache.chroniclemap.HttpModule")
public class MigrateH2CachesIT extends LightweightPluginDaemonTest {
private final Duration LOAD_CACHE_WAIT_TIMEOUT = Duration.ofSeconds(4);
private String ACCOUNTS_CACHE_NAME = "accounts";
private String PERSISTED_PROJECTS_CACHE_NAME = "persisted_projects";
+ private String MIGRATION_ENDPOINT = "/plugins/cache-chroniclemap/migrate";
@Inject protected GitRepositoryManager repoManager;
@Inject private SitePaths sitePaths;
+ @Inject @GerritServerConfig Config cfg;
private ChronicleMapCacheConfig.Factory chronicleMapCacheConfigFactory;
- private String cmd = Joiner.on(" ").join("cache-chroniclemap", "migrate-h2-caches");
-
@Before
public void setUp() {
chronicleMapCacheConfigFactory =
- plugin.getSshInjector().getInstance(ChronicleMapCacheConfig.Factory.class);
+ plugin.getHttpInjector().getInstance(ChronicleMapCacheConfig.Factory.class);
}
@Test
@UseLocalDisk
public void shouldRunAndCompleteSuccessfullyWhenCacheDirectoryIsDefined() throws Exception {
- String result = adminSshSession.exec(cmd);
- adminSshSession.assertSuccess();
- assertThat(result).contains("Complete");
+ runMigration(adminRestSession).assertOK();
}
@Test
@UseLocalDisk
- public void shouldOutputChronicleMapBloatedConfiguration() throws Exception {
+ public void shouldReturnTexPlain() throws Exception {
+ RestResponse result = runMigration(adminRestSession);
+ assertThat(result.getHeader(CONTENT_TYPE)).contains(TEXT_PLAIN);
+ }
+
+ @Test
+ @UseLocalDisk
+ public void shouldReturnBadRequestWhenTextPlainIsNotAnAcceptedHeader() throws Exception {
+ runMigrationWithAcceptHeader(adminRestSession, "application/json").assertBadRequest();
+ }
+
+ @Test
+ @UseLocalDisk
+ public void shouldReturnSuccessWhenAllTextContentsAreAccepted() throws Exception {
+ runMigrationWithAcceptHeader(adminRestSession, "text/*").assertOK();
+ }
+
+ @Test
+ @UseLocalDisk
+ public void shouldReturnSuccessWhenAllContentsAreAccepted() throws Exception {
+ runMigrationWithAcceptHeader(adminRestSession, "*/*").assertOK();
+ }
+
+ @Test
+ @UseLocalDisk
+ public void shouldOutputChronicleMapBloatedDefaultConfiguration() throws Exception {
waitForCacheToLoad(ACCOUNTS_CACHE_NAME);
waitForCacheToLoad(PERSISTED_PROJECTS_CACHE_NAME);
- String result = adminSshSession.exec(cmd);
- adminSshSession.assertSuccess();
+ RestResponse result = runMigration(adminRestSession);
+ result.assertOK();
- assertThat(result)
- .contains(
- "[cache \""
- + ACCOUNTS_CACHE_NAME
- + "\"]\n"
- + "\tmaxEntries = "
- + H2CacheFor(ACCOUNTS_CACHE_NAME).diskStats().size() * DEFAULT_SIZE_MULTIPLIER);
+ Config configResult = new Config();
+ configResult.fromText(result.getEntityContent());
- assertThat(result)
- .contains(
- "[cache \""
- + PERSISTED_PROJECTS_CACHE_NAME
- + "\"]\n"
- + "\tmaxEntries = "
- + H2CacheFor(PERSISTED_PROJECTS_CACHE_NAME).diskStats().size()
- * DEFAULT_SIZE_MULTIPLIER);
+ assertThat(configResult.getInt("cache", ACCOUNTS_CACHE_NAME, "maxEntries", 0))
+ .isEqualTo(H2CacheFor(ACCOUNTS_CACHE_NAME).diskStats().size() * DEFAULT_SIZE_MULTIPLIER);
+
+ assertThat(configResult.getInt("cache", ACCOUNTS_CACHE_NAME, "maxBloatFactor", 0))
+ .isEqualTo(DEFAULT_MAX_BLOAT_FACTOR);
+
+ assertThat(configResult.getInt("cache", PERSISTED_PROJECTS_CACHE_NAME, "maxEntries", 0))
+ .isEqualTo(
+ H2CacheFor(PERSISTED_PROJECTS_CACHE_NAME).diskStats().size() * DEFAULT_SIZE_MULTIPLIER);
+
+ assertThat(configResult.getInt("cache", PERSISTED_PROJECTS_CACHE_NAME, "maxBloatFactor", 0))
+ .isEqualTo(DEFAULT_MAX_BLOAT_FACTOR);
+ }
+
+ @Test
+ @UseLocalDisk
+ public void shouldOutputChronicleMapBloatedProvidedConfiguration() throws Exception {
+ waitForCacheToLoad(ACCOUNTS_CACHE_NAME);
+ waitForCacheToLoad(PERSISTED_PROJECTS_CACHE_NAME);
+
+ int sizeMultiplier = 2;
+ int maxBloatFactor = 3;
+ RestResponse result = runMigration(sizeMultiplier, maxBloatFactor);
+ result.assertOK();
+
+ Config configResult = new Config();
+ configResult.fromText(result.getEntityContent());
+
+ assertThat(configResult.getInt("cache", ACCOUNTS_CACHE_NAME, "maxEntries", 0))
+ .isEqualTo(H2CacheFor(ACCOUNTS_CACHE_NAME).diskStats().size() * sizeMultiplier);
+
+ assertThat(configResult.getInt("cache", ACCOUNTS_CACHE_NAME, "maxBloatFactor", 0))
+ .isEqualTo(maxBloatFactor);
+
+ assertThat(configResult.getInt("cache", PERSISTED_PROJECTS_CACHE_NAME, "maxEntries", 0))
+ .isEqualTo(H2CacheFor(PERSISTED_PROJECTS_CACHE_NAME).diskStats().size() * sizeMultiplier);
+
+ assertThat(configResult.getInt("cache", PERSISTED_PROJECTS_CACHE_NAME, "maxBloatFactor", 0))
+ .isEqualTo(maxBloatFactor);
}
@Test
public void shouldFailWhenCacheDirectoryIsNotDefined() throws Exception {
- adminSshSession.exec(cmd);
- adminSshSession.assertFailure("fatal: Cannot run migration, cache directory is not configured");
+ RestResponse result = runMigration(adminRestSession);
+ result.assertBadRequest();
+ assertThat(result.getEntityContent())
+ .contains("Cannot run migration, cache directory is not configured");
}
@Test
public void shouldFailWhenUserHasNoAdminServerCapability() throws Exception {
- userSshSession.exec(cmd);
- userSshSession.assertFailure("administrateServer for plugin cache-chroniclemap not permitted");
+ RestResponse result = runMigration(userRestSession);
+ result.assertForbidden();
+ assertThat(result.getEntityContent())
+ .contains("administrateServer for plugin cache-chroniclemap not permitted");
}
@Test
@@ -127,8 +184,7 @@
public void shouldMigrateAccountsCache() throws Exception {
waitForCacheToLoad(ACCOUNTS_CACHE_NAME);
- adminSshSession.exec(cmd);
- adminSshSession.assertSuccess();
+ runMigration(adminRestSession).assertOK();
ChronicleMapCacheImpl<CachedAccountDetails.Key, CachedAccountDetails> chronicleMapCache =
chronicleCacheFor(ACCOUNTS_CACHE_NAME);
@@ -143,8 +199,7 @@
public void shouldMigratePersistentProjects() throws Exception {
waitForCacheToLoad(PERSISTED_PROJECTS_CACHE_NAME);
- adminSshSession.exec(cmd);
- adminSshSession.assertSuccess();
+ runMigration(adminRestSession).assertOK();
H2CacheImpl<Cache.ProjectCacheKeyProto, CachedProjectConfig> h2Cache =
H2CacheFor(PERSISTED_PROJECTS_CACHE_NAME);
@@ -184,6 +239,26 @@
return findClassBoundWithName(CacheLoader.class, named);
}
+ private RestResponse runMigration(int sizeMultiplier, int maxBloatFactor) throws IOException {
+ return adminRestSession.put(
+ String.format(
+ "%s?%s=%d&%s=%d",
+ MIGRATION_ENDPOINT,
+ MAX_BLOAT_FACTOR_PARAM,
+ maxBloatFactor,
+ SIZE_MULTIPLIER_PARAM,
+ sizeMultiplier));
+ }
+
+ private RestResponse runMigration(RestSession restSession) throws IOException {
+ return runMigrationWithAcceptHeader(restSession, TEXT_PLAIN);
+ }
+
+ private RestResponse runMigrationWithAcceptHeader(RestSession restSession, String acceptHeader)
+ throws IOException {
+ return restSession.putWithHeader(MIGRATION_ENDPOINT, new BasicHeader(ACCEPT, acceptHeader));
+ }
+
private <T> T findClassBoundWithName(Class<T> clazz, String named) {
return plugin.getSysInjector().getAllBindings().entrySet().stream()
.filter(entry -> isClassBoundWithName(entry, clazz.getSimpleName(), named))
@@ -201,17 +276,16 @@
&& annotation.toString().endsWith(String.format("Named(value=\"%s\")", named));
}
- private <K, V> ChronicleMapCacheImpl<K, V> chronicleCacheFor(String cacheName)
- throws BaseCommand.UnloggedFailure, IOException {
+ private <K, V> ChronicleMapCacheImpl<K, V> chronicleCacheFor(String cacheName) throws Exception {
Path cacheDirectory = sitePaths.resolve(cfg.getString("cache", null, "directory"));
PersistentCacheDef<K, V> persistentDef = getPersistentCacheDef(cacheName);
ChronicleMapCacheConfig config =
- MigrateH2Caches.makeChronicleMapConfig(
+ H2MigrationServlet.makeChronicleMapConfig(
chronicleMapCacheConfigFactory,
cacheDirectory,
persistentDef,
- H2CacheSshCommand.getStats(
+ H2CacheCommand.getStats(
cacheDirectory.resolve(String.format("%s.%s", cacheName, H2_SUFFIX))),
DEFAULT_SIZE_MULTIPLIER,
DEFAULT_MAX_BLOAT_FACTOR);