Merge "Fix some warnings"
diff --git a/Documentation/config-gerrit.txt b/Documentation/config-gerrit.txt
index 3ddc3ee..cf2f2d9 100644
--- a/Documentation/config-gerrit.txt
+++ b/Documentation/config-gerrit.txt
@@ -4348,6 +4348,33 @@
 +
 Defaults to `true`.
 
+[[log.daysToKeep]]log.timeToKeep::
++
+Time that logs should be kept until they are being deleted. Values should use common
+suffixes to express their setting:
++
+* d, day, days
+* w, week, weeks (`1 week` is treated as `7 days`)
+* mon, month, months (`1 month` is treated as `30 days`)
+* y, year, years (`1 year` is treated as `365 days`)
++
+The minimum granularity is days. Using a smaller time unit will result in deletion of
+all old logs, as if `0d` would have been configured.
++
+Actively used logs will never be deleted. Thus, this feature only works in combination
+with enabled link:#log.rotate[log.rotate]. Log deletion happens at server startup and
+then daily at 11pm (in the server's local time zone).
++
+Depending on the filesystem the following file times will be used, in order of priority:
++
+* Time of file creation
+* Time when the file was last modified
+* Date added to the filename as part of log file rotation. Time will be set to `00:00:00Z`.
++
+If none of the above is available, the log file won't be deleted.
++
+Defaults to `-1`, i.e. being disabled.
+
 [[metrics]]
 === Section metrics
 
diff --git a/Documentation/rest-api-config.txt b/Documentation/rest-api-config.txt
index b6cbaaa..aa88141 100644
--- a/Documentation/rest-api-config.txt
+++ b/Documentation/rest-api-config.txt
@@ -931,7 +931,7 @@
       "state": "SLEEPING",
       "start_time": "2014-06-11 12:58:51.508000000",
       "delay": 3287966,
-      "command": "Log File Compressor"
+      "command": "Log File Manager"
     }
   ]
 ----
diff --git a/java/com/google/gerrit/httpd/init/WebAppInitializer.java b/java/com/google/gerrit/httpd/init/WebAppInitializer.java
index ba88617..fa67034 100644
--- a/java/com/google/gerrit/httpd/init/WebAppInitializer.java
+++ b/java/com/google/gerrit/httpd/init/WebAppInitializer.java
@@ -45,7 +45,7 @@
 import com.google.gerrit.lifecycle.LifecycleModule;
 import com.google.gerrit.lucene.LuceneIndexModule;
 import com.google.gerrit.metrics.dropwizard.DropWizardMetricMaker;
-import com.google.gerrit.pgm.util.LogFileCompressor.LogFileCompressorModule;
+import com.google.gerrit.pgm.util.LogFileManager.LogFileManagerModule;
 import com.google.gerrit.server.DefaultRefLogIdentityProvider;
 import com.google.gerrit.server.LibModuleLoader;
 import com.google.gerrit.server.LibModuleType;
@@ -302,7 +302,7 @@
   private Injector createSysInjector() {
     final List<Module> modules = new ArrayList<>();
     modules.add(new DropWizardMetricMaker.RestModule());
-    modules.add(new LogFileCompressorModule());
+    modules.add(new LogFileManagerModule());
     modules.add(new EventBrokerModule());
     modules.add(new JdbcAccountPatchReviewStoreModule(config));
     modules.add(cfgInjector.getInstance(GitRepositoryManagerModule.class));
diff --git a/java/com/google/gerrit/pgm/Daemon.java b/java/com/google/gerrit/pgm/Daemon.java
index d213a60..198eeaa 100644
--- a/java/com/google/gerrit/pgm/Daemon.java
+++ b/java/com/google/gerrit/pgm/Daemon.java
@@ -52,7 +52,7 @@
 import com.google.gerrit.pgm.http.jetty.JettyModule;
 import com.google.gerrit.pgm.http.jetty.ProjectQoSFilter.ProjectQoSFilterModule;
 import com.google.gerrit.pgm.util.ErrorLogFile;
-import com.google.gerrit.pgm.util.LogFileCompressor.LogFileCompressorModule;
+import com.google.gerrit.pgm.util.LogFileManager.LogFileManagerModule;
 import com.google.gerrit.pgm.util.RuntimeShutdown;
 import com.google.gerrit.pgm.util.SiteProgram;
 import com.google.gerrit.server.DefaultRefLogIdentityProvider;
@@ -450,7 +450,7 @@
     final List<Module> modules = new ArrayList<>();
     modules.add(NoteDbSchemaVersionCheck.module());
     modules.add(new DropWizardMetricMaker.RestModule());
-    modules.add(new LogFileCompressorModule());
+    modules.add(new LogFileManagerModule());
 
     // Index module shutdown must happen before work queue shutdown, otherwise
     // work queue can get stuck waiting on index futures that will never return.
diff --git a/java/com/google/gerrit/pgm/util/LogFileCompressor.java b/java/com/google/gerrit/pgm/util/LogFileCompressor.java
deleted file mode 100644
index 5e49312..0000000
--- a/java/com/google/gerrit/pgm/util/LogFileCompressor.java
+++ /dev/null
@@ -1,171 +0,0 @@
-// Copyright (C) 2009 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package com.google.gerrit.pgm.util;
-
-import static java.util.concurrent.TimeUnit.HOURS;
-import static java.util.concurrent.TimeUnit.MILLISECONDS;
-
-import com.google.common.flogger.FluentLogger;
-import com.google.common.io.ByteStreams;
-import com.google.gerrit.extensions.events.LifecycleListener;
-import com.google.gerrit.lifecycle.LifecycleModule;
-import com.google.gerrit.server.config.GerritServerConfig;
-import com.google.gerrit.server.config.SitePaths;
-import com.google.gerrit.server.git.WorkQueue;
-import com.google.inject.Inject;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.nio.file.DirectoryStream;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.time.LocalDateTime;
-import java.time.ZoneId;
-import java.time.temporal.ChronoUnit;
-import java.util.concurrent.Future;
-import java.util.zip.GZIPOutputStream;
-import org.eclipse.jgit.lib.Config;
-
-/** Compresses the old error logs. */
-public class LogFileCompressor implements Runnable {
-  private static final FluentLogger logger = FluentLogger.forEnclosingClass();
-
-  public static class LogFileCompressorModule extends LifecycleModule {
-    @Override
-    protected void configure() {
-      listener().to(Lifecycle.class);
-    }
-  }
-
-  static class Lifecycle implements LifecycleListener {
-    private final WorkQueue queue;
-    private final LogFileCompressor compressor;
-    private final boolean enabled;
-
-    @Inject
-    Lifecycle(WorkQueue queue, LogFileCompressor compressor, @GerritServerConfig Config config) {
-      this.queue = queue;
-      this.compressor = compressor;
-      this.enabled = config.getBoolean("log", "compress", true);
-    }
-
-    @Override
-    public void start() {
-      if (!enabled) {
-        return;
-      }
-      // compress log once and then schedule compression every day at 11:00pm
-      queue.getDefaultQueue().execute(compressor);
-      ZoneId zone = ZoneId.systemDefault();
-      LocalDateTime now = LocalDateTime.now(zone);
-      long milliSecondsUntil11pm =
-          now.until(now.withHour(23).withMinute(0).withSecond(0).withNano(0), ChronoUnit.MILLIS);
-      @SuppressWarnings("unused")
-      Future<?> possiblyIgnoredError =
-          queue
-              .getDefaultQueue()
-              .scheduleAtFixedRate(
-                  compressor, milliSecondsUntil11pm, HOURS.toMillis(24), MILLISECONDS);
-    }
-
-    @Override
-    public void stop() {}
-  }
-
-  private final Path logs_dir;
-
-  @Inject
-  LogFileCompressor(SitePaths site) {
-    logs_dir = resolve(site.logs_dir);
-  }
-
-  private static Path resolve(Path p) {
-    try {
-      return p.toRealPath().normalize();
-    } catch (IOException e) {
-      return p.toAbsolutePath().normalize();
-    }
-  }
-
-  @Override
-  public void run() {
-    try {
-      if (!Files.isDirectory(logs_dir)) {
-        return;
-      }
-      try (DirectoryStream<Path> list = Files.newDirectoryStream(logs_dir)) {
-        for (Path entry : list) {
-          if (!isLive(entry) && !isCompressed(entry) && isLogFile(entry)) {
-            compress(entry);
-          }
-        }
-      } catch (IOException e) {
-        logger.atSevere().withCause(e).log("Error listing logs to compress in %s", logs_dir);
-      }
-    } catch (Exception e) {
-      logger.atSevere().withCause(e).log("Failed to compress log files: %s", e.getMessage());
-    }
-  }
-
-  private boolean isLive(Path entry) {
-    String name = entry.getFileName().toString();
-    return name.endsWith("_log")
-        || name.endsWith(".log")
-        || name.endsWith(".run")
-        || name.endsWith(".pid")
-        || name.endsWith(".json");
-  }
-
-  private boolean isCompressed(Path entry) {
-    String name = entry.getFileName().toString();
-    return name.endsWith(".gz") //
-        || name.endsWith(".zip") //
-        || name.endsWith(".bz2");
-  }
-
-  private boolean isLogFile(Path entry) {
-    return Files.isRegularFile(entry);
-  }
-
-  private void compress(Path src) {
-    Path dst = src.resolveSibling(src.getFileName() + ".gz");
-    Path tmp = src.resolveSibling(".tmp." + src.getFileName());
-    try {
-      try (InputStream in = Files.newInputStream(src);
-          OutputStream out = new GZIPOutputStream(Files.newOutputStream(tmp))) {
-        ByteStreams.copy(in, out);
-      }
-      tmp.toFile().setReadOnly();
-      try {
-        Files.move(tmp, dst);
-      } catch (IOException e) {
-        throw new IOException("Cannot rename " + tmp + " to " + dst, e);
-      }
-      Files.delete(src);
-    } catch (IOException e) {
-      logger.atSevere().withCause(e).log("Cannot compress %s", src);
-      try {
-        Files.deleteIfExists(tmp);
-      } catch (IOException e2) {
-        logger.atWarning().withCause(e2).log("Failed to delete temporary log file %s", tmp);
-      }
-    }
-  }
-
-  @Override
-  public String toString() {
-    return "Log File Compressor";
-  }
-}
diff --git a/java/com/google/gerrit/pgm/util/LogFileManager.java b/java/com/google/gerrit/pgm/util/LogFileManager.java
new file mode 100644
index 0000000..902f7d64
--- /dev/null
+++ b/java/com/google/gerrit/pgm/util/LogFileManager.java
@@ -0,0 +1,249 @@
+// Copyright (C) 2009 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.pgm.util;
+
+import static java.util.concurrent.TimeUnit.HOURS;
+import static java.util.concurrent.TimeUnit.MILLISECONDS;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.flogger.FluentLogger;
+import com.google.common.io.ByteStreams;
+import com.google.gerrit.extensions.events.LifecycleListener;
+import com.google.gerrit.lifecycle.LifecycleModule;
+import com.google.gerrit.server.config.ConfigUtil;
+import com.google.gerrit.server.config.GerritServerConfig;
+import com.google.gerrit.server.config.SitePaths;
+import com.google.gerrit.server.git.WorkQueue;
+import com.google.inject.Inject;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.file.DirectoryStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.attribute.BasicFileAttributes;
+import java.nio.file.attribute.FileTime;
+import java.time.Duration;
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+import java.time.temporal.ChronoUnit;
+import java.util.Optional;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.zip.GZIPOutputStream;
+import org.eclipse.jgit.lib.Config;
+
+/** Compresses and eventually deletes the old logs. */
+public class LogFileManager implements Runnable {
+  private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+  private static final Pattern LOG_FILENAME_PATTERN =
+      Pattern.compile("^.+(?<date>\\d{4}-\\d{2}-\\d{2})(.gz)?");
+  protected final boolean compressionEnabled;
+  private final Duration timeToKeep;
+
+  public static class LogFileManagerModule extends LifecycleModule {
+    @Override
+    protected void configure() {
+      listener().to(Lifecycle.class);
+    }
+  }
+
+  static class Lifecycle implements LifecycleListener {
+    private final WorkQueue queue;
+    private final LogFileManager manager;
+
+    @Inject
+    Lifecycle(WorkQueue queue, LogFileManager manager) {
+      this.queue = queue;
+      this.manager = manager;
+    }
+
+    @Override
+    public void start() {
+      if (!manager.compressionEnabled && manager.timeToKeep.isNegative()) {
+        return;
+      }
+      // compress log once and then schedule compression every day at 11:00pm
+      queue.getDefaultQueue().execute(manager);
+      ZoneId zone = ZoneId.systemDefault();
+      LocalDateTime now = LocalDateTime.now(zone);
+      long milliSecondsUntil11pm =
+          now.until(now.withHour(23).withMinute(0).withSecond(0).withNano(0), ChronoUnit.MILLIS);
+      @SuppressWarnings("unused")
+      Future<?> possiblyIgnoredError =
+          queue
+              .getDefaultQueue()
+              .scheduleAtFixedRate(
+                  manager, milliSecondsUntil11pm, HOURS.toMillis(24), MILLISECONDS);
+    }
+
+    @Override
+    public void stop() {}
+  }
+
+  private final Path logs_dir;
+
+  @Inject
+  LogFileManager(SitePaths site, @GerritServerConfig Config config) {
+    this.logs_dir = resolve(site.logs_dir);
+    this.compressionEnabled = config.getBoolean("log", "compress", true);
+    this.timeToKeep = getTimeToKeep(config);
+  }
+
+  private Duration getTimeToKeep(Config config) {
+    try {
+      return Duration.ofDays(
+          ConfigUtil.getTimeUnit(config, "log", null, "timeToKeep", -1, TimeUnit.DAYS));
+    } catch (IllegalArgumentException e) {
+      logger.atWarning().withCause(e).log(
+          "Illegal duration value for log deletion. Disabling log deletion.");
+      return Duration.ofDays(-1L);
+    }
+  }
+
+  private static Path resolve(Path p) {
+    try {
+      return p.toRealPath().normalize();
+    } catch (IOException e) {
+      return p.toAbsolutePath().normalize();
+    }
+  }
+
+  @Override
+  public void run() {
+    logger.atInfo().log("Starting log file maintenance.");
+    try {
+      if (!Files.isDirectory(logs_dir)) {
+        return;
+      }
+      try (DirectoryStream<Path> list = Files.newDirectoryStream(logs_dir)) {
+        for (Path entry : list) {
+          if (isLive(entry) || !isLogFile(entry)) {
+            continue;
+          }
+          if (!timeToKeep.isNegative() && isExpired(entry)) {
+            if (delete(entry)) {
+              continue;
+            }
+          }
+          if (compressionEnabled && !isCompressed(entry)) {
+            compress(entry);
+          }
+        }
+      } catch (IOException e) {
+        logger.atSevere().withCause(e).log("Error listing logs to compress in %s", logs_dir);
+      }
+    } catch (Exception e) {
+      logger.atSevere().withCause(e).log("Failed to process log files: %s", e.getMessage());
+    }
+    logger.atInfo().log("Log file maintenance has finished.");
+  }
+
+  private boolean isLive(Path entry) {
+    String name = entry.getFileName().toString();
+    return name.endsWith("_log")
+        || name.endsWith(".log")
+        || name.endsWith(".run")
+        || name.endsWith(".pid")
+        || name.endsWith(".json");
+  }
+
+  private boolean isCompressed(Path entry) {
+    String name = entry.getFileName().toString();
+    return name.endsWith(".gz") //
+        || name.endsWith(".zip") //
+        || name.endsWith(".bz2");
+  }
+
+  private boolean isLogFile(Path entry) {
+    return Files.isRegularFile(entry);
+  }
+
+  @VisibleForTesting
+  boolean isExpired(Path entry) {
+    try {
+      FileTime creationTime = Files.readAttributes(entry, BasicFileAttributes.class).creationTime();
+
+      if (creationTime.toInstant().equals(Instant.EPOCH)) {
+        Optional<Instant> fileDate = getDateFromFilename(entry);
+        if (fileDate.isPresent()) {
+          return fileDate.get().isBefore(Instant.now().minus(timeToKeep));
+        }
+        return false;
+      }
+
+      return creationTime.toInstant().isBefore(Instant.now().minus(timeToKeep));
+    } catch (IOException e) {
+      logger.atSevere().withCause(e).log("Failed to get creation time of log file %s", entry);
+    }
+    return false;
+  }
+
+  @VisibleForTesting
+  Optional<Instant> getDateFromFilename(Path entry) {
+    Matcher filenameMatcher = LOG_FILENAME_PATTERN.matcher(entry.getFileName().toString());
+    if (filenameMatcher.matches()) {
+      String rotationDate = filenameMatcher.group("date");
+      if (rotationDate != null && !rotationDate.isBlank()) {
+        return Optional.of(Instant.parse(rotationDate + "T00:00:00.00Z"));
+      }
+    }
+    return Optional.empty();
+  }
+
+  private boolean delete(Path entry) {
+    try {
+      Files.deleteIfExists(entry);
+      logger.atInfo().log("Log file %s has been deleted.", entry);
+      return true;
+    } catch (IOException e) {
+      logger.atWarning().withCause(e).log("Failed to delete log file %s", entry);
+    }
+    return false;
+  }
+
+  private void compress(Path src) {
+    Path dst = src.resolveSibling(src.getFileName() + ".gz");
+    Path tmp = src.resolveSibling(".tmp." + src.getFileName());
+    try {
+      try (InputStream in = Files.newInputStream(src);
+          OutputStream out = new GZIPOutputStream(Files.newOutputStream(tmp))) {
+        ByteStreams.copy(in, out);
+      }
+      tmp.toFile().setReadOnly();
+      try {
+        Files.move(tmp, dst);
+      } catch (IOException e) {
+        throw new IOException("Cannot rename " + tmp + " to " + dst, e);
+      }
+      Files.delete(src);
+    } catch (IOException e) {
+      logger.atSevere().withCause(e).log("Cannot compress %s", src);
+      try {
+        Files.deleteIfExists(tmp);
+      } catch (IOException e2) {
+        logger.atWarning().withCause(e2).log("Failed to delete temporary log file %s", tmp);
+      }
+    }
+  }
+
+  @Override
+  public String toString() {
+    return "Log File Manager";
+  }
+}
diff --git a/javatests/com/google/gerrit/acceptance/api/accounts/AccountManagerIT.java b/javatests/com/google/gerrit/acceptance/api/accounts/AccountManagerIT.java
index 07ce95b..efc7e0f 100644
--- a/javatests/com/google/gerrit/acceptance/api/accounts/AccountManagerIT.java
+++ b/javatests/com/google/gerrit/acceptance/api/accounts/AccountManagerIT.java
@@ -28,6 +28,7 @@
 import com.google.gerrit.acceptance.AbstractDaemonTest;
 import com.google.gerrit.acceptance.config.GerritConfig;
 import com.google.gerrit.common.Nullable;
+import com.google.gerrit.common.UsedAt;
 import com.google.gerrit.entities.Account;
 import com.google.gerrit.extensions.client.AccountFieldName;
 import com.google.gerrit.server.IdentifiedUser;
@@ -52,11 +53,9 @@
 import com.google.gerrit.server.update.context.RefUpdateContext;
 import com.google.inject.Inject;
 import com.google.inject.util.Providers;
-import java.io.IOException;
 import java.util.Optional;
 import java.util.Set;
 import org.eclipse.jgit.api.Git;
-import org.eclipse.jgit.api.errors.GitAPIException;
 import org.eclipse.jgit.lib.ObjectId;
 import org.eclipse.jgit.lib.Repository;
 import org.junit.Test;
@@ -797,29 +796,22 @@
         "Create Test Account",
         accountId,
         u -> u.addExternalId(externalIdFactory.create(mailExtIdKey, accountId)));
-
     accountManager.link(accountId, authRequestFactory.createForEmail(email1));
+    int initialCommits = countExternalIdsCommits();
 
-    int initialCommits;
-    try (Repository allUsersRepo = repoManager.openRepository(allUsers);
-        Git git = new Git(allUsersRepo)) {
-      initialCommits = getCommitsInExternalIds(git, allUsersRepo);
+    accountManager.updateLink(accountId, authRequestFactory.createForEmail(email2));
 
-      accountManager.updateLink(accountId, authRequestFactory.createForEmail(email2));
-    }
-    // Reopen the repo again - this is required for git.log() operations (otherwise, git.log()
-    // returns unmodified history on google internal infra).
-    try (Repository allUsersRepo = repoManager.openRepository(allUsers);
-        Git git = new Git(allUsersRepo)) {
-      int afterUpdateCommits = getCommitsInExternalIds(git, allUsersRepo);
-      assertThat(afterUpdateCommits).isEqualTo(initialCommits + 1);
-    }
+    int afterUpdateCommits = countExternalIdsCommits();
+    assertThat(afterUpdateCommits).isEqualTo(initialCommits + 1);
   }
 
-  private static int getCommitsInExternalIds(Git git, Repository allUsersRepo)
-      throws GitAPIException, IOException {
-    ObjectId refsMetaExternalIdsHead = allUsersRepo.exactRef(REFS_EXTERNAL_IDS).getObjectId();
-    return Iterables.size(git.log().add(refsMetaExternalIdsHead).call());
+  @UsedAt(UsedAt.Project.GOOGLE)
+  protected int countExternalIdsCommits() throws Exception {
+    try (Repository allUsersRepo = repoManager.openRepository(allUsers);
+        Git git = new Git(allUsersRepo)) {
+      ObjectId refsMetaExternalIdsHead = allUsersRepo.exactRef(REFS_EXTERNAL_IDS).getObjectId();
+      return Iterables.size(git.log().add(refsMetaExternalIdsHead).call());
+    }
   }
 
   private void assertNoSuchExternalIds(ExternalId.Key... extIdKeys) throws Exception {
diff --git a/javatests/com/google/gerrit/acceptance/rest/binding/ConfigRestApiBindingsIT.java b/javatests/com/google/gerrit/acceptance/rest/binding/ConfigRestApiBindingsIT.java
index 576a921..13382ef 100644
--- a/javatests/com/google/gerrit/acceptance/rest/binding/ConfigRestApiBindingsIT.java
+++ b/javatests/com/google/gerrit/acceptance/rest/binding/ConfigRestApiBindingsIT.java
@@ -110,7 +110,7 @@
 
     Optional<String> id =
         result.stream()
-            .filter(t -> "Log File Compressor".equals(t.command))
+            .filter(t -> "Log File Manager".equals(t.command))
             .map(t -> t.id)
             .findFirst();
     assertThat(id).isPresent();
diff --git a/javatests/com/google/gerrit/acceptance/rest/config/GetTaskIT.java b/javatests/com/google/gerrit/acceptance/rest/config/GetTaskIT.java
index a9e3cf6..9ed6d15 100644
--- a/javatests/com/google/gerrit/acceptance/rest/config/GetTaskIT.java
+++ b/javatests/com/google/gerrit/acceptance/rest/config/GetTaskIT.java
@@ -33,7 +33,7 @@
     TaskInfo info = newGson().fromJson(r.getReader(), new TypeToken<TaskInfo>() {}.getType());
     assertThat(info.id).isNotNull();
     Long.parseLong(info.id, 16);
-    assertThat(info.command).isEqualTo("Log File Compressor");
+    assertThat(info.command).isEqualTo("Log File Manager");
     assertThat(info.startTime).isNotNull();
   }
 
@@ -49,7 +49,7 @@
         newGson().fromJson(r.getReader(), new TypeToken<List<TaskInfo>>() {}.getType());
     r.consume();
     for (TaskInfo info : result) {
-      if ("Log File Compressor".equals(info.command)) {
+      if ("Log File Manager".equals(info.command)) {
         return info.id;
       }
     }
diff --git a/javatests/com/google/gerrit/acceptance/rest/config/KillTaskIT.java b/javatests/com/google/gerrit/acceptance/rest/config/KillTaskIT.java
index 2aa350e..ab3689b 100644
--- a/javatests/com/google/gerrit/acceptance/rest/config/KillTaskIT.java
+++ b/javatests/com/google/gerrit/acceptance/rest/config/KillTaskIT.java
@@ -36,7 +36,7 @@
 
     Optional<String> id =
         result.stream()
-            .filter(t -> "Log File Compressor".equals(t.command))
+            .filter(t -> "Log File Manager".equals(t.command))
             .map(t -> t.id)
             .findFirst();
     assertThat(id).isPresent();
diff --git a/javatests/com/google/gerrit/acceptance/rest/config/ListTasksIT.java b/javatests/com/google/gerrit/acceptance/rest/config/ListTasksIT.java
index 674ca79..cad0875 100644
--- a/javatests/com/google/gerrit/acceptance/rest/config/ListTasksIT.java
+++ b/javatests/com/google/gerrit/acceptance/rest/config/ListTasksIT.java
@@ -34,7 +34,7 @@
     assertThat(result).isNotEmpty();
     boolean foundLogFileCompressorTask = false;
     for (TaskInfo info : result) {
-      if ("Log File Compressor".equals(info.command)) {
+      if ("Log File Manager".equals(info.command)) {
         foundLogFileCompressorTask = true;
       }
       assertThat(info.id).isNotNull();
diff --git a/javatests/com/google/gerrit/acceptance/server/util/TaskListenerIT.java b/javatests/com/google/gerrit/acceptance/server/util/TaskListenerIT.java
index e62cb2b..809cee9 100644
--- a/javatests/com/google/gerrit/acceptance/server/util/TaskListenerIT.java
+++ b/javatests/com/google/gerrit/acceptance/server/util/TaskListenerIT.java
@@ -150,7 +150,7 @@
       @Override
       public void configure() {
         // Forwarder.delegate is empty on start to protect test listener from non test tasks
-        // (such as the "Log File Compressor") interference
+        // (such as the "Log File Manager") interference
         forwarder = new ForwardingListener(); // Only gets bound once for all tests
         bind(TaskListener.class).annotatedWith(Exports.named("listener")).toInstance(forwarder);
       }
@@ -161,7 +161,7 @@
   public void setupExecutorAndForwarder() throws InterruptedException {
     executor = workQueue.createQueue(1, "TaskListeners");
 
-    // "Log File Compressor"s are likely running and will interfere with tests
+    // "Log File Manager"s are likely running and will interfere with tests
     while (0 != workQueue.getTasks().size()) {
       for (Task<?> t : workQueue.getTasks()) {
         @SuppressWarnings("unused")
diff --git a/javatests/com/google/gerrit/pgm/BUILD b/javatests/com/google/gerrit/pgm/BUILD
index 0fe4fad..43b86cb 100644
--- a/javatests/com/google/gerrit/pgm/BUILD
+++ b/javatests/com/google/gerrit/pgm/BUILD
@@ -7,6 +7,7 @@
     deps = [
         "//java/com/google/gerrit/pgm/http/jetty",
         "//java/com/google/gerrit/pgm/init/api",
+        "//java/com/google/gerrit/pgm/util",
         "//java/com/google/gerrit/server",
         "//java/com/google/gerrit/server/securestore/testing",
         "//lib:guava",
diff --git a/javatests/com/google/gerrit/pgm/util/LogFileManagerTest.java b/javatests/com/google/gerrit/pgm/util/LogFileManagerTest.java
new file mode 100644
index 0000000..b3f59cc
--- /dev/null
+++ b/javatests/com/google/gerrit/pgm/util/LogFileManagerTest.java
@@ -0,0 +1,58 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.pgm.util;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import com.google.gerrit.server.config.SitePaths;
+import java.nio.file.Path;
+import java.time.Instant;
+import java.util.List;
+import org.eclipse.jgit.lib.Config;
+import org.junit.Test;
+
+public class LogFileManagerTest {
+
+  @Test
+  public void testLogFilePattern() throws Exception {
+    List<String> filenamesWithDate =
+        List.of(
+            "error_log.2024-01-01",
+            "error_log.2024-01-01.gz",
+            "error_log.json.2024-01-01",
+            "error_log.json.2024-01-01.gz",
+            "sshd_log.2024-01-01",
+            "httpd_log.2024-01-01");
+
+    List<String> filenamesWithoutDate =
+        List.of(
+            "error_log",
+            "error_log.gz",
+            "error_log.json",
+            "error_log.json.gz",
+            "sshd_log",
+            "httpd_log");
+
+    LogFileManager manager = new LogFileManager(new SitePaths(Path.of("/gerrit")), new Config());
+    Instant expected = Instant.parse("2024-01-01T00:00:00.00Z");
+    for (String filename : filenamesWithDate) {
+      assertThat(manager.getDateFromFilename(Path.of(filename)).get()).isEqualTo(expected);
+    }
+
+    for (String filename : filenamesWithoutDate) {
+      assertThat(manager.getDateFromFilename(Path.of(filename)).isEmpty()).isTrue();
+    }
+  }
+}
diff --git a/plugins/replication b/plugins/replication
index aac2528..2f6c7ce 160000
--- a/plugins/replication
+++ b/plugins/replication
@@ -1 +1 @@
-Subproject commit aac252809094b8e4d4e26d69dab75a23d2da1770
+Subproject commit 2f6c7ceeb0cc50bc73d018cd9f990392d58804ab
diff --git a/polygerrit-ui/app/elements/core/gr-search-bar/gr-search-bar.ts b/polygerrit-ui/app/elements/core/gr-search-bar/gr-search-bar.ts
index 98e9eba..368eb22 100644
--- a/polygerrit-ui/app/elements/core/gr-search-bar/gr-search-bar.ts
+++ b/polygerrit-ui/app/elements/core/gr-search-bar/gr-search-bar.ts
@@ -126,7 +126,11 @@
 
 const MAX_AUTOCOMPLETE_RESULTS = 10;
 
-const TOKENIZE_REGEX = /(?:[^\s"]+|"[^"]*")+\s*/g;
+// 3 types of tokens
+// 1. predicate:expression (?:[^\s":]+:\s*[^\s"]+)
+// 2. quotes with anything inside "[^"]*"
+// 3. anything else like unfinished predicate [^\s"]+
+const TOKENIZE_REGEX = /(?:(?:[^\s":]+:\s*[^\s"]+)|[^\s"]+|"[^"]*")+\s*/g;
 
 export type SuggestionProvider = (
   predicate: string,
diff --git a/polygerrit-ui/app/elements/core/gr-search-bar/gr-search-bar_test.ts b/polygerrit-ui/app/elements/core/gr-search-bar/gr-search-bar_test.ts
index f67024f..bc8da05 100644
--- a/polygerrit-ui/app/elements/core/gr-search-bar/gr-search-bar_test.ts
+++ b/polygerrit-ui/app/elements/core/gr-search-bar/gr-search-bar_test.ts
@@ -256,6 +256,18 @@
       const s = await element.getSearchSuggestions('is:mergeab');
       assert.isEmpty(s);
     });
+
+    test('Autocompletes correctly second condition', async () => {
+      const s = await element.getSearchSuggestions('is:open me');
+      assert.equal(s[0].value, 'mergedafter:');
+    });
+
+    test('Autocomplete handles space before expression correctly', async () => {
+      // This previously suggested "mergedafter" (incorrectly) due to the
+      // leading space.
+      const s = await element.getSearchSuggestions('author: me');
+      assert.isEmpty(s);
+    });
   });
 
   [