Merge "Remove experiment flag for rebasing merge commits"
diff --git a/Documentation/config-gerrit.txt b/Documentation/config-gerrit.txt
index 9ce2eee..b276cdf 100644
--- a/Documentation/config-gerrit.txt
+++ b/Documentation/config-gerrit.txt
@@ -1352,6 +1352,33 @@
 
 Default is 00:00 if the project_list cache warmer is enabled.
 
+[[cachePruning]]
+=== Section cachePruning
+
+[[cachePruning.pruneOnStartup]]cachePruning.pruneOnStartup::
++
+Whether to asynchronously prune all cache when starting Gerrit.
++
+Defaults to `true`.
+
+[[cachePruning.startTime]]cachePruning.startTime::
++
+The link:#schedule-configuration-startTime[start time] for running
+cache pruning.
++
+Defaults to `01:00`.
+
+[[cachePruning.interval]]cachePruning.interval::
++
+The link:#schedule-configuration-interval[interval] for running
+cache pruning.
++
+Defaults to `1d`.
+
+link:#schedule-configuration-examples[Schedule examples] can be found
+in the link:#schedule-configuration[Schedule Configuration] section.
+
+
 [[capability]]
 === Section capability
 
diff --git a/java/com/google/gerrit/acceptance/GerritServerTestRule.java b/java/com/google/gerrit/acceptance/GerritServerTestRule.java
index 493652d..d3bc008 100644
--- a/java/com/google/gerrit/acceptance/GerritServerTestRule.java
+++ b/java/com/google/gerrit/acceptance/GerritServerTestRule.java
@@ -268,11 +268,6 @@
     return true;
   }
 
-  @Override
-  public boolean isRefSequenceSupported() {
-    return true;
-  }
-
   public static void afterConfigChanged() {
     if (commonServer != null) {
       try {
diff --git a/java/com/google/gerrit/acceptance/ServerTestRule.java b/java/com/google/gerrit/acceptance/ServerTestRule.java
index 5687f91..a057a6e 100644
--- a/java/com/google/gerrit/acceptance/ServerTestRule.java
+++ b/java/com/google/gerrit/acceptance/ServerTestRule.java
@@ -101,7 +101,4 @@
    * (e.g. email)
    */
   boolean isUsernameSupported();
-
-  /** Returns true if ref sequences are stored in NoteDb. */
-  boolean isRefSequenceSupported();
 }
diff --git a/java/com/google/gerrit/server/cache/h2/H2CacheFactory.java b/java/com/google/gerrit/server/cache/h2/H2CacheFactory.java
index fdd55ac..4ccbab5 100644
--- a/java/com/google/gerrit/server/cache/h2/H2CacheFactory.java
+++ b/java/com/google/gerrit/server/cache/h2/H2CacheFactory.java
@@ -32,11 +32,13 @@
 import com.google.gerrit.server.cache.h2.H2CacheImpl.ValueHolder;
 import com.google.gerrit.server.config.ConfigUtil;
 import com.google.gerrit.server.config.GerritServerConfig;
+import com.google.gerrit.server.config.ScheduleConfig;
+import com.google.gerrit.server.config.ScheduleConfig.Schedule;
 import com.google.gerrit.server.config.SitePaths;
+import com.google.gerrit.server.git.WorkQueue;
 import com.google.gerrit.server.index.options.BuildBloomFilter;
 import com.google.gerrit.server.index.options.IsFirstInsertForEntry;
 import com.google.gerrit.server.logging.LoggingContextAwareExecutorService;
-import com.google.gerrit.server.logging.LoggingContextAwareScheduledExecutorService;
 import com.google.inject.Inject;
 import com.google.inject.Provider;
 import com.google.inject.Singleton;
@@ -67,6 +69,8 @@
   private final boolean h2AutoServer;
   private final boolean isOfflineReindex;
   private final boolean buildBloomFilter;
+  private final boolean pruneOnStartup;
+  private final Schedule schedule;
 
   @Inject
   H2CacheFactory(
@@ -74,12 +78,18 @@
       @GerritServerConfig Config cfg,
       SitePaths site,
       DynamicMap<Cache<?, ?>> cacheMap,
+      WorkQueue queue,
       @Nullable IsFirstInsertForEntry isFirstInsertForEntry,
       @Nullable BuildBloomFilter buildBloomFilter) {
     super(memCacheFactory, cfg, site);
     h2CacheSize = cfg.getLong("cache", null, "h2CacheSize", -1);
     h2AutoServer = cfg.getBoolean("cache", null, "h2AutoServer", false);
+    pruneOnStartup = cfg.getBoolean("cachePruning", null, "pruneOnStartup", true);
     caches = new ArrayList<>();
+    schedule =
+        ScheduleConfig.createSchedule(cfg, "cachePruning")
+            .orElse(Schedule.createOrFail(Duration.ofDays(1).toMillis(), "01:00"));
+    logger.atInfo().log("Scheduling cache pruning with schedule %s", schedule);
     this.cacheMap = cacheMap;
     this.isOfflineReindex =
         isFirstInsertForEntry != null && isFirstInsertForEntry.equals(IsFirstInsertForEntry.YES);
@@ -92,16 +102,7 @@
               Executors.newFixedThreadPool(
                   1, new ThreadFactoryBuilder().setNameFormat("DiskCache-Store-%d").build()));
 
-      cleanup =
-          isOfflineReindex
-              ? null
-              : new LoggingContextAwareScheduledExecutorService(
-                  Executors.newScheduledThreadPool(
-                      1,
-                      new ThreadFactoryBuilder()
-                          .setNameFormat("DiskCache-Prune-%d")
-                          .setDaemon(true)
-                          .build()));
+      cleanup = isOfflineReindex ? null : queue.createQueue(1, "DiskCache-Prune", true);
     } else {
       executor = null;
       cleanup = null;
@@ -114,9 +115,19 @@
       for (H2CacheImpl<?, ?> cache : caches) {
         executor.execute(cache::start);
         if (cleanup != null) {
+          if (pruneOnStartup) {
+            @SuppressWarnings("unused")
+            Future<?> possiblyIgnoredError =
+                cleanup.schedule(() -> cache.prune(), 30, TimeUnit.SECONDS);
+          }
+
           @SuppressWarnings("unused")
           Future<?> possiblyIgnoredError =
-              cleanup.schedule(() -> cache.prune(cleanup), 30, TimeUnit.SECONDS);
+              cleanup.scheduleAtFixedRate(
+                  () -> cache.prune(),
+                  schedule.initialDelay(),
+                  schedule.interval(),
+                  TimeUnit.MILLISECONDS);
         }
       }
     }
diff --git a/java/com/google/gerrit/server/cache/h2/H2CacheImpl.java b/java/com/google/gerrit/server/cache/h2/H2CacheImpl.java
index 27a09ed..ad46483 100644
--- a/java/com/google/gerrit/server/cache/h2/H2CacheImpl.java
+++ b/java/com/google/gerrit/server/cache/h2/H2CacheImpl.java
@@ -47,7 +47,6 @@
 import java.time.Duration;
 import java.time.Instant;
 import java.util.ArrayList;
-import java.util.Calendar;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -56,9 +55,6 @@
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Executor;
-import java.util.concurrent.Future;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 
 /**
@@ -92,6 +88,7 @@
   private final SqlStore<K, V> store;
   private final TypeLiteral<K> keyType;
   private final Cache<K, ValueHolder<V>> mem;
+  private final String cacheName;
 
   H2CacheImpl(
       Executor executor,
@@ -102,6 +99,7 @@
     this.store = store;
     this.keyType = keyType;
     this.mem = mem;
+    this.cacheName = store.url.substring(store.url.lastIndexOf('/') + 1);
   }
 
   @Nullable
@@ -230,20 +228,10 @@
     store.close();
   }
 
-  void prune(ScheduledExecutorService service) {
+  void prune() {
+    logger.atInfo().log("Pruning cache %s...", cacheName);
     store.prune(mem);
-
-    Calendar cal = Calendar.getInstance();
-    cal.set(Calendar.HOUR_OF_DAY, 01);
-    cal.set(Calendar.MINUTE, 0);
-    cal.set(Calendar.SECOND, 0);
-    cal.set(Calendar.MILLISECOND, 0);
-    cal.add(Calendar.DAY_OF_MONTH, 1);
-
-    long delay = cal.getTimeInMillis() - TimeUtil.nowMs();
-    @SuppressWarnings("unused")
-    Future<?> possiblyIgnoredError =
-        service.schedule(() -> prune(service), delay, TimeUnit.MILLISECONDS);
+    logger.atInfo().log("Finished pruning cache %s...", cacheName);
   }
 
   static class ValueHolder<V> {
diff --git a/java/com/google/gerrit/sshd/InvalidKeyAlgorithmException.java b/java/com/google/gerrit/sshd/InvalidKeyAlgorithmException.java
new file mode 100644
index 0000000..5f09658
--- /dev/null
+++ b/java/com/google/gerrit/sshd/InvalidKeyAlgorithmException.java
@@ -0,0 +1,44 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.sshd;
+
+import java.security.PublicKey;
+import java.security.spec.InvalidKeySpecException;
+
+public class InvalidKeyAlgorithmException extends InvalidKeySpecException {
+  private final String invalidKeyAlgo;
+  private final String expectedKeyAlgo;
+  private final PublicKey publicKey;
+
+  public InvalidKeyAlgorithmException(
+      String invalidKeyAlgo, String expectedKeyAlgo, PublicKey publicKey) {
+    super("Key algorithm mismatch: expected " + expectedKeyAlgo + " but got " + invalidKeyAlgo);
+    this.invalidKeyAlgo = invalidKeyAlgo;
+    this.expectedKeyAlgo = expectedKeyAlgo;
+    this.publicKey = publicKey;
+  }
+
+  public String getInvalidKeyAlgo() {
+    return invalidKeyAlgo;
+  }
+
+  public String getExpectedKeyAlgo() {
+    return expectedKeyAlgo;
+  }
+
+  public PublicKey getPublicKey() {
+    return publicKey;
+  }
+}
diff --git a/java/com/google/gerrit/sshd/SshKeyCacheImpl.java b/java/com/google/gerrit/sshd/SshKeyCacheImpl.java
index 628a050..ff452a6 100644
--- a/java/com/google/gerrit/sshd/SshKeyCacheImpl.java
+++ b/java/com/google/gerrit/sshd/SshKeyCacheImpl.java
@@ -19,6 +19,7 @@
 import com.google.common.cache.CacheLoader;
 import com.google.common.cache.LoadingCache;
 import com.google.common.flogger.FluentLogger;
+import com.google.gerrit.exceptions.InvalidSshKeyException;
 import com.google.gerrit.server.account.AccountSshKey;
 import com.google.gerrit.server.account.VersionedAuthorizedKeys;
 import com.google.gerrit.server.account.externalids.ExternalId;
@@ -144,6 +145,15 @@
         // to do with the key object, and instead we must abort this load.
         //
         throw e;
+      } catch (InvalidKeyAlgorithmException e) {
+        logger.atWarning().withCause(e).log(
+            "SSH key %d of account %s has an invalid algorithm %s: fixing the algorithm to %s",
+            k.seq(), k.accountId(), e.getInvalidKeyAlgo(), e.getExpectedKeyAlgo());
+        if (fixKeyAlgorithm(k, e.getExpectedKeyAlgo())) {
+          kl.add(new SshKeyCacheEntry(k.accountId(), e.getPublicKey()));
+        } else {
+          markInvalid(k);
+        }
       } catch (Exception e) {
         markInvalid(k);
       }
@@ -158,5 +168,20 @@
             "Failed to mark SSH key %d of account %s invalid", k.seq(), k.accountId());
       }
     }
+
+    private boolean fixKeyAlgorithm(AccountSshKey k, String keyAlgo) {
+      try {
+        logger.atInfo().log(
+            "Fixing SSH key %d of account %s algorithm to %s", k.seq(), k.accountId(), keyAlgo);
+        authorizedKeys.deleteKey(k.accountId(), k.seq());
+        String sshKey = k.sshPublicKey();
+        authorizedKeys.addKey(k.accountId(), keyAlgo + sshKey.substring(sshKey.indexOf(' ')));
+        return true;
+      } catch (IOException | ConfigInvalidException | InvalidSshKeyException e) {
+        logger.atSevere().withCause(e).log(
+            "Failed to fix SSH key %d of account %s with algo %s", k.seq(), k.accountId(), keyAlgo);
+        return false;
+      }
+    }
   }
 }
diff --git a/java/com/google/gerrit/sshd/SshUtil.java b/java/com/google/gerrit/sshd/SshUtil.java
index abbd81d..29d0e90 100644
--- a/java/com/google/gerrit/sshd/SshUtil.java
+++ b/java/com/google/gerrit/sshd/SshUtil.java
@@ -57,7 +57,12 @@
         throw new InvalidKeySpecException("No key string");
       }
       final byte[] bin = BaseEncoding.base64().decode(s);
-      return new ByteArrayBuffer(bin).getRawPublicKey();
+      String publicKeyAlgo = new ByteArrayBuffer(bin).getString();
+      PublicKey publicKey = new ByteArrayBuffer(bin).getRawPublicKey();
+      if (!key.algorithm().equals(publicKeyAlgo)) {
+        throw new InvalidKeyAlgorithmException(key.algorithm(), publicKeyAlgo, publicKey);
+      }
+      return publicKey;
     } catch (RuntimeException | SshException e) {
       throw new InvalidKeySpecException("Cannot parse key", e);
     }
diff --git a/javatests/com/google/gerrit/acceptance/api/accounts/AccountIT.java b/javatests/com/google/gerrit/acceptance/api/accounts/AccountIT.java
index 8626a32..6f3f8d7 100644
--- a/javatests/com/google/gerrit/acceptance/api/accounts/AccountIT.java
+++ b/javatests/com/google/gerrit/acceptance/api/accounts/AccountIT.java
@@ -15,6 +15,7 @@
 package com.google.gerrit.acceptance.api.accounts;
 
 import static com.google.common.collect.ImmutableList.toImmutableList;
+import static com.google.common.collect.ImmutableMap.toImmutableMap;
 import static com.google.common.collect.ImmutableSet.toImmutableSet;
 import static com.google.common.truth.Truth.assertThat;
 import static com.google.common.truth.Truth.assertWithMessage;
@@ -329,7 +330,7 @@
 
   @Test
   public void createByAccountCreator() throws Exception {
-    RefUpdateCounter refUpdateCounter = new RefUpdateCounter(server.isRefSequenceSupported());
+    RefUpdateCounter refUpdateCounter = createRefUpdateCounter();
     try (Registration registration = extensionRegistry.newRegistration().add(refUpdateCounter)) {
       Account.Id accountId = createByAccountCreator(1);
       refUpdateCounter.assertRefUpdateFor(
@@ -811,7 +812,7 @@
   @Test
   public void starUnstarChange() throws Exception {
     AccountIndexedCounter accountIndexedCounter = getAccountIndexedCounter();
-    RefUpdateCounter refUpdateCounter = new RefUpdateCounter(server.isRefSequenceSupported());
+    RefUpdateCounter refUpdateCounter = createRefUpdateCounter();
     try (Registration registration =
         extensionRegistry.newRegistration().add(accountIndexedCounter).add(refUpdateCounter)) {
       PushOneCommit.Result r = createChange();
@@ -2754,11 +2755,7 @@
         getExternalIdFactory()
             .createWithEmail(externalIdKeyFactory.parse(extId2String), user.id(), "2@foo.com");
 
-    ObjectId revBefore;
-    try (Repository repo = repoManager.openRepository(allUsers)) {
-      revBefore = repo.exactRef(RefNames.REFS_EXTERNAL_IDS).getObjectId();
-    }
-
+    int initialCommits = countExternalIdsCommits();
     AccountsUpdate.UpdateArguments ua1 =
         new AccountsUpdate.UpdateArguments(
             "Add External ID", admin.id(), (a, u) -> u.addExternalId(extId1));
@@ -2782,10 +2779,17 @@
         .contains(extId2String);
 
     // Ensure that we only applied one single commit.
-    try (Repository repo = repoManager.openRepository(allUsers);
-        RevWalk rw = new RevWalk(repo)) {
-      RevCommit after = rw.parseCommit(repo.exactRef(RefNames.REFS_EXTERNAL_IDS).getObjectId());
-      assertThat(after.getParent(0).toObjectId()).isEqualTo(revBefore);
+    int afterUpdateCommits = countExternalIdsCommits();
+    assertThat(afterUpdateCommits).isEqualTo(initialCommits + 1);
+  }
+
+  @UsedAt(UsedAt.Project.GOOGLE)
+  protected int countExternalIdsCommits() throws Exception {
+    try (Repository allUsersRepo = repoManager.openRepository(allUsers);
+        Git git = new Git(allUsersRepo)) {
+      ObjectId refsMetaExternalIdsHead =
+          allUsersRepo.exactRef(RefNames.REFS_EXTERNAL_IDS).getObjectId();
+      return Iterables.size(git.log().add(refsMetaExternalIdsHead).call());
     }
   }
 
@@ -3039,28 +3043,27 @@
         getExternalIdFactory()
             .createWithEmail(
                 SCHEME_MAILTO, "secondary@non.google", admin.id(), "secondary@non.google");
-    AtomicBoolean bgIndicator = new AtomicBoolean(false);
+    ExternalId oldExternalId =
+        getExternalIdsReader().get(createEmailExternalId(admin.id(), admin.email()).key()).get();
     accountsUpdateProvider
         .get()
         .update(
             "Replace External ID",
             admin.id(),
             (a, u) -> {
-              if (bgIndicator.getAndSet(true)) {
-                // In the Google architecture, this runnable might be called multiple times. Only
-                // do the replacement once.
-                return;
-              }
-              u.replaceExternalId(
-                  getExternalIdsReader()
-                      .get(createEmailExternalId(admin.id(), admin.email()).key())
-                      .get(),
-                  externalId);
+              u.replaceExternalId(oldExternalId, externalId);
             });
     assertExternalIds(admin.id(), ImmutableSet.of("mailto:secondary@non.google", "username:admin"));
 
     AccountState updatedState = accountCache.get(admin.id()).get();
-    assertThat(preUpdateState.account().metaId()).isNotEqualTo(updatedState.account().metaId());
+    assertThat(accountCache.get(admin.id()).get()).isNotSameInstanceAs(preUpdateState);
+    if (preUpdateState.account().metaId() == null) {
+      // When the test is executed on google infrastructure, metaId should be either always set
+      // or always be null.
+      assertThat(updatedState.account().metaId()).isNull();
+    } else {
+      assertThat(preUpdateState.account().metaId()).isNotEqualTo(updatedState.account().metaId());
+    }
   }
 
   @Test
@@ -3328,23 +3331,12 @@
     requestScopeOperations.setApiUser(deleted.id());
 
     gApi.accounts().self().starChange(triplet);
-    try (Repository repo = repoManager.openRepository(allUsers)) {
-      assertThat(
-              repo.getRefDatabase()
-                  .getRefsByPrefix(RefNames.refsStarredChangesPrefix(r.getChange().getId())))
-          .hasSize(1);
+    assertThat(getStarredChangesCount(r.getChange().getId())).isEqualTo(1);
 
-      gApi.accounts().self().delete();
-    }
+    gApi.accounts().self().delete();
 
     // Reopen the repo to refresh RefDb
-    try (Repository repo = repoManager.openRepository(allUsers)) {
-      assertThat(
-              repo.getRefDatabase()
-                  .getRefsByPrefix(RefNames.refsStarredChangesPrefix(r.getChange().getId())))
-          .isEmpty();
-    }
-
+    assertThat(getStarredChangesCount(r.getChange().getId())).isEqualTo(0);
     // Clean up the test framework
     accountCreator.evict(deleted.id());
   }
@@ -3386,27 +3378,35 @@
     requestScopeOperations.setApiUser(deleted.id());
 
     createDraft(r, PushOneCommit.FILE_NAME, "draft");
-    try (Repository repo = repoManager.openRepository(allUsers)) {
-      assertThat(
-              repo.getRefDatabase()
-                  .getRefsByPrefix(RefNames.refsDraftCommentsPrefix(r.getChange().getId())))
-          .hasSize(1);
+    assertThat(getUsersWithDraftsCount(r.getChange().getId())).isEqualTo(1);
+    gApi.accounts().self().delete();
 
-      gApi.accounts().self().delete();
-    }
-
-    // Reopen the repo to refresh RefDb
-    try (Repository repo = repoManager.openRepository(allUsers)) {
-      assertThat(
-              repo.getRefDatabase()
-                  .getRefsByPrefix(RefNames.refsDraftCommentsPrefix(r.getChange().getId())))
-          .isEmpty();
-    }
+    assertThat(getUsersWithDraftsCount(r.getChange().getId())).isEqualTo(0);
 
     // Clean up the test framework
     accountCreator.evict(deleted.id());
   }
 
+  @UsedAt(UsedAt.Project.GOOGLE)
+  protected int getUsersWithDraftsCount(Change.Id changeId) throws Exception {
+    // The getStarredChangesCount and getUsersWithDraftsCount should be 2 distinct methods,
+    // because in google they can query data from a different storage (i.e. not from noteDb).
+    return getRefCount(RefNames.refsDraftCommentsPrefix(changeId));
+  }
+
+  @UsedAt(UsedAt.Project.GOOGLE)
+  protected int getStarredChangesCount(Change.Id changeId) throws Exception {
+    // The getStarredChangesCount and getDraftsCommentsCount should be 2 distinct methods,
+    // because in google they can query data from a different storage (i.e. not from noteDb).
+    return getRefCount(RefNames.refsStarredChangesPrefix(changeId));
+  }
+
+  private int getRefCount(String refPrefix) throws Exception {
+    try (Repository repo = repoManager.openRepository(allUsers)) {
+      return repo.getRefDatabase().getRefsByPrefix(refPrefix).size();
+    }
+  }
+
   @Test
   @SuppressWarnings("unused")
   public void deleteAccount_deletesReviewedFlags() throws Exception {
@@ -3773,13 +3773,13 @@
   }
 
   @UsedAt(UsedAt.Project.GOOGLE)
-  public static class RefUpdateCounter implements GitReferenceUpdatedListener {
-    private final boolean refSequenceSupported;
-    private final AtomicLongMap<String> countsByProjectRefs = AtomicLongMap.create();
+  protected RefUpdateCounter createRefUpdateCounter() {
+    return new RefUpdateCounter();
+  }
 
-    public RefUpdateCounter(boolean refSequenceSupported) {
-      this.refSequenceSupported = refSequenceSupported;
-    }
+  @UsedAt(UsedAt.Project.GOOGLE)
+  public static class RefUpdateCounter implements GitReferenceUpdatedListener {
+    private final AtomicLongMap<String> countsByProjectRefs = AtomicLongMap.create();
 
     @UsedAt(UsedAt.Project.GOOGLE)
     public static String projectRef(Project.NameKey project, String ref) {
@@ -3810,14 +3810,16 @@
 
     protected void assertRefUpdateFor(Map<String, Long> expectedProjectRefUpdateCounts) {
       ImmutableMap<String, Long> exprectedFiltered =
-          refSequenceSupported
-              ? ImmutableMap.copyOf(expectedProjectRefUpdateCounts)
-              : ImmutableMap.copyOf(
-                  expectedProjectRefUpdateCounts.entrySet().stream()
-                      .filter(entry -> !entry.getKey().contains(":refs/sequences/"))
-                      .collect(toList()));
+          expectedProjectRefUpdateCounts.entrySet().stream()
+              .filter(entry -> isRefSupported(entry.getKey()))
+              .collect(toImmutableMap(Map.Entry::getKey, Map.Entry::getValue));
       assertThat(countsByProjectRefs.asMap()).containsExactlyEntriesIn(exprectedFiltered);
       clear();
     }
+
+    @UsedAt(UsedAt.Project.GOOGLE)
+    protected boolean isRefSupported(String expectedRefEntryKey) {
+      return true;
+    }
   }
 }
diff --git a/javatests/com/google/gerrit/sshd/BUILD b/javatests/com/google/gerrit/sshd/BUILD
index 3e11ff2..44b9c62 100644
--- a/javatests/com/google/gerrit/sshd/BUILD
+++ b/javatests/com/google/gerrit/sshd/BUILD
@@ -4,8 +4,11 @@
     name = "sshd_tests",
     srcs = glob(["**/*.java"]),
     deps = [
+        "//java/com/google/gerrit/entities",
         "//java/com/google/gerrit/extensions:api",
+        "//java/com/google/gerrit/server",
         "//java/com/google/gerrit/sshd",
+        "//java/com/google/gerrit/testing:gerrit-test-util",
         "//lib/mina:sshd",
         "//lib/truth",
     ],
diff --git a/javatests/com/google/gerrit/sshd/SshUtilTest.java b/javatests/com/google/gerrit/sshd/SshUtilTest.java
new file mode 100644
index 0000000..1585bc3
--- /dev/null
+++ b/javatests/com/google/gerrit/sshd/SshUtilTest.java
@@ -0,0 +1,49 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.sshd;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.gerrit.testing.GerritJUnit.assertThrows;
+
+import com.google.gerrit.entities.Account;
+import com.google.gerrit.server.account.AccountSshKey;
+import java.security.spec.InvalidKeySpecException;
+import org.junit.Test;
+
+public class SshUtilTest {
+  private static final Account.Id TEST_ACCOUNT_ID = Account.id(1);
+  private static final int TEST_SSHKEY_SEQUENCE = 1;
+  private static final String INVALID_ALGO = "invalid-algo";
+  private static final String VALID_OPENSSH_RSA_KEY =
+      "AAAAB3NzaC1yc2EAAAABIwAAAIEA0R66EoZ7hFp81w9sAJqu34UFyE+w36H/mobUqnT5Lns7PcTOJh3sgMJAlswX2lFAWqvF2gd2PRMpMhbfEU4iq2SfY8x+RDCJ4ZQWESln/587T41BlQjOXzu3W1bqgmtHnRCte3DjyWDvM/fucnUMSwOgP+FVEZCLTrk3thLMWsU=";
+  private static final Object VALID_SSH_RSA_ALGO = "ssh-rsa";
+
+  @Test
+  public void shouldFailParsingOpenSshKeyWithInvalidAlgo() {
+    String sshKeyWithInvalidAlgo = String.format("%s %s", INVALID_ALGO, VALID_OPENSSH_RSA_KEY);
+    AccountSshKey sshKey =
+        AccountSshKey.create(TEST_ACCOUNT_ID, TEST_SSHKEY_SEQUENCE, sshKeyWithInvalidAlgo);
+    assertThrows(InvalidKeySpecException.class, () -> SshUtil.parse(sshKey));
+  }
+
+  @Test
+  public void shouldParseSshKeyWithAlgoMatchingKey() {
+    String sshKeyWithValidKeyAlgo =
+        String.format("%s %s", VALID_SSH_RSA_ALGO, VALID_OPENSSH_RSA_KEY);
+    AccountSshKey sshKey =
+        AccountSshKey.create(TEST_ACCOUNT_ID, TEST_SSHKEY_SEQUENCE, sshKeyWithValidKeyAlgo);
+    assertThat(sshKey).isNotNull();
+  }
+}
diff --git a/polygerrit-ui/app/api/diff.ts b/polygerrit-ui/app/api/diff.ts
index 092375a..24110ed 100644
--- a/polygerrit-ui/app/api/diff.ts
+++ b/polygerrit-ui/app/api/diff.ts
@@ -255,11 +255,6 @@
   show_newline_warning_left?: boolean;
   show_newline_warning_right?: boolean;
   use_new_image_diff_ui?: boolean;
-  /**
-   * Temporary flag for switching over to a simplified version of the diff
-   * processor.
-   */
-  use_simplified_processor?: boolean;
 }
 
 /**
diff --git a/polygerrit-ui/app/api/plugin.ts b/polygerrit-ui/app/api/plugin.ts
index 684429a..8630f75 100644
--- a/polygerrit-ui/app/api/plugin.ts
+++ b/polygerrit-ui/app/api/plugin.ts
@@ -34,6 +34,7 @@
   POST_REVERT = 'postrevert',
   ADMIN_MENU_LINKS = 'admin-menu-links',
   SHOW_DIFF = 'showdiff',
+  REPLY_SENT = 'replysent',
 }
 
 export declare interface PluginApi {
diff --git a/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.ts b/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.ts
index 5b2f951..2914a03 100644
--- a/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.ts
+++ b/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.ts
@@ -2067,7 +2067,7 @@
           fire(this, 'hide-alert', {});
         });
     }
-    this.change = newChange;
+    this.getChangeModel().updateStateChange(newChange);
   }
 
   // Private but used in tests.
diff --git a/polygerrit-ui/app/elements/change/gr-reply-dialog/gr-reply-dialog.ts b/polygerrit-ui/app/elements/change/gr-reply-dialog/gr-reply-dialog.ts
index d6b110e..1a18e9c 100644
--- a/polygerrit-ui/app/elements/change/gr-reply-dialog/gr-reply-dialog.ts
+++ b/polygerrit-ui/app/elements/change/gr-reply-dialog/gr-reply-dialog.ts
@@ -98,7 +98,10 @@
 } from '../../../utils/attention-set-util';
 import {RestApiService} from '../../../services/gr-rest-api/gr-rest-api';
 import {resolve} from '../../../models/dependency';
-import {changeModelToken} from '../../../models/change/change-model';
+import {
+  changeModelToken,
+  updateRevisionsWithCommitShas,
+} from '../../../models/change/change-model';
 import {LabelNameToValuesMap, PatchSetNumber} from '../../../api/rest-api';
 import {css, html, PropertyValues, LitElement, nothing} from 'lit';
 import {sharedStyles} from '../../../styles/shared-styles';
@@ -1459,8 +1462,10 @@
     return this.saveReview(reviewInput, errFn)
       .then(result => {
         this.getChangeModel().updateStateChange(
-          GrReviewerUpdatesParser.parse(
-            result?.change_info as ChangeViewChangeInfo
+          updateRevisionsWithCommitShas(
+            GrReviewerUpdatesParser.parse(
+              result?.change_info as ChangeViewChangeInfo
+            )
           )
         );
 
@@ -1468,6 +1473,7 @@
         this.includeComments = true;
         fireNoBubble(this, 'send', {});
         fireIronAnnounce(this, 'Reply sent');
+        this.getPluginLoader().jsApiService.handleReplySent();
         return;
       })
       .then(result => result)
diff --git a/polygerrit-ui/app/elements/diff/gr-diff-host/gr-diff-host.ts b/polygerrit-ui/app/elements/diff/gr-diff-host/gr-diff-host.ts
index 6770d1c..00f7639 100644
--- a/polygerrit-ui/app/elements/diff/gr-diff-host/gr-diff-host.ts
+++ b/polygerrit-ui/app/elements/diff/gr-diff-host/gr-diff-host.ts
@@ -326,12 +326,6 @@
       resolve(this, highlightServiceToken),
       () => getAppContext().reportingService
     );
-    this.renderPrefs = {
-      ...this.renderPrefs,
-      use_simplified_processor: this.flags.isEnabled(
-        KnownExperimentId.SIMPLIFIED_DIFF_PROCESSOR
-      ),
-    };
     this.addEventListener(
       // These are named inconsistently for a reason:
       // The create-comment event is fired to indicate that we should
diff --git a/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-js-api-interface-element.ts b/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-js-api-interface-element.ts
index 6b9c684..b09502a 100644
--- a/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-js-api-interface-element.ts
+++ b/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-js-api-interface-element.ts
@@ -57,11 +57,7 @@
       try {
         return callback(change, revision) === false;
       } catch (err: unknown) {
-        this.reporting.error(
-          'GrJsApiInterface',
-          new Error('canSubmitChange callback error'),
-          err
-        );
+        this.reportError(err, EventType.SUBMIT_CHANGE);
       }
       return false;
     });
@@ -116,11 +112,18 @@
       try {
         cb(change, revision, info, baseRevision ?? PARENT);
       } catch (err: unknown) {
-        this.reporting.error(
-          'GrJsApiInterface',
-          new Error('showChange callback error'),
-          err
-        );
+        this.reportError(err, EventType.SHOW_CHANGE);
+      }
+    }
+  }
+
+  async handleReplySent() {
+    await this.waitForPluginsToLoad();
+    for (const cb of this._getEventCallbacks(EventType.REPLY_SENT)) {
+      try {
+        cb();
+      } catch (err: unknown) {
+        this.reportError(err, EventType.REPLY_SENT);
       }
     }
   }
@@ -134,11 +137,7 @@
       try {
         cb(detail.revisionActions, detail.change);
       } catch (err: unknown) {
-        this.reporting.error(
-          'GrJsApiInterface',
-          new Error('showRevisionActions callback error'),
-          err
-        );
+        this.reportError(err, EventType.SHOW_REVISION_ACTIONS);
       }
     }
   }
@@ -148,11 +147,7 @@
       try {
         cb(change, msg);
       } catch (err: unknown) {
-        this.reporting.error(
-          'GrJsApiInterface',
-          new Error('commitMessage callback error'),
-          err
-        );
+        this.reportError(err, EventType.COMMIT_MSG_EDIT);
       }
     }
   }
@@ -163,11 +158,7 @@
       try {
         cb(detail.change);
       } catch (err: unknown) {
-        this.reporting.error(
-          'GrJsApiInterface',
-          new Error('labelChange callback error'),
-          err
-        );
+        this.reportError(err, EventType.LABEL_CHANGE);
       }
     }
   }
@@ -177,11 +168,7 @@
       try {
         revertMsg = cb(change, revertMsg, origMsg) as string;
       } catch (err: unknown) {
-        this.reporting.error(
-          'GrJsApiInterface',
-          new Error('modifyRevertMsg callback error'),
-          err
-        );
+        this.reportError(err, EventType.REVERT);
       }
     }
     return revertMsg;
@@ -200,11 +187,7 @@
           origMsg
         ) as string;
       } catch (err: unknown) {
-        this.reporting.error(
-          'GrJsApiInterface',
-          new Error('modifyRevertSubmissionMsg callback error'),
-          err
-        );
+        this.reportError(err, EventType.REVERT_SUBMISSION);
       }
     }
     return revertSubmissionMsg;
@@ -230,11 +213,7 @@
           review = {labels: r as LabelNameToValueMap};
         }
       } catch (err: unknown) {
-        this.reporting.error(
-          'GrJsApiInterface',
-          new Error('getReviewPostRevert callback error'),
-          err
-        );
+        this.reportError(err, EventType.POST_REVERT);
       }
     }
     return review;
@@ -246,15 +225,19 @@
       try {
         cb(detail.change, detail.patchRange, detail.fileRange);
       } catch (err: unknown) {
-        this.reporting.error(
-          'GrJsApiInterface',
-          new Error('showDiff callback error'),
-          err
-        );
+        this.reportError(err, EventType.SHOW_DIFF);
       }
     }
   }
 
+  reportError(err: unknown, type: EventType) {
+    this.reporting.error(
+      'GrJsApiInterface',
+      new Error(`plugin event callback error for type "${type}"`),
+      err
+    );
+  }
+
   _getEventCallbacks(type: EventType) {
     return eventCallbacks[type] || [];
   }
diff --git a/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-js-api-types.ts b/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-js-api-types.ts
index 6c180d7..dafa434 100644
--- a/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-js-api-types.ts
+++ b/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-js-api-types.ts
@@ -45,7 +45,7 @@
     revertSubmissionMsg: string,
     origMsg: string
   ): string;
-  handleShowChange(detail: ShowChangeDetail): void;
+  handleShowChange(detail: ShowChangeDetail): Promise<void>;
   handleShowRevisionActions(detail: ShowRevisionActionsDetail): void;
   handleLabelChange(detail: {change?: ParsedChangeInfo}): void;
   modifyRevertMsg(
@@ -59,4 +59,5 @@
   canSubmitChange(change: ChangeInfo, revision?: RevisionInfo | null): boolean;
   getReviewPostRevert(change?: ChangeInfo): ReviewInput;
   handleShowDiff(detail: ShowDiffDetail): void;
+  handleReplySent(): Promise<void>;
 }
diff --git a/polygerrit-ui/app/embed/diff/gr-diff-model/gr-diff-model.ts b/polygerrit-ui/app/embed/diff/gr-diff-model/gr-diff-model.ts
index 42fa984..88451f6 100644
--- a/polygerrit-ui/app/embed/diff/gr-diff-model/gr-diff-model.ts
+++ b/polygerrit-ui/app/embed/diff/gr-diff-model/gr-diff-model.ts
@@ -3,8 +3,8 @@
  * Copyright 2023 Google LLC
  * SPDX-License-Identifier: Apache-2.0
  */
-import {Observable, combineLatest, from, of} from 'rxjs';
-import {debounceTime, filter, switchMap, withLatestFrom} from 'rxjs/operators';
+import {Observable, combineLatest} from 'rxjs';
+import {debounceTime, filter, map, withLatestFrom} from 'rxjs/operators';
 import {
   CreateCommentEventDetail,
   DiffInfo,
@@ -36,10 +36,6 @@
   GrDiffProcessor,
   ProcessingOptions,
 } from '../gr-diff-processor/gr-diff-processor';
-import {
-  GrDiffProcessorSimplified,
-  ProcessingOptions as ProcessingOptionsSimplified,
-} from '../gr-diff-processor/gr-diff-processor-simplified';
 import {GrDiffGroup, GrDiffGroupType} from '../gr-diff/gr-diff-group';
 import {assert} from '../../../utils/common-util';
 import {countLines, isImageDiff} from '../../../utils/diff-util';
@@ -240,8 +236,8 @@
       .pipe(
         withLatestFrom(this.keyLocations$),
         debounceTime(1),
-        switchMap(([[diff, context, renderPrefs], keyLocations]) => {
-          const options: ProcessingOptions | ProcessingOptionsSimplified = {
+        map(([[diff, context, renderPrefs], keyLocations]) => {
+          const options: ProcessingOptions = {
             context,
             keyLocations,
             isBinary: !!(isImageDiff(diff) || diff.binary),
@@ -250,15 +246,8 @@
             options.asyncThreshold = renderPrefs.num_lines_rendered_at_once;
           }
 
-          // TODO: When switching to the simplified processor unconditionally,
-          // then we can use map() instead of switchMap().
-          if (renderPrefs?.use_simplified_processor) {
-            const processor = new GrDiffProcessorSimplified(options);
-            return of(processor.process(diff.content));
-          } else {
-            const processor = new GrDiffProcessor(options);
-            return from(processor.process(diff.content));
-          }
+          const processor = new GrDiffProcessor(options);
+          return processor.process(diff.content);
         })
       )
       .subscribe(groups => {
diff --git a/polygerrit-ui/app/embed/diff/gr-diff-processor/gr-diff-processor-simplified.ts b/polygerrit-ui/app/embed/diff/gr-diff-processor/gr-diff-processor-simplified.ts
deleted file mode 100644
index a306de8..0000000
--- a/polygerrit-ui/app/embed/diff/gr-diff-processor/gr-diff-processor-simplified.ts
+++ /dev/null
@@ -1,550 +0,0 @@
-/**
- * @license
- * Copyright 2016 Google LLC
- * SPDX-License-Identifier: Apache-2.0
- */
-import {GrDiffLine, Highlights} from '../gr-diff/gr-diff-line';
-import {
-  GrDiffGroup,
-  GrDiffGroupType,
-  hideInContextControl,
-} from '../gr-diff/gr-diff-group';
-import {DiffContent} from '../../../types/diff';
-import {Side} from '../../../constants/constants';
-import {getStringLength} from '../gr-diff-highlight/gr-annotation';
-import {GrDiffLineType, LineNumber} from '../../../api/diff';
-import {FULL_CONTEXT, KeyLocations} from '../gr-diff/gr-diff-utils';
-
-// visible for testing
-export interface State {
-  lineNums: {
-    left: number;
-    right: number;
-  };
-  chunkIndex: number;
-}
-
-interface ChunkEnd {
-  offset: number;
-  keyLocation: boolean;
-}
-
-/** Interface for listening to the output of the processor. */
-export interface GroupConsumer {
-  addGroup(group: GrDiffGroup): void;
-  clearGroups(): void;
-}
-
-/** Interface for listening to the output of the processor. */
-export interface ProcessingOptions {
-  context: number;
-  keyLocations?: KeyLocations;
-  asyncThreshold?: number;
-  isBinary?: boolean;
-}
-
-/**
- * Converts the API's `DiffContent`s  to `GrDiffGroup`s for rendering.
- *
- * Glossary:
- * - "chunk": A single `DiffContent` as returned by the API.
- * - "group": A single `GrDiffGroup` as used for rendering.
- * - "common" chunk/group: A chunk/group that should be considered unchanged
- *   for diffing purposes. This can mean its either actually unchanged, or it
- *   has only whitespace changes.
- * - "key location": A line number and side of the diff that should not be
- *   collapsed e.g. because a comment is attached to it, or because it was
- *   provided in the URL and thus should be visible
- * - "uncollapsible" chunk/group: A chunk/group that is either not "common",
- *   or cannot be collapsed because it contains a key location
- *
- * Here a a number of tasks this processor performs:
- *  - splitting large chunks to allow more granular async rendering
- *  - adding a group for the "File" pseudo line that file-level comments can
- *    be attached to
- *  - replacing common parts of the diff that are outside the user's
- *    context setting and do not have comments with a group representing the
- *    "expand context" widget. This may require splitting a chunk/group so
- *    that the part that is within the context or has comments is shown, while
- *    the rest is not.
- */
-export class GrDiffProcessorSimplified {
-  // visible for testing
-  context: number;
-
-  // visible for testing
-  keyLocations: KeyLocations;
-
-  private isBinary = false;
-
-  private groups: GrDiffGroup[] = [];
-
-  constructor(options: ProcessingOptions) {
-    this.context = options.context;
-    this.keyLocations = options.keyLocations ?? {left: {}, right: {}};
-    this.isBinary = options.isBinary ?? false;
-  }
-
-  /**
-   * Process the diff chunks into GrDiffGroups.
-   *
-   * @return an array of GrDiffGroups
-   */
-  process(chunks: DiffContent[]): GrDiffGroup[] {
-    this.groups = [];
-    this.groups.push(this.makeGroup('LOST'));
-    this.groups.push(this.makeGroup('FILE'));
-
-    this.processChunks(chunks);
-    return this.groups;
-  }
-
-  processChunks(chunks: DiffContent[]) {
-    if (this.isBinary) return;
-
-    const state = {
-      lineNums: {left: 0, right: 0},
-      chunkIndex: 0,
-    };
-    chunks = this.splitCommonChunksWithKeyLocations(chunks);
-
-    while (state.chunkIndex < chunks.length) {
-      const stateUpdate = this.processNext(state, chunks);
-      for (const group of stateUpdate.groups) {
-        this.groups.push(group);
-      }
-      state.lineNums.left += stateUpdate.lineDelta.left;
-      state.lineNums.right += stateUpdate.lineDelta.right;
-      state.chunkIndex = stateUpdate.newChunkIndex;
-    }
-  }
-
-  /**
-   * Process the next uncollapsible chunk, or the next collapsible chunks.
-   */
-  // visible for testing
-  processNext(state: State, chunks: DiffContent[]) {
-    const firstUncollapsibleChunkIndex = this.firstUncollapsibleChunkIndex(
-      chunks,
-      state.chunkIndex
-    );
-    if (firstUncollapsibleChunkIndex === state.chunkIndex) {
-      const chunk = chunks[state.chunkIndex];
-      return {
-        lineDelta: {
-          left: this.linesLeft(chunk).length,
-          right: this.linesRight(chunk).length,
-        },
-        groups: [
-          this.chunkToGroup(
-            chunk,
-            state.lineNums.left + 1,
-            state.lineNums.right + 1
-          ),
-        ],
-        newChunkIndex: state.chunkIndex + 1,
-      };
-    }
-
-    return this.processCollapsibleChunks(
-      state,
-      chunks,
-      firstUncollapsibleChunkIndex
-    );
-  }
-
-  private linesLeft(chunk: DiffContent) {
-    return chunk.ab || chunk.a || [];
-  }
-
-  private linesRight(chunk: DiffContent) {
-    return chunk.ab || chunk.b || [];
-  }
-
-  private firstUncollapsibleChunkIndex(chunks: DiffContent[], offset: number) {
-    let chunkIndex = offset;
-    while (
-      chunkIndex < chunks.length &&
-      this.isCollapsibleChunk(chunks[chunkIndex])
-    ) {
-      chunkIndex++;
-    }
-    return chunkIndex;
-  }
-
-  private isCollapsibleChunk(chunk: DiffContent) {
-    return (chunk.ab || chunk.common || chunk.skip) && !chunk.keyLocation;
-  }
-
-  /**
-   * Process a stretch of collapsible chunks.
-   *
-   * Outputs up to three groups:
-   * 1) Visible context before the hidden common code, unless it's the
-   * very beginning of the file.
-   * 2) Context hidden behind a context bar, unless empty.
-   * 3) Visible context after the hidden common code, unless it's the very
-   * end of the file.
-   */
-  private processCollapsibleChunks(
-    state: State,
-    chunks: DiffContent[],
-    firstUncollapsibleChunkIndex: number
-  ) {
-    const collapsibleChunks = chunks.slice(
-      state.chunkIndex,
-      firstUncollapsibleChunkIndex
-    );
-    const lineCount = collapsibleChunks.reduce(
-      (sum, chunk) => sum + this.commonChunkLength(chunk),
-      0
-    );
-
-    let groups = this.chunksToGroups(
-      collapsibleChunks,
-      state.lineNums.left + 1,
-      state.lineNums.right + 1
-    );
-
-    const hasSkippedGroup = !!groups.find(g => g.skip);
-    if (this.context !== FULL_CONTEXT || hasSkippedGroup) {
-      const contextNumLines = this.context > 0 ? this.context : 0;
-      const hiddenStart = state.chunkIndex === 0 ? 0 : contextNumLines;
-      const hiddenEnd =
-        lineCount -
-        (firstUncollapsibleChunkIndex === chunks.length ? 0 : this.context);
-      groups = hideInContextControl(groups, hiddenStart, hiddenEnd);
-    }
-
-    return {
-      lineDelta: {
-        left: lineCount,
-        right: lineCount,
-      },
-      groups,
-      newChunkIndex: firstUncollapsibleChunkIndex,
-    };
-  }
-
-  private commonChunkLength(chunk: DiffContent) {
-    if (chunk.skip) {
-      return chunk.skip;
-    }
-    console.assert(!!chunk.ab || !!chunk.common);
-
-    console.assert(
-      !chunk.a || (!!chunk.b && chunk.a.length === chunk.b.length),
-      'common chunk needs same number of a and b lines: ',
-      chunk
-    );
-    return this.linesLeft(chunk).length;
-  }
-
-  private chunksToGroups(
-    chunks: DiffContent[],
-    offsetLeft: number,
-    offsetRight: number
-  ): GrDiffGroup[] {
-    return chunks.map(chunk => {
-      const group = this.chunkToGroup(chunk, offsetLeft, offsetRight);
-      const chunkLength = this.commonChunkLength(chunk);
-      offsetLeft += chunkLength;
-      offsetRight += chunkLength;
-      return group;
-    });
-  }
-
-  private chunkToGroup(
-    chunk: DiffContent,
-    offsetLeft: number,
-    offsetRight: number
-  ): GrDiffGroup {
-    const type =
-      chunk.ab || chunk.skip ? GrDiffGroupType.BOTH : GrDiffGroupType.DELTA;
-    const lines = this.linesFromChunk(chunk, offsetLeft, offsetRight);
-    const options = {
-      moveDetails: chunk.move_details,
-      dueToRebase: !!chunk.due_to_rebase,
-      ignoredWhitespaceOnly: !!chunk.common,
-      keyLocation: !!chunk.keyLocation,
-    };
-    if (chunk.skip !== undefined) {
-      return new GrDiffGroup({
-        type,
-        skip: chunk.skip,
-        offsetLeft,
-        offsetRight,
-        ...options,
-      });
-    } else {
-      return new GrDiffGroup({
-        type,
-        lines,
-        ...options,
-      });
-    }
-  }
-
-  private linesFromChunk(
-    chunk: DiffContent,
-    offsetLeft: number,
-    offsetRight: number
-  ) {
-    if (chunk.ab) {
-      return chunk.ab.map((row, i) =>
-        this.lineFromRow(GrDiffLineType.BOTH, offsetLeft, offsetRight, row, i)
-      );
-    }
-    let lines: GrDiffLine[] = [];
-    if (chunk.a) {
-      // Avoiding a.push(...b) because that causes callstack overflows for
-      // large b, which can occur when large files are added removed.
-      lines = lines.concat(
-        this.linesFromRows(
-          GrDiffLineType.REMOVE,
-          chunk.a,
-          offsetLeft,
-          chunk.edit_a
-        )
-      );
-    }
-    if (chunk.b) {
-      // Avoiding a.push(...b) because that causes callstack overflows for
-      // large b, which can occur when large files are added removed.
-      lines = lines.concat(
-        this.linesFromRows(
-          GrDiffLineType.ADD,
-          chunk.b,
-          offsetRight,
-          chunk.edit_b
-        )
-      );
-    }
-    return lines;
-  }
-
-  // visible for testing
-  linesFromRows(
-    lineType: GrDiffLineType,
-    rows: string[],
-    offset: number,
-    intralineInfos?: number[][]
-  ): GrDiffLine[] {
-    const grDiffHighlights = intralineInfos
-      ? this.convertIntralineInfos(rows, intralineInfos)
-      : undefined;
-    return rows.map((row, i) =>
-      this.lineFromRow(lineType, offset, offset, row, i, grDiffHighlights)
-    );
-  }
-
-  private lineFromRow(
-    type: GrDiffLineType,
-    offsetLeft: number,
-    offsetRight: number,
-    row: string,
-    i: number,
-    highlights?: Highlights[]
-  ): GrDiffLine {
-    const line = new GrDiffLine(type);
-    line.text = row;
-    if (type !== GrDiffLineType.ADD) line.beforeNumber = offsetLeft + i;
-    if (type !== GrDiffLineType.REMOVE) line.afterNumber = offsetRight + i;
-    if (highlights) {
-      line.hasIntralineInfo = true;
-      line.highlights = highlights.filter(hl => hl.contentIndex === i);
-    } else {
-      line.hasIntralineInfo = false;
-    }
-    return line;
-  }
-
-  private makeGroup(number: LineNumber) {
-    const line = new GrDiffLine(GrDiffLineType.BOTH);
-    line.beforeNumber = number;
-    line.afterNumber = number;
-    return new GrDiffGroup({type: GrDiffGroupType.BOTH, lines: [line]});
-  }
-
-  /**
-   * In order to show key locations, such as comments, out of the bounds of
-   * the selected context, treat them as separate chunks within the model so
-   * that the content (and context surrounding it) renders correctly.
-   *
-   * @param chunks DiffContents as returned from server.
-   * @return Finer grained DiffContents.
-   */
-  // visible for testing
-  splitCommonChunksWithKeyLocations(chunks: DiffContent[]): DiffContent[] {
-    const result = [];
-    let leftLineNum = 1;
-    let rightLineNum = 1;
-
-    for (const chunk of chunks) {
-      // If it isn't a common chunk, append it as-is and update line numbers.
-      if (!chunk.ab && !chunk.skip && !chunk.common) {
-        if (chunk.a) {
-          leftLineNum += chunk.a.length;
-        }
-        if (chunk.b) {
-          rightLineNum += chunk.b.length;
-        }
-        result.push(chunk);
-        continue;
-      }
-
-      if (chunk.common && chunk.a!.length !== chunk.b!.length) {
-        throw new Error(
-          'DiffContent with common=true must always have equal length'
-        );
-      }
-      const numLines = this.commonChunkLength(chunk);
-      const chunkEnds = this.findChunkEndsAtKeyLocations(
-        numLines,
-        leftLineNum,
-        rightLineNum
-      );
-      leftLineNum += numLines;
-      rightLineNum += numLines;
-
-      if (chunk.skip) {
-        result.push({
-          ...chunk,
-          skip: chunk.skip,
-          keyLocation: false,
-        });
-      } else if (chunk.ab) {
-        result.push(
-          ...this.splitAtChunkEnds(chunk.ab, chunkEnds).map(
-            ({lines, keyLocation}) => {
-              return {
-                ...chunk,
-                ab: lines,
-                keyLocation,
-              };
-            }
-          )
-        );
-      } else if (chunk.common) {
-        const aChunks = this.splitAtChunkEnds(chunk.a!, chunkEnds);
-        const bChunks = this.splitAtChunkEnds(chunk.b!, chunkEnds);
-        result.push(
-          ...aChunks.map(({lines, keyLocation}, i) => {
-            return {
-              ...chunk,
-              a: lines,
-              b: bChunks[i].lines,
-              keyLocation,
-            };
-          })
-        );
-      }
-    }
-
-    return result;
-  }
-
-  /**
-   * @return Offsets of the new chunk ends, including whether it's a key
-   * location.
-   */
-  private findChunkEndsAtKeyLocations(
-    numLines: number,
-    leftOffset: number,
-    rightOffset: number
-  ): ChunkEnd[] {
-    const result = [];
-    let lastChunkEnd = 0;
-    for (let i = 0; i < numLines; i++) {
-      // If this line should not be collapsed.
-      if (
-        this.keyLocations[Side.LEFT][leftOffset + i] ||
-        this.keyLocations[Side.RIGHT][rightOffset + i]
-      ) {
-        // If any lines have been accumulated into the chunk leading up to
-        // this non-collapse line, then add them as a chunk and start a new
-        // one.
-        if (i > lastChunkEnd) {
-          result.push({offset: i, keyLocation: false});
-          lastChunkEnd = i;
-        }
-
-        // Add the non-collapse line as its own chunk.
-        result.push({offset: i + 1, keyLocation: true});
-      }
-    }
-
-    if (numLines > lastChunkEnd) {
-      result.push({offset: numLines, keyLocation: false});
-    }
-
-    return result;
-  }
-
-  private splitAtChunkEnds(lines: string[], chunkEnds: ChunkEnd[]) {
-    const result = [];
-    let lastChunkEndOffset = 0;
-    for (const {offset, keyLocation} of chunkEnds) {
-      if (lastChunkEndOffset === offset) continue;
-      result.push({
-        lines: lines.slice(lastChunkEndOffset, offset),
-        keyLocation,
-      });
-      lastChunkEndOffset = offset;
-    }
-    return result;
-  }
-
-  /**
-   * Converts `IntralineInfo`s return by the API to `GrLineHighlights` used
-   * for rendering.
-   */
-  // visible for testing
-  convertIntralineInfos(
-    rows: string[],
-    intralineInfos: number[][]
-  ): Highlights[] {
-    // +1 to account for the \n that is not part of the rows passed here
-    const lineLengths = rows.map(r => getStringLength(r) + 1);
-
-    let rowIndex = 0;
-    let idx = 0;
-    const normalized = [];
-    for (const [skipLength, markLength] of intralineInfos) {
-      let lineLength = lineLengths[rowIndex];
-      let j = 0;
-      while (j < skipLength) {
-        if (idx === lineLength) {
-          idx = 0;
-          lineLength = lineLengths[++rowIndex];
-          continue;
-        }
-        idx++;
-        j++;
-      }
-      let lineHighlight: Highlights = {
-        contentIndex: rowIndex,
-        startIndex: idx,
-      };
-
-      j = 0;
-      while (lineLength && j < markLength) {
-        if (idx === lineLength) {
-          idx = 0;
-          lineLength = lineLengths[++rowIndex];
-          normalized.push(lineHighlight);
-          lineHighlight = {
-            contentIndex: rowIndex,
-            startIndex: idx,
-          };
-          continue;
-        }
-        idx++;
-        j++;
-      }
-      lineHighlight.endIndex = idx;
-      normalized.push(lineHighlight);
-    }
-    return normalized;
-  }
-}
diff --git a/polygerrit-ui/app/embed/diff/gr-diff-processor/gr-diff-processor-simplified_test.ts b/polygerrit-ui/app/embed/diff/gr-diff-processor/gr-diff-processor-simplified_test.ts
deleted file mode 100644
index abd224f..0000000
--- a/polygerrit-ui/app/embed/diff/gr-diff-processor/gr-diff-processor-simplified_test.ts
+++ /dev/null
@@ -1,987 +0,0 @@
-/**
- * @license
- * Copyright 2016 Google LLC
- * SPDX-License-Identifier: Apache-2.0
- */
-import '../../../test/common-test-setup';
-import './gr-diff-processor';
-import {GrDiffLine} from '../gr-diff/gr-diff-line';
-import {GrDiffGroup, GrDiffGroupType} from '../gr-diff/gr-diff-group';
-import {
-  GrDiffProcessorSimplified,
-  ProcessingOptions,
-  State,
-} from './gr-diff-processor-simplified';
-import {DiffContent} from '../../../types/diff';
-import {assert} from '@open-wc/testing';
-import {FILE, GrDiffLineType} from '../../../api/diff';
-import {FULL_CONTEXT} from '../gr-diff/gr-diff-utils';
-
-suite('gr-diff-processor tests', () => {
-  const loremIpsum =
-    'Lorem ipsum dolor sit amet, ei nonumes vituperata ius. ' +
-    'Duo  animal omnesque fabellas et. Id has phaedrum dignissim ' +
-    'deterruisset, pro ei petentium comprehensam, ut vis solum dicta. ' +
-    'Eos cu aliquam labores qualisque, usu postea inermis te, et solum ' +
-    'fugit assum per.';
-
-  let processor: GrDiffProcessorSimplified;
-  let options: ProcessingOptions = {
-    context: 4,
-  };
-
-  setup(() => {});
-
-  suite('not logged in', () => {
-    setup(() => {
-      options = {context: 4};
-      processor = new GrDiffProcessorSimplified(options);
-    });
-
-    test('process loaded content', async () => {
-      const content: DiffContent[] = [
-        {
-          ab: ['<!DOCTYPE html>', '<meta charset="utf-8">'],
-        },
-        {
-          a: ['  Welcome ', '  to the wooorld of tomorrow!'],
-          b: ['  Hello, world!'],
-        },
-        {
-          ab: [
-            'Leela: This is the only place the ship can’t hear us, so ',
-            'everyone pretend to shower.',
-            'Fry: Same as every day. Got it.',
-          ],
-        },
-      ];
-
-      const groups = await processor.process(content);
-      groups.shift(); // remove portedThreadsWithoutRangeGroup
-      assert.equal(groups.length, 4);
-
-      let group = groups[0];
-      assert.equal(group.type, GrDiffGroupType.BOTH);
-      assert.equal(group.lines.length, 1);
-      assert.equal(group.lines[0].text, '');
-      assert.equal(group.lines[0].beforeNumber, FILE);
-      assert.equal(group.lines[0].afterNumber, FILE);
-
-      group = groups[1];
-      assert.equal(group.type, GrDiffGroupType.BOTH);
-      assert.equal(group.lines.length, 2);
-
-      function beforeNumberFn(l: GrDiffLine) {
-        return l.beforeNumber;
-      }
-      function afterNumberFn(l: GrDiffLine) {
-        return l.afterNumber;
-      }
-      function textFn(l: GrDiffLine) {
-        return l.text;
-      }
-
-      assert.deepEqual(group.lines.map(beforeNumberFn), [1, 2]);
-      assert.deepEqual(group.lines.map(afterNumberFn), [1, 2]);
-      assert.deepEqual(group.lines.map(textFn), [
-        '<!DOCTYPE html>',
-        '<meta charset="utf-8">',
-      ]);
-
-      group = groups[2];
-      assert.equal(group.type, GrDiffGroupType.DELTA);
-      assert.equal(group.lines.length, 3);
-      assert.equal(group.adds.length, 1);
-      assert.equal(group.removes.length, 2);
-      assert.deepEqual(group.removes.map(beforeNumberFn), [3, 4]);
-      assert.deepEqual(group.adds.map(afterNumberFn), [3]);
-      assert.deepEqual(group.removes.map(textFn), [
-        '  Welcome ',
-        '  to the wooorld of tomorrow!',
-      ]);
-      assert.deepEqual(group.adds.map(textFn), ['  Hello, world!']);
-
-      group = groups[3];
-      assert.equal(group.type, GrDiffGroupType.BOTH);
-      assert.equal(group.lines.length, 3);
-      assert.deepEqual(group.lines.map(beforeNumberFn), [5, 6, 7]);
-      assert.deepEqual(group.lines.map(afterNumberFn), [4, 5, 6]);
-      assert.deepEqual(group.lines.map(textFn), [
-        'Leela: This is the only place the ship can’t hear us, so ',
-        'everyone pretend to shower.',
-        'Fry: Same as every day. Got it.',
-      ]);
-    });
-
-    test('first group is for file', async () => {
-      const content = [{b: ['foo']}];
-
-      const groups = await processor.process(content);
-      groups.shift(); // remove portedThreadsWithoutRangeGroup
-
-      assert.equal(groups[0].type, GrDiffGroupType.BOTH);
-      assert.equal(groups[0].lines.length, 1);
-      assert.equal(groups[0].lines[0].text, '');
-      assert.equal(groups[0].lines[0].beforeNumber, FILE);
-      assert.equal(groups[0].lines[0].afterNumber, FILE);
-    });
-
-    suite('context groups', async () => {
-      test('at the beginning, larger than context', async () => {
-        options.context = 10;
-        processor = new GrDiffProcessorSimplified(options);
-        const content = [
-          {
-            ab: Array.from<string>({length: 100}).fill(
-              'all work and no play make jack a dull boy'
-            ),
-          },
-          {a: ['all work and no play make andybons a dull boy']},
-        ];
-
-        const groups = await processor.process(content);
-        // group[0] is the LOST group
-        // group[1] is the FILE group
-
-        assert.equal(groups[2].type, GrDiffGroupType.CONTEXT_CONTROL);
-        assert.instanceOf(groups[2].contextGroups[0], GrDiffGroup);
-        assert.equal(groups[2].contextGroups[0].lines.length, 90);
-        for (const l of groups[2].contextGroups[0].lines) {
-          assert.equal(l.text, 'all work and no play make jack a dull boy');
-        }
-
-        assert.equal(groups[3].type, GrDiffGroupType.BOTH);
-        assert.equal(groups[3].lines.length, 10);
-        for (const l of groups[3].lines) {
-          assert.equal(l.text, 'all work and no play make jack a dull boy');
-        }
-      });
-
-      test('at the beginning with skip chunks', async () => {
-        options.context = 10;
-        processor = new GrDiffProcessorSimplified(options);
-        const content = [
-          {
-            ab: Array.from<string>({length: 20}).fill(
-              'all work and no play make jack a dull boy'
-            ),
-          },
-          {skip: 43900},
-          {ab: Array.from<string>({length: 30}).fill('some other content')},
-          {a: ['some other content']},
-        ];
-
-        const groups = await processor.process(content);
-        groups.shift(); // remove portedThreadsWithoutRangeGroup
-
-        // group[0] is the file group
-
-        const commonGroup = groups[1];
-
-        // Hidden context before
-        assert.equal(commonGroup.type, GrDiffGroupType.CONTEXT_CONTROL);
-        assert.instanceOf(commonGroup.contextGroups[0], GrDiffGroup);
-        assert.equal(commonGroup.contextGroups[0].lines.length, 20);
-        for (const l of commonGroup.contextGroups[0].lines) {
-          assert.equal(l.text, 'all work and no play make jack a dull boy');
-        }
-
-        // Skipped group
-        const skipGroup = commonGroup.contextGroups[1];
-        assert.equal(skipGroup.skip, 43900);
-        const expectedRange = {
-          left: {start_line: 21, end_line: 43920},
-          right: {start_line: 21, end_line: 43920},
-        };
-        assert.deepEqual(skipGroup.lineRange, expectedRange);
-
-        // Hidden context after
-        assert.equal(commonGroup.contextGroups[2].lines.length, 20);
-        for (const l of commonGroup.contextGroups[2].lines) {
-          assert.equal(l.text, 'some other content');
-        }
-
-        // Displayed lines
-        assert.equal(groups[2].type, GrDiffGroupType.BOTH);
-        assert.equal(groups[2].lines.length, 10);
-        for (const l of groups[2].lines) {
-          assert.equal(l.text, 'some other content');
-        }
-      });
-
-      test('at the beginning, smaller than context', async () => {
-        options.context = 10;
-        processor = new GrDiffProcessorSimplified(options);
-        const content = [
-          {
-            ab: Array.from<string>({length: 5}).fill(
-              'all work and no play make jack a dull boy'
-            ),
-          },
-          {a: ['all work and no play make andybons a dull boy']},
-        ];
-
-        const groups = await processor.process(content);
-        groups.shift(); // remove portedThreadsWithoutRangeGroup
-
-        // group[0] is the file group
-
-        assert.equal(groups[1].type, GrDiffGroupType.BOTH);
-        assert.equal(groups[1].lines.length, 5);
-        for (const l of groups[1].lines) {
-          assert.equal(l.text, 'all work and no play make jack a dull boy');
-        }
-      });
-
-      test('at the end, larger than context', async () => {
-        options.context = 10;
-        processor = new GrDiffProcessorSimplified(options);
-        const content = [
-          {a: ['all work and no play make andybons a dull boy']},
-          {
-            ab: Array.from<string>({length: 100}).fill(
-              'all work and no play make jill a dull girl'
-            ),
-          },
-        ];
-
-        const groups = await processor.process(content);
-        groups.shift(); // remove portedThreadsWithoutRangeGroup
-
-        // group[0] is the file group
-        // group[1] is the "a" group
-
-        assert.equal(groups[2].type, GrDiffGroupType.BOTH);
-        assert.equal(groups[2].lines.length, 10);
-        for (const l of groups[2].lines) {
-          assert.equal(l.text, 'all work and no play make jill a dull girl');
-        }
-
-        assert.equal(groups[3].type, GrDiffGroupType.CONTEXT_CONTROL);
-        assert.instanceOf(groups[3].contextGroups[0], GrDiffGroup);
-        assert.equal(groups[3].contextGroups[0].lines.length, 90);
-        for (const l of groups[3].contextGroups[0].lines) {
-          assert.equal(l.text, 'all work and no play make jill a dull girl');
-        }
-      });
-
-      test('at the end, smaller than context', async () => {
-        options.context = 10;
-        const content = [
-          {a: ['all work and no play make andybons a dull boy']},
-          {
-            ab: Array.from<string>({length: 5}).fill(
-              'all work and no play make jill a dull girl'
-            ),
-          },
-        ];
-
-        const groups = await processor.process(content);
-        groups.shift(); // remove portedThreadsWithoutRangeGroup
-
-        // group[0] is the file group
-        // group[1] is the "a" group
-
-        assert.equal(groups[2].type, GrDiffGroupType.BOTH);
-        assert.equal(groups[2].lines.length, 5);
-        for (const l of groups[2].lines) {
-          assert.equal(l.text, 'all work and no play make jill a dull girl');
-        }
-      });
-
-      test('for interleaved ab and common: true chunks', async () => {
-        options.context = 10;
-        processor = new GrDiffProcessorSimplified(options);
-        const content = [
-          {a: ['all work and no play make andybons a dull boy']},
-          {
-            ab: Array.from<string>({length: 3}).fill(
-              'all work and no play make jill a dull girl'
-            ),
-          },
-          {
-            a: Array.from<string>({length: 3}).fill(
-              'all work and no play make jill a dull girl'
-            ),
-            b: Array.from<string>({length: 3}).fill(
-              '  all work and no play make jill a dull girl'
-            ),
-            common: true,
-          },
-          {
-            ab: Array.from<string>({length: 3}).fill(
-              'all work and no play make jill a dull girl'
-            ),
-          },
-          {
-            a: Array.from<string>({length: 3}).fill(
-              'all work and no play make jill a dull girl'
-            ),
-            b: Array.from<string>({length: 3}).fill(
-              '  all work and no play make jill a dull girl'
-            ),
-            common: true,
-          },
-          {
-            ab: Array.from<string>({length: 3}).fill(
-              'all work and no play make jill a dull girl'
-            ),
-          },
-        ];
-
-        const groups = await processor.process(content);
-        groups.shift(); // remove portedThreadsWithoutRangeGroup
-
-        // group[0] is the file group
-        // group[1] is the "a" group
-
-        // The first three interleaved chunks are completely shown because
-        // they are part of the context (3 * 3 <= 10)
-
-        assert.equal(groups[2].type, GrDiffGroupType.BOTH);
-        assert.equal(groups[2].lines.length, 3);
-        for (const l of groups[2].lines) {
-          assert.equal(l.text, 'all work and no play make jill a dull girl');
-        }
-
-        assert.equal(groups[3].type, GrDiffGroupType.DELTA);
-        assert.equal(groups[3].lines.length, 6);
-        assert.equal(groups[3].adds.length, 3);
-        assert.equal(groups[3].removes.length, 3);
-        for (const l of groups[3].removes) {
-          assert.equal(l.text, 'all work and no play make jill a dull girl');
-        }
-        for (const l of groups[3].adds) {
-          assert.equal(l.text, '  all work and no play make jill a dull girl');
-        }
-
-        assert.equal(groups[4].type, GrDiffGroupType.BOTH);
-        assert.equal(groups[4].lines.length, 3);
-        for (const l of groups[4].lines) {
-          assert.equal(l.text, 'all work and no play make jill a dull girl');
-        }
-
-        // The next chunk is partially shown, so it results in two groups
-
-        assert.equal(groups[5].type, GrDiffGroupType.DELTA);
-        assert.equal(groups[5].lines.length, 2);
-        assert.equal(groups[5].adds.length, 1);
-        assert.equal(groups[5].removes.length, 1);
-        for (const l of groups[5].removes) {
-          assert.equal(l.text, 'all work and no play make jill a dull girl');
-        }
-        for (const l of groups[5].adds) {
-          assert.equal(l.text, '  all work and no play make jill a dull girl');
-        }
-
-        assert.equal(groups[6].type, GrDiffGroupType.CONTEXT_CONTROL);
-        assert.equal(groups[6].contextGroups.length, 2);
-
-        assert.equal(groups[6].contextGroups[0].lines.length, 4);
-        assert.equal(groups[6].contextGroups[0].removes.length, 2);
-        assert.equal(groups[6].contextGroups[0].adds.length, 2);
-        for (const l of groups[6].contextGroups[0].removes) {
-          assert.equal(l.text, 'all work and no play make jill a dull girl');
-        }
-        for (const l of groups[6].contextGroups[0].adds) {
-          assert.equal(l.text, '  all work and no play make jill a dull girl');
-        }
-
-        // The final chunk is completely hidden
-        assert.equal(groups[6].contextGroups[1].type, GrDiffGroupType.BOTH);
-        assert.equal(groups[6].contextGroups[1].lines.length, 3);
-        for (const l of groups[6].contextGroups[1].lines) {
-          assert.equal(l.text, 'all work and no play make jill a dull girl');
-        }
-      });
-
-      test('in the middle, larger than context', async () => {
-        options.context = 10;
-        processor = new GrDiffProcessorSimplified(options);
-        const content = [
-          {a: ['all work and no play make andybons a dull boy']},
-          {
-            ab: Array.from<string>({length: 100}).fill(
-              'all work and no play make jill a dull girl'
-            ),
-          },
-          {a: ['all work and no play make andybons a dull boy']},
-        ];
-
-        const groups = await processor.process(content);
-        groups.shift(); // remove portedThreadsWithoutRangeGroup
-
-        // group[0] is the file group
-        // group[1] is the "a" group
-
-        assert.equal(groups[2].type, GrDiffGroupType.BOTH);
-        assert.equal(groups[2].lines.length, 10);
-        for (const l of groups[2].lines) {
-          assert.equal(l.text, 'all work and no play make jill a dull girl');
-        }
-
-        assert.equal(groups[3].type, GrDiffGroupType.CONTEXT_CONTROL);
-        assert.instanceOf(groups[3].contextGroups[0], GrDiffGroup);
-        assert.equal(groups[3].contextGroups[0].lines.length, 80);
-        for (const l of groups[3].contextGroups[0].lines) {
-          assert.equal(l.text, 'all work and no play make jill a dull girl');
-        }
-
-        assert.equal(groups[4].type, GrDiffGroupType.BOTH);
-        assert.equal(groups[4].lines.length, 10);
-        for (const l of groups[4].lines) {
-          assert.equal(l.text, 'all work and no play make jill a dull girl');
-        }
-      });
-
-      test('in the middle, smaller than context', async () => {
-        options.context = 10;
-        processor = new GrDiffProcessorSimplified(options);
-        const content = [
-          {a: ['all work and no play make andybons a dull boy']},
-          {
-            ab: Array.from<string>({length: 5}).fill(
-              'all work and no play make jill a dull girl'
-            ),
-          },
-          {a: ['all work and no play make andybons a dull boy']},
-        ];
-
-        const groups = await processor.process(content);
-        groups.shift(); // remove portedThreadsWithoutRangeGroup
-
-        // group[0] is the file group
-        // group[1] is the "a" group
-
-        assert.equal(groups[2].type, GrDiffGroupType.BOTH);
-        assert.equal(groups[2].lines.length, 5);
-        for (const l of groups[2].lines) {
-          assert.equal(l.text, 'all work and no play make jill a dull girl');
-        }
-      });
-    });
-
-    test('in the middle with skip chunks', async () => {
-      options.context = 10;
-      processor = new GrDiffProcessorSimplified(options);
-      const content = [
-        {a: ['all work and no play make andybons a dull boy']},
-        {
-          ab: Array.from<string>({length: 20}).fill(
-            'all work and no play make jill a dull girl'
-          ),
-        },
-        {skip: 60},
-        {
-          ab: Array.from<string>({length: 20}).fill(
-            'all work and no play make jill a dull girl'
-          ),
-        },
-        {a: ['all work and no play make andybons a dull boy']},
-      ];
-
-      const groups = await processor.process(content);
-      groups.shift(); // remove portedThreadsWithoutRangeGroup
-
-      // group[0] is the file group
-      // group[1] is the chunk with a
-      // group[2] is the displayed part of ab before
-
-      const commonGroup = groups[3];
-
-      // Hidden context before
-      assert.equal(commonGroup.type, GrDiffGroupType.CONTEXT_CONTROL);
-      assert.instanceOf(commonGroup.contextGroups[0], GrDiffGroup);
-      assert.equal(commonGroup.contextGroups[0].lines.length, 10);
-      for (const l of commonGroup.contextGroups[0].lines) {
-        assert.equal(l.text, 'all work and no play make jill a dull girl');
-      }
-
-      // Skipped group
-      const skipGroup = commonGroup.contextGroups[1];
-      assert.equal(skipGroup.skip, 60);
-      const expectedRange = {
-        left: {start_line: 22, end_line: 81},
-        right: {start_line: 21, end_line: 80},
-      };
-      assert.deepEqual(skipGroup.lineRange, expectedRange);
-
-      // Hidden context after
-      assert.equal(commonGroup.contextGroups[2].lines.length, 10);
-      for (const l of commonGroup.contextGroups[2].lines) {
-        assert.equal(l.text, 'all work and no play make jill a dull girl');
-      }
-      // group[4] is the displayed part of the second ab
-    });
-
-    test('works with skip === 0', async () => {
-      options.context = 3;
-      processor = new GrDiffProcessorSimplified(options);
-      const content = [
-        {
-          skip: 0,
-        },
-        {
-          b: [
-            '/**',
-            ' * @license',
-            ' * Copyright 2015 Google LLC',
-            ' * SPDX-License-Identifier: Apache-2.0',
-            ' */',
-            "import '../../../test/common-test-setup';",
-          ],
-        },
-      ];
-      await processor.process(content);
-    });
-
-    test('break up common diff chunks', () => {
-      options.keyLocations = {
-        left: {1: true},
-        right: {10: true},
-      };
-      processor = new GrDiffProcessorSimplified(options);
-
-      const content = [
-        {
-          ab: [
-            'copy',
-            '',
-            'asdf',
-            'qwer',
-            'zxcv',
-            '',
-            'http',
-            '',
-            'vbnm',
-            'dfgh',
-            'yuio',
-            'sdfg',
-            '1234',
-          ],
-        },
-      ];
-      const result = processor.splitCommonChunksWithKeyLocations(content);
-      assert.deepEqual(result, [
-        {
-          ab: ['copy'],
-          keyLocation: true,
-        },
-        {
-          ab: ['', 'asdf', 'qwer', 'zxcv', '', 'http', '', 'vbnm'],
-          keyLocation: false,
-        },
-        {
-          ab: ['dfgh'],
-          keyLocation: true,
-        },
-        {
-          ab: ['yuio', 'sdfg', '1234'],
-          keyLocation: false,
-        },
-      ]);
-    });
-
-    test('does not break-down common chunks w/ context', () => {
-      const ab = Array(75)
-        .fill(0)
-        .map(() => `${Math.random()}`);
-      const content = [{ab}];
-      processor.context = 4;
-      const result = processor.splitCommonChunksWithKeyLocations(content);
-      assert.equal(result.length, 1);
-      assert.deepEqual(result[0].ab, content[0].ab);
-      assert.isFalse(result[0].keyLocation);
-    });
-
-    test('intraline normalization', () => {
-      // The content and highlights are in the format returned by the Gerrit
-      // REST API.
-      let content = [
-        '      <section class="summary">',
-        '        <gr-formatted-text content="' +
-          '[[_computeCurrentRevisionMessage(change)]]"></gr-formatted-text>',
-        '      </section>',
-      ];
-      let highlights = [
-        [31, 34],
-        [42, 26],
-      ];
-
-      let results = processor.convertIntralineInfos(content, highlights);
-      assert.deepEqual(results, [
-        {
-          contentIndex: 0,
-          startIndex: 31,
-        },
-        {
-          contentIndex: 1,
-          startIndex: 0,
-          endIndex: 33,
-        },
-        {
-          contentIndex: 1,
-          endIndex: 101,
-          startIndex: 75,
-        },
-      ]);
-      const lines = processor.linesFromRows(
-        GrDiffLineType.BOTH,
-        content,
-        0,
-        highlights
-      );
-      assert.equal(lines.length, 3);
-      assert.isTrue(lines[0].hasIntralineInfo);
-      assert.equal(lines[0].highlights.length, 1);
-      assert.isTrue(lines[1].hasIntralineInfo);
-      assert.equal(lines[1].highlights.length, 2);
-      assert.isTrue(lines[2].hasIntralineInfo);
-      assert.equal(lines[2].highlights.length, 0);
-
-      content = [
-        '        this._path = value.path;',
-        '',
-        '        // When navigating away from the page, there is a ' +
-          'possibility that the',
-        '        // patch number is no longer a part of the URL ' +
-          '(say when navigating to',
-        '        // the top-level change info view) and therefore ' +
-          'undefined in `params`.',
-        '        if (!this._patchRange.patchNum) {',
-      ];
-      highlights = [
-        [14, 17],
-        [11, 70],
-        [12, 67],
-        [12, 67],
-        [14, 29],
-      ];
-      results = processor.convertIntralineInfos(content, highlights);
-      assert.deepEqual(results, [
-        {
-          contentIndex: 0,
-          startIndex: 14,
-          endIndex: 31,
-        },
-        {
-          contentIndex: 2,
-          startIndex: 8,
-          endIndex: 78,
-        },
-        {
-          contentIndex: 3,
-          startIndex: 11,
-          endIndex: 78,
-        },
-        {
-          contentIndex: 4,
-          startIndex: 11,
-          endIndex: 78,
-        },
-        {
-          contentIndex: 5,
-          startIndex: 12,
-          endIndex: 41,
-        },
-      ]);
-
-      content = ['🙈 a', '🙉 b', '🙊 c'];
-      highlights = [[2, 7]];
-      results = processor.convertIntralineInfos(content, highlights);
-      assert.deepEqual(results, [
-        {
-          contentIndex: 0,
-          startIndex: 2,
-        },
-        {
-          contentIndex: 1,
-          startIndex: 0,
-        },
-        {
-          contentIndex: 2,
-          startIndex: 0,
-          endIndex: 1,
-        },
-      ]);
-    });
-
-    test('image diffs', async () => {
-      const content = Array(200).fill({ab: ['', '']});
-      options.isBinary = true;
-      processor = new GrDiffProcessorSimplified(options);
-      const groups = await processor.process(content);
-      assert.equal(groups.length, 2);
-
-      // Image diffs don't process content, just the 'FILE' line.
-      assert.equal(groups[0].lines.length, 1);
-    });
-
-    suite('processNext', () => {
-      let rows: string[];
-
-      setup(() => {
-        rows = loremIpsum.split(' ');
-      });
-
-      test('FULL_CONTEXT', () => {
-        processor.context = FULL_CONTEXT;
-        const state: State = {
-          lineNums: {left: 10, right: 100},
-          chunkIndex: 1,
-        };
-        const chunks = [{a: ['foo']}, {ab: rows}, {a: ['bar']}];
-        const result = processor.processNext(state, chunks);
-
-        // Results in one, uncollapsed group with all rows.
-        assert.equal(result.groups.length, 1);
-        assert.equal(result.groups[0].type, GrDiffGroupType.BOTH);
-        assert.equal(result.groups[0].lines.length, rows.length);
-
-        // Line numbers are set correctly.
-        assert.equal(
-          result.groups[0].lines[0].beforeNumber,
-          state.lineNums.left + 1
-        );
-        assert.equal(
-          result.groups[0].lines[0].afterNumber,
-          state.lineNums.right + 1
-        );
-
-        assert.equal(
-          result.groups[0].lines[rows.length - 1].beforeNumber,
-          state.lineNums.left + rows.length
-        );
-        assert.equal(
-          result.groups[0].lines[rows.length - 1].afterNumber,
-          state.lineNums.right + rows.length
-        );
-      });
-
-      test('FULL_CONTEXT with skip chunks still get collapsed', () => {
-        processor.context = FULL_CONTEXT;
-        const lineNums = {left: 10, right: 100};
-        const state = {
-          lineNums,
-          chunkIndex: 1,
-        };
-        const skip = 10000;
-        const chunks = [{a: ['foo']}, {skip}, {ab: rows}, {a: ['bar']}];
-        const result = processor.processNext(state, chunks);
-        // Results in one, uncollapsed group with all rows.
-        assert.equal(result.groups.length, 1);
-        assert.equal(result.groups[0].type, GrDiffGroupType.CONTEXT_CONTROL);
-
-        // Skip and ab group are hidden in the same context control
-        assert.equal(result.groups[0].contextGroups.length, 2);
-        const [skippedGroup, abGroup] = result.groups[0].contextGroups;
-
-        // Line numbers are set correctly.
-        assert.deepEqual(skippedGroup.lineRange, {
-          left: {
-            start_line: lineNums.left + 1,
-            end_line: lineNums.left + skip,
-          },
-          right: {
-            start_line: lineNums.right + 1,
-            end_line: lineNums.right + skip,
-          },
-        });
-
-        assert.deepEqual(abGroup.lineRange, {
-          left: {
-            start_line: lineNums.left + skip + 1,
-            end_line: lineNums.left + skip + rows.length,
-          },
-          right: {
-            start_line: lineNums.right + skip + 1,
-            end_line: lineNums.right + skip + rows.length,
-          },
-        });
-      });
-
-      test('with context', () => {
-        processor.context = 10;
-        const state = {
-          lineNums: {left: 10, right: 100},
-          chunkIndex: 1,
-        };
-        const chunks = [{a: ['foo']}, {ab: rows}, {a: ['bar']}];
-        const result = processor.processNext(state, chunks);
-        const expectedCollapseSize = rows.length - 2 * processor.context;
-
-        assert.equal(result.groups.length, 3, 'Results in three groups');
-
-        // The first and last are uncollapsed context, whereas the middle has
-        // a single context-control line.
-        assert.equal(result.groups[0].lines.length, processor.context);
-        assert.equal(result.groups[2].lines.length, processor.context);
-
-        // The collapsed group has the hidden lines as its context group.
-        assert.equal(
-          result.groups[1].contextGroups[0].lines.length,
-          expectedCollapseSize
-        );
-      });
-
-      test('first', () => {
-        processor.context = 10;
-        const state = {
-          lineNums: {left: 10, right: 100},
-          chunkIndex: 0,
-        };
-        const chunks = [{ab: rows}, {a: ['foo']}, {a: ['bar']}];
-        const result = processor.processNext(state, chunks);
-        const expectedCollapseSize = rows.length - processor.context;
-
-        assert.equal(result.groups.length, 2, 'Results in two groups');
-
-        // Only the first group is collapsed.
-        assert.equal(result.groups[1].lines.length, processor.context);
-
-        // The collapsed group has the hidden lines as its context group.
-        assert.equal(
-          result.groups[0].contextGroups[0].lines.length,
-          expectedCollapseSize
-        );
-      });
-
-      test('few-rows', () => {
-        // Only ten rows.
-        rows = rows.slice(0, 10);
-        processor.context = 10;
-        const state = {
-          lineNums: {left: 10, right: 100},
-          chunkIndex: 0,
-        };
-        const chunks = [{ab: rows}, {a: ['foo']}, {a: ['bar']}];
-        const result = processor.processNext(state, chunks);
-
-        // Results in one uncollapsed group with all rows.
-        assert.equal(result.groups.length, 1, 'Results in one group');
-        assert.equal(result.groups[0].lines.length, rows.length);
-      });
-
-      test('no single line collapse', () => {
-        rows = rows.slice(0, 7);
-        processor.context = 3;
-        const state = {
-          lineNums: {left: 10, right: 100},
-          chunkIndex: 1,
-        };
-        const chunks = [{a: ['foo']}, {ab: rows}, {a: ['bar']}];
-        const result = processor.processNext(state, chunks);
-
-        // Results in one uncollapsed group with all rows.
-        assert.equal(result.groups.length, 1, 'Results in one group');
-        assert.equal(result.groups[0].lines.length, rows.length);
-      });
-
-      suite('with key location', () => {
-        let state: State;
-        let chunks: DiffContent[];
-
-        setup(() => {
-          state = {
-            lineNums: {left: 10, right: 100},
-            chunkIndex: 0,
-          };
-          processor.context = 10;
-          chunks = [{ab: rows}, {ab: ['foo'], keyLocation: true}, {ab: rows}];
-        });
-
-        test('context before', () => {
-          state.chunkIndex = 0;
-          const result = processor.processNext(state, chunks);
-
-          // The first chunk is split into two groups:
-          // 1) A context-control, hiding everything but the context before
-          //    the key location.
-          // 2) The context before the key location.
-          // The key location is not processed in this call to processNext
-          assert.equal(result.groups.length, 2);
-          // The collapsed group has the hidden lines as its context group.
-          assert.equal(
-            result.groups[0].contextGroups[0].lines.length,
-            rows.length - processor.context
-          );
-          assert.equal(result.groups[1].lines.length, processor.context);
-        });
-
-        test('key location itself', () => {
-          state.chunkIndex = 1;
-          const result = processor.processNext(state, chunks);
-
-          // The second chunk results in a single group, that is just the
-          // line with the key location
-          assert.equal(result.groups.length, 1);
-          assert.equal(result.groups[0].lines.length, 1);
-          assert.equal(result.lineDelta.left, 1);
-          assert.equal(result.lineDelta.right, 1);
-        });
-
-        test('context after', () => {
-          state.chunkIndex = 2;
-          const result = processor.processNext(state, chunks);
-
-          // The last chunk is split into two groups:
-          // 1) The context after the key location.
-          // 1) A context-control, hiding everything but the context after the
-          //    key location.
-          assert.equal(result.groups.length, 2);
-          assert.equal(result.groups[0].lines.length, processor.context);
-          // The collapsed group has the hidden lines as its context group.
-          assert.equal(
-            result.groups[1].contextGroups[0].lines.length,
-            rows.length - processor.context
-          );
-        });
-      });
-    });
-
-    suite('gr-diff-processor helpers', () => {
-      let rows: string[];
-
-      setup(() => {
-        rows = loremIpsum.split(' ');
-      });
-
-      test('linesFromRows', () => {
-        const startLineNum = 10;
-        let result = processor.linesFromRows(
-          GrDiffLineType.ADD,
-          rows,
-          startLineNum + 1
-        );
-
-        assert.equal(result.length, rows.length);
-        assert.equal(result[0].type, GrDiffLineType.ADD);
-        assert.notOk(result[0].hasIntralineInfo);
-        assert.equal(result[0].afterNumber, startLineNum + 1);
-        assert.notOk(result[0].beforeNumber);
-        assert.equal(
-          result[result.length - 1].afterNumber,
-          startLineNum + rows.length
-        );
-        assert.notOk(result[result.length - 1].beforeNumber);
-
-        result = processor.linesFromRows(
-          GrDiffLineType.REMOVE,
-          rows,
-          startLineNum + 1
-        );
-
-        assert.equal(result.length, rows.length);
-        assert.equal(result[0].type, GrDiffLineType.REMOVE);
-        assert.notOk(result[0].hasIntralineInfo);
-        assert.equal(result[0].beforeNumber, startLineNum + 1);
-        assert.notOk(result[0].afterNumber);
-        assert.equal(
-          result[result.length - 1].beforeNumber,
-          startLineNum + rows.length
-        );
-        assert.notOk(result[result.length - 1].afterNumber);
-      });
-    });
-  });
-});
diff --git a/polygerrit-ui/app/embed/diff/gr-diff-processor/gr-diff-processor.ts b/polygerrit-ui/app/embed/diff/gr-diff-processor/gr-diff-processor.ts
index 5db6db9..6b7b45f 100644
--- a/polygerrit-ui/app/embed/diff/gr-diff-processor/gr-diff-processor.ts
+++ b/polygerrit-ui/app/embed/diff/gr-diff-processor/gr-diff-processor.ts
@@ -11,8 +11,6 @@
 } from '../gr-diff/gr-diff-group';
 import {DiffContent} from '../../../types/diff';
 import {Side} from '../../../constants/constants';
-import {debounce, DelayedTask} from '../../../utils/async-util';
-import {assert} from '../../../utils/common-util';
 import {getStringLength} from '../gr-diff-highlight/gr-annotation';
 import {GrDiffLineType, LineNumber} from '../../../api/diff';
 import {FULL_CONTEXT, KeyLocations} from '../gr-diff/gr-diff-utils';
@@ -31,19 +29,6 @@
   keyLocation: boolean;
 }
 
-/**
- * The maximum size for an addition or removal chunk before it is broken down
- * into a series of chunks that are this size at most.
- *
- * Note: The value of 120 is chosen so that it is larger than the default
- * asyncThreshold of 64, but feel free to tune this constant to your
- * performance needs.
- */
-function calcMaxGroupSize(asyncThreshold?: number): number {
-  if (!asyncThreshold) return 120;
-  return asyncThreshold * 2;
-}
-
 /** Interface for listening to the output of the processor. */
 export interface GroupConsumer {
   addGroup(group: GrDiffGroup): void;
@@ -90,114 +75,48 @@
   // visible for testing
   keyLocations: KeyLocations;
 
-  private asyncThreshold: number;
+  private isBinary = false;
 
-  private isBinary: boolean;
-
-  // visible for testing
-  isScrolling?: boolean;
-
-  /** Just for making sure that process() is only called once. */
-  private isStarted = false;
-
-  /** Indicates that processing should be stopped. */
-  private isCancelled = false;
-
-  private resetIsScrollingTask?: DelayedTask;
-
-  private readonly groups: GrDiffGroup[] = [];
+  private groups: GrDiffGroup[] = [];
 
   constructor(options: ProcessingOptions) {
     this.context = options.context;
-    this.asyncThreshold = options.asyncThreshold ?? 64;
     this.keyLocations = options.keyLocations ?? {left: {}, right: {}};
     this.isBinary = options.isBinary ?? false;
   }
 
-  private readonly handleWindowScroll = () => {
-    this.isScrolling = true;
-    this.resetIsScrollingTask = debounce(
-      this.resetIsScrollingTask,
-      () => (this.isScrolling = false),
-      50
-    );
-  };
-
   /**
-   * Asynchronously process the diff chunks into groups. As it processes, it
-   * will splice groups into the `groups` property of the component.
+   * Process the diff chunks into GrDiffGroups.
    *
-   * @return A promise that resolves with an
-   * array of GrDiffGroups when the diff is completely processed.
+   * @return an array of GrDiffGroups
    */
-  async process(chunks: DiffContent[]): Promise<GrDiffGroup[]> {
-    assert(this.isStarted === false, 'diff processor cannot be started twice');
-
-    window.addEventListener('scroll', this.handleWindowScroll);
-
+  process(chunks: DiffContent[]): GrDiffGroup[] {
+    this.groups = [];
     this.groups.push(this.makeGroup('LOST'));
     this.groups.push(this.makeGroup('FILE'));
 
-    if (this.isBinary) return this.groups;
-    try {
-      await this.processChunks(chunks);
-    } finally {
-      this.finish();
-    }
+    this.processChunks(chunks);
     return this.groups;
   }
 
-  finish() {
-    window.removeEventListener('scroll', this.handleWindowScroll);
-  }
-
-  cancel() {
-    this.isCancelled = true;
-    this.finish();
-  }
-
-  async processChunks(chunks: DiffContent[]) {
-    let completed = () => {};
-    const promise = new Promise<void>(resolve => (completed = resolve));
+  processChunks(chunks: DiffContent[]) {
+    if (this.isBinary) return;
 
     const state = {
       lineNums: {left: 0, right: 0},
       chunkIndex: 0,
     };
-
-    chunks = this.splitLargeChunks(chunks);
     chunks = this.splitCommonChunksWithKeyLocations(chunks);
 
-    let currentBatch = 0;
-    const nextStep = () => {
-      if (this.isCancelled || state.chunkIndex >= chunks.length) {
-        completed();
-        return;
-      }
-      if (this.isScrolling) {
-        window.setTimeout(nextStep, 100);
-        return;
-      }
-
+    while (state.chunkIndex < chunks.length) {
       const stateUpdate = this.processNext(state, chunks);
       for (const group of stateUpdate.groups) {
         this.groups.push(group);
-        currentBatch += group.lines.length;
       }
       state.lineNums.left += stateUpdate.lineDelta.left;
       state.lineNums.right += stateUpdate.lineDelta.right;
-
       state.chunkIndex = stateUpdate.newChunkIndex;
-      if (currentBatch >= this.asyncThreshold) {
-        currentBatch = 0;
-        window.setTimeout(nextStep, 1);
-      } else {
-        nextStep.call(this);
-      }
-    };
-
-    nextStep.call(this);
-    await promise;
+    }
   }
 
   /**
@@ -448,53 +367,6 @@
   }
 
   /**
-   * Split chunks into smaller chunks of the same kind.
-   *
-   * This is done to prevent doing too much work on the main thread in one
-   * uninterrupted rendering step, which would make the browser unresponsive.
-   *
-   * Note that in the case of unmodified chunks, we only split chunks if the
-   * context is set to file (because otherwise they are split up further down
-   * the processing into the visible and hidden context), and only split it
-   * into 2 chunks, one max sized one and the rest (for reasons that are
-   * unclear to me).
-   *
-   * @param chunks Chunks as returned from the server
-   * @return Finer grained chunks.
-   */
-  // visible for testing
-  splitLargeChunks(chunks: DiffContent[]): DiffContent[] {
-    const newChunks = [];
-
-    for (const chunk of chunks) {
-      if (!chunk.ab) {
-        for (const subChunk of this.breakdownChunk(chunk)) {
-          newChunks.push(subChunk);
-        }
-        continue;
-      }
-
-      // If the context is set to "whole file", then break down the shared
-      // chunks so they can be rendered incrementally. Note: this is not
-      // enabled for any other context preference because manipulating the
-      // chunks in this way violates assumptions by the context grouper logic.
-      const MAX_GROUP_SIZE = calcMaxGroupSize(this.asyncThreshold);
-      if (
-        this.context === FULL_CONTEXT &&
-        chunk.ab.length > MAX_GROUP_SIZE * 2
-      ) {
-        // Split large shared chunks in two, where the first is the maximum
-        // group size.
-        newChunks.push({ab: chunk.ab.slice(0, MAX_GROUP_SIZE)});
-        newChunks.push({ab: chunk.ab.slice(MAX_GROUP_SIZE)});
-      } else {
-        newChunks.push(chunk);
-      }
-    }
-    return newChunks;
-  }
-
-  /**
    * In order to show key locations, such as comments, out of the bounds of
    * the selected context, treat them as separate chunks within the model so
    * that the content (and context surrounding it) renders correctly.
@@ -675,60 +547,4 @@
     }
     return normalized;
   }
-
-  /**
-   * If a group is an addition or a removal, break it down into smaller groups
-   * of that type using the MAX_GROUP_SIZE. If the group is a shared chunk
-   * or a delta it is returned as the single element of the result array.
-   */
-  // visible for testing
-  breakdownChunk(chunk: DiffContent): DiffContent[] {
-    let key: 'a' | 'b' | 'ab' | null = null;
-    const {a, b, ab, move_details} = chunk;
-    if (a?.length && !b?.length) {
-      key = 'a';
-    } else if (b?.length && !a?.length) {
-      key = 'b';
-    } else if (ab?.length) {
-      key = 'ab';
-    }
-
-    // Move chunks should not be divided because of move label
-    // positioned in the top of the chunk
-    if (!key || move_details) {
-      return [chunk];
-    }
-
-    const MAX_GROUP_SIZE = calcMaxGroupSize(this.asyncThreshold);
-    return this.breakdown(chunk[key]!, MAX_GROUP_SIZE).map(subChunkLines => {
-      const subChunk: DiffContent = {};
-      subChunk[key!] = subChunkLines;
-      if (chunk.due_to_rebase) {
-        subChunk.due_to_rebase = true;
-      }
-      if (chunk.move_details) {
-        subChunk.move_details = chunk.move_details;
-      }
-      return subChunk;
-    });
-  }
-
-  /**
-   * Given an array and a size, return an array of arrays where no inner array
-   * is larger than that size, preserving the original order.
-   */
-  // visible for testing
-  breakdown<T>(array: T[], size: number): T[][] {
-    if (!array.length) {
-      return [];
-    }
-    if (array.length < size) {
-      return [array];
-    }
-
-    const head = array.slice(0, array.length - size);
-    const tail = array.slice(array.length - size);
-
-    return this.breakdown(head, size).concat([tail]);
-  }
 }
diff --git a/polygerrit-ui/app/embed/diff/gr-diff-processor/gr-diff-processor_test.ts b/polygerrit-ui/app/embed/diff/gr-diff-processor/gr-diff-processor_test.ts
index 3485fe4..f6b9737 100644
--- a/polygerrit-ui/app/embed/diff/gr-diff-processor/gr-diff-processor_test.ts
+++ b/polygerrit-ui/app/embed/diff/gr-diff-processor/gr-diff-processor_test.ts
@@ -578,71 +578,6 @@
       ]);
     });
 
-    test('breaks down shared chunks w/ whole-file', () => {
-      const maxGroupSize = 128;
-      const size = maxGroupSize * 2 + 5;
-      const ab = Array(size)
-        .fill(0)
-        .map(() => `${Math.random()}`);
-      const content = [{ab}];
-      processor.context = FULL_CONTEXT;
-      const result = processor.splitLargeChunks(content);
-      assert.equal(result.length, 2);
-      assert.deepEqual(result[0].ab, content[0].ab.slice(0, maxGroupSize));
-      assert.deepEqual(result[1].ab, content[0].ab.slice(maxGroupSize));
-    });
-
-    test('breaks down added chunks', () => {
-      const maxGroupSize = 128;
-      const size = maxGroupSize * 2 + 5;
-      const content = Array(size)
-        .fill(0)
-        .map(() => `${Math.random()}`);
-      processor.context = 5;
-      const splitContent = processor
-        .splitLargeChunks([{a: [], b: content}])
-        .map(r => r.b);
-      assert.equal(splitContent.length, 3);
-      assert.deepEqual(splitContent[0], content.slice(0, 5));
-      assert.deepEqual(splitContent[1], content.slice(5, maxGroupSize + 5));
-      assert.deepEqual(splitContent[2], content.slice(maxGroupSize + 5));
-    });
-
-    test('breaks down removed chunks', () => {
-      const maxGroupSize = 128;
-      const size = maxGroupSize * 2 + 5;
-      const content = Array(size)
-        .fill(0)
-        .map(() => `${Math.random()}`);
-      processor.context = 5;
-      const splitContent = processor
-        .splitLargeChunks([{a: content, b: []}])
-        .map(r => r.a);
-      assert.equal(splitContent.length, 3);
-      assert.deepEqual(splitContent[0], content.slice(0, 5));
-      assert.deepEqual(splitContent[1], content.slice(5, maxGroupSize + 5));
-      assert.deepEqual(splitContent[2], content.slice(maxGroupSize + 5));
-    });
-
-    test('does not break down moved chunks', () => {
-      const size = 120 * 2 + 5;
-      const content = Array(size)
-        .fill(0)
-        .map(() => `${Math.random()}`);
-      processor.context = 5;
-      const splitContent = processor
-        .splitLargeChunks([
-          {
-            a: content,
-            b: [],
-            move_details: {changed: false, range: {start: 1, end: 1}},
-          },
-        ])
-        .map(r => r.a);
-      assert.equal(splitContent.length, 1);
-      assert.deepEqual(splitContent[0], content);
-    });
-
     test('does not break-down common chunks w/ context', () => {
       const ab = Array(75)
         .fill(0)
@@ -767,15 +702,6 @@
       ]);
     });
 
-    test('isScrolling paused', async () => {
-      const content = Array(200).fill({ab: ['', '']});
-      processor.isScrolling = true;
-      const promise = processor.process(content);
-      processor.isScrolling = false;
-      const groups = await promise;
-      assert.isAtLeast(groups.length, 3);
-    });
-
     test('image diffs', async () => {
       const content = Array(200).fill({ab: ['', '']});
       options.isBinary = true;
@@ -1053,61 +979,5 @@
         assert.notOk(result[result.length - 1].afterNumber);
       });
     });
-
-    suite('breakdown*', () => {
-      test('breakdownChunk breaks down additions', () => {
-        const breakdownSpy = sinon.spy(processor, 'breakdown');
-        const chunk = {b: ['blah', 'blah', 'blah']};
-        const result = processor.breakdownChunk(chunk);
-        assert.deepEqual(result, [chunk]);
-        assert.isTrue(breakdownSpy.called);
-      });
-
-      test('breakdownChunk keeps due_to_rebase for broken down additions', () => {
-        sinon.spy(processor, 'breakdown');
-        const chunk = {b: ['blah', 'blah', 'blah'], due_to_rebase: true};
-        const result = processor.breakdownChunk(chunk);
-        for (const subResult of result) {
-          assert.isTrue(subResult.due_to_rebase);
-        }
-      });
-
-      test('breakdown common case', () => {
-        const array = 'Lorem ipsum dolor sit amet, suspendisse inceptos'.split(
-          ' '
-        );
-        const size = 3;
-
-        const result = processor.breakdown(array, size);
-
-        for (const subResult of result) {
-          assert.isAtMost(subResult.length, size);
-        }
-        const flattened = result.reduce((a, b) => a.concat(b), []);
-        assert.deepEqual(flattened, array);
-      });
-
-      test('breakdown smaller than size', () => {
-        const array = 'Lorem ipsum dolor sit amet, suspendisse inceptos'.split(
-          ' '
-        );
-        const size = 10;
-        const expected = [array];
-
-        const result = processor.breakdown(array, size);
-
-        assert.deepEqual(result, expected);
-      });
-
-      test('breakdown empty', () => {
-        const array: string[] = [];
-        const size = 10;
-        const expected: string[][] = [];
-
-        const result = processor.breakdown(array, size);
-
-        assert.deepEqual(result, expected);
-      });
-    });
   });
 });
diff --git a/polygerrit-ui/app/services/flags/flags.ts b/polygerrit-ui/app/services/flags/flags.ts
index 4a1fc29..b59d7a8b3 100644
--- a/polygerrit-ui/app/services/flags/flags.ts
+++ b/polygerrit-ui/app/services/flags/flags.ts
@@ -22,5 +22,4 @@
   ML_SUGGESTED_EDIT = 'UiFeature__ml_suggested_edit',
   ML_SUGGESTED_EDIT_V2 = 'UiFeature__ml_suggested_edit_v2',
   REVISION_PARENTS_DATA = 'UiFeature__revision_parents_data',
-  SIMPLIFIED_DIFF_PROCESSOR = 'UiFeature__simplified_diff_processor',
 }