Do large chunk and comment splitting separately

Refactoring to make it easier to understand and change.
Also fix some types and names.

Change-Id: I24a333c0fb71089edc84a8f0d4ad45f8e9baf328
diff --git a/polygerrit-ui/app/elements/diff/gr-diff-processor/gr-diff-processor.js b/polygerrit-ui/app/elements/diff/gr-diff-processor/gr-diff-processor.js
index 7c6fbc6..a1af8e1 100644
--- a/polygerrit-ui/app/elements/diff/gr-diff-processor/gr-diff-processor.js
+++ b/polygerrit-ui/app/elements/diff/gr-diff-processor/gr-diff-processor.js
@@ -151,7 +151,8 @@
               sectionIndex: 0,
             };
 
-            content = this._splitCommonGroupsWithComments(content);
+            content = this._splitLargeChunks(content);
+            content = this._splitUnchangedChunksWithComments(content);
 
             let currentBatch = 0;
             const nextStep = () => {
@@ -384,60 +385,78 @@
       return new GrDiffGroup(GrDiffGroup.Type.BOTH, [line]);
     },
 
+
+    /**
+     * Split chunks into smaller chunks of the same kind.
+     *
+     * This is done to prevent doing too much work on the main thread in one
+     * uninterrupted rendering step, which would make the browser unresponsive.
+     *
+     * Note that in the case of unmodified chunks, we only split chunks if the
+     * context is set to file (because otherwise they are split up further down
+     * the processing into the visible and hidden context), and only split it
+     * into 2 chunks, one max sized one and the rest (for reasons that are
+     * unclear to me).
+     *
+     * @param {!Array<!Object>} chunks Chunks as returned from the server
+     * @return {!Array<!Object>} Finer grained chunks.
+     */
+    _splitLargeChunks(chunks) {
+      const newChunks = [];
+
+      for (const chunk of chunks) {
+        if (!chunk.ab) {
+          for (const group of this._breakdownGroup(chunk)) {
+            newChunks.push(group);
+          }
+          continue;
+        }
+
+        // If the context is set to "whole file", then break down the shared
+        // chunks so they can be rendered incrementally. Note: this is not
+        // enabled for any other context preference because manipulating the
+        // chunks in this way violates assumptions by the context grouper logic.
+        if (this.context === -1 && chunk.ab.length > MAX_GROUP_SIZE * 2) {
+          // Split large shared groups in two, where the first is the maximum
+          // group size.
+          newChunks.push({ab: chunk.ab.slice(0, MAX_GROUP_SIZE)});
+          newChunks.push({ab: chunk.ab.slice(MAX_GROUP_SIZE)});
+        } else {
+          newChunks.push(chunk);
+        }
+      }
+      return newChunks;
+    },
+
     /**
      * In order to show comments out of the bounds of the selected context,
      * treat them as separate chunks within the model so that the content (and
      * context surrounding it) renders correctly.
-     * @param {?} content The diff content object. (has to be iterable)
-     * @return {!Object} A new diff content object with regions split up.
+     * @param {!Array<!Object>} chunks DiffContents as returned from server.
+     * @return {!Array<!Object>} Finer grained DiffContents.
      */
-    _splitCommonGroupsWithComments(content) {
+    _splitUnchangedChunksWithComments(chunks) {
       const result = [];
       let leftLineNum = 0;
       let rightLineNum = 0;
 
-      // If the context is set to "whole file", then break down the shared
-      // chunks so they can be rendered incrementally. Note: this is not enabled
-      // for any other context preference because manipulating the chunks in
-      // this way violates assumptions by the context grouper logic.
-      if (this.context === -1) {
-        const newContent = [];
-        for (const group of content) {
-          if (group.ab && group.ab.length > MAX_GROUP_SIZE * 2) {
-            // Split large shared groups in two, where the first is the maximum
-            // group size.
-            newContent.push({ab: group.ab.slice(0, MAX_GROUP_SIZE)});
-            newContent.push({ab: group.ab.slice(MAX_GROUP_SIZE)});
-          } else {
-            newContent.push(group);
+      for (const chunk of chunks) {
+        // If it isn't a common chunk, append it as-is and update line numbers.
+        if (!chunk.ab) {
+          if (chunk.a) {
+            leftLineNum += chunk.a.length;
           }
-        }
-        content = newContent;
-      }
-
-      // For each section in the diff.
-      for (let i = 0; i < content.length; i++) {
-        // If it isn't a common group, append it as-is and update line numbers.
-        if (!content[i].ab) {
-          if (content[i].a) {
-            leftLineNum += content[i].a.length;
+          if (chunk.b) {
+            rightLineNum += chunk.b.length;
           }
-          if (content[i].b) {
-            rightLineNum += content[i].b.length;
-          }
-
-          for (const group of this._breakdownGroup(content[i])) {
-            result.push(group);
-          }
-
+          result.push(chunk);
           continue;
         }
 
-        const chunk = content[i].ab;
         let currentChunk = {ab: []};
 
         // For each line in the common group.
-        for (const subChunk of chunk) {
+        for (const line of chunk.ab) {
           leftLineNum++;
           rightLineNum++;
 
@@ -453,10 +472,10 @@
             }
 
             // Add the non-collapse line as its own chunk.
-            result.push({ab: [subChunk]});
+            result.push({ab: [line]});
           } else {
             // Append the current line to the current chunk.
-            currentChunk.ab.push(subChunk);
+            currentChunk.ab.push(line);
           }
         }
 
diff --git a/polygerrit-ui/app/elements/diff/gr-diff-processor/gr-diff-processor_test.html b/polygerrit-ui/app/elements/diff/gr-diff-processor/gr-diff-processor_test.html
index 7ccd9f8..186a49e 100644
--- a/polygerrit-ui/app/elements/diff/gr-diff-processor/gr-diff-processor_test.html
+++ b/polygerrit-ui/app/elements/diff/gr-diff-processor/gr-diff-processor_test.html
@@ -280,7 +280,6 @@
           left: {1: true},
           right: {10: true},
         };
-        const lineNums = {left: 0, right: 0};
 
         const content = [
           {
@@ -304,7 +303,7 @@
           },
         ];
         const result =
-            element._splitCommonGroupsWithComments(content, lineNums);
+            element._splitUnchangedChunksWithComments(content);
         assert.deepEqual(result, [
           {
             ab: ['Copyright (C) 2015 The Android Open Source Project'],
@@ -337,28 +336,25 @@
         ]);
       });
 
-      test('breaks-down shared chunks w/ whole-file', () => {
+      test('breaks down shared chunks w/ whole-file', () => {
         const size = 120 * 2 + 5;
-        const lineNums = {left: 0, right: 0};
         const content = [{
           ab: _.times(size, () => { return `${Math.random()}`; }),
         }];
         element.context = -1;
-        const result =
-            element._splitCommonGroupsWithComments(content, lineNums);
+        const result = element._splitLargeChunks(content);
         assert.equal(result.length, 2);
         assert.deepEqual(result[0].ab, content[0].ab.slice(0, 120));
         assert.deepEqual(result[1].ab, content[0].ab.slice(120));
       });
 
       test('does not break-down shared chunks w/ context', () => {
-        const lineNums = {left: 0, right: 0};
         const content = [{
           ab: _.times(75, () => { return `${Math.random()}`; }),
         }];
         element.context = 4;
         const result =
-            element._splitCommonGroupsWithComments(content, lineNums);
+            element._splitUnchangedChunksWithComments(content);
         assert.deepEqual(result, content);
       });