Merge branch 'master' into stable-7.0

* master:
  Lib: Fix ssh value for gpg.format throwing an IllegalArgumentException
  DfsPackFile: Abstract the loading of pack indexes
  PackExtBlockCacheTable: spread extensions over multiple dfs tables
  PackObjectSizeIndex: Read all bytes and use the byte[] directly
  DfsPackFile: Do not set local reverse index ref from cache callback
  Add 4.33 target platform for Eclipse 2024-09
  DfsBlockCacheTable: extract stats get* methods to interface
  Add worktrees read support
  DfsBlockCacheConfig: support configurations for dfs cache tables per extensions
  ssh: Remove .orig file
  DfsPackFile: Enable/disable object size index via DfsReaderOptions

Change-Id: Ie123851a784e8a0f7197543898c0c8fb920e4f31
diff --git a/org.eclipse.jgit.junit.ssh/META-INF/MANIFEST.MF.orig b/org.eclipse.jgit.junit.ssh/META-INF/MANIFEST.MF.orig
deleted file mode 100644
index e835828..0000000
--- a/org.eclipse.jgit.junit.ssh/META-INF/MANIFEST.MF.orig
+++ /dev/null
@@ -1,104 +0,0 @@
-Manifest-Version: 1.0
-Bundle-ManifestVersion: 2
-Bundle-Name: %Bundle-Name
-Automatic-Module-Name: org.eclipse.jgit.junit.ssh
-Bundle-SymbolicName: org.eclipse.jgit.junit.ssh
-Bundle-Version: 7.0.0.qualifier
-Bundle-Localization: OSGI-INF/l10n/plugin
-Bundle-Vendor: %Bundle-Vendor
-Bundle-ActivationPolicy: lazy
-<<<<<<< HEAD
-Bundle-RequiredExecutionEnvironment: JavaSE-17
-Import-Package: org.apache.sshd.common;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.config.keys;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.file.virtualfs;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.helpers;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.io;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.kex;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.keyprovider;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.session;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.signature;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.util.buffer;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.util.logging;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.util.security;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.util.threads;version="[2.12.0,2.13.0)",
- org.apache.sshd.core;version="[2.12.0,2.13.0)",
- org.apache.sshd.server;version="[2.12.0,2.13.0)",
- org.apache.sshd.server.auth;version="[2.12.0,2.13.0)",
- org.apache.sshd.server.auth.gss;version="[2.12.0,2.13.0)",
- org.apache.sshd.server.auth.keyboard;version="[2.12.0,2.13.0)",
- org.apache.sshd.server.auth.password;version="[2.12.0,2.13.0)",
- org.apache.sshd.server.command;version="[2.12.0,2.13.0)",
- org.apache.sshd.server.session;version="[2.12.0,2.13.0)",
- org.apache.sshd.server.shell;version="[2.12.0,2.13.0)",
- org.apache.sshd.server.subsystem;version="[2.12.0,2.13.0)",
- org.apache.sshd.sftp;version="[2.12.0,2.13.0)",
- org.apache.sshd.sftp.server;version="[2.12.0,2.13.0)",
-||||||| parent of 9724f9467f (Update org.apache.sshd to 2.13.1)
-Bundle-RequiredExecutionEnvironment: JavaSE-11
-Import-Package: org.apache.sshd.common;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.config.keys;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.file.virtualfs;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.helpers;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.io;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.kex;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.keyprovider;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.session;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.signature;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.util.buffer;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.util.logging;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.util.security;version="[2.12.0,2.13.0)",
- org.apache.sshd.common.util.threads;version="[2.12.0,2.13.0)",
- org.apache.sshd.core;version="[2.12.0,2.13.0)",
- org.apache.sshd.server;version="[2.12.0,2.13.0)",
- org.apache.sshd.server.auth;version="[2.12.0,2.13.0)",
- org.apache.sshd.server.auth.gss;version="[2.12.0,2.13.0)",
- org.apache.sshd.server.auth.keyboard;version="[2.12.0,2.13.0)",
- org.apache.sshd.server.auth.password;version="[2.12.0,2.13.0)",
- org.apache.sshd.server.command;version="[2.12.0,2.13.0)",
- org.apache.sshd.server.session;version="[2.12.0,2.13.0)",
- org.apache.sshd.server.shell;version="[2.12.0,2.13.0)",
- org.apache.sshd.server.subsystem;version="[2.12.0,2.13.0)",
- org.apache.sshd.sftp;version="[2.12.0,2.13.0)",
- org.apache.sshd.sftp.server;version="[2.12.0,2.13.0)",
-=======
-Bundle-RequiredExecutionEnvironment: JavaSE-11
-Import-Package: org.apache.sshd.common;version="[2.13.0,2.14.0)",
- org.apache.sshd.common.config.keys;version="[2.13.0,2.14.0)",
- org.apache.sshd.common.file.virtualfs;version="[2.13.0,2.14.0)",
- org.apache.sshd.common.helpers;version="[2.13.0,2.14.0)",
- org.apache.sshd.common.io;version="[2.13.0,2.14.0)",
- org.apache.sshd.common.kex;version="[2.13.0,2.14.0)",
- org.apache.sshd.common.keyprovider;version="[2.13.0,2.14.0)",
- org.apache.sshd.common.session;version="[2.13.0,2.14.0)",
- org.apache.sshd.common.signature;version="[2.13.0,2.14.0)",
- org.apache.sshd.common.util.buffer;version="[2.13.0,2.14.0)",
- org.apache.sshd.common.util.logging;version="[2.13.0,2.14.0)",
- org.apache.sshd.common.util.security;version="[2.13.0,2.14.0)",
- org.apache.sshd.common.util.threads;version="[2.13.0,2.14.0)",
- org.apache.sshd.core;version="[2.13.0,2.14.0)",
- org.apache.sshd.server;version="[2.13.0,2.14.0)",
- org.apache.sshd.server.auth;version="[2.13.0,2.14.0)",
- org.apache.sshd.server.auth.gss;version="[2.13.0,2.14.0)",
- org.apache.sshd.server.auth.keyboard;version="[2.13.0,2.14.0)",
- org.apache.sshd.server.auth.password;version="[2.13.0,2.14.0)",
- org.apache.sshd.server.command;version="[2.13.0,2.14.0)",
- org.apache.sshd.server.session;version="[2.13.0,2.14.0)",
- org.apache.sshd.server.shell;version="[2.13.0,2.14.0)",
- org.apache.sshd.server.subsystem;version="[2.13.0,2.14.0)",
- org.apache.sshd.sftp;version="[2.13.0,2.14.0)",
- org.apache.sshd.sftp.server;version="[2.13.0,2.14.0)",
->>>>>>> 9724f9467f (Update org.apache.sshd to 2.13.1)
- org.eclipse.jgit.annotations;version="[7.0.0,7.1.0)",
- org.eclipse.jgit.api;version="[7.0.0,7.1.0)",
- org.eclipse.jgit.api.errors;version="[7.0.0,7.1.0)",
- org.eclipse.jgit.errors;version="[7.0.0,7.1.0)",
- org.eclipse.jgit.junit;version="[7.0.0,7.1.0)",
- org.eclipse.jgit.lib;version="[7.0.0,7.1.0)",
- org.eclipse.jgit.revwalk;version="[7.0.0,7.1.0)",
- org.eclipse.jgit.transport;version="[7.0.0,7.1.0)",
- org.eclipse.jgit.util;version="[7.0.0,7.1.0)",
- org.junit;version="[4.13,5.0.0)",
- org.junit.experimental.theories;version="[4.13,5.0.0)",
- org.slf4j;version="[1.7.0,3.0.0)"
-Export-Package: org.eclipse.jgit.junit.ssh;version="7.0.0"
diff --git a/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.33.target b/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.33.target
new file mode 100644
index 0000000..fac43dc
--- /dev/null
+++ b/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.33.target
@@ -0,0 +1,290 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<?pde?>
+<!-- generated with https://github.com/eclipse-cbi/targetplatform-dsl -->
+<target name="jgit-4.33" sequenceNumber="1721077009">
+  <locations>
+    <location includeMode="slicer" includeAllPlatforms="false" includeSource="true" includeConfigurePhase="true" type="InstallableUnit">
+      <unit id="com.jcraft.jsch" version="0.1.55.v20230916-1400"/>
+      <unit id="com.jcraft.jsch.source" version="0.1.55.v20230916-1400"/>
+      <unit id="com.jcraft.jzlib" version="1.1.3.v20230916-1400"/>
+      <unit id="com.jcraft.jzlib.source" version="1.1.3.v20230916-1400"/>
+      <unit id="net.i2p.crypto.eddsa" version="0.3.0"/>
+      <unit id="net.i2p.crypto.eddsa.source" version="0.3.0"/>
+      <unit id="org.apache.ant" version="1.10.14.v20230922-1200"/>
+      <unit id="org.apache.ant.source" version="1.10.14.v20230922-1200"/>
+      <unit id="org.apache.httpcomponents.httpclient" version="4.5.14"/>
+      <unit id="org.apache.httpcomponents.httpclient.source" version="4.5.14"/>
+      <unit id="org.apache.httpcomponents.httpcore" version="4.4.16"/>
+      <unit id="org.apache.httpcomponents.httpcore.source" version="4.4.16"/>
+      <unit id="org.hamcrest.core" version="1.3.0.v20230809-1000"/>
+      <unit id="org.hamcrest.core.source" version="1.3.0.v20230809-1000"/>
+      <unit id="org.hamcrest.library" version="1.3.0.v20230809-1000"/>
+      <unit id="org.hamcrest.library.source" version="1.3.0.v20230809-1000"/>
+      <unit id="org.junit" version="4.13.2.v20230809-1000"/>
+      <unit id="org.junit.source" version="4.13.2.v20230809-1000"/>
+      <unit id="org.objenesis" version="3.4.0"/>
+      <unit id="org.objenesis.source" version="3.4.0"/>
+      <unit id="org.osgi.service.cm" version="1.6.1.202109301733"/>
+      <unit id="org.osgi.service.cm.source" version="1.6.1.202109301733"/>
+      <repository location="https://download.eclipse.org/tools/orbit/simrel/orbit-aggregation/2024-09"/>
+    </location>
+    <location includeMode="slicer" includeAllPlatforms="false" includeSource="true" includeConfigurePhase="true" type="InstallableUnit">
+      <unit id="org.eclipse.osgi" version="0.0.0"/>
+      <repository location="https://download.eclipse.org/staging/2024-09/"/>
+    </location>
+    <location includeDependencyDepth="none" includeDependencyScopes="compile" includeSource="true" missingManifest="error" type="Maven" label="xz">
+      <dependencies>
+        <dependency>
+          <groupId>org.tukaani</groupId>
+          <artifactId>xz</artifactId>
+          <version>1.9</version>
+          <type>jar</type>
+        </dependency>
+      </dependencies>
+    </location>
+    <location includeDependencyDepth="none" includeDependencyScopes="compile" includeSource="true" missingManifest="error" type="Maven" label="slf4j">
+      <dependencies>
+        <dependency>
+          <groupId>org.slf4j</groupId>
+          <artifactId>slf4j-api</artifactId>
+          <version>1.7.36</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>org.slf4j</groupId>
+          <artifactId>slf4j-simple</artifactId>
+          <version>1.7.36</version>
+          <type>jar</type>
+        </dependency>
+      </dependencies>
+    </location>
+    <location includeDependencyDepth="none" includeDependencyScopes="compile" includeSource="true" missingManifest="error" type="Maven" label="sshd">
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.sshd</groupId>
+          <artifactId>sshd-osgi</artifactId>
+          <version>2.13.1</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.sshd</groupId>
+          <artifactId>sshd-sftp</artifactId>
+          <version>2.13.1</version>
+          <type>jar</type>
+        </dependency>
+      </dependencies>
+    </location>
+    <location includeDependencyDepth="none" includeDependencyScopes="compile" includeSource="true" missingManifest="error" type="Maven" label="mockito">
+      <dependencies>
+        <dependency>
+          <groupId>org.mockito</groupId>
+          <artifactId>mockito-core</artifactId>
+          <version>5.12.0</version>
+          <type>jar</type>
+        </dependency>
+      </dependencies>
+    </location>
+    <location includeDependencyDepth="none" includeDependencyScopes="compile" includeSource="true" missingManifest="error" type="Maven" label="jna">
+      <dependencies>
+        <dependency>
+          <groupId>net.java.dev.jna</groupId>
+          <artifactId>jna</artifactId>
+          <version>5.14.0</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>net.java.dev.jna</groupId>
+          <artifactId>jna-platform</artifactId>
+          <version>5.14.0</version>
+          <type>jar</type>
+        </dependency>
+      </dependencies>
+    </location>
+    <location includeDependencyDepth="none" includeDependencyScopes="compile" includeSource="true" missingManifest="error" type="Maven" label="jetty">
+      <dependencies>
+        <dependency>
+          <groupId>org.eclipse.jetty.ee10</groupId>
+          <artifactId>jetty-ee10-servlet</artifactId>
+          <version>12.0.10</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>jetty-http</artifactId>
+          <version>12.0.10</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>jetty-io</artifactId>
+          <version>12.0.10</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>jetty-security</artifactId>
+          <version>12.0.10</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>jetty-server</artifactId>
+          <version>12.0.10</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>jetty-session</artifactId>
+          <version>12.0.10</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>jetty-util</artifactId>
+          <version>12.0.10</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>org.eclipse.jetty</groupId>
+          <artifactId>jetty-util-ajax</artifactId>
+          <version>12.0.10</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>jakarta.servlet</groupId>
+          <artifactId>jakarta.servlet-api</artifactId>
+          <version>6.0.0</version>
+          <type>jar</type>
+        </dependency>
+      </dependencies>
+    </location>
+    <location includeDependencyDepth="none" includeDependencyScopes="compile" includeSource="true" missingManifest="error" type="Maven" label="javaewah">
+      <dependencies>
+        <dependency>
+          <groupId>com.googlecode.javaewah</groupId>
+          <artifactId>JavaEWAH</artifactId>
+          <version>1.2.3</version>
+          <type>jar</type>
+        </dependency>
+      </dependencies>
+    </location>
+    <location includeDependencyDepth="none" includeDependencyScopes="compile" includeSource="true" missingManifest="error" type="Maven" label="hamcrest">
+      <dependencies>
+        <dependency>
+          <groupId>org.hamcrest</groupId>
+          <artifactId>hamcrest</artifactId>
+          <version>2.2</version>
+          <type>jar</type>
+        </dependency>
+      </dependencies>
+    </location>
+    <location includeDependencyDepth="none" includeDependencyScopes="compile" includeSource="true" missingManifest="error" type="Maven" label="gson">
+      <dependencies>
+        <dependency>
+          <groupId>com.google.code.gson</groupId>
+          <artifactId>gson</artifactId>
+          <version>2.11.0</version>
+          <type>jar</type>
+        </dependency>
+      </dependencies>
+    </location>
+    <location includeDependencyDepth="none" includeDependencyScopes="compile" includeSource="true" missingManifest="error" type="Maven" label="bytebuddy">
+      <dependencies>
+        <dependency>
+          <groupId>net.bytebuddy</groupId>
+          <artifactId>byte-buddy</artifactId>
+          <version>1.14.17</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>net.bytebuddy</groupId>
+          <artifactId>byte-buddy-agent</artifactId>
+          <version>1.14.17</version>
+          <type>jar</type>
+        </dependency>
+      </dependencies>
+    </location>
+    <location includeDependencyDepth="none" includeDependencyScopes="compile" includeSource="true" missingManifest="error" type="Maven" label="bouncycastle">
+      <dependencies>
+        <dependency>
+          <groupId>org.bouncycastle</groupId>
+          <artifactId>bcpg-jdk18on</artifactId>
+          <version>1.78.1</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>org.bouncycastle</groupId>
+          <artifactId>bcprov-jdk18on</artifactId>
+          <version>1.78.1</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>org.bouncycastle</groupId>
+          <artifactId>bcpkix-jdk18on</artifactId>
+          <version>1.78.1</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>org.bouncycastle</groupId>
+          <artifactId>bcutil-jdk18on</artifactId>
+          <version>1.78.1</version>
+          <type>jar</type>
+        </dependency>
+      </dependencies>
+    </location>
+    <location includeDependencyDepth="none" includeDependencyScopes="compile" includeSource="true" missingManifest="error" type="Maven" label="assertj">
+      <dependencies>
+        <dependency>
+          <groupId>org.assertj</groupId>
+          <artifactId>assertj-core</artifactId>
+          <version>3.26.0</version>
+          <type>jar</type>
+        </dependency>
+      </dependencies>
+    </location>
+    <location includeDependencyDepth="none" includeDependencyScopes="compile" includeSource="true" missingManifest="error" type="Maven" label="args4j">
+      <dependencies>
+        <dependency>
+          <groupId>args4j</groupId>
+          <artifactId>args4j</artifactId>
+          <version>2.37</version>
+          <type>jar</type>
+        </dependency>
+      </dependencies>
+    </location>
+    <location includeDependencyDepth="none" includeDependencyScopes="compile" includeSource="true" missingManifest="error" type="Maven" label="apache">
+      <dependencies>
+        <dependency>
+          <groupId>commons-codec</groupId>
+          <artifactId>commons-codec</artifactId>
+          <version>1.17.0</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.commons</groupId>
+          <artifactId>commons-compress</artifactId>
+          <version>1.26.2</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.commons</groupId>
+          <artifactId>commons-lang3</artifactId>
+          <version>3.14.0</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>commons-io</groupId>
+          <artifactId>commons-io</artifactId>
+          <version>2.16.1</version>
+          <type>jar</type>
+        </dependency>
+        <dependency>
+          <groupId>commons-logging</groupId>
+          <artifactId>commons-logging</artifactId>
+          <version>1.3.2</version>
+          <type>jar</type>
+        </dependency>
+      </dependencies>
+    </location>
+  </locations>
+</target>
diff --git a/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.33.tpd b/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.33.tpd
new file mode 100644
index 0000000..d01a8a9
--- /dev/null
+++ b/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.33.tpd
@@ -0,0 +1,8 @@
+target "jgit-4.33" with source configurePhase
+
+include "orbit/orbit-4.33.tpd"
+include "maven/dependencies.tpd"
+
+location "https://download.eclipse.org/staging/2024-09/" {
+	org.eclipse.osgi lazy
+}
diff --git a/org.eclipse.jgit.packaging/org.eclipse.jgit.target/orbit/orbit-4.31.tpd b/org.eclipse.jgit.packaging/org.eclipse.jgit.target/orbit/orbit-4.31.tpd
index 0554a85..9d00cb4 100644
--- a/org.eclipse.jgit.packaging/org.eclipse.jgit.target/orbit/orbit-4.31.tpd
+++ b/org.eclipse.jgit.packaging/org.eclipse.jgit.target/orbit/orbit-4.31.tpd
@@ -1,4 +1,4 @@
-target "orbit-4.30" with source configurePhase
+target "orbit-4.31" with source configurePhase
 // see https://download.eclipse.org/tools/orbit/downloads/
 
 location "https://download.eclipse.org/tools/orbit/simrel/orbit-aggregation/2023-12" {
diff --git a/org.eclipse.jgit.packaging/org.eclipse.jgit.target/orbit/orbit-4.33.tpd b/org.eclipse.jgit.packaging/org.eclipse.jgit.target/orbit/orbit-4.33.tpd
new file mode 100644
index 0000000..8dca4cb
--- /dev/null
+++ b/org.eclipse.jgit.packaging/org.eclipse.jgit.target/orbit/orbit-4.33.tpd
@@ -0,0 +1,27 @@
+target "orbit-4.33" with source configurePhase
+// see https://download.eclipse.org/tools/orbit/downloads/
+
+location "https://download.eclipse.org/tools/orbit/simrel/orbit-aggregation/2024-09" {
+	com.jcraft.jsch [0.1.55.v20230916-1400,0.1.55.v20230916-1400]
+	com.jcraft.jsch.source [0.1.55.v20230916-1400,0.1.55.v20230916-1400]
+	com.jcraft.jzlib [1.1.3.v20230916-1400,1.1.3.v20230916-1400]
+	com.jcraft.jzlib.source [1.1.3.v20230916-1400,1.1.3.v20230916-1400]
+	net.i2p.crypto.eddsa [0.3.0,0.3.0]
+	net.i2p.crypto.eddsa.source [0.3.0,0.3.0]
+	org.apache.ant [1.10.14.v20230922-1200,1.10.14.v20230922-1200]
+	org.apache.ant.source [1.10.14.v20230922-1200,1.10.14.v20230922-1200]
+	org.apache.httpcomponents.httpclient [4.5.14,4.5.14]
+	org.apache.httpcomponents.httpclient.source [4.5.14,4.5.14]
+	org.apache.httpcomponents.httpcore [4.4.16,4.4.16]
+	org.apache.httpcomponents.httpcore.source [4.4.16,4.4.16]
+	org.hamcrest.core [1.3.0.v20230809-1000,1.3.0.v20230809-1000]
+	org.hamcrest.core.source [1.3.0.v20230809-1000,1.3.0.v20230809-1000]
+	org.hamcrest.library [1.3.0.v20230809-1000,1.3.0.v20230809-1000]
+	org.hamcrest.library.source [1.3.0.v20230809-1000,1.3.0.v20230809-1000]
+	org.junit [4.13.2.v20230809-1000,4.13.2.v20230809-1000]
+	org.junit.source [4.13.2.v20230809-1000,4.13.2.v20230809-1000]
+	org.objenesis [3.4,3.4]
+	org.objenesis.source [3.4,3.4]
+	org.osgi.service.cm [1.6.1.202109301733,1.6.1.202109301733]
+	org.osgi.service.cm.source [1.6.1.202109301733,1.6.1.202109301733]
+}
diff --git a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Config.java b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Config.java
index 52f40c2..f5de704 100644
--- a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Config.java
+++ b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Config.java
@@ -94,7 +94,7 @@ private void list() throws IOException, ConfigInvalidException {
 		if (global || isListAll())
 			list(SystemReader.getInstance().openUserConfig(null, fs));
 		if (local || isListAll())
-			list(new FileBasedConfig(fs.resolve(getRepository().getDirectory(),
+			list(new FileBasedConfig(fs.resolve(getRepository().getCommonDirectory(),
 					Constants.CONFIG), fs));
 	}
 
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/EolRepositoryTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/EolRepositoryTest.java
index b937b1f..4c971ff 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/EolRepositoryTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/EolRepositoryTest.java
@@ -559,7 +559,7 @@ private void setupGitAndDoHardReset(AutoCRLF autoCRLF, EOL eol,
 
 		}
 		if (infoAttributesContent != null) {
-			File f = new File(db.getDirectory(), Constants.INFO_ATTRIBUTES);
+			File f = new File(db.getCommonDirectory(), Constants.INFO_ATTRIBUTES);
 			write(f, infoAttributesContent);
 		}
 		config.save();
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/LinkedWorktreeTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/LinkedWorktreeTest.java
new file mode 100644
index 0000000..3b60e1b
--- /dev/null
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/LinkedWorktreeTest.java
@@ -0,0 +1,192 @@
+/*
+ * Copyright (C) 2024, Broadcom and others
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Distribution License v. 1.0 which is available at
+ * https://www.eclipse.org/org/documents/edl-v10.php.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+package org.eclipse.jgit.api;
+
+import static org.eclipse.jgit.lib.Constants.HEAD;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.Collection;
+import java.util.Iterator;
+
+import org.eclipse.jgit.internal.storage.file.FileRepository;
+import org.eclipse.jgit.junit.JGitTestUtil;
+import org.eclipse.jgit.junit.RepositoryTestCase;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ReflogEntry;
+import org.eclipse.jgit.revwalk.RevCommit;
+import org.eclipse.jgit.util.FS;
+import org.eclipse.jgit.util.FS.ExecutionResult;
+import org.eclipse.jgit.util.RawParseUtils;
+import org.eclipse.jgit.util.TemporaryBuffer;
+import org.junit.Test;
+
+public class LinkedWorktreeTest extends RepositoryTestCase {
+
+	@Override
+	public void setUp() throws Exception {
+		super.setUp();
+
+		try (Git git = new Git(db)) {
+			git.commit().setMessage("Initial commit").call();
+		}
+	}
+
+	@Test
+	public void testWeCanReadFromLinkedWorktreeFromBare() throws Exception {
+		FS fs = db.getFS();
+		File directory = trash.getParentFile();
+		String dbDirName = db.getWorkTree().getName();
+		cloneBare(fs, directory, dbDirName, "bare");
+		File bareDirectory = new File(directory, "bare");
+		worktreeAddExisting(fs, bareDirectory, "master");
+
+		File worktreesDir = new File(bareDirectory, "worktrees");
+		File masterWorktreesDir = new File(worktreesDir, "master");
+
+		FileRepository repository = new FileRepository(masterWorktreesDir);
+		try (Git git = new Git(repository)) {
+			ObjectId objectId = repository.resolve(HEAD);
+			assertNotNull(objectId);
+
+			Iterator<RevCommit> log = git.log().all().call().iterator();
+			assertTrue(log.hasNext());
+			assertTrue("Initial commit".equals(log.next().getShortMessage()));
+
+			// we have reflog entry
+			// depending on git version we either have one or
+			// two entries where extra is zeroid entry with
+			// same message or no message
+			Collection<ReflogEntry> reflog = git.reflog().call();
+			assertNotNull(reflog);
+			assertTrue(reflog.size() > 0);
+			ReflogEntry[] reflogs = reflog.toArray(new ReflogEntry[0]);
+			assertEquals(reflogs[reflogs.length - 1].getComment(),
+					"reset: moving to HEAD");
+
+			// index works with file changes
+			File masterDir = new File(directory, "master");
+			File testFile = new File(masterDir, "test");
+
+			Status status = git.status().call();
+			assertTrue(status.getUncommittedChanges().size() == 0);
+			assertTrue(status.getUntracked().size() == 0);
+
+			JGitTestUtil.write(testFile, "test");
+			status = git.status().call();
+			assertTrue(status.getUncommittedChanges().size() == 0);
+			assertTrue(status.getUntracked().size() == 1);
+
+			git.add().addFilepattern("test").call();
+			status = git.status().call();
+			assertTrue(status.getUncommittedChanges().size() == 1);
+			assertTrue(status.getUntracked().size() == 0);
+		}
+	}
+
+	@Test
+	public void testWeCanReadFromLinkedWorktreeFromNonBare() throws Exception {
+		FS fs = db.getFS();
+		worktreeAddNew(fs, db.getWorkTree(), "wt");
+
+		File worktreesDir = new File(db.getDirectory(), "worktrees");
+		File masterWorktreesDir = new File(worktreesDir, "wt");
+
+		FileRepository repository = new FileRepository(masterWorktreesDir);
+		try (Git git = new Git(repository)) {
+			ObjectId objectId = repository.resolve(HEAD);
+			assertNotNull(objectId);
+
+			Iterator<RevCommit> log = git.log().all().call().iterator();
+			assertTrue(log.hasNext());
+			assertTrue("Initial commit".equals(log.next().getShortMessage()));
+
+			// we have reflog entry
+			Collection<ReflogEntry> reflog = git.reflog().call();
+			assertNotNull(reflog);
+			assertTrue(reflog.size() > 0);
+			ReflogEntry[] reflogs = reflog.toArray(new ReflogEntry[0]);
+			assertEquals(reflogs[reflogs.length - 1].getComment(),
+					"reset: moving to HEAD");
+
+			// index works with file changes
+			File directory = trash.getParentFile();
+			File wtDir = new File(directory, "wt");
+			File testFile = new File(wtDir, "test");
+
+			Status status = git.status().call();
+			assertTrue(status.getUncommittedChanges().size() == 0);
+			assertTrue(status.getUntracked().size() == 0);
+
+			JGitTestUtil.write(testFile, "test");
+			status = git.status().call();
+			assertTrue(status.getUncommittedChanges().size() == 0);
+			assertTrue(status.getUntracked().size() == 1);
+
+			git.add().addFilepattern("test").call();
+			status = git.status().call();
+			assertTrue(status.getUncommittedChanges().size() == 1);
+			assertTrue(status.getUntracked().size() == 0);
+		}
+
+	}
+
+	private static void cloneBare(FS fs, File directory, String from, String to) throws IOException, InterruptedException {
+		ProcessBuilder builder = fs.runInShell("git",
+				new String[] { "clone", "--bare", from, to });
+		builder.directory(directory);
+		builder.environment().put("HOME", fs.userHome().getAbsolutePath());
+		StringBuilder input = new StringBuilder();
+		ExecutionResult result = fs.execute(builder, new ByteArrayInputStream(
+				input.toString().getBytes(StandardCharsets.UTF_8)));
+		String stdOut = toString(result.getStdout());
+		String errorOut = toString(result.getStderr());
+		assertNotNull(stdOut);
+		assertNotNull(errorOut);
+	}
+
+	private static void worktreeAddExisting(FS fs, File directory, String name) throws IOException, InterruptedException {
+		ProcessBuilder builder = fs.runInShell("git",
+				new String[] { "worktree", "add", "../" + name, name });
+		builder.directory(directory);
+		builder.environment().put("HOME", fs.userHome().getAbsolutePath());
+		StringBuilder input = new StringBuilder();
+		ExecutionResult result = fs.execute(builder, new ByteArrayInputStream(
+				input.toString().getBytes(StandardCharsets.UTF_8)));
+		String stdOut = toString(result.getStdout());
+		String errorOut = toString(result.getStderr());
+		assertNotNull(stdOut);
+		assertNotNull(errorOut);
+	}
+
+	private static void worktreeAddNew(FS fs, File directory, String name) throws IOException, InterruptedException {
+		ProcessBuilder builder = fs.runInShell("git",
+				new String[] { "worktree", "add", "-b", name, "../" + name, "master"});
+		builder.directory(directory);
+		builder.environment().put("HOME", fs.userHome().getAbsolutePath());
+		StringBuilder input = new StringBuilder();
+		ExecutionResult result = fs.execute(builder, new ByteArrayInputStream(
+				input.toString().getBytes(StandardCharsets.UTF_8)));
+		String stdOut = toString(result.getStdout());
+		String errorOut = toString(result.getStderr());
+		assertNotNull(stdOut);
+		assertNotNull(errorOut);
+	}
+
+	private static String toString(TemporaryBuffer b) throws IOException {
+		return RawParseUtils.decode(b.toByteArray());
+	}
+
+}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/attributes/AttributesHandlerTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/attributes/AttributesHandlerTest.java
index 7fb98ec..c41dd81 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/attributes/AttributesHandlerTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/attributes/AttributesHandlerTest.java
@@ -584,7 +584,7 @@ private void setupRepo(
 
 		}
 		if (infoAttributesContent != null) {
-			File f = new File(db.getDirectory(), Constants.INFO_ATTRIBUTES);
+			File f = new File(db.getCommonDirectory(), Constants.INFO_ATTRIBUTES);
 			write(f, infoAttributesContent);
 		}
 		config.save();
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfigTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfigTest.java
index 2df0ba1..6ca0ff6 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfigTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfigTest.java
@@ -38,11 +38,27 @@
 
 package org.eclipse.jgit.internal.storage.dfs;
 
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_CORE_SECTION;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_DFS_CACHE_PREFIX;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_DFS_SECTION;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_BLOCK_LIMIT;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_BLOCK_SIZE;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_CONCURRENCY_LEVEL;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_PACK_EXTENSIONS;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_STREAM_RATIO;
 import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.closeTo;
+import static org.hamcrest.Matchers.hasSize;
 import static org.hamcrest.Matchers.is;
 import static org.junit.Assert.assertThrows;
 
+import java.util.List;
+import java.util.stream.Collectors;
+
 import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCacheConfig.DfsBlockCachePackExtConfig;
+import org.eclipse.jgit.internal.storage.pack.PackExt;
+import org.eclipse.jgit.lib.Config;
 import org.junit.Test;
 
 public class DfsBlockCacheConfigTest {
@@ -80,4 +96,147 @@ public void validBlockSize() {
 
 		assertThat(config.getBlockSize(), is(65536));
 	}
+
+	@Test
+	public void fromConfigs() {
+		Config config = new Config();
+		config.setLong(CONFIG_CORE_SECTION, CONFIG_DFS_SECTION,
+				CONFIG_KEY_BLOCK_LIMIT, 50 * 1024);
+		config.setInt(CONFIG_CORE_SECTION, CONFIG_DFS_SECTION,
+				CONFIG_KEY_BLOCK_SIZE, 1024);
+		config.setInt(CONFIG_CORE_SECTION, CONFIG_DFS_SECTION,
+				CONFIG_KEY_CONCURRENCY_LEVEL, 3);
+		config.setString(CONFIG_CORE_SECTION, CONFIG_DFS_SECTION,
+				CONFIG_KEY_STREAM_RATIO, "0.5");
+
+		DfsBlockCacheConfig cacheConfig = new DfsBlockCacheConfig()
+				.fromConfig(config);
+		assertThat(cacheConfig.getBlockLimit(), is(50L * 1024L));
+		assertThat(cacheConfig.getBlockSize(), is(1024));
+		assertThat(cacheConfig.getConcurrencyLevel(), is(3));
+		assertThat(cacheConfig.getStreamRatio(), closeTo(0.5, 0.0001));
+	}
+
+	@Test
+	public void fromConfig_blockLimitNotAMultipleOfBlockSize_throws() {
+		Config config = new Config();
+		config.setLong(CONFIG_CORE_SECTION, CONFIG_DFS_SECTION,
+				CONFIG_KEY_BLOCK_LIMIT, 1025);
+		config.setInt(CONFIG_CORE_SECTION, CONFIG_DFS_SECTION,
+				CONFIG_KEY_BLOCK_SIZE, 1024);
+
+		assertThrows(IllegalArgumentException.class,
+				() -> new DfsBlockCacheConfig().fromConfig(config));
+	}
+
+	@Test
+	public void fromConfig_streamRatioInvalidFormat_throws() {
+		Config config = new Config();
+		config.setString(CONFIG_CORE_SECTION, CONFIG_DFS_SECTION,
+				CONFIG_KEY_STREAM_RATIO, "0.a5");
+
+		assertThrows(IllegalArgumentException.class,
+				() -> new DfsBlockCacheConfig().fromConfig(config));
+	}
+
+	@Test
+	public void fromConfig_generatesDfsBlockCachePackExtConfigs() {
+		Config config = new Config();
+		addPackExtConfigEntry(config, "pack", List.of(PackExt.PACK),
+				/* blockLimit= */ 20 * 512, /* blockSize= */ 512);
+
+		addPackExtConfigEntry(config, "bitmap", List.of(PackExt.BITMAP_INDEX),
+				/* blockLimit= */ 25 * 1024, /* blockSize= */ 1024);
+
+		addPackExtConfigEntry(config, "index",
+				List.of(PackExt.INDEX, PackExt.OBJECT_SIZE_INDEX,
+						PackExt.REVERSE_INDEX),
+				/* blockLimit= */ 30 * 1024, /* blockSize= */ 1024);
+
+		DfsBlockCacheConfig cacheConfig = new DfsBlockCacheConfig()
+				.fromConfig(config);
+		var configs = cacheConfig.getPackExtCacheConfigurations();
+		assertThat(configs, hasSize(3));
+		var packConfig = getConfigForExt(configs, PackExt.PACK);
+		assertThat(packConfig.getBlockLimit(), is(20L * 512L));
+		assertThat(packConfig.getBlockSize(), is(512));
+
+		var bitmapConfig = getConfigForExt(configs, PackExt.BITMAP_INDEX);
+		assertThat(bitmapConfig.getBlockLimit(), is(25L * 1024L));
+		assertThat(bitmapConfig.getBlockSize(), is(1024));
+
+		var indexConfig = getConfigForExt(configs, PackExt.INDEX);
+		assertThat(indexConfig.getBlockLimit(), is(30L * 1024L));
+		assertThat(indexConfig.getBlockSize(), is(1024));
+		assertThat(getConfigForExt(configs, PackExt.OBJECT_SIZE_INDEX),
+				is(indexConfig));
+		assertThat(getConfigForExt(configs, PackExt.REVERSE_INDEX),
+				is(indexConfig));
+	}
+
+	@Test
+	public void fromConfigs_dfsBlockCachePackExtConfigWithDuplicateExtensions_throws() {
+		Config config = new Config();
+		config.setString(CONFIG_CORE_SECTION, CONFIG_DFS_CACHE_PREFIX + "pack1",
+				CONFIG_KEY_PACK_EXTENSIONS, PackExt.PACK.name());
+
+		config.setString(CONFIG_CORE_SECTION, CONFIG_DFS_CACHE_PREFIX + "pack2",
+				CONFIG_KEY_PACK_EXTENSIONS, PackExt.PACK.name());
+
+		assertThrows(IllegalArgumentException.class,
+				() -> new DfsBlockCacheConfig().fromConfig(config));
+	}
+
+	@Test
+	public void fromConfigs_dfsBlockCachePackExtConfigWithEmptyExtensions_throws() {
+		Config config = new Config();
+		config.setString(CONFIG_CORE_SECTION, CONFIG_DFS_CACHE_PREFIX + "pack1",
+				CONFIG_KEY_PACK_EXTENSIONS, "");
+
+		assertThrows(IllegalArgumentException.class,
+				() -> new DfsBlockCacheConfig().fromConfig(config));
+	}
+
+	@Test
+	public void fromConfigs_dfsBlockCachePackExtConfigWithNoExtensions_throws() {
+		Config config = new Config();
+		config.setInt(CONFIG_CORE_SECTION, CONFIG_DFS_CACHE_PREFIX + "pack1",
+				CONFIG_KEY_BLOCK_SIZE, 0);
+
+		assertThrows(IllegalArgumentException.class,
+				() -> new DfsBlockCacheConfig().fromConfig(config));
+	}
+
+	@Test
+	public void fromConfigs_dfsBlockCachePackExtConfigWithUnknownExtensions_throws() {
+		Config config = new Config();
+		config.setString(CONFIG_CORE_SECTION,
+				CONFIG_DFS_CACHE_PREFIX + "unknownExt",
+				CONFIG_KEY_PACK_EXTENSIONS, "NotAKnownExt");
+
+		assertThrows(IllegalArgumentException.class,
+				() -> new DfsBlockCacheConfig().fromConfig(config));
+	}
+
+	private static void addPackExtConfigEntry(Config config, String configName,
+			List<PackExt> packExts, long blockLimit, int blockSize) {
+		String packExtConfigName = CONFIG_DFS_CACHE_PREFIX + configName;
+		config.setString(CONFIG_CORE_SECTION, packExtConfigName,
+				CONFIG_KEY_PACK_EXTENSIONS, packExts.stream().map(PackExt::name)
+						.collect(Collectors.joining(" ")));
+		config.setLong(CONFIG_CORE_SECTION, packExtConfigName,
+				CONFIG_KEY_BLOCK_LIMIT, blockLimit);
+		config.setInt(CONFIG_CORE_SECTION, packExtConfigName,
+				CONFIG_KEY_BLOCK_SIZE, blockSize);
+	}
+
+	private static DfsBlockCacheConfig getConfigForExt(
+			List<DfsBlockCachePackExtConfig> configs, PackExt packExt) {
+		for (DfsBlockCachePackExtConfig config : configs) {
+			if (config.getPackExts().contains(packExt)) {
+				return config.getPackExtCacheConfiguration();
+			}
+		}
+		return null;
+	}
 }
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java
index e193de9..2be11d3 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsGarbageCollectorTest.java
@@ -18,6 +18,7 @@
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.concurrent.TimeUnit;
+
 import org.eclipse.jgit.internal.storage.commitgraph.CommitGraph;
 import org.eclipse.jgit.internal.storage.commitgraph.CommitGraphWriter;
 import org.eclipse.jgit.internal.storage.dfs.DfsObjDatabase.PackSource;
@@ -1171,6 +1172,7 @@ public void objectSizeIdx_reachableBlob_bigEnough_indexed() throws Exception {
 
 		gcWithObjectSizeIndex(10);
 
+		odb.getReaderOptions().setUseObjectSizeIndex(true);
 		DfsReader reader = odb.newReader();
 		DfsPackFile gcPack = findFirstBySource(odb.getPacks(), GC);
 		assertTrue(gcPack.hasObjectSizeIndex(reader));
@@ -1191,6 +1193,7 @@ public void objectSizeIdx_reachableBlob_tooSmall_notIndexed() throws Exception {
 
 		gcWithObjectSizeIndex(10);
 
+		odb.getReaderOptions().setUseObjectSizeIndex(true);
 		DfsReader reader = odb.newReader();
 		DfsPackFile gcPack = findFirstBySource(odb.getPacks(), GC);
 		assertTrue(gcPack.hasObjectSizeIndex(reader));
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsInserterTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsInserterTest.java
index b84a0b0..0b558ed 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsInserterTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsInserterTest.java
@@ -295,6 +295,7 @@ public void testObjectSizePopulated() throws IOException {
 	public void testObjectSizeIndexOnInsert() throws IOException {
 		db.getConfig().setInt(CONFIG_PACK_SECTION, null,
 				CONFIG_KEY_MIN_BYTES_OBJ_SIZE_INDEX, 0);
+		db.getObjectDatabase().getReaderOptions().setUseObjectSizeIndex(true);
 
 		byte[] contents = Constants.encode("foo");
 		ObjectId fooId;
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackFileTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackFileTest.java
index d21e51f..bc851f8 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackFileTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackFileTest.java
@@ -126,6 +126,7 @@ public void testLoadObjectSizeIndex() throws IOException {
 		setObjectSizeIndexMinBytes(0);
 		ObjectId blobId = setupPack(512, 800);
 
+		db.getObjectDatabase().getReaderOptions().setUseObjectSizeIndex(true);
 		DfsReader reader = db.getObjectDatabase().newReader();
 		DfsPackFile pack = db.getObjectDatabase().getPacks()[0];
 		assertTrue(pack.hasObjectSizeIndex(reader));
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackParserTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackParserTest.java
index 130af27..c1cd231 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackParserTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsPackParserTest.java
@@ -61,6 +61,7 @@ public void parse_writeObjSizeIdx() throws IOException {
 			ins.flush();
 		}
 
+		repo.getObjectDatabase().getReaderOptions().setUseObjectSizeIndex(true);
 		DfsReader reader = repo.getObjectDatabase().newReader();
 		PackList packList = repo.getObjectDatabase().getPackList();
 		assertEquals(1, packList.packs.length);
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsReaderTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsReaderTest.java
index 254184e..a0c2289 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsReaderTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsReaderTest.java
@@ -37,6 +37,8 @@ public class DfsReaderTest {
 	@Before
 	public void setUp() {
 		db = new InMemoryRepository(new DfsRepositoryDescription("test"));
+		// These tests assume the object size index is enabled.
+		db.getObjectDatabase().getReaderOptions().setUseObjectSizeIndex(true);
 	}
 
 	@Test
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/PackExtBlockCacheTableTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/PackExtBlockCacheTableTest.java
new file mode 100644
index 0000000..d506bfb
--- /dev/null
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/PackExtBlockCacheTableTest.java
@@ -0,0 +1,588 @@
+/*
+ * Copyright (c) 2024, Google LLC and others
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Distribution License v. 1.0 which is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+package org.eclipse.jgit.internal.storage.dfs;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.sameInstance;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertThrows;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyInt;
+import static org.mockito.ArgumentMatchers.anyLong;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.when;
+
+import java.util.EnumSet;
+import java.util.List;
+import java.util.Map;
+
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCache.Ref;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCache.RefLoader;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCacheConfig.DfsBlockCachePackExtConfig;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCacheTable.DfsBlockCacheStats;
+import org.eclipse.jgit.internal.storage.pack.PackExt;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+public class PackExtBlockCacheTableTest {
+	@Test
+	public void fromBlockCacheConfigs_createsDfsPackExtBlockCacheTables() {
+		DfsBlockCacheConfig cacheConfig = new DfsBlockCacheConfig();
+		cacheConfig.setPackExtCacheConfigurations(
+				List.of(new DfsBlockCachePackExtConfig(EnumSet.of(PackExt.PACK),
+						new DfsBlockCacheConfig())));
+		assertNotNull(
+				PackExtBlockCacheTable.fromBlockCacheConfigs(cacheConfig));
+	}
+
+	@Test
+	public void fromBlockCacheConfigs_noPackExtConfigurationGiven_packExtCacheConfigurationsIsEmpty_throws() {
+		DfsBlockCacheConfig cacheConfig = new DfsBlockCacheConfig();
+		cacheConfig.setPackExtCacheConfigurations(List.of());
+		assertThrows(IllegalArgumentException.class,
+				() -> PackExtBlockCacheTable
+						.fromBlockCacheConfigs(cacheConfig));
+	}
+
+	@Test
+	public void hasBlock0_packExtMapsToCacheTable_callsBitmapIndexCacheTable() {
+		DfsStreamKey streamKey = new TestKey(PackExt.BITMAP_INDEX);
+		DfsBlockCacheTable defaultBlockCacheTable = mock(
+				DfsBlockCacheTable.class);
+		DfsBlockCacheTable bitmapIndexCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(bitmapIndexCacheTable.hasBlock0(any(DfsStreamKey.class)))
+				.thenReturn(true);
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+				defaultBlockCacheTable,
+				Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+		assertTrue(tables.hasBlock0(streamKey));
+	}
+
+	@Test
+	public void hasBlock0_packExtDoesNotMapToCacheTable_callsDefaultCache() {
+		DfsStreamKey streamKey = new TestKey(PackExt.PACK);
+		DfsBlockCacheTable defaultBlockCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(defaultBlockCacheTable.hasBlock0(any(DfsStreamKey.class)))
+				.thenReturn(true);
+		DfsBlockCacheTable bitmapIndexCacheTable = mock(
+				DfsBlockCacheTable.class);
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+				defaultBlockCacheTable,
+				Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+		assertTrue(tables.hasBlock0(streamKey));
+	}
+
+	@Test
+	public void getOrLoad_packExtMapsToCacheTable_callsBitmapIndexCacheTable()
+			throws Exception {
+		BlockBasedFile blockBasedFile = new BlockBasedFile(null,
+				mock(DfsPackDescription.class), PackExt.BITMAP_INDEX) {
+		};
+		DfsBlock dfsBlock = mock(DfsBlock.class);
+		DfsBlockCacheTable defaultBlockCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(defaultBlockCacheTable.getOrLoad(any(BlockBasedFile.class),
+				anyLong(), any(DfsReader.class),
+				any(DfsBlockCache.ReadableChannelSupplier.class)))
+				.thenReturn(mock(DfsBlock.class));
+		DfsBlockCacheTable bitmapIndexCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(bitmapIndexCacheTable.getOrLoad(any(BlockBasedFile.class),
+				anyLong(), any(DfsReader.class),
+				any(DfsBlockCache.ReadableChannelSupplier.class)))
+				.thenReturn(dfsBlock);
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+				defaultBlockCacheTable,
+				Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+		assertThat(
+				tables.getOrLoad(blockBasedFile, 0, mock(DfsReader.class),
+						mock(DfsBlockCache.ReadableChannelSupplier.class)),
+				sameInstance(dfsBlock));
+	}
+
+	@Test
+	public void getOrLoad_packExtDoesNotMapToCacheTable_callsDefaultCache()
+			throws Exception {
+		BlockBasedFile blockBasedFile = new BlockBasedFile(null,
+				mock(DfsPackDescription.class), PackExt.PACK) {
+		};
+		DfsBlock dfsBlock = mock(DfsBlock.class);
+		DfsBlockCacheTable defaultBlockCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(defaultBlockCacheTable.getOrLoad(any(BlockBasedFile.class),
+				anyLong(), any(DfsReader.class),
+				any(DfsBlockCache.ReadableChannelSupplier.class)))
+				.thenReturn(dfsBlock);
+		DfsBlockCacheTable bitmapIndexCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(bitmapIndexCacheTable.getOrLoad(any(BlockBasedFile.class),
+				anyLong(), any(DfsReader.class),
+				any(DfsBlockCache.ReadableChannelSupplier.class)))
+				.thenReturn(mock(DfsBlock.class));
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+				defaultBlockCacheTable,
+				Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+		assertThat(
+				tables.getOrLoad(blockBasedFile, 0, mock(DfsReader.class),
+						mock(DfsBlockCache.ReadableChannelSupplier.class)),
+				sameInstance(dfsBlock));
+	}
+
+	@Test
+	public void getOrLoadRef_packExtMapsToCacheTable_callsBitmapIndexCacheTable()
+			throws Exception {
+		Ref<Integer> ref = mock(Ref.class);
+		DfsStreamKey dfsStreamKey = new TestKey(PackExt.BITMAP_INDEX);
+		DfsBlockCacheTable defaultBlockCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(defaultBlockCacheTable.getOrLoadRef(any(DfsStreamKey.class),
+				anyLong(), any(RefLoader.class))).thenReturn(mock(Ref.class));
+		DfsBlockCacheTable bitmapIndexCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(bitmapIndexCacheTable.getOrLoadRef(any(DfsStreamKey.class),
+				anyLong(), any(RefLoader.class))).thenReturn(ref);
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+				defaultBlockCacheTable,
+				Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+		assertThat(tables.getOrLoadRef(dfsStreamKey, 0, mock(RefLoader.class)),
+				sameInstance(ref));
+	}
+
+	@Test
+	public void getOrLoadRef_packExtDoesNotMapToCacheTable_callsDefaultCache()
+			throws Exception {
+		Ref<Integer> ref = mock(Ref.class);
+		DfsStreamKey dfsStreamKey = new TestKey(PackExt.PACK);
+		DfsBlockCacheTable defaultBlockCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(defaultBlockCacheTable.getOrLoadRef(any(DfsStreamKey.class),
+				anyLong(), any(RefLoader.class))).thenReturn(ref);
+		DfsBlockCacheTable bitmapIndexCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(bitmapIndexCacheTable.getOrLoadRef(any(DfsStreamKey.class),
+				anyLong(), any(RefLoader.class))).thenReturn(mock(Ref.class));
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+				defaultBlockCacheTable,
+				Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+		assertThat(tables.getOrLoadRef(dfsStreamKey, 0, mock(RefLoader.class)),
+				sameInstance(ref));
+	}
+
+	@Test
+	public void putDfsBlock_packExtMapsToCacheTable_callsBitmapIndexCacheTable() {
+		DfsStreamKey dfsStreamKey = new TestKey(PackExt.BITMAP_INDEX);
+		DfsBlock dfsBlock = new DfsBlock(dfsStreamKey, 0, new byte[0]);
+		DfsBlockCacheTable defaultBlockCacheTable = mock(
+				DfsBlockCacheTable.class);
+		DfsBlockCacheTable bitmapIndexCacheTable = mock(
+				DfsBlockCacheTable.class);
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+				defaultBlockCacheTable,
+				Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+		tables.put(dfsBlock);
+		Mockito.verify(bitmapIndexCacheTable, times(1)).put(dfsBlock);
+	}
+
+	@Test
+	public void putDfsBlock_packExtDoesNotMapToCacheTable_callsDefaultCache() {
+		DfsStreamKey dfsStreamKey = new TestKey(PackExt.PACK);
+		DfsBlock dfsBlock = new DfsBlock(dfsStreamKey, 0, new byte[0]);
+		DfsBlockCacheTable defaultBlockCacheTable = mock(
+				DfsBlockCacheTable.class);
+		DfsBlockCacheTable bitmapIndexCacheTable = mock(
+				DfsBlockCacheTable.class);
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+				defaultBlockCacheTable,
+				Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+		tables.put(dfsBlock);
+		Mockito.verify(defaultBlockCacheTable, times(1)).put(dfsBlock);
+	}
+
+	@Test
+	public void putDfsStreamKey_packExtMapsToCacheTable_callsBitmapIndexCacheTable() {
+		DfsStreamKey dfsStreamKey = new TestKey(PackExt.BITMAP_INDEX);
+		Ref<Integer> ref = mock(Ref.class);
+		DfsBlockCacheTable defaultBlockCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(defaultBlockCacheTable.put(any(DfsStreamKey.class), anyLong(),
+				anyLong(), anyInt())).thenReturn(mock(Ref.class));
+		DfsBlockCacheTable bitmapIndexCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(bitmapIndexCacheTable.put(any(DfsStreamKey.class), anyLong(),
+				anyLong(), anyInt())).thenReturn(ref);
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+				defaultBlockCacheTable,
+				Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+		assertThat(tables.put(dfsStreamKey, 0, 0, 0), sameInstance(ref));
+	}
+
+	@Test
+	public void putDfsStreamKey_packExtDoesNotMapToCacheTable_callsDefaultCache() {
+		DfsStreamKey dfsStreamKey = new TestKey(PackExt.PACK);
+		Ref<Integer> ref = mock(Ref.class);
+		DfsBlockCacheTable defaultBlockCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(defaultBlockCacheTable.put(any(DfsStreamKey.class), anyLong(),
+				anyLong(), anyInt())).thenReturn(ref);
+		DfsBlockCacheTable bitmapIndexCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(bitmapIndexCacheTable.put(any(DfsStreamKey.class), anyLong(),
+				anyLong(), anyInt())).thenReturn(mock(Ref.class));
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+				defaultBlockCacheTable,
+				Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+		assertThat(tables.put(dfsStreamKey, 0, 0, 0), sameInstance(ref));
+	}
+
+	@Test
+	public void putRef_packExtMapsToCacheTable_callsBitmapIndexCacheTable() {
+		DfsStreamKey dfsStreamKey = new TestKey(PackExt.BITMAP_INDEX);
+		Ref<Integer> ref = mock(Ref.class);
+		DfsBlockCacheTable defaultBlockCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(defaultBlockCacheTable.putRef(any(DfsStreamKey.class), anyLong(),
+				anyInt())).thenReturn(mock(Ref.class));
+		DfsBlockCacheTable bitmapIndexCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(bitmapIndexCacheTable.putRef(any(DfsStreamKey.class), anyLong(),
+				anyInt())).thenReturn(ref);
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+				defaultBlockCacheTable,
+				Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+		assertThat(tables.putRef(dfsStreamKey, 0, 0), sameInstance(ref));
+	}
+
+	@Test
+	public void putRef_packExtDoesNotMapToCacheTable_callsDefaultCache() {
+		DfsStreamKey dfsStreamKey = new TestKey(PackExt.PACK);
+		Ref<Integer> ref = mock(Ref.class);
+		DfsBlockCacheTable defaultBlockCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(defaultBlockCacheTable.putRef(any(DfsStreamKey.class), anyLong(),
+				anyInt())).thenReturn(ref);
+		DfsBlockCacheTable bitmapIndexCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(bitmapIndexCacheTable.putRef(any(DfsStreamKey.class), anyLong(),
+				anyInt())).thenReturn(mock(Ref.class));
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+				defaultBlockCacheTable,
+				Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+		assertThat(tables.putRef(dfsStreamKey, 0, 0), sameInstance(ref));
+	}
+
+	@Test
+	public void contains_packExtMapsToCacheTable_callsBitmapIndexCacheTable() {
+		DfsStreamKey streamKey = new TestKey(PackExt.BITMAP_INDEX);
+		DfsBlockCacheTable defaultBlockCacheTable = mock(
+				DfsBlockCacheTable.class);
+		DfsBlockCacheTable bitmapIndexCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(bitmapIndexCacheTable.contains(any(DfsStreamKey.class), anyLong()))
+				.thenReturn(true);
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+				defaultBlockCacheTable,
+				Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+		assertTrue(tables.contains(streamKey, 0));
+	}
+
+	@Test
+	public void contains_packExtDoesNotMapToCacheTable_callsDefaultCache() {
+		DfsStreamKey streamKey = new TestKey(PackExt.PACK);
+		DfsBlockCacheTable defaultBlockCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(defaultBlockCacheTable.contains(any(DfsStreamKey.class),
+				anyLong())).thenReturn(true);
+		DfsBlockCacheTable bitmapIndexCacheTable = mock(
+				DfsBlockCacheTable.class);
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+				defaultBlockCacheTable,
+				Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+		assertTrue(tables.contains(streamKey, 0));
+	}
+
+	@Test
+	public void get_packExtMapsToCacheTable_callsBitmapIndexCacheTable() {
+		DfsStreamKey dfsStreamKey = new TestKey(PackExt.BITMAP_INDEX);
+		Ref<Integer> ref = mock(Ref.class);
+		DfsBlockCacheTable defaultBlockCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(defaultBlockCacheTable.get(any(DfsStreamKey.class), anyLong()))
+				.thenReturn(mock(Ref.class));
+		DfsBlockCacheTable bitmapIndexCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(bitmapIndexCacheTable.get(any(DfsStreamKey.class), anyLong()))
+				.thenReturn(ref);
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+				defaultBlockCacheTable,
+				Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+		assertThat(tables.get(dfsStreamKey, 0), sameInstance(ref));
+	}
+
+	@Test
+	public void get_packExtDoesNotMapToCacheTable_callsDefaultCache() {
+		DfsStreamKey dfsStreamKey = new TestKey(PackExt.PACK);
+		Ref<Integer> ref = mock(Ref.class);
+		DfsBlockCacheTable defaultBlockCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(defaultBlockCacheTable.get(any(DfsStreamKey.class), anyLong()))
+				.thenReturn(ref);
+		DfsBlockCacheTable bitmapIndexCacheTable = mock(
+				DfsBlockCacheTable.class);
+		when(bitmapIndexCacheTable.get(any(DfsStreamKey.class), anyLong()))
+				.thenReturn(mock(Ref.class));
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable.fromCacheTables(
+				defaultBlockCacheTable,
+				Map.of(PackExt.BITMAP_INDEX, bitmapIndexCacheTable));
+
+		assertThat(tables.get(dfsStreamKey, 0), sameInstance(ref));
+	}
+
+	@Test
+	public void getBlockCacheStats_getCurrentSize_consolidatesAllTableCurrentSizes() {
+		long[] currentSizes = createEmptyStatsArray();
+
+		DfsBlockCacheStats packStats = new DfsBlockCacheStats();
+		packStats.addToLiveBytes(new TestKey(PackExt.PACK), 5);
+		currentSizes[PackExt.PACK.getPosition()] = 5;
+
+		DfsBlockCacheStats bitmapStats = new DfsBlockCacheStats();
+		bitmapStats.addToLiveBytes(new TestKey(PackExt.BITMAP_INDEX), 6);
+		currentSizes[PackExt.BITMAP_INDEX.getPosition()] = 6;
+
+		DfsBlockCacheStats indexStats = new DfsBlockCacheStats();
+		indexStats.addToLiveBytes(new TestKey(PackExt.INDEX), 7);
+		currentSizes[PackExt.INDEX.getPosition()] = 7;
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable
+				.fromCacheTables(cacheTableWithStats(packStats),
+						Map.of(PackExt.BITMAP_INDEX,
+								cacheTableWithStats(bitmapStats), PackExt.INDEX,
+								cacheTableWithStats(indexStats)));
+
+		assertArrayEquals(tables.getBlockCacheStats().getCurrentSize(),
+				currentSizes);
+	}
+
+	@Test
+	public void getBlockCacheStats_GetHitCount_consolidatesAllTableHitCounts() {
+		long[] hitCounts = createEmptyStatsArray();
+
+		DfsBlockCacheStats packStats = new DfsBlockCacheStats();
+		incrementCounter(5,
+				() -> packStats.incrementHit(new TestKey(PackExt.PACK)));
+		hitCounts[PackExt.PACK.getPosition()] = 5;
+
+		DfsBlockCacheStats bitmapStats = new DfsBlockCacheStats();
+		incrementCounter(6, () -> bitmapStats
+				.incrementHit(new TestKey(PackExt.BITMAP_INDEX)));
+		hitCounts[PackExt.BITMAP_INDEX.getPosition()] = 6;
+
+		DfsBlockCacheStats indexStats = new DfsBlockCacheStats();
+		incrementCounter(7,
+				() -> indexStats.incrementHit(new TestKey(PackExt.INDEX)));
+		hitCounts[PackExt.INDEX.getPosition()] = 7;
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable
+				.fromCacheTables(cacheTableWithStats(packStats),
+						Map.of(PackExt.BITMAP_INDEX,
+								cacheTableWithStats(bitmapStats), PackExt.INDEX,
+								cacheTableWithStats(indexStats)));
+
+		assertArrayEquals(tables.getBlockCacheStats().getHitCount(), hitCounts);
+	}
+
+	@Test
+	public void getBlockCacheStats_getMissCount_consolidatesAllTableMissCounts() {
+		long[] missCounts = createEmptyStatsArray();
+
+		DfsBlockCacheStats packStats = new DfsBlockCacheStats();
+		incrementCounter(5,
+				() -> packStats.incrementMiss(new TestKey(PackExt.PACK)));
+		missCounts[PackExt.PACK.getPosition()] = 5;
+
+		DfsBlockCacheStats bitmapStats = new DfsBlockCacheStats();
+		incrementCounter(6, () -> bitmapStats
+				.incrementMiss(new TestKey(PackExt.BITMAP_INDEX)));
+		missCounts[PackExt.BITMAP_INDEX.getPosition()] = 6;
+
+		DfsBlockCacheStats indexStats = new DfsBlockCacheStats();
+		incrementCounter(7,
+				() -> indexStats.incrementMiss(new TestKey(PackExt.INDEX)));
+		missCounts[PackExt.INDEX.getPosition()] = 7;
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable
+				.fromCacheTables(cacheTableWithStats(packStats),
+						Map.of(PackExt.BITMAP_INDEX,
+								cacheTableWithStats(bitmapStats), PackExt.INDEX,
+								cacheTableWithStats(indexStats)));
+
+		assertArrayEquals(tables.getBlockCacheStats().getMissCount(),
+				missCounts);
+	}
+
+	@Test
+	public void getBlockCacheStats_getTotalRequestCount_consolidatesAllTableTotalRequestCounts() {
+		long[] totalRequestCounts = createEmptyStatsArray();
+
+		DfsBlockCacheStats packStats = new DfsBlockCacheStats();
+		incrementCounter(5, () -> {
+			packStats.incrementHit(new TestKey(PackExt.PACK));
+			packStats.incrementMiss(new TestKey(PackExt.PACK));
+		});
+		totalRequestCounts[PackExt.PACK.getPosition()] = 10;
+
+		DfsBlockCacheStats bitmapStats = new DfsBlockCacheStats();
+		incrementCounter(6, () -> {
+			bitmapStats.incrementHit(new TestKey(PackExt.BITMAP_INDEX));
+			bitmapStats.incrementMiss(new TestKey(PackExt.BITMAP_INDEX));
+		});
+		totalRequestCounts[PackExt.BITMAP_INDEX.getPosition()] = 12;
+
+		DfsBlockCacheStats indexStats = new DfsBlockCacheStats();
+		incrementCounter(7, () -> {
+			indexStats.incrementHit(new TestKey(PackExt.INDEX));
+			indexStats.incrementMiss(new TestKey(PackExt.INDEX));
+		});
+		totalRequestCounts[PackExt.INDEX.getPosition()] = 14;
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable
+				.fromCacheTables(cacheTableWithStats(packStats),
+						Map.of(PackExt.BITMAP_INDEX,
+								cacheTableWithStats(bitmapStats), PackExt.INDEX,
+								cacheTableWithStats(indexStats)));
+
+		assertArrayEquals(tables.getBlockCacheStats().getTotalRequestCount(),
+				totalRequestCounts);
+	}
+
+	@Test
+	public void getBlockCacheStats_getHitRatio_consolidatesAllTableHitRatios() {
+		long[] hitRatios = createEmptyStatsArray();
+
+		DfsBlockCacheStats packStats = new DfsBlockCacheStats();
+		incrementCounter(5,
+				() -> packStats.incrementHit(new TestKey(PackExt.PACK)));
+		hitRatios[PackExt.PACK.getPosition()] = 100;
+
+		DfsBlockCacheStats bitmapStats = new DfsBlockCacheStats();
+		incrementCounter(6, () -> {
+			bitmapStats.incrementHit(new TestKey(PackExt.BITMAP_INDEX));
+			bitmapStats.incrementMiss(new TestKey(PackExt.BITMAP_INDEX));
+		});
+		hitRatios[PackExt.BITMAP_INDEX.getPosition()] = 50;
+
+		DfsBlockCacheStats indexStats = new DfsBlockCacheStats();
+		incrementCounter(7,
+				() -> indexStats.incrementMiss(new TestKey(PackExt.INDEX)));
+		hitRatios[PackExt.INDEX.getPosition()] = 0;
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable
+				.fromCacheTables(cacheTableWithStats(packStats),
+						Map.of(PackExt.BITMAP_INDEX,
+								cacheTableWithStats(bitmapStats), PackExt.INDEX,
+								cacheTableWithStats(indexStats)));
+
+		assertArrayEquals(tables.getBlockCacheStats().getHitRatio(), hitRatios);
+	}
+
+	@Test
+	public void getBlockCacheStats_getEvictions_consolidatesAllTableEvictions() {
+		long[] evictions = createEmptyStatsArray();
+
+		DfsBlockCacheStats packStats = new DfsBlockCacheStats();
+		incrementCounter(5,
+				() -> packStats.incrementEvict(new TestKey(PackExt.PACK)));
+		evictions[PackExt.PACK.getPosition()] = 5;
+
+		DfsBlockCacheStats bitmapStats = new DfsBlockCacheStats();
+		incrementCounter(6, () -> bitmapStats
+				.incrementEvict(new TestKey(PackExt.BITMAP_INDEX)));
+		evictions[PackExt.BITMAP_INDEX.getPosition()] = 6;
+
+		DfsBlockCacheStats indexStats = new DfsBlockCacheStats();
+		incrementCounter(7,
+				() -> indexStats.incrementEvict(new TestKey(PackExt.INDEX)));
+		evictions[PackExt.INDEX.getPosition()] = 7;
+
+		PackExtBlockCacheTable tables = PackExtBlockCacheTable
+				.fromCacheTables(cacheTableWithStats(packStats),
+						Map.of(PackExt.BITMAP_INDEX,
+								cacheTableWithStats(bitmapStats), PackExt.INDEX,
+								cacheTableWithStats(indexStats)));
+
+		assertArrayEquals(tables.getBlockCacheStats().getEvictions(),
+				evictions);
+	}
+
+	private static void incrementCounter(int amount, Runnable fn) {
+		for (int i = 0; i < amount; i++) {
+			fn.run();
+		}
+	}
+
+	private static long[] createEmptyStatsArray() {
+		return new long[PackExt.values().length];
+	}
+
+	private static DfsBlockCacheTable cacheTableWithStats(
+			DfsBlockCacheStats dfsBlockCacheStats) {
+		DfsBlockCacheTable cacheTable = mock(DfsBlockCacheTable.class);
+		when(cacheTable.getBlockCacheStats()).thenReturn(dfsBlockCacheStats);
+		return cacheTable;
+	}
+
+	private static class TestKey extends DfsStreamKey {
+		TestKey(PackExt packExt) {
+			super(0, packExt);
+		}
+
+		@Override
+		public boolean equals(Object o) {
+			return false;
+		}
+	}
+}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/BatchRefUpdateTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/BatchRefUpdateTest.java
index daf4382..1af42cb 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/BatchRefUpdateTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/BatchRefUpdateTest.java
@@ -171,7 +171,7 @@ public void packedRefsFileIsSorted() throws IOException {
 			assertEquals(c2.getResult(), ReceiveCommand.Result.OK);
 		}
 
-		File packed = new File(diskRepo.getDirectory(), "packed-refs");
+		File packed = new File(diskRepo.getCommonDirectory(), "packed-refs");
 		String packedStr = new String(Files.readAllBytes(packed.toPath()),
 				UTF_8);
 
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcPackRefsTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcPackRefsTest.java
index 8baa3cc..c572955 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcPackRefsTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcPackRefsTest.java
@@ -58,7 +58,7 @@ public void emptyRefDirectoryDeleted() throws Exception {
 		String ref = "dir/ref";
 		tr.branch(ref).commit().create();
 		String name = repo.findRef(ref).getName();
-		Path dir = repo.getDirectory().toPath().resolve(name).getParent();
+		Path dir = repo.getCommonDirectory().toPath().resolve(name).getParent();
 		assertNotNull(dir);
 		gc.packRefs();
 		assertFalse(Files.exists(dir));
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcReflogTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcReflogTest.java
index e6c1ee5..29f180d 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcReflogTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcReflogTest.java
@@ -30,7 +30,7 @@ public void testPruneNone() throws Exception {
 		BranchBuilder bb = tr.branch("refs/heads/master");
 		bb.commit().add("A", "A").add("B", "B").create();
 		bb.commit().add("A", "A2").add("B", "B2").create();
-		new File(repo.getDirectory(), Constants.LOGS + "/refs/heads/master")
+		new File(repo.getCommonDirectory(), Constants.LOGS + "/refs/heads/master")
 				.delete();
 		stats = gc.getStatistics();
 		assertEquals(8, stats.numberOfLooseObjects);
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefDirectoryTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefDirectoryTest.java
index 2bafde6..baa0182 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefDirectoryTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefDirectoryTest.java
@@ -90,25 +90,26 @@ public void refDirectorySetup() throws Exception {
 	@Test
 	public void testCreate() throws IOException {
 		// setUp above created the directory. We just have to test it.
-		File d = diskRepo.getDirectory();
+		File gitDir = diskRepo.getDirectory();
+		File commonDir = diskRepo.getCommonDirectory();
 		assertSame(diskRepo, refdir.getRepository());
 
-		assertTrue(new File(d, "refs").isDirectory());
-		assertTrue(new File(d, "logs").isDirectory());
-		assertTrue(new File(d, "logs/refs").isDirectory());
-		assertFalse(new File(d, "packed-refs").exists());
+		assertTrue(new File(commonDir, "refs").isDirectory());
+		assertTrue(new File(commonDir, "logs").isDirectory());
+		assertTrue(new File(commonDir, "logs/refs").isDirectory());
+		assertFalse(new File(commonDir, "packed-refs").exists());
 
-		assertTrue(new File(d, "refs/heads").isDirectory());
-		assertTrue(new File(d, "refs/tags").isDirectory());
-		assertEquals(2, new File(d, "refs").list().length);
-		assertEquals(0, new File(d, "refs/heads").list().length);
-		assertEquals(0, new File(d, "refs/tags").list().length);
+		assertTrue(new File(commonDir, "refs/heads").isDirectory());
+		assertTrue(new File(commonDir, "refs/tags").isDirectory());
+		assertEquals(2, new File(commonDir, "refs").list().length);
+		assertEquals(0, new File(commonDir, "refs/heads").list().length);
+		assertEquals(0, new File(commonDir, "refs/tags").list().length);
 
-		assertTrue(new File(d, "logs/refs/heads").isDirectory());
-		assertFalse(new File(d, "logs/HEAD").exists());
-		assertEquals(0, new File(d, "logs/refs/heads").list().length);
+		assertTrue(new File(commonDir, "logs/refs/heads").isDirectory());
+		assertFalse(new File(gitDir, "logs/HEAD").exists());
+		assertEquals(0, new File(commonDir, "logs/refs/heads").list().length);
 
-		assertEquals("ref: refs/heads/master\n", read(new File(d, HEAD)));
+		assertEquals("ref: refs/heads/master\n", read(new File(gitDir, HEAD)));
 	}
 
 	@Test(expected = UnsupportedOperationException.class)
@@ -1382,7 +1383,7 @@ private void writeLooseRef(String name, String content) throws IOException {
 	}
 
 	private void deleteLooseRef(String name) {
-		File path = new File(diskRepo.getDirectory(), name);
+		File path = new File(diskRepo.getCommonDirectory(), name);
 		assertTrue("deleted " + name, path.delete());
 	}
 }
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/ReflogReaderTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/ReflogReaderTest.java
index dc0e749..eb521ff 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/ReflogReaderTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/ReflogReaderTest.java
@@ -238,7 +238,7 @@ public void testSpecificEntryNumber() throws Exception {
 
 	private void setupReflog(String logName, byte[] data)
 			throws FileNotFoundException, IOException {
-		File logfile = new File(db.getDirectory(), logName);
+		File logfile = new File(db.getCommonDirectory(), logName);
 		if (!logfile.getParentFile().mkdirs()
 				&& !logfile.getParentFile().isDirectory()) {
 			throw new IOException(
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/ReflogWriterTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/ReflogWriterTest.java
index 8d0e99d..8e9b7b8 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/ReflogWriterTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/ReflogWriterTest.java
@@ -48,7 +48,7 @@ public void shouldFilterLineFeedFromMessage() throws Exception {
 
 	private void readReflog(byte[] buffer)
 			throws FileNotFoundException, IOException {
-		File logfile = new File(db.getDirectory(), "logs/refs/heads/master");
+		File logfile = new File(db.getCommonDirectory(), "logs/refs/heads/master");
 		if (!logfile.getParentFile().mkdirs()
 				&& !logfile.getParentFile().isDirectory()) {
 			throw new IOException(
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/GpgConfigTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/GpgConfigTest.java
index 32f6766..5c2b190 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/GpgConfigTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/GpgConfigTest.java
@@ -96,6 +96,16 @@ public void testGetKeyFormat_x509() throws Exception {
 	}
 
 	@Test
+	public void testGetKeyFormat_ssh() throws Exception {
+		Config c = parse("" //
+				+ "[gpg]\n" //
+				+ "  format = ssh\n" //
+		);
+
+		assertEquals(GpgConfig.GpgFormat.SSH, new GpgConfig(c).getKeyFormat());
+	}
+
+	@Test
 	public void testGetSigningKey() throws Exception {
 		Config c = parse("" //
 				+ "[user]\n" //
diff --git a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
index 19c9008..c9f7336 100644
--- a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
+++ b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
@@ -285,6 +285,9 @@
 downloadCancelled=Download cancelled
 downloadCancelledDuringIndexing=Download cancelled during indexing
 duplicateAdvertisementsOf=duplicate advertisements of {0}
+duplicateCacheTablesGiven=Duplicate cache tables given
+duplicatePackExtensionsForCacheTables=Duplicate pack extension {0} in cache tables
+duplicatePackExtensionsSet=Attempting to configure duplicate pack extensions: {0}.{1}.{2} contains {3}
 duplicateRef=Duplicate ref: {0}
 duplicateRefAttribute=Duplicate ref attribute: {0}
 duplicateRemoteRefUpdateIsIllegal=Duplicate remote ref update is illegal. Affected remote name: {0}
@@ -539,6 +542,8 @@
 noMergeHeadSpecified=No merge head specified
 nonBareLinkFilesNotSupported=Link files are not supported with nonbare repos
 nonCommitToHeads=Cannot point a branch to a non-commit object
+noPackExtConfigurationGiven=No PackExt configuration given
+noPackExtGivenForConfiguration=No PackExt given for configuration
 noPathAttributesFound=No Attributes found for {0}.
 noSuchRef=no such ref
 noSuchRefKnown=no such ref: {0}
@@ -803,6 +808,7 @@
 tSizeMustBeGreaterOrEqual1=tSize must be >= 1
 unableToCheckConnectivity=Unable to check connectivity.
 unableToCreateNewObject=Unable to create new object: {0}
+unableToReadFullArray=Unable to read an array with {0} elements from the stream
 unableToReadFullInt=Unable to read a full int from the stream
 unableToReadPackfile=Unable to read packfile {0}
 unableToRemovePath=Unable to remove path ''{0}''
@@ -829,6 +835,7 @@
 unknownObjectInIndex=unknown object {0} found in index but not in pack file
 unknownObjectType=Unknown object type {0}.
 unknownObjectType2=unknown
+unknownPackExtension=Unknown pack extension: {0}.{1}.{2}={3}
 unknownPositionEncoding=Unknown position encoding %s
 unknownRefStorageFormat=Unknown ref storage format "{0}"
 unknownRepositoryFormat=Unknown repository format
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleAddCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleAddCommand.java
index 8fb5d60..401f069 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleAddCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleAddCommand.java
@@ -176,7 +176,7 @@ public Repository call() throws GitAPIException {
 		CloneCommand clone = Git.cloneRepository();
 		configure(clone);
 		clone.setDirectory(moduleDirectory);
-		clone.setGitDir(new File(new File(repo.getDirectory(),
+		clone.setGitDir(new File(new File(repo.getCommonDirectory(),
 				Constants.MODULES), path));
 		clone.setURI(resolvedUri);
 		if (monitor != null)
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleUpdateCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleUpdateCommand.java
index df73164..751dabc 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleUpdateCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleUpdateCommand.java
@@ -128,7 +128,7 @@ private Repository getOrCloneSubmodule(SubmoduleWalk generator, String url)
 			clone.setURI(url);
 			clone.setDirectory(generator.getDirectory());
 			clone.setGitDir(
-					new File(new File(repo.getDirectory(), Constants.MODULES),
+					new File(new File(repo.getCommonDirectory(), Constants.MODULES),
 							generator.getPath()));
 			if (monitor != null) {
 				clone.setProgressMonitor(monitor);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java b/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java
index 6ae5153..fa0a82f 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java
@@ -1647,6 +1647,8 @@ private static void runExternalFilterCommand(Repository repo, String path,
 		filterProcessBuilder.directory(repo.getWorkTree());
 		filterProcessBuilder.environment().put(Constants.GIT_DIR_KEY,
 				repo.getDirectory().getAbsolutePath());
+		filterProcessBuilder.environment().put(Constants.GIT_COMMON_DIR_KEY,
+				repo.getCommonDirectory().getAbsolutePath());
 		ExecutionResult result;
 		int rc;
 		try {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
index 700b54a..8a5f2b2 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
@@ -315,6 +315,9 @@ public static JGitText get() {
 	/***/ public String downloadCancelled;
 	/***/ public String downloadCancelledDuringIndexing;
 	/***/ public String duplicateAdvertisementsOf;
+	/***/ public String duplicateCacheTablesGiven;
+	/***/ public String duplicatePackExtensionsForCacheTables;
+	/***/ public String duplicatePackExtensionsSet;
 	/***/ public String duplicateRef;
 	/***/ public String duplicateRefAttribute;
 	/***/ public String duplicateRemoteRefUpdateIsIllegal;
@@ -569,6 +572,8 @@ public static JGitText get() {
 	/***/ public String noMergeHeadSpecified;
 	/***/ public String nonBareLinkFilesNotSupported;
 	/***/ public String nonCommitToHeads;
+	/***/ public String noPackExtConfigurationGiven;
+	/***/ public String noPackExtGivenForConfiguration;
 	/***/ public String noPathAttributesFound;
 	/***/ public String noSuchRef;
 	/***/ public String noSuchRefKnown;
@@ -833,6 +838,7 @@ public static JGitText get() {
 	/***/ public String unableToCheckConnectivity;
 	/***/ public String unableToCreateNewObject;
 	/***/ public String unableToReadFullInt;
+	/***/ public String unableToReadFullArray;
 	/***/ public String unableToReadPackfile;
 	/***/ public String unableToRemovePath;
 	/***/ public String unableToWrite;
@@ -858,6 +864,7 @@ public static JGitText get() {
 	/***/ public String unknownObjectInIndex;
 	/***/ public String unknownObjectType;
 	/***/ public String unknownObjectType2;
+	/***/ public String unknownPackExtension;
 	/***/ public String unknownPositionEncoding;
 	/***/ public String unknownRefStorageFormat;
 	/***/ public String unknownRepositoryFormat;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ClockBlockCacheTable.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ClockBlockCacheTable.java
index d0907bc..ce71a71 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ClockBlockCacheTable.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/ClockBlockCacheTable.java
@@ -135,7 +135,7 @@ final class ClockBlockCacheTable implements DfsBlockCacheTable {
 	}
 
 	@Override
-	public DfsBlockCacheStats getDfsBlockCacheStats() {
+	public BlockCacheStats getBlockCacheStats() {
 		return dfsBlockCacheStats;
 	}
 
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
index 56719cf..3e1300c 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
@@ -119,7 +119,7 @@ boolean shouldCopyThroughCache(long length) {
 	 * @return total number of bytes in the cache, per pack file extension.
 	 */
 	public long[] getCurrentSize() {
-		return dfsBlockCacheTable.getDfsBlockCacheStats().getCurrentSize();
+		return dfsBlockCacheTable.getBlockCacheStats().getCurrentSize();
 	}
 
 	/**
@@ -138,7 +138,7 @@ public long getFillPercentage() {
 	 *         extension.
 	 */
 	public long[] getHitCount() {
-		return dfsBlockCacheTable.getDfsBlockCacheStats().getHitCount();
+		return dfsBlockCacheTable.getBlockCacheStats().getHitCount();
 	}
 
 	/**
@@ -149,7 +149,7 @@ public long getFillPercentage() {
 	 *         extension.
 	 */
 	public long[] getMissCount() {
-		return dfsBlockCacheTable.getDfsBlockCacheStats().getMissCount();
+		return dfsBlockCacheTable.getBlockCacheStats().getMissCount();
 	}
 
 	/**
@@ -158,7 +158,7 @@ public long getFillPercentage() {
 	 * @return total number of requests (hit + miss), per pack file extension.
 	 */
 	public long[] getTotalRequestCount() {
-		return dfsBlockCacheTable.getDfsBlockCacheStats()
+		return dfsBlockCacheTable.getBlockCacheStats()
 				.getTotalRequestCount();
 	}
 
@@ -168,7 +168,7 @@ public long getFillPercentage() {
 	 * @return hit ratios
 	 */
 	public long[] getHitRatio() {
-		return dfsBlockCacheTable.getDfsBlockCacheStats().getHitRatio();
+		return dfsBlockCacheTable.getBlockCacheStats().getHitRatio();
 	}
 
 	/**
@@ -179,7 +179,7 @@ public long getFillPercentage() {
 	 *         file extension.
 	 */
 	public long[] getEvictions() {
-		return dfsBlockCacheTable.getDfsBlockCacheStats().getEvictions();
+		return dfsBlockCacheTable.getBlockCacheStats().getEvictions();
 	}
 
 	/**
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java
index 77273ce..20f4666 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java
@@ -11,17 +11,25 @@
 package org.eclipse.jgit.internal.storage.dfs;
 
 import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_CORE_SECTION;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_DFS_CACHE_PREFIX;
 import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_DFS_SECTION;
 import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_BLOCK_LIMIT;
 import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_BLOCK_SIZE;
 import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_CONCURRENCY_LEVEL;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_PACK_EXTENSIONS;
 import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_STREAM_RATIO;
 
 import java.text.MessageFormat;
 import java.time.Duration;
+import java.util.ArrayList;
 import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashSet;
+import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.function.Consumer;
+import java.util.stream.Collectors;
 
 import org.eclipse.jgit.internal.JGitText;
 import org.eclipse.jgit.internal.storage.pack.PackExt;
@@ -42,15 +50,21 @@ public class DfsBlockCacheConfig {
 	public static final int DEFAULT_CACHE_HOT_MAX = 1;
 
 	private long blockLimit;
+
 	private int blockSize;
+
 	private double streamRatio;
+
 	private int concurrencyLevel;
 
 	private Consumer<Long> refLock;
+
 	private Map<PackExt, Integer> cacheHotMap;
 
 	private IndexEventConsumer indexEventConsumer;
 
+	private List<DfsBlockCachePackExtConfig> packExtCacheConfigurations;
+
 	/**
 	 * Create a default configuration.
 	 */
@@ -60,6 +74,7 @@ public DfsBlockCacheConfig() {
 		setStreamRatio(0.30);
 		setConcurrencyLevel(32);
 		cacheHotMap = Collections.emptyMap();
+		packExtCacheConfigurations = Collections.emptyList();
 	}
 
 	/**
@@ -77,10 +92,10 @@ public long getBlockLimit() {
 	 * Set maximum number bytes of heap memory to dedicate to caching pack file
 	 * data.
 	 * <p>
-	 * It is strongly recommended to set the block limit to be an integer multiple
-	 * of the block size. This constraint is not enforced by this method (since
-	 * it may be called before {@link #setBlockSize(int)}), but it is enforced by
-	 * {@link #fromConfig(Config)}.
+	 * It is strongly recommended to set the block limit to be an integer
+	 * multiple of the block size. This constraint is not enforced by this
+	 * method (since it may be called before {@link #setBlockSize(int)}), but it
+	 * is enforced by {@link #fromConfig(Config)}.
 	 *
 	 * @param newLimit
 	 *            maximum number bytes of heap memory to dedicate to caching
@@ -89,9 +104,9 @@ public long getBlockLimit() {
 	 */
 	public DfsBlockCacheConfig setBlockLimit(long newLimit) {
 		if (newLimit <= 0) {
-			throw new IllegalArgumentException(MessageFormat.format(
-					JGitText.get().blockLimitNotPositive,
-					Long.valueOf(newLimit)));
+			throw new IllegalArgumentException(
+					MessageFormat.format(JGitText.get().blockLimitNotPositive,
+							Long.valueOf(newLimit)));
 		}
 		blockLimit = newLimit;
 		return this;
@@ -240,61 +255,115 @@ public DfsBlockCacheConfig setIndexEventConsumer(
 	}
 
 	/**
+	 * Get the list of pack ext cache configs.
+	 *
+	 * @return the list of pack ext cache configs.
+	 */
+	List<DfsBlockCachePackExtConfig> getPackExtCacheConfigurations() {
+		return packExtCacheConfigurations;
+	}
+
+	/**
+	 * Set the list of pack ext cache configs.
+	 *
+	 * Made visible for testing.
+	 *
+	 * @param packExtCacheConfigurations
+	 *            the list of pack ext cache configs to set.
+	 * @return {@code this}
+	 */
+	DfsBlockCacheConfig setPackExtCacheConfigurations(
+			List<DfsBlockCachePackExtConfig> packExtCacheConfigurations) {
+		this.packExtCacheConfigurations = packExtCacheConfigurations;
+		return this;
+	}
+
+	/**
 	 * Update properties by setting fields from the configuration.
 	 * <p>
 	 * If a property is not defined in the configuration, then it is left
 	 * unmodified.
 	 * <p>
-	 * Enforces certain constraints on the combination of settings in the config,
-	 * for example that the block limit is a multiple of the block size.
+	 * Enforces certain constraints on the combination of settings in the
+	 * config, for example that the block limit is a multiple of the block size.
 	 *
 	 * @param rc
 	 *            configuration to read properties from.
 	 * @return {@code this}
 	 */
 	public DfsBlockCacheConfig fromConfig(Config rc) {
-		long cfgBlockLimit = rc.getLong(
-				CONFIG_CORE_SECTION,
-				CONFIG_DFS_SECTION,
-				CONFIG_KEY_BLOCK_LIMIT,
-				getBlockLimit());
-		int cfgBlockSize = rc.getInt(
-				CONFIG_CORE_SECTION,
-				CONFIG_DFS_SECTION,
-				CONFIG_KEY_BLOCK_SIZE,
+		fromConfig(CONFIG_CORE_SECTION, CONFIG_DFS_SECTION, rc);
+		loadPackExtConfigs(rc);
+		return this;
+	}
+
+	private void fromConfig(String section, String subSection, Config rc) {
+		long cfgBlockLimit = rc.getLong(section, subSection,
+				CONFIG_KEY_BLOCK_LIMIT, getBlockLimit());
+		int cfgBlockSize = rc.getInt(section, subSection, CONFIG_KEY_BLOCK_SIZE,
 				getBlockSize());
 		if (cfgBlockLimit % cfgBlockSize != 0) {
 			throw new IllegalArgumentException(MessageFormat.format(
 					JGitText.get().blockLimitNotMultipleOfBlockSize,
-					Long.valueOf(cfgBlockLimit),
-					Long.valueOf(cfgBlockSize)));
+					Long.valueOf(cfgBlockLimit), Long.valueOf(cfgBlockSize)));
 		}
 
 		setBlockLimit(cfgBlockLimit);
 		setBlockSize(cfgBlockSize);
 
-		setConcurrencyLevel(rc.getInt(
-				CONFIG_CORE_SECTION,
-				CONFIG_DFS_SECTION,
-				CONFIG_KEY_CONCURRENCY_LEVEL,
-				getConcurrencyLevel()));
+		setConcurrencyLevel(rc.getInt(section, subSection,
+				CONFIG_KEY_CONCURRENCY_LEVEL, getConcurrencyLevel()));
 
-		String v = rc.getString(
-				CONFIG_CORE_SECTION,
-				CONFIG_DFS_SECTION,
-				CONFIG_KEY_STREAM_RATIO);
+		String v = rc.getString(section, subSection, CONFIG_KEY_STREAM_RATIO);
 		if (v != null) {
 			try {
 				setStreamRatio(Double.parseDouble(v));
 			} catch (NumberFormatException e) {
 				throw new IllegalArgumentException(MessageFormat.format(
-						JGitText.get().enumValueNotSupported3,
-						CONFIG_CORE_SECTION,
-						CONFIG_DFS_SECTION,
-						CONFIG_KEY_STREAM_RATIO, v), e);
+						JGitText.get().enumValueNotSupported3, section,
+						subSection, CONFIG_KEY_STREAM_RATIO, v), e);
 			}
 		}
-		return this;
+	}
+
+	private void loadPackExtConfigs(Config config) {
+		List<String> subSections = config.getSubsections(CONFIG_CORE_SECTION)
+				.stream()
+				.filter(section -> section.startsWith(CONFIG_DFS_CACHE_PREFIX))
+				.collect(Collectors.toList());
+		if (subSections.size() == 0) {
+			return;
+		}
+		ArrayList<DfsBlockCachePackExtConfig> cacheConfigs = new ArrayList<>();
+		Set<PackExt> extensionsSeen = new HashSet<>();
+		for (String subSection : subSections) {
+			var cacheConfig = DfsBlockCachePackExtConfig.fromConfig(config,
+					CONFIG_CORE_SECTION, subSection);
+			Set<PackExt> packExtsDuplicates = intersection(extensionsSeen,
+					cacheConfig.packExts);
+			if (packExtsDuplicates.size() > 0) {
+				String duplicatePackExts = packExtsDuplicates.stream()
+						.map(PackExt::toString)
+						.collect(Collectors.joining(","));
+				throw new IllegalArgumentException(MessageFormat.format(
+						JGitText.get().duplicatePackExtensionsSet,
+						CONFIG_CORE_SECTION, subSection,
+						CONFIG_KEY_PACK_EXTENSIONS, duplicatePackExts));
+			}
+			extensionsSeen.addAll(cacheConfig.packExts);
+			cacheConfigs.add(cacheConfig);
+		}
+		packExtCacheConfigurations = cacheConfigs;
+	}
+
+	private static <T> Set<T> intersection(Set<T> first, Set<T> second) {
+		Set<T> ret = new HashSet<>();
+		for (T entry : second) {
+			if (first.contains(entry)) {
+				ret.add(entry);
+			}
+		}
+		return ret;
 	}
 
 	/** Consumer of DfsBlockCache loading and eviction events for indexes. */
@@ -346,4 +415,88 @@ default boolean shouldReportEvictedEvent() {
 			return false;
 		}
 	}
+
+	/**
+	 * A configuration for a single cache table storing 1 or more Pack
+	 * extensions.
+	 * <p>
+	 * The current pack ext cache tables implementation supports the same
+	 * parameters the ClockBlockCacheTable (current default implementation).
+	 * <p>
+	 * Configuration falls back to the defaults coded values defined in the
+	 * {@link DfsBlockCacheConfig} when not set on each cache table
+	 * configuration and NOT the values of the basic dfs section.
+	 * <p>
+	 * <code>
+	 *
+	 * Format:
+	 * [core "dfs.packCache"]
+	 *   packExtensions = "PACK"
+	 *   blockSize = 512
+	 *   blockLimit = 100
+	 *   concurrencyLevel = 5
+	 *
+	 * [core "dfs.multipleExtensionCache"]
+	 *   packExtensions = "INDEX REFTABLE BITMAP_INDEX"
+	 *   blockSize = 512
+	 *   blockLimit = 100
+	 *   concurrencyLevel = 5
+	 * </code>
+	 */
+	static class DfsBlockCachePackExtConfig {
+		// Set of pack extensions that will map to the cache instance.
+		private final EnumSet<PackExt> packExts;
+
+		// Configuration for the cache instance.
+		private final DfsBlockCacheConfig packExtCacheConfiguration;
+
+		/**
+		 * Made visible for testing.
+		 *
+		 * @param packExts
+		 *            Set of {@link PackExt}s associated to this cache config.
+		 * @param packExtCacheConfiguration
+		 *            {@link DfsBlockCacheConfig} for this cache config.
+		 */
+		DfsBlockCachePackExtConfig(EnumSet<PackExt> packExts,
+				DfsBlockCacheConfig packExtCacheConfiguration) {
+			this.packExts = packExts;
+			this.packExtCacheConfiguration = packExtCacheConfiguration;
+		}
+
+		Set<PackExt> getPackExts() {
+			return packExts;
+		}
+
+		DfsBlockCacheConfig getPackExtCacheConfiguration() {
+			return packExtCacheConfiguration;
+		}
+
+		private static DfsBlockCachePackExtConfig fromConfig(Config config,
+				String section, String subSection) {
+			String packExtensions = config.getString(section, subSection,
+					CONFIG_KEY_PACK_EXTENSIONS);
+			if (packExtensions == null) {
+				throw new IllegalArgumentException(
+						JGitText.get().noPackExtGivenForConfiguration);
+			}
+			String[] extensions = packExtensions.split(" ", -1);
+			Set<PackExt> packExts = new HashSet<>(extensions.length);
+			for (String extension : extensions) {
+				try {
+					packExts.add(PackExt.valueOf(extension));
+				} catch (IllegalArgumentException e) {
+					throw new IllegalArgumentException(MessageFormat.format(
+							JGitText.get().unknownPackExtension, section,
+							subSection, CONFIG_KEY_PACK_EXTENSIONS, extension),
+							e);
+				}
+			}
+
+			DfsBlockCacheConfig dfsBlockCacheConfig = new DfsBlockCacheConfig();
+			dfsBlockCacheConfig.fromConfig(section, subSection, config);
+			return new DfsBlockCachePackExtConfig(EnumSet.copyOf(packExts),
+					dfsBlockCacheConfig);
+		}
+	}
 }
\ No newline at end of file
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheTable.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheTable.java
index 701d1fd..309f2d1 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheTable.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheTable.java
@@ -129,18 +129,72 @@ <T> DfsBlockCache.Ref<T> getOrLoadRef(DfsStreamKey key, long position,
 	<T> T get(DfsStreamKey key, long position);
 
 	/**
-	 * Get the DfsBlockCacheStats object for this block cache table's
+	 * Get the {@link BlockCacheStats} object for this block cache table's
 	 * statistics.
 	 *
-	 * @return the DfsBlockCacheStats tracking this block cache table's
+	 * @return the {@link BlockCacheStats} tracking this block cache table's
 	 *         statistics.
 	 */
-	DfsBlockCacheStats getDfsBlockCacheStats();
+	BlockCacheStats getBlockCacheStats();
+
+	/**
+	 * Provides methods used with Block Cache statistics.
+	 */
+	interface BlockCacheStats {
+		/**
+		 * Get total number of bytes in the cache, per pack file extension.
+		 *
+		 * @return total number of bytes in the cache, per pack file extension.
+		 */
+		long[] getCurrentSize();
+
+		/**
+		 * Get number of requests for items in the cache, per pack file
+		 * extension.
+		 *
+		 * @return the number of requests for items in the cache, per pack file
+		 *         extension.
+		 */
+		long[] getHitCount();
+
+		/**
+		 * Get number of requests for items not in the cache, per pack file
+		 * extension.
+		 *
+		 * @return the number of requests for items not in the cache, per pack
+		 *         file extension.
+		 */
+		long[] getMissCount();
+
+		/**
+		 * Get total number of requests (hit + miss), per pack file extension.
+		 *
+		 * @return total number of requests (hit + miss), per pack file
+		 *         extension.
+		 */
+		long[] getTotalRequestCount();
+
+		/**
+		 * Get hit ratios.
+		 *
+		 * @return hit ratios.
+		 */
+		long[] getHitRatio();
+
+		/**
+		 * Get number of evictions performed due to cache being full, per pack
+		 * file extension.
+		 *
+		 * @return the number of evictions performed due to cache being full,
+		 *         per pack file extension.
+		 */
+		long[] getEvictions();
+	}
 
 	/**
 	 * Keeps track of stats for a Block Cache table.
 	 */
-	class DfsBlockCacheStats {
+	class DfsBlockCacheStats implements BlockCacheStats {
 		/**
 		 * Number of times a block was found in the cache, per pack file
 		 * extension.
@@ -214,44 +268,23 @@ void addToLiveBytes(DfsStreamKey key, long size) {
 			getStat(liveBytes, key).addAndGet(size);
 		}
 
-		/**
-		 * Get total number of bytes in the cache, per pack file extension.
-		 *
-		 * @return total number of bytes in the cache, per pack file extension.
-		 */
-		long[] getCurrentSize() {
+		@Override
+		public long[] getCurrentSize() {
 			return getStatVals(liveBytes);
 		}
 
-		/**
-		 * Get number of requests for items in the cache, per pack file
-		 * extension.
-		 *
-		 * @return the number of requests for items in the cache, per pack file
-		 *         extension.
-		 */
-		long[] getHitCount() {
+		@Override
+		public long[] getHitCount() {
 			return getStatVals(statHit);
 		}
 
-		/**
-		 * Get number of requests for items not in the cache, per pack file
-		 * extension.
-		 *
-		 * @return the number of requests for items not in the cache, per pack
-		 *         file extension.
-		 */
-		long[] getMissCount() {
+		@Override
+		public long[] getMissCount() {
 			return getStatVals(statMiss);
 		}
 
-		/**
-		 * Get total number of requests (hit + miss), per pack file extension.
-		 *
-		 * @return total number of requests (hit + miss), per pack file
-		 *         extension.
-		 */
-		long[] getTotalRequestCount() {
+		@Override
+		public long[] getTotalRequestCount() {
 			AtomicLong[] hit = statHit.get();
 			AtomicLong[] miss = statMiss.get();
 			long[] cnt = new long[Math.max(hit.length, miss.length)];
@@ -264,12 +297,8 @@ void addToLiveBytes(DfsStreamKey key, long size) {
 			return cnt;
 		}
 
-		/**
-		 * Get hit ratios.
-		 *
-		 * @return hit ratios.
-		 */
-		long[] getHitRatio() {
+		@Override
+		public long[] getHitRatio() {
 			AtomicLong[] hit = statHit.get();
 			AtomicLong[] miss = statMiss.get();
 			long[] ratio = new long[Math.max(hit.length, miss.length)];
@@ -288,14 +317,8 @@ void addToLiveBytes(DfsStreamKey key, long size) {
 			return ratio;
 		}
 
-		/**
-		 * Get number of evictions performed due to cache being full, per pack
-		 * file extension.
-		 *
-		 * @return the number of evictions performed due to cache being full,
-		 *         per pack file extension.
-		 */
-		long[] getEvictions() {
+		@Override
+		public long[] getEvictions() {
 			return getStatVals(statEvict);
 		}
 
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
index 3e4d4d3..845feab 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsPackFile.java
@@ -107,6 +107,53 @@ public final class DfsPackFile extends BlockBasedFile {
 	/** Lock for {@link #corruptObjects}. */
 	private final Object corruptObjectsLock = new Object();
 
+	private final IndexFactory indexFactory;
+
+	/**
+	 * Returns the indexes for this pack.
+	 * <p>
+	 * We define indexes in different sub interfaces to allow implementing the
+	 * indexes over different combinations of backends.
+	 * <p>
+	 * Implementations decide if/how to cache the indexes. The calling
+	 * DfsPackFile will keep the reference to the index as long as it needs it.
+	 */
+	public interface IndexFactory {
+		/**
+		 * Take care of loading the primary and reverse indexes for this pack.
+		 */
+		interface PackIndexes {
+			/**
+			 * Load the primary index for the pack.
+			 *
+			 * @param ctx
+			 *            reader to find the raw bytes
+			 * @return a primary index
+			 * @throws IOException
+			 *             a problem finding/parsing the index
+			 */
+			PackIndex index(DfsReader ctx) throws IOException;
+
+			/**
+			 * Load the reverse index of the pack
+			 *
+			 * @param ctx
+			 *            reader to find the raw bytes
+			 * @return the reverse index of the pack
+			 * @throws IOException
+			 *             a problem finding/parsing the reverse index
+			 */
+			PackReverseIndex reverseIndex(DfsReader ctx) throws IOException;
+		}
+
+		/**
+		 * Returns a provider of the primary and reverse indexes of this pack
+		 *
+		 * @return an implementation of the {@link PackIndexes} interface
+		 */
+		PackIndexes getPackIndexes();
+	}
+
 	/**
 	 * Construct a reader for an existing, packfile.
 	 *
@@ -116,7 +163,8 @@ public final class DfsPackFile extends BlockBasedFile {
 	 *            description of the pack within the DFS.
 	 */
 	DfsPackFile(DfsBlockCache cache, DfsPackDescription desc) {
-		this(cache, desc, DEFAULT_BITMAP_LOADER);
+		this(cache, desc, DEFAULT_BITMAP_LOADER,
+				new CachedStreamIndexFactory(cache, desc));
 	}
 
 	/**
@@ -128,9 +176,11 @@ public final class DfsPackFile extends BlockBasedFile {
 	 *            description of the pack within the DFS
 	 * @param bitmapLoader
 	 *            loader to get the bitmaps of this pack (if any)
+	 * @param indexFactory
+	 *            an IndexFactory to get references to the indexes of this pack
 	 */
 	public DfsPackFile(DfsBlockCache cache, DfsPackDescription desc,
-			PackBitmapIndexLoader bitmapLoader) {
+			PackBitmapIndexLoader bitmapLoader, IndexFactory indexFactory) {
 		super(cache, desc, PACK);
 
 		int bs = desc.getBlockSize(PACK);
@@ -142,6 +192,7 @@ public DfsPackFile(DfsBlockCache cache, DfsPackDescription desc,
 		length = sz > 0 ? sz : -1;
 
 		this.bitmapLoader = bitmapLoader;
+		this.indexFactory = indexFactory;
 	}
 
 	/**
@@ -196,22 +247,7 @@ private PackIndex idx(DfsReader ctx) throws IOException {
 		Repository.getGlobalListenerList()
 				.dispatch(new BeforeDfsPackIndexLoadedEvent(this));
 		try {
-			DfsStreamKey idxKey = desc.getStreamKey(INDEX);
-			// Keep the value parsed in the loader, in case the Ref<> is
-			// nullified in ClockBlockCacheTable#reserveSpace
-			// before we read its value.
-			AtomicReference<PackIndex> loadedRef = new AtomicReference<>(null);
-			DfsBlockCache.Ref<PackIndex> cachedRef = cache.getOrLoadRef(idxKey,
-					REF_POSITION, () -> {
-						RefWithSize<PackIndex> idx = loadPackIndex(ctx);
-						loadedRef.set(idx.ref);
-						return new DfsBlockCache.Ref<>(idxKey, REF_POSITION,
-								idx.size, idx.ref);
-					});
-			if (loadedRef.get() == null) {
-				ctx.stats.idxCacheHit++;
-			}
-			index = cachedRef.get() != null ? cachedRef.get() : loadedRef.get();
+			index = indexFactory.getPackIndexes().index(ctx);
 			if (index == null) {
 				throw new IOException(
 						"Couldn't get a reference to the primary index"); //$NON-NLS-1$
@@ -328,20 +364,10 @@ public PackReverseIndex getReverseIdx(DfsReader ctx) throws IOException {
 			return reverseIndex;
 		}
 
-		PackIndex idx = idx(ctx);
-		DfsStreamKey revKey = desc.getStreamKey(REVERSE_INDEX);
-		AtomicBoolean cacheHit = new AtomicBoolean(true);
-		DfsBlockCache.Ref<PackReverseIndex> revref = cache.getOrLoadRef(revKey,
-				REF_POSITION, () -> {
-					cacheHit.set(false);
-					return loadReverseIdx(ctx, revKey, idx);
-				});
-		if (cacheHit.get()) {
-			ctx.stats.ridxCacheHit++;
-		}
-		PackReverseIndex revidx = revref.get();
-		if (reverseIndex == null && revidx != null) {
-			reverseIndex = revidx;
+		reverseIndex = indexFactory.getPackIndexes().reverseIndex(ctx);
+		if (reverseIndex == null) {
+			throw new IOException(
+					"Couldn't get a reference to the reverse index"); //$NON-NLS-1$
 		}
 		ctx.emitIndexLoad(desc, REVERSE_INDEX, reverseIndex);
 		return reverseIndex;
@@ -354,6 +380,7 @@ private PackObjectSizeIndex getObjectSizeIndex(DfsReader ctx)
 		}
 
 		if (objectSizeIndexLoadAttempted
+				|| !ctx.getOptions().shouldUseObjectSizeIndex()
 				|| !desc.hasFileExt(OBJECT_SIZE_INDEX)) {
 			// Pack doesn't have object size index
 			return null;
@@ -1217,43 +1244,6 @@ private void setCorrupt(long offset) {
 		}
 	}
 
-	private RefWithSize<PackIndex> loadPackIndex(DfsReader ctx)
-			throws IOException {
-		try {
-			ctx.stats.readIdx++;
-			long start = System.nanoTime();
-			try (ReadableChannel rc = ctx.db.openFile(desc, INDEX)) {
-				PackIndex idx = PackIndex.read(alignTo8kBlocks(rc));
-				ctx.stats.readIdxBytes += rc.position();
-				return new RefWithSize<>(idx, idx.getObjectCount() * REC_SIZE);
-			} finally {
-				ctx.stats.readIdxMicros += elapsedMicros(start);
-			}
-		} catch (EOFException e) {
-			throw new IOException(MessageFormat.format(
-					DfsText.get().shortReadOfIndex,
-					desc.getFileName(INDEX)), e);
-		} catch (IOException e) {
-			throw new IOException(MessageFormat.format(
-					DfsText.get().cannotReadIndex,
-					desc.getFileName(INDEX)), e);
-		}
-	}
-
-	private DfsBlockCache.Ref<PackReverseIndex> loadReverseIdx(
-			DfsReader ctx, DfsStreamKey revKey, PackIndex idx) {
-		ctx.stats.readReverseIdx++;
-		long start = System.nanoTime();
-		PackReverseIndex revidx = PackReverseIndexFactory.computeFromIndex(idx);
-		reverseIndex = revidx;
-		ctx.stats.readReverseIdxMicros += elapsedMicros(start);
-		return new DfsBlockCache.Ref<>(
-				revKey,
-				REF_POSITION,
-				idx.getObjectCount() * 8,
-				revidx);
-	}
-
 	private DfsBlockCache.Ref<PackObjectSizeIndex> loadObjectSizeIndex(
 			DfsReader ctx, DfsStreamKey objectSizeIndexKey) throws IOException {
 		ctx.stats.readObjectSizeIndex++;
@@ -1452,6 +1442,134 @@ public LoadResult loadPackBitmapIndex(DfsReader ctx, DfsPackFile pack)
 		}
 	}
 
+	/**
+	 * An index factory backed by Dfs streams and references cached in
+	 * DfsBlockCache
+	 */
+	public static final class CachedStreamIndexFactory implements IndexFactory {
+		private final CachedStreamPackIndexes indexes;
+
+		/**
+		 * An index factory
+		 *
+		 * @param cache
+		 *            DFS block cache to use for the references
+		 * @param desc
+		 *            This factory loads indexes for this package
+		 */
+		public CachedStreamIndexFactory(DfsBlockCache cache,
+				DfsPackDescription desc) {
+			this.indexes = new CachedStreamPackIndexes(cache, desc);
+		}
+
+		@Override
+		public PackIndexes getPackIndexes() {
+			return indexes;
+		}
+	}
+
+	/**
+	 * Load primary and reverse index from Dfs streams and cache the references
+	 * in DfsBlockCache.
+	 */
+	public static final class CachedStreamPackIndexes implements IndexFactory.PackIndexes {
+		private final DfsBlockCache cache;
+
+		private final DfsPackDescription desc;
+
+		/**
+		 * An index factory
+		 *
+		 * @param cache
+		 *            DFS block cache to use for the references
+		 * @param desc This factory loads indexes for this package
+		 */
+		public CachedStreamPackIndexes(DfsBlockCache cache,
+									   DfsPackDescription desc) {
+			this.cache = cache;
+			this.desc = desc;
+		}
+
+		@Override
+		public PackIndex index(DfsReader ctx) throws IOException {
+			DfsStreamKey idxKey = desc.getStreamKey(INDEX);
+			// Keep the value parsed in the loader, in case the Ref<> is
+			// nullified in ClockBlockCacheTable#reserveSpace
+			// before we read its value.
+			AtomicReference<PackIndex> loadedRef = new AtomicReference<>(null);
+			DfsBlockCache.Ref<PackIndex> cachedRef = cache.getOrLoadRef(idxKey,
+					REF_POSITION, () -> {
+						RefWithSize<PackIndex> idx = loadPackIndex(ctx, desc);
+						loadedRef.set(idx.ref);
+						return new DfsBlockCache.Ref<>(idxKey, REF_POSITION,
+								idx.size, idx.ref);
+					});
+			if (loadedRef.get() == null) {
+				ctx.stats.idxCacheHit++;
+			}
+			return cachedRef.get() != null ? cachedRef.get() : loadedRef.get();
+		}
+
+		private static RefWithSize<PackIndex> loadPackIndex(DfsReader ctx,
+				DfsPackDescription desc) throws IOException {
+			try {
+				ctx.stats.readIdx++;
+				long start = System.nanoTime();
+				try (ReadableChannel rc = ctx.db.openFile(desc, INDEX)) {
+					PackIndex idx = PackIndex.read(alignTo8kBlocks(rc));
+					ctx.stats.readIdxBytes += rc.position();
+					return new RefWithSize<>(idx,
+							idx.getObjectCount() * REC_SIZE);
+				} finally {
+					ctx.stats.readIdxMicros += elapsedMicros(start);
+				}
+			} catch (EOFException e) {
+				throw new IOException(
+						MessageFormat.format(DfsText.get().shortReadOfIndex,
+								desc.getFileName(INDEX)),
+						e);
+			} catch (IOException e) {
+				throw new IOException(
+						MessageFormat.format(DfsText.get().cannotReadIndex,
+								desc.getFileName(INDEX)),
+						e);
+			}
+		}
+
+		@Override
+		public PackReverseIndex reverseIndex(DfsReader ctx) throws IOException {
+			PackIndex idx = index(ctx);
+			DfsStreamKey revKey = desc.getStreamKey(REVERSE_INDEX);
+			// Keep the value parsed in the loader, in case the Ref<> is
+			// nullified in ClockBlockCacheTable#reserveSpace
+			// before we read its value.
+			AtomicReference<PackReverseIndex> loadedRef = new AtomicReference<>(
+					null);
+			DfsBlockCache.Ref<PackReverseIndex> cachedRef = cache
+					.getOrLoadRef(revKey, REF_POSITION, () -> {
+						RefWithSize<PackReverseIndex> ridx = loadReverseIdx(ctx,
+								idx);
+						loadedRef.set(ridx.ref);
+						return new DfsBlockCache.Ref<>(revKey, REF_POSITION,
+								ridx.size, ridx.ref);
+					});
+			if (loadedRef.get() == null) {
+				ctx.stats.ridxCacheHit++;
+			}
+			return cachedRef.get() != null ? cachedRef.get() : loadedRef.get();
+		}
+
+		private static RefWithSize<PackReverseIndex> loadReverseIdx(
+				DfsReader ctx, PackIndex idx) {
+			ctx.stats.readReverseIdx++;
+			long start = System.nanoTime();
+			PackReverseIndex revidx = PackReverseIndexFactory
+					.computeFromIndex(idx);
+			ctx.stats.readReverseIdxMicros += elapsedMicros(start);
+			return new RefWithSize<>(revidx, idx.getObjectCount() * 8);
+		}
+	}
+
 	private static final class RefWithSize<V> {
 		final V ref;
 		final long size;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderOptions.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderOptions.java
index f2ac461..5f5e819 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderOptions.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReaderOptions.java
@@ -15,6 +15,7 @@
 import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_DELTA_BASE_CACHE_LIMIT;
 import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_STREAM_BUFFER;
 import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_STREAM_FILE_THRESHOLD;
+import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_USE_OBJECT_SIZE_INDEX;
 
 import org.eclipse.jgit.lib.Config;
 import org.eclipse.jgit.storage.pack.PackConfig;
@@ -36,6 +37,8 @@ public class DfsReaderOptions {
 
 	private boolean loadRevIndexInParallel;
 
+	private boolean useObjectSizeIndex;
+
 	/**
 	 * Create a default reader configuration.
 	 */
@@ -137,6 +140,28 @@ public DfsReaderOptions setLoadRevIndexInParallel(
 	}
 
 	/**
+	 * Use the object size index if available.
+	 *
+	 * @return true if the reader should try to use the object size index. if
+	 *         false, the reader ignores that index.
+	 */
+	public boolean shouldUseObjectSizeIndex() {
+		return useObjectSizeIndex;
+	}
+
+	/**
+	 * Set if the reader should try to use the object size index
+	 *
+	 * @param useObjectSizeIndex true to use it, false to ignore the object size index
+	 *
+	 * @return {@code this}
+	 */
+	public DfsReaderOptions setUseObjectSizeIndex(boolean useObjectSizeIndex) {
+		this.useObjectSizeIndex = useObjectSizeIndex;
+		return this;
+	}
+
+	/**
 	 * Update properties by setting fields from the configuration.
 	 * <p>
 	 * If a property is not defined in the configuration, then it is left
@@ -168,6 +193,10 @@ public DfsReaderOptions fromConfig(Config rc) {
 				CONFIG_DFS_SECTION,
 				CONFIG_KEY_STREAM_BUFFER,
 				getStreamPackBufferSize()));
+
+		setUseObjectSizeIndex(rc.getBoolean(CONFIG_CORE_SECTION,
+				CONFIG_DFS_SECTION, CONFIG_KEY_USE_OBJECT_SIZE_INDEX,
+				false));
 		return this;
 	}
 }
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/PackExtBlockCacheTable.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/PackExtBlockCacheTable.java
new file mode 100644
index 0000000..858f731
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/PackExtBlockCacheTable.java
@@ -0,0 +1,289 @@
+/*
+ * Copyright (c) 2024, Google LLC and others
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Distribution License v. 1.0 which is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+package org.eclipse.jgit.internal.storage.dfs;
+
+import java.io.IOException;
+import java.text.MessageFormat;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCache.ReadableChannelSupplier;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCache.Ref;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCache.RefLoader;
+import org.eclipse.jgit.internal.storage.dfs.DfsBlockCacheConfig.DfsBlockCachePackExtConfig;
+import org.eclipse.jgit.internal.storage.pack.PackExt;
+
+/**
+ * A table that holds multiple cache tables accessed by {@link PackExt} types.
+ *
+ * <p>
+ * Allows the separation of entries from different {@link PackExt} types to
+ * limit churn in cache caused by entries of differing sizes.
+ * <p>
+ * Separating these tables enables the fine-tuning of cache tables per extension
+ * type.
+ */
+class PackExtBlockCacheTable implements DfsBlockCacheTable {
+	private final DfsBlockCacheTable defaultBlockCacheTable;
+
+	// Holds the unique tables backing the extBlockCacheTables values.
+	private final List<DfsBlockCacheTable> blockCacheTableList;
+
+	// Holds the mapping of PackExt to DfsBlockCacheTables.
+	// The relation between the size of extBlockCacheTables entries and
+	// blockCacheTableList entries is:
+	// blockCacheTableList.size() <= extBlockCacheTables.size()
+	private final Map<PackExt, DfsBlockCacheTable> extBlockCacheTables;
+
+	/**
+	 * Builds the PackExtBlockCacheTable from a list of
+	 * {@link DfsBlockCachePackExtConfig}s.
+	 *
+	 * @param cacheConfig
+	 *            {@link DfsBlockCacheConfig} containing
+	 *            {@link DfsBlockCachePackExtConfig}s used to configure
+	 *            PackExtBlockCacheTable. The {@link DfsBlockCacheConfig} holds
+	 *            the configuration for the default cache table.
+	 * @return the cache table built from the given configs.
+	 * @throws IllegalArgumentException
+	 *             when no {@link DfsBlockCachePackExtConfig} exists in the
+	 *             {@link DfsBlockCacheConfig}.
+	 */
+	static PackExtBlockCacheTable fromBlockCacheConfigs(
+			DfsBlockCacheConfig cacheConfig) {
+		DfsBlockCacheTable defaultTable = new ClockBlockCacheTable(cacheConfig);
+		Map<PackExt, DfsBlockCacheTable> packExtBlockCacheTables = new HashMap<>();
+		List<DfsBlockCachePackExtConfig> packExtConfigs = cacheConfig
+				.getPackExtCacheConfigurations();
+		if (packExtConfigs == null || packExtConfigs.size() == 0) {
+			throw new IllegalArgumentException(
+					JGitText.get().noPackExtConfigurationGiven);
+		}
+		for (DfsBlockCachePackExtConfig packExtCacheConfig : packExtConfigs) {
+			DfsBlockCacheTable table = new ClockBlockCacheTable(
+					packExtCacheConfig.getPackExtCacheConfiguration());
+			for (PackExt packExt : packExtCacheConfig.getPackExts()) {
+				if (packExtBlockCacheTables.containsKey(packExt)) {
+					throw new IllegalArgumentException(MessageFormat.format(
+							JGitText.get().duplicatePackExtensionsForCacheTables,
+							packExt));
+				}
+				packExtBlockCacheTables.put(packExt, table);
+			}
+		}
+		return fromCacheTables(defaultTable, packExtBlockCacheTables);
+	}
+
+	/**
+	 * Creates a new PackExtBlockCacheTable from the combination of a default
+	 * {@link DfsBlockCacheTable} and a map of {@link PackExt}s to
+	 * {@link DfsBlockCacheTable}s.
+	 * <p>
+	 * This method allows for the PackExtBlockCacheTable to handle a mapping of
+	 * {@link PackExt}s to arbitrarily defined {@link DfsBlockCacheTable}
+	 * implementations. This is especially useful for users wishing to implement
+	 * custom cache tables.
+	 * <p>
+	 * This is currently made visible for testing.
+	 *
+	 * @param defaultBlockCacheTable
+	 *            the default table used when a handling a {@link PackExt} type
+	 *            that does not map to a {@link DfsBlockCacheTable} mapped by
+	 *            packExtsCacheTablePairs.
+	 * @param packExtBlockCacheTables
+	 *            the mapping of {@link PackExt}s to
+	 *            {@link DfsBlockCacheTable}s. A single
+	 *            {@link DfsBlockCacheTable} can be defined for multiple
+	 *            {@link PackExt}s in a many-to-one relationship.
+	 * @return the PackExtBlockCacheTable created from the
+	 *         defaultBlockCacheTable and packExtsCacheTablePairs mapping.
+	 * @throws IllegalArgumentException
+	 *             when a {@link PackExt} is defined for multiple
+	 *             {@link DfsBlockCacheTable}s.
+	 */
+	static PackExtBlockCacheTable fromCacheTables(
+			DfsBlockCacheTable defaultBlockCacheTable,
+			Map<PackExt, DfsBlockCacheTable> packExtBlockCacheTables) {
+		Set<DfsBlockCacheTable> blockCacheTables = new HashSet<>();
+		blockCacheTables.add(defaultBlockCacheTable);
+		blockCacheTables.addAll(packExtBlockCacheTables.values());
+		return new PackExtBlockCacheTable(defaultBlockCacheTable,
+				List.copyOf(blockCacheTables), packExtBlockCacheTables);
+	}
+
+	private PackExtBlockCacheTable(DfsBlockCacheTable defaultBlockCacheTable,
+			List<DfsBlockCacheTable> blockCacheTableList,
+			Map<PackExt, DfsBlockCacheTable> extBlockCacheTables) {
+		this.defaultBlockCacheTable = defaultBlockCacheTable;
+		this.blockCacheTableList = blockCacheTableList;
+		this.extBlockCacheTables = extBlockCacheTables;
+	}
+
+	@Override
+	public boolean hasBlock0(DfsStreamKey key) {
+		return getTable(key).hasBlock0(key);
+	}
+
+	@Override
+	public DfsBlock getOrLoad(BlockBasedFile file, long position,
+			DfsReader dfsReader, ReadableChannelSupplier fileChannel)
+			throws IOException {
+		return getTable(file.ext).getOrLoad(file, position, dfsReader,
+				fileChannel);
+	}
+
+	@Override
+	public <T> Ref<T> getOrLoadRef(DfsStreamKey key, long position,
+			RefLoader<T> loader) throws IOException {
+		return getTable(key).getOrLoadRef(key, position, loader);
+	}
+
+	@Override
+	public void put(DfsBlock v) {
+		getTable(v.stream).put(v);
+	}
+
+	@Override
+	public <T> Ref<T> put(DfsStreamKey key, long pos, long size, T v) {
+		return getTable(key).put(key, pos, size, v);
+	}
+
+	@Override
+	public <T> Ref<T> putRef(DfsStreamKey key, long size, T v) {
+		return getTable(key).putRef(key, size, v);
+	}
+
+	@Override
+	public boolean contains(DfsStreamKey key, long position) {
+		return getTable(key).contains(key, position);
+	}
+
+	@Override
+	public <T> T get(DfsStreamKey key, long position) {
+		return getTable(key).get(key, position);
+	}
+
+	@Override
+	public BlockCacheStats getBlockCacheStats() {
+		return new CacheStats(blockCacheTableList.stream()
+				.map(DfsBlockCacheTable::getBlockCacheStats)
+				.collect(Collectors.toList()));
+	}
+
+	private DfsBlockCacheTable getTable(PackExt packExt) {
+		return extBlockCacheTables.getOrDefault(packExt,
+				defaultBlockCacheTable);
+	}
+
+	private DfsBlockCacheTable getTable(DfsStreamKey key) {
+		return extBlockCacheTables.getOrDefault(getPackExt(key),
+				defaultBlockCacheTable);
+	}
+
+	private static PackExt getPackExt(DfsStreamKey key) {
+		return PackExt.values()[key.packExtPos];
+	}
+
+	private static class CacheStats implements BlockCacheStats {
+		private final List<BlockCacheStats> blockCacheStats;
+
+		private CacheStats(List<BlockCacheStats> blockCacheStats) {
+			this.blockCacheStats = blockCacheStats;
+		}
+
+		@Override
+		public long[] getCurrentSize() {
+			long[] sums = emptyPackStats();
+			for (BlockCacheStats blockCacheStatsEntry : blockCacheStats) {
+				sums = add(sums, blockCacheStatsEntry.getCurrentSize());
+			}
+			return sums;
+		}
+
+		@Override
+		public long[] getHitCount() {
+			long[] sums = emptyPackStats();
+			for (BlockCacheStats blockCacheStatsEntry : blockCacheStats) {
+				sums = add(sums, blockCacheStatsEntry.getHitCount());
+			}
+			return sums;
+		}
+
+		@Override
+		public long[] getMissCount() {
+			long[] sums = emptyPackStats();
+			for (BlockCacheStats blockCacheStatsEntry : blockCacheStats) {
+				sums = add(sums, blockCacheStatsEntry.getMissCount());
+			}
+			return sums;
+		}
+
+		@Override
+		public long[] getTotalRequestCount() {
+			long[] sums = emptyPackStats();
+			for (BlockCacheStats blockCacheStatsEntry : blockCacheStats) {
+				sums = add(sums, blockCacheStatsEntry.getTotalRequestCount());
+			}
+			return sums;
+		}
+
+		@Override
+		public long[] getHitRatio() {
+			long[] hit = getHitCount();
+			long[] miss = getMissCount();
+			long[] ratio = new long[Math.max(hit.length, miss.length)];
+			for (int i = 0; i < ratio.length; i++) {
+				if (i >= hit.length) {
+					ratio[i] = 0;
+				} else if (i >= miss.length) {
+					ratio[i] = 100;
+				} else {
+					long total = hit[i] + miss[i];
+					ratio[i] = total == 0 ? 0 : hit[i] * 100 / total;
+				}
+			}
+			return ratio;
+		}
+
+		@Override
+		public long[] getEvictions() {
+			long[] sums = emptyPackStats();
+			for (BlockCacheStats blockCacheStatsEntry : blockCacheStats) {
+				sums = add(sums, blockCacheStatsEntry.getEvictions());
+			}
+			return sums;
+		}
+
+		private static long[] emptyPackStats() {
+			return new long[PackExt.values().length];
+		}
+
+		private static long[] add(long[] first, long[] second) {
+			long[] sums = new long[Integer.max(first.length, second.length)];
+			int i;
+			for (i = 0; i < Integer.min(first.length, second.length); i++) {
+				sums[i] = first[i] + second[i];
+			}
+			for (int j = i; j < first.length; j++) {
+				sums[j] = first[i];
+			}
+			for (int j = i; j < second.length; j++) {
+				sums[j] = second[i];
+			}
+			return sums;
+		}
+	}
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableDatabase.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableDatabase.java
index ed2516d..80240e5 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableDatabase.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileReftableDatabase.java
@@ -68,14 +68,14 @@ public class FileReftableDatabase extends RefDatabase {
 	private final FileReftableStack reftableStack;
 
 	FileReftableDatabase(FileRepository repo) throws IOException {
-		this(repo, new File(new File(repo.getDirectory(), Constants.REFTABLE),
+		this(repo, new File(new File(repo.getCommonDirectory(), Constants.REFTABLE),
 				Constants.TABLES_LIST));
 	}
 
 	FileReftableDatabase(FileRepository repo, File refstackName) throws IOException {
 		this.fileRepository = repo;
 		this.reftableStack = new FileReftableStack(refstackName,
-			new File(fileRepository.getDirectory(), Constants.REFTABLE),
+				new File(fileRepository.getCommonDirectory(), Constants.REFTABLE),
 			() -> fileRepository.fireEvent(new RefsChangedEvent()),
 			() -> fileRepository.getConfig());
 		this.reftableDatabase = new ReftableDatabase() {
@@ -318,7 +318,7 @@ public void close() {
 	@Override
 	public void create() throws IOException {
 		FileUtils.mkdir(
-				new File(fileRepository.getDirectory(), Constants.REFTABLE),
+				new File(fileRepository.getCommonDirectory(), Constants.REFTABLE),
 				true);
 	}
 
@@ -615,7 +615,7 @@ public static FileReftableDatabase convertFrom(FileRepository repo,
 		FileReftableDatabase newDb = null;
 		File reftableList = null;
 		try {
-			File reftableDir = new File(repo.getDirectory(),
+			File reftableDir = new File(repo.getCommonDirectory(),
 					Constants.REFTABLE);
 			reftableList = new File(reftableDir, Constants.TABLES_LIST);
 			if (!reftableDir.isDirectory()) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java
index e5a00d3..b5d29a3 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java
@@ -165,7 +165,7 @@ public FileRepository(BaseRepositoryBuilder options) throws IOException {
 			throw new IOException(e.getMessage(), e);
 		}
 		repoConfig = new FileBasedConfig(userConfig, getFS().resolve(
-				getDirectory(), Constants.CONFIG),
+				getCommonDirectory(), Constants.CONFIG),
 				getFS());
 		loadRepoConfig();
 
@@ -193,7 +193,7 @@ public FileRepository(BaseRepositoryBuilder options) throws IOException {
 				options.getObjectDirectory(), //
 				options.getAlternateObjectDirectories(), //
 				getFS(), //
-				new File(getDirectory(), Constants.SHALLOW));
+				new File(getCommonDirectory(), Constants.SHALLOW));
 
 		if (objectDatabase.exists()) {
 			if (repositoryFormatVersion > 1)
@@ -622,16 +622,17 @@ public void autoGC(ProgressMonitor monitor) {
 	 *             on IO problem
 	 */
 	void convertToPackedRefs(boolean writeLogs, boolean backup) throws IOException {
+		File commonDirectory = getCommonDirectory();
 		List<Ref> all = refs.getRefs();
-		File packedRefs = new File(getDirectory(), Constants.PACKED_REFS);
+		File packedRefs = new File(commonDirectory, Constants.PACKED_REFS);
 		if (packedRefs.exists()) {
 			throw new IOException(MessageFormat.format(JGitText.get().fileAlreadyExists,
 				packedRefs.getName()));
 		}
 
-		File refsFile = new File(getDirectory(), "refs"); //$NON-NLS-1$
+		File refsFile = new File(commonDirectory, "refs"); //$NON-NLS-1$
 		File refsHeadsFile = new File(refsFile, "heads");//$NON-NLS-1$
-		File headFile = new File(getDirectory(), Constants.HEAD);
+		File headFile = new File(commonDirectory, Constants.HEAD);
 		FileReftableDatabase oldDb = (FileReftableDatabase) refs;
 
 		// Remove the dummy files that ensure compatibility with older git
@@ -701,7 +702,7 @@ void convertToPackedRefs(boolean writeLogs, boolean backup) throws IOException {
 		}
 
 		if (!backup) {
-			File reftableDir = new File(getDirectory(), Constants.REFTABLE);
+			File reftableDir = new File(commonDirectory, Constants.REFTABLE);
 			FileUtils.delete(reftableDir,
 					FileUtils.RECURSIVE | FileUtils.IGNORE_ERRORS);
 		}
@@ -730,8 +731,10 @@ void convertToPackedRefs(boolean writeLogs, boolean backup) throws IOException {
 	@SuppressWarnings("nls")
 	void convertToReftable(boolean writeLogs, boolean backup)
 			throws IOException {
-		File reftableDir = new File(getDirectory(), Constants.REFTABLE);
-		File headFile = new File(getDirectory(), Constants.HEAD);
+		File commonDirectory = getCommonDirectory();
+		File directory = getDirectory();
+		File reftableDir = new File(commonDirectory, Constants.REFTABLE);
+		File headFile = new File(directory, Constants.HEAD);
 		if (reftableDir.exists() && FileUtils.hasFiles(reftableDir.toPath())) {
 			throw new IOException(JGitText.get().reftableDirExists);
 		}
@@ -739,28 +742,28 @@ void convertToReftable(boolean writeLogs, boolean backup)
 		// Ignore return value, as it is tied to temporary newRefs file.
 		FileReftableDatabase.convertFrom(this, writeLogs);
 
-		File refsFile = new File(getDirectory(), "refs");
+		File refsFile = new File(commonDirectory, "refs");
 
 		// non-atomic: remove old data.
-		File packedRefs = new File(getDirectory(), Constants.PACKED_REFS);
-		File logsDir = new File(getDirectory(), Constants.LOGS);
+		File packedRefs = new File(commonDirectory, Constants.PACKED_REFS);
+		File logsDir = new File(commonDirectory, Constants.LOGS);
 
 		List<String> additional = getRefDatabase().getAdditionalRefs().stream()
 				.map(Ref::getName).collect(toList());
 		additional.add(Constants.HEAD);
 		if (backup) {
-			FileUtils.rename(refsFile, new File(getDirectory(), "refs.old"));
+			FileUtils.rename(refsFile, new File(commonDirectory, "refs.old"));
 			if (packedRefs.exists()) {
-				FileUtils.rename(packedRefs, new File(getDirectory(),
+				FileUtils.rename(packedRefs, new File(commonDirectory,
 						Constants.PACKED_REFS + ".old"));
 			}
 			if (logsDir.exists()) {
 				FileUtils.rename(logsDir,
-						new File(getDirectory(), Constants.LOGS + ".old"));
+						new File(commonDirectory, Constants.LOGS + ".old"));
 			}
 			for (String r : additional) {
-				FileUtils.rename(new File(getDirectory(), r),
-					new File(getDirectory(), r + ".old"));
+				FileUtils.rename(new File(commonDirectory, r),
+						new File(commonDirectory, r + ".old"));
 			}
 		} else {
 			FileUtils.delete(packedRefs, FileUtils.SKIP_MISSING);
@@ -770,7 +773,7 @@ void convertToReftable(boolean writeLogs, boolean backup)
 			FileUtils.delete(refsFile,
 					FileUtils.RECURSIVE | FileUtils.SKIP_MISSING);
 			for (String r : additional) {
-				new File(getDirectory(), r).delete();
+				new File(commonDirectory, r).delete();
 			}
 		}
 
@@ -784,7 +787,7 @@ void convertToReftable(boolean writeLogs, boolean backup)
 
 		// Some tools might write directly into .git/refs/heads/BRANCH. By
 		// putting a file here, this fails spectacularly.
-		FileUtils.createNewFile(new File(refsFile, "heads"));
+		FileUtils.createNewFile(new File(refsFile, Constants.HEADS));
 
 		repoConfig.setString(ConfigConstants.CONFIG_EXTENSIONS_SECTION, null,
 				ConfigConstants.CONFIG_KEY_REF_STORAGE,
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
index cf26f8d..4fafc5a 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
@@ -1047,7 +1047,7 @@ private static boolean isTag(Ref ref) {
 	}
 
 	private void deleteEmptyRefsFolders() throws IOException {
-		Path refs = repo.getDirectory().toPath().resolve(Constants.R_REFS);
+		Path refs = repo.getCommonDirectory().toPath().resolve(Constants.R_REFS);
 		// Avoid deleting a folder that was created after the threshold so that concurrent
 		// operations trying to create a reference are not impacted
 		Instant threshold = Instant.now().minus(30, ChronoUnit.SECONDS);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GcLog.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GcLog.java
index 628bf5d..8647b3e 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GcLog.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GcLog.java
@@ -50,7 +50,7 @@ class GcLog {
 	 */
 	GcLog(FileRepository repo) {
 		this.repo = repo;
-		logFile = new File(repo.getDirectory(), "gc.log"); //$NON-NLS-1$
+		logFile = new File(repo.getCommonDirectory(), "gc.log"); //$NON-NLS-1$
 		lock = new LockFile(logFile);
 	}
 
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/InfoAttributesNode.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/InfoAttributesNode.java
index 11d842b..e8d442b 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/InfoAttributesNode.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/InfoAttributesNode.java
@@ -46,7 +46,7 @@ public AttributesNode load() throws IOException {
 
 		FS fs = repository.getFS();
 
-		File attributes = fs.resolve(repository.getDirectory(),
+		File attributes = fs.resolve(repository.getCommonDirectory(),
 				Constants.INFO_ATTRIBUTES);
 		FileRepository.AttributesNodeProviderImpl.loadRulesFromFile(r, attributes);
 
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackObjectSizeIndexV1.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackObjectSizeIndexV1.java
index a3d74be..e172f14 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackObjectSizeIndexV1.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackObjectSizeIndexV1.java
@@ -12,7 +12,7 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.UnsupportedEncodingException;
-import java.util.Arrays;
+import java.text.MessageFormat;
 
 import org.eclipse.jgit.internal.JGitText;
 import org.eclipse.jgit.util.NB;
@@ -35,7 +35,7 @@ class PackObjectSizeIndexV1 implements PackObjectSizeIndex {
 
 	private final UInt24Array positions24;
 
-	private final int[] positions32;
+	private final IntArray positions32;
 
 	/**
 	 * Parallel array to concat(positions24, positions32) with the size of the
@@ -45,35 +45,37 @@ class PackObjectSizeIndexV1 implements PackObjectSizeIndex {
 	 * doesn't fit in an int and |value|-1 is the position for the size in the
 	 * size64 array e.g. a value of -1 is sizes64[0], -2 = sizes64[1], ...
 	 */
-	private final int[] sizes32;
+	private final IntArray sizes32;
 
-	private final long[] sizes64;
+	private final LongArray sizes64;
 
 	static PackObjectSizeIndex parse(InputStream in) throws IOException {
 		/** Header and version already out of the input */
-		IndexInputStreamReader stream = new IndexInputStreamReader(in);
-		int threshold = stream.readInt(); // minSize
-		int objCount = stream.readInt();
+		byte[] buffer = new byte[8];
+		in.readNBytes(buffer, 0, 8);
+		int threshold = NB.decodeInt32(buffer, 0); // minSize
+		int objCount = NB.decodeInt32(buffer, 4);
 		if (objCount == 0) {
 			return new EmptyPackObjectSizeIndex(threshold);
 		}
-		return new PackObjectSizeIndexV1(stream, threshold, objCount);
+		return new PackObjectSizeIndexV1(in, threshold, objCount);
 	}
 
-	private PackObjectSizeIndexV1(IndexInputStreamReader stream, int threshold,
+	private PackObjectSizeIndexV1(InputStream stream, int threshold,
 			int objCount) throws IOException {
 		this.threshold = threshold;
 		UInt24Array pos24 = null;
-		int[] pos32 = null;
+		IntArray pos32 = null;
 
+		StreamHelper helper = new StreamHelper();
 		byte positionEncoding;
-		while ((positionEncoding = stream.readByte()) != 0) {
+		while ((positionEncoding = helper.readByte(stream)) != 0) {
 			if (Byte.compareUnsigned(positionEncoding, BITS_24) == 0) {
-				int sz = stream.readInt();
+				int sz = helper.readInt(stream);
 				pos24 = new UInt24Array(stream.readNBytes(sz * 3));
 			} else if (Byte.compareUnsigned(positionEncoding, BITS_32) == 0) {
-				int sz = stream.readInt();
-				pos32 = stream.readIntArray(sz);
+				int sz = helper.readInt(stream);
+				pos32 = IntArray.from(stream, sz);
 			} else {
 				throw new UnsupportedEncodingException(
 						String.format(JGitText.get().unknownPositionEncoding,
@@ -81,16 +83,16 @@ private PackObjectSizeIndexV1(IndexInputStreamReader stream, int threshold,
 			}
 		}
 		positions24 = pos24 != null ? pos24 : UInt24Array.EMPTY;
-		positions32 = pos32 != null ? pos32 : new int[0];
+		positions32 = pos32 != null ? pos32 : IntArray.EMPTY;
 
-		sizes32 = stream.readIntArray(objCount);
-		int c64sizes = stream.readInt();
+		sizes32 = IntArray.from(stream, objCount);
+		int c64sizes = helper.readInt(stream);
 		if (c64sizes == 0) {
-			sizes64 = new long[0];
+			sizes64 = LongArray.EMPTY;
 			return;
 		}
-		sizes64 = stream.readLongArray(c64sizes);
-		int c128sizes = stream.readInt();
+		sizes64 = LongArray.from(stream, c64sizes);
+		int c128sizes = helper.readInt(stream);
 		if (c128sizes != 0) {
 			// this MUST be 0 (we don't support 128 bits sizes yet)
 			throw new IOException(JGitText.get().unsupportedSizesObjSizeIndex);
@@ -102,8 +104,8 @@ public long getSize(int idxOffset) {
 		int pos = -1;
 		if (!positions24.isEmpty() && idxOffset <= positions24.getLastValue()) {
 			pos = positions24.binarySearch(idxOffset);
-		} else if (positions32.length > 0 && idxOffset >= positions32[0]) {
-			int pos32 = Arrays.binarySearch(positions32, idxOffset);
+		} else if (!positions32.empty() && idxOffset >= positions32.get(0)) {
+			int pos32 = positions32.binarySearch(idxOffset);
 			if (pos32 >= 0) {
 				pos = pos32 + positions24.size();
 			}
@@ -112,17 +114,17 @@ public long getSize(int idxOffset) {
 			return -1;
 		}
 
-		int objSize = sizes32[pos];
+		int objSize = sizes32.get(pos);
 		if (objSize < 0) {
 			int secondPos = Math.abs(objSize) - 1;
-			return sizes64[secondPos];
+			return sizes64.get(secondPos);
 		}
 		return objSize;
 	}
 
 	@Override
 	public long getObjectCount() {
-		return (long) positions24.size() + positions32.length;
+		return (long) positions24.size() + positions32.size();
 	}
 
 	@Override
@@ -131,19 +133,112 @@ public int getThreshold() {
 	}
 
 	/**
-	 * Wrapper to read parsed content from the byte stream
+	 * A byte[] that should be interpreted as an int[]
 	 */
-	private static class IndexInputStreamReader {
+	private static class IntArray {
+		private static final IntArray EMPTY = new IntArray(new byte[0]);
 
-		private final byte[] buffer = new byte[8];
+		private static final int INT_SIZE = 4;
 
-		private final InputStream in;
+		private final byte[] data;
 
-		IndexInputStreamReader(InputStream in) {
-			this.in = in;
+		private final int size;
+
+		static IntArray from(InputStream in, int ints) throws IOException {
+			int expectedBytes = ints * INT_SIZE;
+			byte[] data = in.readNBytes(expectedBytes);
+			if (data.length < expectedBytes) {
+				throw new IOException(MessageFormat
+						.format(JGitText.get().unableToReadFullArray, ints));
+			}
+			return new IntArray(data);
 		}
 
-		int readInt() throws IOException {
+		private IntArray(byte[] data) {
+			this.data = data;
+			size = data.length / INT_SIZE;
+		}
+
+		/**
+		 * Returns position of element in array, -1 if not there
+		 *
+		 * @param needle
+		 *            element to look for
+		 * @return position of the element in the array or -1 if not found
+		 */
+		int binarySearch(int needle) {
+			if (size == 0) {
+				return -1;
+			}
+			int high = size;
+			int low = 0;
+			do {
+				int mid = (low + high) >>> 1;
+				int cmp = Integer.compare(needle, get(mid));
+				if (cmp < 0)
+					high = mid;
+				else if (cmp == 0) {
+					return mid;
+				} else
+					low = mid + 1;
+			} while (low < high);
+			return -1;
+		}
+
+		int get(int position) {
+			if (position < 0 || position >= size) {
+				throw new IndexOutOfBoundsException(position);
+			}
+			return NB.decodeInt32(data, position * INT_SIZE);
+		}
+
+		boolean empty() {
+			return size == 0;
+		}
+
+		int size() {
+			return size;
+		}
+	}
+
+	/**
+	 * A byte[] that should be interpreted as an long[]
+	 */
+	private static class LongArray {
+		private static final LongArray EMPTY = new LongArray(new byte[0]);
+
+		private static final int LONG_SIZE = 8; // bytes
+
+		private final byte[] data;
+
+		private final int size;
+
+		static LongArray from(InputStream in, int longs) throws IOException {
+			byte[] data = in.readNBytes(longs * LONG_SIZE);
+			if (data.length < longs * LONG_SIZE) {
+				throw new IOException(MessageFormat
+						.format(JGitText.get().unableToReadFullArray, longs));
+			}
+			return new LongArray(data);
+		}
+
+		private LongArray(byte[] data) {
+			this.data = data;
+			size = data.length / LONG_SIZE;
+		}
+
+		long get(int position) {
+			if (position < 0 || position >= size) {
+				throw new IndexOutOfBoundsException(position);
+			}
+			return NB.decodeInt64(data, position * LONG_SIZE);
+		}
+	}
+
+	private static class StreamHelper {
+		private final byte[] buffer = new byte[8];
+
+		int readInt(InputStream in) throws IOException {
 			int n = in.readNBytes(buffer, 0, 4);
 			if (n < 4) {
 				throw new IOException(JGitText.get().unableToReadFullInt);
@@ -151,49 +246,13 @@ int readInt() throws IOException {
 			return NB.decodeInt32(buffer, 0);
 		}
 
-		int[] readIntArray(int intsCount) throws IOException {
-			if (intsCount == 0) {
-				return new int[0];
-			}
-
-			int[] dest = new int[intsCount];
-			for (int i = 0; i < intsCount; i++) {
-				dest[i] = readInt();
-			}
-			return dest;
-		}
-
-		long readLong() throws IOException {
-			int n = in.readNBytes(buffer, 0, 8);
-			if (n < 8) {
-				throw new IOException(JGitText.get().unableToReadFullInt);
-			}
-			return NB.decodeInt64(buffer, 0);
-		}
-
-		long[] readLongArray(int longsCount) throws IOException {
-			if (longsCount == 0) {
-				return new long[0];
-			}
-
-			long[] dest = new long[longsCount];
-			for (int i = 0; i < longsCount; i++) {
-				dest[i] = readLong();
-			}
-			return dest;
-		}
-
-		byte readByte() throws IOException {
+		byte readByte(InputStream in) throws IOException {
 			int n = in.readNBytes(buffer, 0, 1);
 			if (n != 1) {
 				throw new IOException(JGitText.get().cannotReadByte);
 			}
 			return buffer[0];
 		}
-
-		byte[] readNBytes(int sz) throws IOException {
-			return in.readNBytes(sz);
-		}
 	}
 
 	private static class EmptyPackObjectSizeIndex
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
index 8e57bf9..6048681 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
@@ -16,6 +16,7 @@
 import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.eclipse.jgit.lib.Constants.HEAD;
 import static org.eclipse.jgit.lib.Constants.LOGS;
+import static org.eclipse.jgit.lib.Constants.L_LOGS;
 import static org.eclipse.jgit.lib.Constants.OBJECT_ID_STRING_LENGTH;
 import static org.eclipse.jgit.lib.Constants.PACKED_REFS;
 import static org.eclipse.jgit.lib.Constants.R_HEADS;
@@ -124,6 +125,8 @@ public class RefDirectory extends RefDatabase {
 
 	private final File gitDir;
 
+	private final File gitCommonDir;
+
 	final File refsDir;
 
 	final File packedRefsFile;
@@ -188,6 +191,7 @@ public class RefDirectory extends RefDatabase {
 	RefDirectory(RefDirectory refDb) {
 		parent = refDb.parent;
 		gitDir = refDb.gitDir;
+		gitCommonDir = refDb.gitCommonDir;
 		refsDir = refDb.refsDir;
 		logsDir = refDb.logsDir;
 		logsRefsDir = refDb.logsRefsDir;
@@ -204,10 +208,11 @@ public class RefDirectory extends RefDatabase {
 		final FS fs = db.getFS();
 		parent = db;
 		gitDir = db.getDirectory();
-		refsDir = fs.resolve(gitDir, R_REFS);
-		logsDir = fs.resolve(gitDir, LOGS);
-		logsRefsDir = fs.resolve(gitDir, LOGS + '/' + R_REFS);
-		packedRefsFile = fs.resolve(gitDir, PACKED_REFS);
+		gitCommonDir = db.getCommonDirectory();
+		refsDir = fs.resolve(gitCommonDir, R_REFS);
+		logsDir = fs.resolve(gitCommonDir, LOGS);
+		logsRefsDir = fs.resolve(gitCommonDir, L_LOGS + R_REFS);
+		packedRefsFile = fs.resolve(gitCommonDir, PACKED_REFS);
 
 		looseRefs.set(RefList.<LooseRef> emptyList());
 		packedRefs.set(NO_PACKED_REFS);
@@ -1329,7 +1334,12 @@ File fileFor(String name) {
 			name = name.substring(R_REFS.length());
 			return new File(refsDir, name);
 		}
-		return new File(gitDir, name);
+		// HEAD needs to get resolved from git dir as resolving it from common dir
+		// would always lead back to current default branch
+		if (name.equals(HEAD)) {
+			return new File(gitDir, name);
+		}
+		return new File(gitCommonDir, name);
 	}
 
 	static int levelsIn(String name) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogReaderImpl.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogReaderImpl.java
index 21b5a54..f1888eb 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogReaderImpl.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogReaderImpl.java
@@ -10,6 +10,8 @@
 
 package org.eclipse.jgit.internal.storage.file;
 
+import static org.eclipse.jgit.lib.Constants.HEAD;
+
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
@@ -37,7 +39,9 @@ class ReflogReaderImpl implements ReflogReader {
 	 *            {@code Ref} name
 	 */
 	ReflogReaderImpl(Repository db, String refname) {
-		logName = new File(db.getDirectory(), Constants.LOGS + '/' + refname);
+		File logBaseDir = refname.equals(HEAD) ? db.getDirectory()
+				: db.getCommonDirectory();
+		logName = new File(logBaseDir, Constants.L_LOGS + refname);
 	}
 
 	/* (non-Javadoc)
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BaseRepositoryBuilder.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BaseRepositoryBuilder.java
index 5dfb648..d232be6 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BaseRepositoryBuilder.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BaseRepositoryBuilder.java
@@ -13,13 +13,17 @@
 import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_CORE_SECTION;
 import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_BARE;
 import static org.eclipse.jgit.lib.ConfigConstants.CONFIG_KEY_WORKTREE;
+import static org.eclipse.jgit.lib.Constants.CONFIG;
 import static org.eclipse.jgit.lib.Constants.DOT_GIT;
 import static org.eclipse.jgit.lib.Constants.GIT_ALTERNATE_OBJECT_DIRECTORIES_KEY;
 import static org.eclipse.jgit.lib.Constants.GIT_CEILING_DIRECTORIES_KEY;
+import static org.eclipse.jgit.lib.Constants.GIT_COMMON_DIR_KEY;
 import static org.eclipse.jgit.lib.Constants.GIT_DIR_KEY;
 import static org.eclipse.jgit.lib.Constants.GIT_INDEX_FILE_KEY;
 import static org.eclipse.jgit.lib.Constants.GIT_OBJECT_DIRECTORY_KEY;
 import static org.eclipse.jgit.lib.Constants.GIT_WORK_TREE_KEY;
+import static org.eclipse.jgit.lib.Constants.OBJECTS;
+import static org.eclipse.jgit.lib.Constants.GITDIR_FILE;
 
 import java.io.File;
 import java.io.IOException;
@@ -70,7 +74,21 @@ private static boolean isSymRef(byte[] ref) {
 				&& ref[7] == ' ';
 	}
 
-	private static File getSymRef(File workTree, File dotGit, FS fs)
+	/**
+	 * Read symbolic reference file
+	 *
+	 * @param workTree
+	 *            the work tree path
+	 * @param dotGit
+	 *            the .git file
+	 * @param fs
+	 *            th FS util
+	 * @return the file read from symbolic reference file
+	 * @throws java.io.IOException
+	 *             the dotGit file is invalid reference
+	 * @since 7.0
+	 */
+	static File getSymRef(File workTree, File dotGit, FS fs)
 			throws IOException {
 		byte[] content = IO.readFully(dotGit);
 		if (!isSymRef(content)) {
@@ -102,6 +120,8 @@ private static File getSymRef(File workTree, File dotGit, FS fs)
 
 	private File gitDir;
 
+	private File gitCommonDir;
+
 	private File objectDirectory;
 
 	private List<File> alternateObjectDirectories;
@@ -172,6 +192,30 @@ public File getGitDir() {
 	}
 
 	/**
+	 * Set common dir.
+	 *
+	 * @param gitCommonDir
+	 *            {@code GIT_COMMON_DIR}, the common repository meta directory.
+	 * @return {@code this} (for chaining calls).
+	 * @since 7.0
+	 */
+	public B setGitCommonDir(File gitCommonDir) {
+		this.gitCommonDir = gitCommonDir;
+		this.config = null;
+		return self();
+	}
+
+	/**
+	 * Get common dir.
+	 *
+	 * @return common dir; null if not set.
+	 * @since 7.0
+	 */
+	public File getGitCommonDir() {
+		return gitCommonDir;
+	}
+
+	/**
 	 * Set the directory storing the repository's objects.
 	 *
 	 * @param objectDirectory
@@ -396,9 +440,9 @@ public B setInitialBranch(String branch) throws InvalidRefNameException {
 	 * Read standard Git environment variables and configure from those.
 	 * <p>
 	 * This method tries to read the standard Git environment variables, such as
-	 * {@code GIT_DIR} and {@code GIT_WORK_TREE} to configure this builder
-	 * instance. If an environment variable is set, it overrides the value
-	 * already set in this builder.
+	 * {@code GIT_DIR}, {@code GIT_COMMON_DIR}, {@code GIT_WORK_TREE} etc. to
+	 * configure this builder instance. If an environment variable is set, it
+	 * overrides the value already set in this builder.
 	 *
 	 * @return {@code this} (for chaining calls).
 	 */
@@ -410,9 +454,9 @@ public B readEnvironment() {
 	 * Read standard Git environment variables and configure from those.
 	 * <p>
 	 * This method tries to read the standard Git environment variables, such as
-	 * {@code GIT_DIR} and {@code GIT_WORK_TREE} to configure this builder
-	 * instance. If a property is already set in the builder, the environment
-	 * variable is not used.
+	 * {@code GIT_DIR}, {@code GIT_COMMON_DIR}, {@code GIT_WORK_TREE} etc. to
+	 * configure this builder instance. If a property is already set in the
+	 * builder, the environment variable is not used.
 	 *
 	 * @param sr
 	 *            the SystemReader abstraction to access the environment.
@@ -425,6 +469,13 @@ public B readEnvironment(SystemReader sr) {
 				setGitDir(new File(val));
 		}
 
+		if (getGitCommonDir() == null) {
+			String val = sr.getenv(GIT_COMMON_DIR_KEY);
+			if (val != null) {
+				setGitCommonDir(new File(val));
+			}
+		}
+
 		if (getObjectDirectory() == null) {
 			String val = sr.getenv(GIT_OBJECT_DIRECTORY_KEY);
 			if (val != null)
@@ -601,6 +652,7 @@ public B findGitDir(File current) {
 	public B setup() throws IllegalArgumentException, IOException {
 		requireGitDirOrWorkTree();
 		setupGitDir();
+		setupCommonDir();
 		setupWorkTree();
 		setupInternals();
 		return self();
@@ -658,6 +710,20 @@ protected void setupGitDir() throws IOException {
 	}
 
 	/**
+	 * Perform standard common dir initialization.
+	 *
+	 * @throws java.io.IOException
+	 *             the repository could not be accessed
+	 * @since 7.0
+	 */
+	protected void setupCommonDir() throws IOException {
+		// no gitCommonDir? Try to get it from gitDir
+		if (getGitCommonDir() == null) {
+			setGitCommonDir(safeFS().getCommonDir(getGitDir()));
+		}
+	}
+
+	/**
 	 * Perform standard work-tree initialization.
 	 * <p>
 	 * This is a method typically invoked inside of {@link #setup()}, near the
@@ -695,8 +761,12 @@ protected void setupWorkTree() throws IOException {
 	 *             the repository could not be accessed
 	 */
 	protected void setupInternals() throws IOException {
-		if (getObjectDirectory() == null && getGitDir() != null)
-			setObjectDirectory(safeFS().resolve(getGitDir(), Constants.OBJECTS));
+		if (getObjectDirectory() == null) {
+			File commonDir = getGitCommonDir();
+			if (commonDir != null) {
+				setObjectDirectory(safeFS().resolve(commonDir, OBJECTS));
+			}
+		}
 	}
 
 	/**
@@ -723,12 +793,13 @@ protected Config getConfig() throws IOException {
 	 *             the configuration is not available.
 	 */
 	protected Config loadConfig() throws IOException {
-		if (getGitDir() != null) {
+		File commonDir = getGitCommonDir();
+		if (commonDir != null) {
 			// We only want the repository's configuration file, and not
 			// the user file, as these parameters must be unique to this
 			// repository and not inherited from other files.
 			//
-			File path = safeFS().resolve(getGitDir(), Constants.CONFIG);
+			File path = safeFS().resolve(commonDir, CONFIG);
 			FileBasedConfig cfg = new FileBasedConfig(path, safeFS());
 			try {
 				cfg.load();
@@ -749,8 +820,29 @@ private File guessWorkTreeOrFail() throws IOException {
 		//
 		String path = cfg.getString(CONFIG_CORE_SECTION, null,
 				CONFIG_KEY_WORKTREE);
-		if (path != null)
+		if (path != null) {
 			return safeFS().resolve(getGitDir(), path).getCanonicalFile();
+		}
+
+		/*
+		 * We are in worktree's $GIT_DIR folder
+		 * ".git/worktrees/&lt;worktree-name&gt;" and want to get the working
+		 * tree (checkout) path; so here we have an opposite link in file
+		 * "gitdir" showing to the ".git" file located in the working tree read
+		 * it and convert it to absolute path if it's relative
+		 */
+		File gitDirFile = new File(getGitDir(), GITDIR_FILE);
+		if (gitDirFile.isFile()) {
+			String workDirPath = new String(IO.readFully(gitDirFile)).trim();
+			File workTreeDotGitFile = new File(workDirPath);
+			if (!workTreeDotGitFile.isAbsolute()) {
+				workTreeDotGitFile = new File(getGitDir(), workDirPath)
+						.getCanonicalFile();
+			}
+			if (workTreeDotGitFile != null) {
+				return workTreeDotGitFile.getParentFile();
+			}
+		}
 
 		// If core.bare is set, honor its value. Assume workTree is
 		// the parent directory of the repository.
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java
index 0edf3c5..f4e43c9 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ConfigConstants.java
@@ -77,6 +77,9 @@ public final class ConfigConstants {
 	/** The "dfs" section */
 	public static final String CONFIG_DFS_SECTION = "dfs";
 
+	/** The dfs cache subsection prefix */
+	public static final String CONFIG_DFS_CACHE_PREFIX = "dfs.";
+
 	/**
 	 * The "receive" section
 	 * @since 4.6
@@ -332,6 +335,13 @@ public final class ConfigConstants {
 	public static final String CONFIG_KEY_DELTA_BASE_CACHE_LIMIT = "deltaBaseCacheLimit";
 
 	/**
+	 * The "packExtensions" key
+	 *
+	 * @since 7.0
+	 **/
+	public static final String CONFIG_KEY_PACK_EXTENSIONS = "packExtensions";
+
+	/**
 	 * The "symlinks" key
 	 * @since 3.3
 	 */
@@ -1012,4 +1022,11 @@ public final class ConfigConstants {
 	 * @since 6.7
 	 */
 	public static final String CONFIG_KEY_READ_CHANGED_PATHS = "readChangedPaths";
+
+	/**
+	 * The "useObjectSizeIndex" key
+	 *
+	 * @since 7.0
+	 */
+	public static final String CONFIG_KEY_USE_OBJECT_SIZE_INDEX = "useObjectSizeIndex";
 }
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Constants.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Constants.java
index 1835dc7..b9c90bd 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Constants.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Constants.java
@@ -273,6 +273,20 @@ public final class Constants {
 	public static final String INFO_REFS = "info/refs";
 
 	/**
+	 * Name of heads folder or file in refs.
+	 *
+	 * @since 7.0
+	 */
+	public static final String HEADS = "heads";
+
+	/**
+	 * Prefix for any log.
+	 *
+	 * @since 7.0
+	 */
+	public static final String L_LOGS = LOGS + "/";
+
+	/**
 	 * Info alternates file (goes under OBJECTS)
 	 * @since 5.5
 	 */
@@ -358,6 +372,14 @@ public final class Constants {
 	public static final String GIT_DIR_KEY = "GIT_DIR";
 
 	/**
+	 * The environment variable that tells us which directory is the common
+	 * ".git" directory.
+	 *
+	 * @since 7.0
+	 */
+	public static final String GIT_COMMON_DIR_KEY = "GIT_COMMON_DIR";
+
+	/**
 	 * The environment variable that tells us which directory is the working
 	 * directory.
 	 */
@@ -459,6 +481,36 @@ public final class Constants {
 	public static final String GITDIR = "gitdir: ";
 
 	/**
+	 * Name of the file (inside gitDir) that references the worktree's .git
+	 * file (opposite link).
+	 *
+	 * .git/worktrees/&lt;worktree-name&gt;/gitdir
+	 *
+	 * A text file containing the absolute path back to the .git file that
+	 * points here. This file is used to verify if the linked repository has been
+	 * manually removed in which case this directory is no longer needed.
+	 * The modification time (mtime) of this file should be updated each time
+	 * the linked repository is accessed.
+	 *
+	 * @since 7.0
+	 */
+	public static final String GITDIR_FILE = "gitdir";
+
+	/**
+	 * Name of the file (inside gitDir) that has reference to $GIT_COMMON_DIR.
+	 *
+	 * .git/worktrees/&lt;worktree-name&gt;/commondir
+	 *
+	 * If this file exists, $GIT_COMMON_DIR will be set to the path specified in
+	 * this file unless it is explicitly set. If the specified path is relative,
+	 * it is relative to $GIT_DIR. The repository with commondir is incomplete
+	 * without the repository pointed by "commondir".
+	 *
+	 * @since 7.0
+	 */
+	public static final String COMMONDIR_FILE = "commondir";
+
+	/**
 	 * Name of the folder (inside gitDir) where submodules are stored
 	 *
 	 * @since 3.6
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java
index 427a235..4b0c079 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/GpgConfig.java
@@ -24,7 +24,9 @@ public enum GpgFormat implements Config.ConfigEnum {
 		/** Value for openpgp */
 		OPENPGP("openpgp"), //$NON-NLS-1$
 		/** Value for x509 */
-		X509("x509"); //$NON-NLS-1$
+		X509("x509"), //$NON-NLS-1$
+		/** Value for ssh */
+		SSH("ssh"); //$NON-NLS-1$
 
 		private final String configValue;
 
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/IndexDiff.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/IndexDiff.java
index 8e965c5..a99c647 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/IndexDiff.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/IndexDiff.java
@@ -639,7 +639,7 @@ public boolean diff(ProgressMonitor monitor, int estWorkTreeSize,
 							// submodule repository in .git/modules doesn't
 							// exist yet it isn't "missing".
 							File gitDir = new File(
-									new File(repository.getDirectory(),
+									new File(repository.getCommonDirectory(),
 											Constants.MODULES),
 									subRepoPath);
 							if (!gitDir.isDirectory()) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Repository.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Repository.java
index 4722e29..9dde99f 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Repository.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Repository.java
@@ -113,9 +113,12 @@ public static ListenerList getGlobalListenerList() {
 
 	final AtomicLong closedAt = new AtomicLong();
 
-	/** Metadata directory holding the repository's critical files. */
+	/** $GIT_DIR: metadata directory holding the repository's critical files. */
 	private final File gitDir;
 
+	/** $GIT_COMMON_DIR: metadata directory holding the common repository's critical files.  */
+	private final File gitCommonDir;
+
 	/** File abstraction used to resolve paths. */
 	private final FS fs;
 
@@ -137,6 +140,7 @@ public static ListenerList getGlobalListenerList() {
 	 */
 	protected Repository(BaseRepositoryBuilder options) {
 		gitDir = options.getGitDir();
+		gitCommonDir = options.getGitCommonDir();
 		fs = options.getFS();
 		workTree = options.getWorkTree();
 		indexFile = options.getIndexFile();
@@ -220,6 +224,16 @@ public File getDirectory() {
 	public abstract String getIdentifier();
 
 	/**
+	 * Get common dir.
+	 *
+	 * @return $GIT_COMMON_DIR: local common metadata directory;
+	 * @since 7.0
+	 */
+	public File getCommonDirectory() {
+		return gitCommonDir;
+	}
+
+	/**
 	 * Get the object database which stores this repository's data.
 	 *
 	 * @return the object database which stores this repository's data.
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/RepositoryCache.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/RepositoryCache.java
index 6288447..1836654 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/RepositoryCache.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/RepositoryCache.java
@@ -450,10 +450,21 @@ public String toString() {
 		 *         Git directory.
 		 */
 		public static boolean isGitRepository(File dir, FS fs) {
-			return fs.resolve(dir, Constants.OBJECTS).exists()
-					&& fs.resolve(dir, "refs").exists() //$NON-NLS-1$
-					&& (fs.resolve(dir, Constants.REFTABLE).exists()
-							|| isValidHead(new File(dir, Constants.HEAD)));
+			// check if common-dir available or fallback to git-dir
+			File commonDir;
+			try {
+				commonDir = fs.getCommonDir(dir);
+			} catch (IOException e) {
+				commonDir = null;
+			}
+			if (commonDir == null) {
+				commonDir = dir;
+			}
+			return fs.resolve(commonDir, Constants.OBJECTS).exists()
+					&& fs.resolve(commonDir, "refs").exists() //$NON-NLS-1$
+					&& (fs.resolve(commonDir, Constants.REFTABLE).exists()
+							|| isValidHead(
+									new File(commonDir, Constants.HEAD)));
 		}
 
 		private static boolean isValidHead(File head) {
@@ -496,15 +507,31 @@ private static String readFirstLine(File head) {
 		 *         null if there is no suitable match.
 		 */
 		public static File resolve(File directory, FS fs) {
-			if (isGitRepository(directory, fs))
+			// the folder itself
+			if (isGitRepository(directory, fs)) {
 				return directory;
-			if (isGitRepository(new File(directory, Constants.DOT_GIT), fs))
-				return new File(directory, Constants.DOT_GIT);
-
-			final String name = directory.getName();
-			final File parent = directory.getParentFile();
-			if (isGitRepository(new File(parent, name + Constants.DOT_GIT_EXT), fs))
-				return new File(parent, name + Constants.DOT_GIT_EXT);
+			}
+			// the .git subfolder or file (reference)
+			File dotDir = new File(directory, Constants.DOT_GIT);
+			if (dotDir.isFile()) {
+				try {
+					File refDir = BaseRepositoryBuilder.getSymRef(directory,
+							dotDir, fs);
+					if (refDir != null && isGitRepository(refDir, fs)) {
+						return refDir;
+					}
+				} catch (IOException ignored) {
+					// Continue searching if gitdir ref isn't found
+				}
+			} else if (isGitRepository(dotDir, fs)) {
+				return dotDir;
+			}
+			// the folder extended with .git (bare)
+			File bareDir = new File(directory.getParentFile(),
+					directory.getName() + Constants.DOT_GIT_EXT);
+			if (isGitRepository(bareDir, fs)) {
+				return bareDir;
+			}
 			return null;
 		}
 	}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportGitSsh.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportGitSsh.java
index 0fc9710..f77b041 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportGitSsh.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportGitSsh.java
@@ -254,6 +254,12 @@ private ProcessBuilder createProcess(List<String> args,
 				pb.environment().put(Constants.GIT_DIR_KEY,
 						directory.getPath());
 			}
+			File commonDirectory = local != null ? local.getCommonDirectory()
+					: null;
+			if (commonDirectory != null) {
+				pb.environment().put(Constants.GIT_COMMON_DIR_KEY,
+						commonDirectory.getPath());
+			}
 			return pb;
 		}
 
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportLocal.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportLocal.java
index 3a06ce5..1b9431c 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportLocal.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportLocal.java
@@ -225,6 +225,7 @@ private Process spawn(String cmd,
 			env.remove("GIT_CONFIG"); //$NON-NLS-1$
 			env.remove("GIT_CONFIG_PARAMETERS"); //$NON-NLS-1$
 			env.remove("GIT_DIR"); //$NON-NLS-1$
+			env.remove("GIT_COMMON_DIR"); //$NON-NLS-1$
 			env.remove("GIT_WORK_TREE"); //$NON-NLS-1$
 			env.remove("GIT_GRAFT_FILE"); //$NON-NLS-1$
 			env.remove("GIT_INDEX_FILE"); //$NON-NLS-1$
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/WorkingTreeIterator.java b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/WorkingTreeIterator.java
index 73a3dda..95e9964 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/WorkingTreeIterator.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/WorkingTreeIterator.java
@@ -498,6 +498,8 @@ private InputStream filterClean(InputStream in)
 			filterProcessBuilder.directory(repository.getWorkTree());
 			filterProcessBuilder.environment().put(Constants.GIT_DIR_KEY,
 					repository.getDirectory().getAbsolutePath());
+			filterProcessBuilder.environment().put(Constants.GIT_COMMON_DIR_KEY,
+					repository.getCommonDirectory().getAbsolutePath());
 			ExecutionResult result;
 			try {
 				result = fs.execute(filterProcessBuilder, in);
@@ -1332,7 +1334,7 @@ IgnoreNode load(IgnoreNode parent) throws IOException {
 
 			IgnoreNode infoExclude = new IgnoreNodeWithParent(
 					coreExclude);
-			File exclude = fs.resolve(repository.getDirectory(),
+			File exclude = fs.resolve(repository.getCommonDirectory(),
 					Constants.INFO_EXCLUDE);
 			if (fs.exists(exclude)) {
 				loadRulesFromFile(infoExclude, exclude);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java
index a8e1dae..6933a6c 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/FS.java
@@ -2042,6 +2042,8 @@ protected ProcessResult internalRunHookIfPresent(Repository repository,
 		environment.put(Constants.GIT_DIR_KEY,
 				repository.getDirectory().getAbsolutePath());
 		if (!repository.isBare()) {
+			environment.put(Constants.GIT_COMMON_DIR_KEY,
+					repository.getCommonDirectory().getAbsolutePath());
 			environment.put(Constants.GIT_WORK_TREE_KEY,
 					repository.getWorkTree().getAbsolutePath());
 		}
@@ -2137,7 +2139,7 @@ private File getRunDirectory(Repository repository,
 		case "post-receive": //$NON-NLS-1$
 		case "post-update": //$NON-NLS-1$
 		case "push-to-checkout": //$NON-NLS-1$
-			return repository.getDirectory();
+			return repository.getCommonDirectory();
 		default:
 			return repository.getWorkTree();
 		}
@@ -2150,7 +2152,7 @@ private File getHooksDirectory(Repository repository) {
 		if (hooksDir != null) {
 			return new File(hooksDir);
 		}
-		File dir = repository.getDirectory();
+		File dir = repository.getCommonDirectory();
 		return dir == null ? null : new File(dir, Constants.HOOKS);
 	}
 
@@ -2578,6 +2580,33 @@ public String normalize(String name) {
 	}
 
 	/**
+	 * Get common dir path.
+	 *
+	 * @param dir
+	 *            the .git folder
+	 * @return common dir path
+	 * @throws IOException
+	 *             if commondir file can't be read
+	 *
+	 * @since 7.0
+	 */
+	public File getCommonDir(File dir) throws IOException {
+		// first the GIT_COMMON_DIR is same as GIT_DIR
+		File commonDir = dir;
+		// now check if commondir file exists (e.g. worktree repository)
+		File commonDirFile = new File(dir, Constants.COMMONDIR_FILE);
+		if (commonDirFile.isFile()) {
+			String commonDirPath = new String(IO.readFully(commonDirFile))
+					.trim();
+			commonDir = new File(commonDirPath);
+			if (!commonDir.isAbsolute()) {
+				commonDir = new File(dir, commonDirPath).getCanonicalFile();
+			}
+		}
+		return commonDir;
+	}
+
+	/**
 	 * This runnable will consume an input stream's content into an output
 	 * stream as soon as it gets available.
 	 * <p>