Merge "Reduce synchronized scope around ConcurrentHashMap"
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
index 05627ed..f7decf1 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
@@ -263,20 +263,22 @@ DfsPackFile getOrCreate(DfsPackDescription dsc, DfsPackKey key) {
 		// TODO This table grows without bound. It needs to clean up
 		// entries that aren't in cache anymore, and aren't being used
 		// by a live DfsObjDatabase reference.
-		synchronized (packCache) {
-			DfsPackFile pack = packCache.get(dsc);
-			if (pack != null && pack.invalid()) {
-				packCache.remove(dsc);
-				pack = null;
-			}
-			if (pack == null) {
-				if (key == null)
-					key = new DfsPackKey();
-				pack = new DfsPackFile(this, dsc, key);
-				packCache.put(dsc, pack);
-			}
+
+		DfsPackFile pack = packCache.get(dsc);
+		if (pack != null && !pack.invalid()) {
 			return pack;
 		}
+
+		// 'pack' either didn't exist or was invalid. Compute a new
+		// entry atomically (guaranteed by ConcurrentHashMap).
+		return packCache.compute(dsc, (k, v) -> {
+			if (v != null && !v.invalid()) { // valid value added by
+				return v;                    // another thread
+			} else {
+				return new DfsPackFile(
+						this, dsc, key != null ? key : new DfsPackKey());
+			}
+		});
 	}
 
 	private int hash(int packHash, long off) {
@@ -504,9 +506,7 @@ private <T> Ref<T> scanRef(HashEntry n, DfsPackKey pack, long position) {
 	}
 
 	void remove(DfsPackFile pack) {
-		synchronized (packCache) {
-			packCache.remove(pack.getPackDescription());
-		}
+		packCache.remove(pack.getPackDescription());
 	}
 
 	private int slot(DfsPackKey pack, long position) {