Merge "Don't serialize internal hash collision chain link"
diff --git a/.buckversion b/.buckversion
index af38772..560aff2 100644
--- a/.buckversion
+++ b/.buckversion
@@ -1 +1 @@
-7b7817c48f30687781040b2b82ac9218d5c4eaa4
+d6949e1440ef2048d697c637a4adae1b509bf72d
diff --git a/org.eclipse.jgit.lfs.server/src/org/eclipse/jgit/lfs/server/fs/FileLfsServlet.java b/org.eclipse.jgit.lfs.server/src/org/eclipse/jgit/lfs/server/fs/FileLfsServlet.java
index 39aaa91..cce021c 100644
--- a/org.eclipse.jgit.lfs.server/src/org/eclipse/jgit/lfs/server/fs/FileLfsServlet.java
+++ b/org.eclipse.jgit.lfs.server/src/org/eclipse/jgit/lfs/server/fs/FileLfsServlet.java
@@ -173,7 +173,19 @@ static class Error {
}
}
- static void sendError(HttpServletResponse rsp, int status, String message)
+ /**
+ * Send an error response.
+ *
+ * @param rsp
+ * the servlet response
+ * @param status
+ * HTTP status code
+ * @param message
+ * error message
+ * @throws IOException
+ * on failure to send the response
+ */
+ protected static void sendError(HttpServletResponse rsp, int status, String message)
throws IOException {
rsp.setStatus(status);
PrintWriter writer = rsp.getWriter();
diff --git a/org.eclipse.jgit.lfs.server/src/org/eclipse/jgit/lfs/server/fs/ObjectUploadListener.java b/org.eclipse.jgit.lfs.server/src/org/eclipse/jgit/lfs/server/fs/ObjectUploadListener.java
index bb31743..84e4e6f 100644
--- a/org.eclipse.jgit.lfs.server/src/org/eclipse/jgit/lfs/server/fs/ObjectUploadListener.java
+++ b/org.eclipse.jgit.lfs.server/src/org/eclipse/jgit/lfs/server/fs/ObjectUploadListener.java
@@ -64,9 +64,11 @@
import org.eclipse.jgit.lfs.lib.Constants;
/**
- * Handle asynchronous object upload
+ * Handle asynchronous object upload.
+ *
+ * @since 4.6
*/
-class ObjectUploadListener implements ReadListener {
+public class ObjectUploadListener implements ReadListener {
private static Logger LOG = Logger
.getLogger(ObjectUploadListener.class.getName());
@@ -139,6 +141,9 @@ public void onAllDataRead() throws IOException {
close();
}
+ /**
+ * @throws IOException
+ */
protected void close() throws IOException {
try {
inChannel.close();
@@ -174,4 +179,4 @@ public void onError(Throwable e) {
LOG.log(Level.SEVERE, ex.getMessage(), ex);
}
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/CleanFilter.java b/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/CleanFilter.java
index f7b55e5..66feca7 100644
--- a/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/CleanFilter.java
+++ b/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/CleanFilter.java
@@ -47,15 +47,14 @@
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.StandardOpenOption;
-import java.security.DigestOutputStream;
+import java.nio.file.StandardCopyOption;
import org.eclipse.jgit.attributes.FilterCommand;
import org.eclipse.jgit.attributes.FilterCommandFactory;
import org.eclipse.jgit.attributes.FilterCommandRegistry;
import org.eclipse.jgit.lfs.errors.CorruptMediaFile;
-import org.eclipse.jgit.lfs.lib.Constants;
-import org.eclipse.jgit.lfs.lib.LongObjectId;
+import org.eclipse.jgit.lfs.internal.AtomicObjectOutputStream;
+import org.eclipse.jgit.lfs.lib.AnyLongObjectId;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.util.FileUtils;
@@ -97,12 +96,8 @@ public final static void register() {
FACTORY);
}
- // The OutputStream to a temporary file which will be renamed to mediafile
- // when the operation succeeds
- private OutputStream tmpOut;
-
// Used to compute the hash for the original content
- private DigestOutputStream dOut;
+ private AtomicObjectOutputStream aOut;
private Lfs lfsUtil;
@@ -132,34 +127,33 @@ public CleanFilter(Repository db, InputStream in, OutputStream out)
lfsUtil = new Lfs(db.getDirectory().toPath().resolve("lfs")); //$NON-NLS-1$
Files.createDirectories(lfsUtil.getLfsTmpDir());
tmpFile = lfsUtil.createTmpFile();
- tmpOut = Files.newOutputStream(tmpFile,
- StandardOpenOption.CREATE);
- this.dOut = new DigestOutputStream(
- tmpOut,
- Constants.newMessageDigest());
+ this.aOut = new AtomicObjectOutputStream(tmpFile.toAbsolutePath());
}
public int run() throws IOException {
try {
- int b = in.read();
- if (b != -1) {
- dOut.write(b);
- size++;
- return 1;
+ byte[] buf = new byte[8192];
+ int length = in.read(buf);
+ if (length != -1) {
+ aOut.write(buf, 0, length);
+ size += length;
+ return length;
} else {
- dOut.close();
- tmpOut.close();
- LongObjectId loid = LongObjectId
- .fromRaw(dOut.getMessageDigest().digest());
+ aOut.close();
+ AnyLongObjectId loid = aOut.getId();
+ aOut = null;
Path mediaFile = lfsUtil.getMediaFile(loid);
if (Files.isRegularFile(mediaFile)) {
long fsSize = Files.size(mediaFile);
if (fsSize != size) {
throw new CorruptMediaFile(mediaFile, size, fsSize);
+ } else {
+ FileUtils.delete(tmpFile.toFile());
}
} else {
FileUtils.mkdirs(mediaFile.getParent().toFile(), true);
- FileUtils.rename(tmpFile.toFile(), mediaFile.toFile());
+ FileUtils.rename(tmpFile.toFile(), mediaFile.toFile(),
+ StandardCopyOption.ATOMIC_MOVE);
}
LfsPointer lfsPointer = new LfsPointer(loid, size);
lfsPointer.encode(out);
@@ -167,9 +161,10 @@ public int run() throws IOException {
return -1;
}
} catch (IOException e) {
+ if (aOut != null) {
+ aOut.abort();
+ }
out.close();
- dOut.close();
- tmpOut.close();
throw e;
}
}
diff --git a/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/Lfs.java b/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/Lfs.java
index f099c5a..75e34e0 100644
--- a/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/Lfs.java
+++ b/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/Lfs.java
@@ -46,7 +46,7 @@
import java.nio.file.Files;
import java.nio.file.Path;
-import org.eclipse.jgit.lfs.lib.LongObjectId;
+import org.eclipse.jgit.lfs.lib.AnyLongObjectId;
/**
* Class which represents the lfs folder hierarchy inside a .git folder
@@ -104,8 +104,8 @@ public Path getLfsObjDir() {
* underneath
* "<repo>/.git/lfs/objects/<firstTwoLettersOfID>/<remainingLettersOfID>"
*/
- public Path getMediaFile(LongObjectId id) {
- String idStr = LongObjectId.toString(id);
+ public Path getMediaFile(AnyLongObjectId id) {
+ String idStr = id.name();
return getLfsObjDir().resolve(idStr.substring(0, 2))
.resolve(idStr.substring(2));
}
diff --git a/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/LfsPointer.java b/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/LfsPointer.java
index e43cb25..c0cf233 100644
--- a/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/LfsPointer.java
+++ b/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/LfsPointer.java
@@ -50,6 +50,7 @@
import java.io.PrintStream;
import org.eclipse.jgit.annotations.Nullable;
+import org.eclipse.jgit.lfs.lib.AnyLongObjectId;
import org.eclipse.jgit.lfs.lib.Constants;
import org.eclipse.jgit.lfs.lib.LongObjectId;
@@ -71,7 +72,7 @@ public class LfsPointer {
public static final String HASH_FUNCTION_NAME = Constants.LONG_HASH_FUNCTION
.toLowerCase().replace("-", ""); //$NON-NLS-1$ //$NON-NLS-2$
- private LongObjectId oid;
+ private AnyLongObjectId oid;
private long size;
@@ -81,7 +82,7 @@ public class LfsPointer {
* @param size
* the size of the content
*/
- public LfsPointer(LongObjectId oid, long size) {
+ public LfsPointer(AnyLongObjectId oid, long size) {
this.oid = oid;
this.size = size;
}
@@ -89,7 +90,7 @@ public LfsPointer(LongObjectId oid, long size) {
/**
* @return the id of the content
*/
- public LongObjectId getOid() {
+ public AnyLongObjectId getOid() {
return oid;
}
@@ -112,7 +113,7 @@ public void encode(OutputStream out) {
ps.print("version "); //$NON-NLS-1$
ps.println(VERSION);
ps.print("oid " + HASH_FUNCTION_NAME + ":"); //$NON-NLS-1$ //$NON-NLS-2$
- ps.println(LongObjectId.toString(oid));
+ ps.println(oid.name());
ps.print("size "); //$NON-NLS-1$
ps.println(size);
}
@@ -160,7 +161,7 @@ public static LfsPointer parseLfsPointer(InputStream in)
@Override
public String toString() {
- return "LfsPointer: oid=" + LongObjectId.toString(oid) + ", size=" //$NON-NLS-1$ //$NON-NLS-2$
+ return "LfsPointer: oid=" + oid.name() + ", size=" //$NON-NLS-1$ //$NON-NLS-2$
+ size;
}
}
diff --git a/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/internal/AtomicObjectOutputStream.java b/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/internal/AtomicObjectOutputStream.java
index 7e050b1..867cca5 100644
--- a/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/internal/AtomicObjectOutputStream.java
+++ b/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/internal/AtomicObjectOutputStream.java
@@ -48,6 +48,7 @@
import java.security.DigestOutputStream;
import java.text.MessageFormat;
+import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.internal.storage.file.LockFile;
import org.eclipse.jgit.lfs.errors.CorruptLongObjectException;
import org.eclipse.jgit.lfs.lib.AnyLongObjectId;
@@ -83,6 +84,22 @@ public AtomicObjectOutputStream(Path path, AnyLongObjectId id)
Constants.newMessageDigest());
}
+ /**
+ * @param path
+ * @throws IOException
+ */
+ public AtomicObjectOutputStream(Path path) throws IOException {
+ this(path, null);
+ }
+
+ /**
+ * @return content hash of the object which was streamed through this
+ * stream. May return {@code null} if called before closing this stream.
+ */
+ public @Nullable AnyLongObjectId getId() {
+ return id;
+ }
+
@Override
public void write(int b) throws IOException {
out.write(b);
@@ -102,7 +119,11 @@ public void write(byte[] b, int off, int len) throws IOException {
public void close() throws IOException {
out.close();
if (!aborted) {
- verifyHash();
+ if (id != null) {
+ verifyHash();
+ } else {
+ id = LongObjectId.fromRaw(out.getMessageDigest().digest());
+ }
locked.commit();
}
}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfigTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfigTest.java
new file mode 100644
index 0000000..e612061
--- /dev/null
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfigTest.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2016, Philipp Marx <philippmarx@gmx.de> and
+ * other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Distribution License v1.0 which accompanies this
+ * distribution, is reproduced below, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright notice, this
+ * list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from this
+ * software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.dfs;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.is;
+
+import org.eclipse.jgit.internal.JGitText;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+public class DfsBlockCacheConfigTest {
+
+ @Rule
+ public ExpectedException thrown = ExpectedException.none();
+
+ @Test
+ public void blockSizeNotPowerOfTwoExpectsException() {
+ thrown.expect(IllegalArgumentException.class);
+ thrown.expectMessage(is(JGitText.get().blockSizeNotPowerOf2));
+
+ new DfsBlockCacheConfig().setBlockSize(1000);
+ }
+
+ @Test
+ @SuppressWarnings("boxing")
+ public void negativeBlockSizeIsConvertedToDefault() {
+ DfsBlockCacheConfig config = new DfsBlockCacheConfig();
+ config.setBlockSize(-1);
+
+ assertThat(config.getBlockSize(), is(512));
+ }
+
+ @Test
+ @SuppressWarnings("boxing")
+ public void tooSmallBlockSizeIsConvertedToDefault() {
+ DfsBlockCacheConfig config = new DfsBlockCacheConfig();
+ config.setBlockSize(10);
+
+ assertThat(config.getBlockSize(), is(512));
+ }
+
+ @Test
+ @SuppressWarnings("boxing")
+ public void validBlockSize() {
+ DfsBlockCacheConfig config = new DfsBlockCacheConfig();
+ config.setBlockSize(65536);
+
+ assertThat(config.getBlockSize(), is(65536));
+ }
+}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/AutoGcTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/AutoGcTest.java
new file mode 100644
index 0000000..56994a6
--- /dev/null
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/AutoGcTest.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2016, Matthias Sohn <matthias.sohn@sap.com>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.eclipse.jgit.internal.storage.file;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import org.eclipse.jgit.lib.ConfigConstants;
+import org.eclipse.jgit.storage.file.FileBasedConfig;
+import org.eclipse.jgit.test.resources.SampleDataRepositoryTestCase;
+import org.junit.Test;
+
+public class AutoGcTest extends GcTestCase {
+
+ @Test
+ public void testNotTooManyLooseObjects() {
+ assertFalse("should not find too many loose objects",
+ gc.tooManyLooseObjects());
+ }
+
+ @Test
+ public void testTooManyLooseObjects() throws Exception {
+ FileBasedConfig c = repo.getConfig();
+ c.setInt(ConfigConstants.CONFIG_GC_SECTION, null,
+ ConfigConstants.CONFIG_KEY_AUTO, 255);
+ c.save();
+ commitChain(10, 50);
+ assertTrue("should find too many loose objects",
+ gc.tooManyLooseObjects());
+ }
+
+ @Test
+ public void testNotTooManyPacks() {
+ assertFalse("should not find too many packs", gc.tooManyPacks());
+ }
+
+ @Test
+ public void testTooManyPacks() throws Exception {
+ FileBasedConfig c = repo.getConfig();
+ c.setInt(ConfigConstants.CONFIG_GC_SECTION, null,
+ ConfigConstants.CONFIG_KEY_AUTOPACKLIMIT, 1);
+ c.save();
+ SampleDataRepositoryTestCase.copyCGitTestPacks(repo);
+
+ assertTrue("should find too many packs", gc.tooManyPacks());
+ }
+}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcTestCase.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcTestCase.java
index e463285..90c1152 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcTestCase.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcTestCase.java
@@ -105,6 +105,46 @@ protected RevCommit commitChain(int depth) throws Exception {
return tip;
}
+ /**
+ * Create a chain of commits of given depth with given number of added files
+ * per commit.
+ * <p>
+ * Each commit contains {@code files} files as its content. The created
+ * commit chain is referenced from any ref.
+ * <p>
+ * A chain will create {@code (2 + files) * depth} objects in Gits object
+ * database. For each depth level the following objects are created: the
+ * commit object, the top-level tree object and @code files} blobs for the
+ * content of the file "a".
+ *
+ * @param depth
+ * the depth of the commit chain.
+ * @param width
+ * number of files added per commit
+ * @return the commit that is the tip of the commit chain
+ * @throws Exception
+ */
+ protected RevCommit commitChain(int depth, int width) throws Exception {
+ if (depth <= 0) {
+ throw new IllegalArgumentException("Chain depth must be > 0");
+ }
+ if (width <= 0) {
+ throw new IllegalArgumentException("Number of files per commit must be > 0");
+ }
+ CommitBuilder cb = tr.commit();
+ RevCommit tip = null;
+ do {
+ --depth;
+ for (int i=0; i < width; i++) {
+ String id = depth + "-" + i;
+ cb.add("a" + id, id).message(id);
+ }
+ tip = cb.create();
+ cb = cb.child();
+ } while (depth > 0);
+ return tip;
+ }
+
protected long lastModified(AnyObjectId objectId) throws IOException {
return repo.getFS().lastModified(
repo.getObjectDatabase().fileFor(objectId));
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackFileTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackFileTest.java
index ba07d68..1c10bb3 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackFileTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/PackFileTest.java
@@ -310,6 +310,27 @@ public void testDelta_FailsOver2GiB() throws Exception {
}
}
+ @Test
+ public void testConfigurableStreamFileThreshold() throws Exception {
+ byte[] data = getRng().nextBytes(300);
+ RevBlob id = tr.blob(data);
+ tr.branch("master").commit().add("A", id).create();
+ tr.packAndPrune();
+ assertTrue("has blob", wc.has(id));
+
+ ObjectLoader ol = wc.open(id);
+ ObjectStream in = ol.openStream();
+ assertTrue(in instanceof ObjectStream.SmallStream);
+ assertEquals(300, in.available());
+ in.close();
+
+ wc.setStreamFileThreshold(299);
+ ol = wc.open(id);
+ in = ol.openStream();
+ assertTrue(in instanceof ObjectStream.Filter);
+ assertEquals(1, in.available());
+ }
+
private static byte[] clone(int first, byte[] base) {
byte[] r = new byte[base.length];
System.arraycopy(base, 1, r, 1, r.length - 1);
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/test/resources/SampleDataRepositoryTestCase.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/test/resources/SampleDataRepositoryTestCase.java
index 3a3b3d8..a57ef40 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/test/resources/SampleDataRepositoryTestCase.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/test/resources/SampleDataRepositoryTestCase.java
@@ -47,7 +47,9 @@
package org.eclipse.jgit.test.resources;
import java.io.File;
+import java.io.IOException;
+import org.eclipse.jgit.internal.storage.file.FileRepository;
import org.eclipse.jgit.junit.JGitTestUtil;
import org.eclipse.jgit.junit.RepositoryTestCase;
@@ -57,7 +59,17 @@ public abstract class SampleDataRepositoryTestCase extends RepositoryTestCase {
@Override
public void setUp() throws Exception {
super.setUp();
+ copyCGitTestPacks(db);
+ }
+ /**
+ * Copy C Git generated pack files into given repository for testing
+ *
+ * @param repo
+ * test repository to receive packfile copies
+ * @throws IOException
+ */
+ public static void copyCGitTestPacks(FileRepository repo) throws IOException {
final String[] packs = {
"pack-34be9032ac282b11fa9babdc2b2a93ca996c9c2f",
"pack-df2982f284bbabb6bdb59ee3fcc6eb0983e20371",
@@ -67,13 +79,14 @@ public void setUp() throws Exception {
"pack-e6d07037cbcf13376308a0a995d1fa48f8f76aaa",
"pack-3280af9c07ee18a87705ef50b0cc4cd20266cf12"
};
- final File packDir = new File(db.getObjectDatabase().getDirectory(), "pack");
+ final File packDir = new File(repo.getObjectDatabase().getDirectory(),
+ "pack");
for (String n : packs) {
JGitTestUtil.copyTestResource(n + ".pack", new File(packDir, n + ".pack"));
JGitTestUtil.copyTestResource(n + ".idx", new File(packDir, n + ".idx"));
}
JGitTestUtil.copyTestResource("packed-refs",
- new File(db.getDirectory(), "packed-refs"));
+ new File(repo.getDirectory(), "packed-refs"));
}
}
diff --git a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
index 399436d..0f2c8b3 100644
--- a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
+++ b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
@@ -39,6 +39,7 @@
blameNotCommittedYet=Not Committed Yet
blobNotFound=Blob not found: {0}
blobNotFoundForPath=Blob not found: {0} for path: {1}
+blockSizeNotPowerOf2=blockSize must be a power of 2
branchNameInvalid=Branch name {0} is not allowed
buildingBitmaps=Building bitmaps
cachedPacksPreventsIndexCreation=Using cached packs prevents index creation
@@ -115,6 +116,7 @@
checkoutUnexpectedResult=Checkout returned unexpected result {0}
classCastNotA=Not a {0}
cloneNonEmptyDirectory=Destination path "{0}" already exists and is not an empty directory
+closed=closed
collisionOn=Collision on {0}
commandRejectedByHook=Rejected by "{0}" hook.\n{1}
commandWasCalledInTheWrongState=Command {0} was called in the wrong state
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
index 00d0191..212cb7f 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
@@ -98,6 +98,7 @@ public static JGitText get() {
/***/ public String blameNotCommittedYet;
/***/ public String blobNotFound;
/***/ public String blobNotFoundForPath;
+ /***/ public String blockSizeNotPowerOf2;
/***/ public String branchNameInvalid;
/***/ public String buildingBitmaps;
/***/ public String cachedPacksPreventsIndexCreation;
@@ -174,6 +175,7 @@ public static JGitText get() {
/***/ public String checkoutUnexpectedResult;
/***/ public String classCastNotA;
/***/ public String cloneNonEmptyDirectory;
+ /***/ public String closed;
/***/ public String collisionOn;
/***/ public String commandRejectedByHook;
/***/ public String commandWasCalledInTheWrongState;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
index 764ae12..f7decf1 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCache.java
@@ -145,6 +145,8 @@ public static DfsBlockCache getInstance() {
* <p>
* If a pack file has a native size, a whole multiple of the native size
* will be used until it matches this size.
+ * <p>
+ * The value for blockSize must be a power of 2.
*/
private final int blockSize;
@@ -261,20 +263,22 @@ DfsPackFile getOrCreate(DfsPackDescription dsc, DfsPackKey key) {
// TODO This table grows without bound. It needs to clean up
// entries that aren't in cache anymore, and aren't being used
// by a live DfsObjDatabase reference.
- synchronized (packCache) {
- DfsPackFile pack = packCache.get(dsc);
- if (pack != null && pack.invalid()) {
- packCache.remove(dsc);
- pack = null;
- }
- if (pack == null) {
- if (key == null)
- key = new DfsPackKey();
- pack = new DfsPackFile(this, dsc, key);
- packCache.put(dsc, pack);
- }
+
+ DfsPackFile pack = packCache.get(dsc);
+ if (pack != null && !pack.invalid()) {
return pack;
}
+
+ // 'pack' either didn't exist or was invalid. Compute a new
+ // entry atomically (guaranteed by ConcurrentHashMap).
+ return packCache.compute(dsc, (k, v) -> {
+ if (v != null && !v.invalid()) { // valid value added by
+ return v; // another thread
+ } else {
+ return new DfsPackFile(
+ this, dsc, key != null ? key : new DfsPackKey());
+ }
+ });
}
private int hash(int packHash, long off) {
@@ -502,9 +506,7 @@ private <T> Ref<T> scanRef(HashEntry n, DfsPackKey pack, long position) {
}
void remove(DfsPackFile pack) {
- synchronized (packCache) {
- packCache.remove(pack.getPackDescription());
- }
+ packCache.remove(pack.getPackDescription());
}
private int slot(DfsPackKey pack, long position) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java
index 7e32554..089bfa4 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsBlockCacheConfig.java
@@ -106,10 +106,16 @@ public int getBlockSize() {
/**
* @param newSize
* size in bytes of a single window read in from the pack file.
+ * The value must be a power of 2.
* @return {@code this}
*/
public DfsBlockCacheConfig setBlockSize(final int newSize) {
- blockSize = Math.max(512, newSize);
+ int size = Math.max(512, newSize);
+ if ((size & (size - 1)) != 0) {
+ throw new IllegalArgumentException(
+ JGitText.get().blockSizeNotPowerOf2);
+ }
+ blockSize = size;
return this;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
index 2f61dea..8c93295 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsReader.java
@@ -113,6 +113,7 @@ public final class DfsReader extends ObjectReader implements ObjectReuseAsIs {
DfsReader(DfsObjDatabase db) {
this.db = db;
+ this.streamFileThreshold = db.getReaderOptions().getStreamFileThreshold();
}
DfsReaderOptions getOptions() {
@@ -125,10 +126,6 @@ DeltaBaseCache getDeltaBaseCache() {
return baseCache;
}
- int getStreamFileThreshold() {
- return getOptions().getStreamFileThreshold();
- }
-
@Override
public ObjectReader newReader() {
return new DfsReader(db);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
index f55e15f..9c048da 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/GC.java
@@ -1164,7 +1164,7 @@ private void addRepackAllOption() {
/**
* @return {@code true} if number of packs > gc.autopacklimit (default 50)
*/
- private boolean tooManyPacks() {
+ boolean tooManyPacks() {
int autopacklimit = repo.getConfig().getInt(
ConfigConstants.CONFIG_GC_SECTION,
ConfigConstants.CONFIG_KEY_AUTOPACKLIMIT,
@@ -1183,7 +1183,7 @@ private boolean tooManyPacks() {
*
* @return {@code true} if number of loose objects > gc.auto (default 6700)
*/
- private boolean tooManyLooseObjects() {
+ boolean tooManyLooseObjects() {
int auto = repo.getConfig().getInt(ConfigConstants.CONFIG_GC_SECTION,
ConfigConstants.CONFIG_KEY_AUTO, DEFAULT_AUTOLIMIT);
if (auto <= 0) {
@@ -1204,9 +1204,9 @@ public boolean accept(Path file) throws IOException {
.matches();
}
})) {
- Iterator<Path> iter = stream.iterator();
- while (iter.hasNext()) {
- if (n++ > threshold) {
+ for (Iterator<Path> iter = stream.iterator(); iter.hasNext();
+ iter.next()) {
+ if (++n > threshold) {
return true;
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WindowCursor.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WindowCursor.java
index a2c0561..a742d17 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WindowCursor.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WindowCursor.java
@@ -94,12 +94,14 @@ final class WindowCursor extends ObjectReader implements ObjectReuseAsIs {
WindowCursor(FileObjectDatabase db) {
this.db = db;
this.createdFromInserter = null;
+ this.streamFileThreshold = WindowCache.getStreamFileThreshold();
}
WindowCursor(FileObjectDatabase db,
@Nullable ObjectDirectoryInserter createdFromInserter) {
this.db = db;
this.createdFromInserter = createdFromInserter;
+ this.streamFileThreshold = WindowCache.getStreamFileThreshold();
}
DeltaBaseCache getDeltaBaseCache() {
@@ -337,10 +339,6 @@ void pin(final PackFile pack, final long position)
}
}
- int getStreamFileThreshold() {
- return WindowCache.getStreamFileThreshold();
- }
-
@Override
@Nullable
public ObjectInserter getCreatedFromInserter() {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectReader.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectReader.java
index b23145d..372da98 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectReader.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectReader.java
@@ -66,6 +66,13 @@ public abstract class ObjectReader implements AutoCloseable {
public static final int OBJ_ANY = -1;
/**
+ * The threshold at which a file will be streamed rather than loaded
+ * entirely into memory.
+ * @since 4.6
+ */
+ protected int streamFileThreshold;
+
+ /**
* Construct a new reader from the same data.
* <p>
* Applications can use this method to build a new reader from the same data
@@ -445,6 +452,29 @@ public ObjectInserter getCreatedFromInserter() {
public abstract void close();
/**
+ * Sets the threshold at which a file will be streamed rather than loaded
+ * entirely into memory
+ *
+ * @param threshold
+ * the new threshold
+ * @since 4.6
+ */
+ public void setStreamFileThreshold(int threshold) {
+ streamFileThreshold = threshold;
+ }
+
+ /**
+ * Returns the threshold at which a file will be streamed rather than loaded
+ * entirely into memory
+ *
+ * @return the threshold in bytes
+ * @since 4.6
+ */
+ public int getStreamFileThreshold() {
+ return streamFileThreshold;
+ }
+
+ /**
* Wraps a delegate ObjectReader.
*
* @since 4.4
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/RepositoryCache.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/RepositoryCache.java
index 9a57349..5435116 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/RepositoryCache.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/RepositoryCache.java
@@ -45,8 +45,6 @@
import java.io.File;
import java.io.IOException;
-import java.lang.ref.Reference;
-import java.lang.ref.SoftReference;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
@@ -202,8 +200,7 @@ static boolean isCached(@NonNull Repository repo) {
return false;
}
FileKey key = new FileKey(gitDir, repo.getFS());
- Reference<Repository> repoRef = cache.cacheMap.get(key);
- return repoRef != null && repoRef.get() == repo;
+ return cache.cacheMap.get(key) == repo;
}
/** Unregister all repositories from the cache. */
@@ -219,7 +216,7 @@ static void reconfigure(RepositoryCacheConfig repositoryCacheConfig) {
cache.configureEviction(repositoryCacheConfig);
}
- private final ConcurrentHashMap<Key, Reference<Repository>> cacheMap;
+ private final ConcurrentHashMap<Key, Repository> cacheMap;
private final Lock[] openLocks;
@@ -228,7 +225,7 @@ static void reconfigure(RepositoryCacheConfig repositoryCacheConfig) {
private volatile long expireAfter;
private RepositoryCache() {
- cacheMap = new ConcurrentHashMap<Key, Reference<Repository>>();
+ cacheMap = new ConcurrentHashMap<Key, Repository>();
openLocks = new Lock[4];
for (int i = 0; i < openLocks.length; i++) {
openLocks[i] = new Lock();
@@ -261,19 +258,15 @@ public void run() {
}
}
- @SuppressWarnings("resource")
private Repository openRepository(final Key location,
final boolean mustExist) throws IOException {
- Reference<Repository> ref = cacheMap.get(location);
- Repository db = ref != null ? ref.get() : null;
+ Repository db = cacheMap.get(location);
if (db == null) {
synchronized (lockFor(location)) {
- ref = cacheMap.get(location);
- db = ref != null ? ref.get() : null;
+ db = cacheMap.get(location);
if (db == null) {
db = location.open(mustExist);
- ref = new SoftReference<Repository>(db);
- cacheMap.put(location, ref);
+ cacheMap.put(location, db);
} else {
db.incrementOpen();
}
@@ -285,16 +278,13 @@ private Repository openRepository(final Key location,
}
private void registerRepository(final Key location, final Repository db) {
- SoftReference<Repository> newRef = new SoftReference<Repository>(db);
- Reference<Repository> oldRef = cacheMap.put(location, newRef);
- Repository oldDb = oldRef != null ? oldRef.get() : null;
+ Repository oldDb = cacheMap.put(location, db);
if (oldDb != null)
oldDb.close();
}
private Repository unregisterRepository(final Key location) {
- Reference<Repository> oldRef = cacheMap.remove(location);
- return oldRef != null ? oldRef.get() : null;
+ return cacheMap.remove(location);
}
private boolean isExpired(Repository db) {
@@ -316,8 +306,7 @@ private Collection<Key> getKeys() {
}
private void clearAllExpired() {
- for (Reference<Repository> ref : cacheMap.values()) {
- Repository db = ref.get();
+ for (Repository db : cacheMap.values()) {
if (isExpired(db)) {
RepositoryCache.close(db);
}
@@ -325,7 +314,7 @@ private void clearAllExpired() {
}
private void clearAll() {
- for (Iterator<Map.Entry<Key, Reference<Repository>>> i = cacheMap
+ for (Iterator<Map.Entry<Key, Repository>> i = cacheMap
.entrySet().iterator(); i.hasNext();) {
unregisterAndCloseRepository(i.next().getKey());
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/RepositoryCacheConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/RepositoryCacheConfig.java
index 428dea3..28cdaae 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/RepositoryCacheConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/RepositoryCacheConfig.java
@@ -53,8 +53,8 @@ public class RepositoryCacheConfig {
/**
* Set cleanupDelayMillis to this value in order to switch off time-based
- * cache eviction. The JVM can still expire cache entries when heap memory
- * runs low.
+ * cache eviction. Expired cache entries will only be evicted when
+ * RepositoryCache.clearExpired or RepositoryCache.clear are called.
*/
public static final long NO_CLEANUP = 0;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/InsecureCipherFactory.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/InsecureCipherFactory.java
new file mode 100644
index 0000000..73384a1
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/InsecureCipherFactory.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2016, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.transport;
+
+import java.security.NoSuchAlgorithmException;
+
+import javax.crypto.Cipher;
+import javax.crypto.NoSuchPaddingException;
+
+/**
+ * <b>DO NOT USE</b> Factory to create any cipher.
+ * <p>
+ * This is a hack for {@link WalkEncryption} to create any cipher configured by
+ * the end-user. Using this class allows JGit to violate ErrorProne's security
+ * recommendations (<a
+ * href="http://errorprone.info/bugpattern/InsecureCryptoUsage"
+ * >InsecureCryptoUsage</a>), which is not secure.
+ */
+class InsecureCipherFactory {
+ static Cipher create(String algo)
+ throws NoSuchAlgorithmException, NoSuchPaddingException {
+ return Cipher.getInstance(algo);
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschSession.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschSession.java
index 1dfe5d9..fa27bfc 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschSession.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschSession.java
@@ -48,15 +48,14 @@
package org.eclipse.jgit.transport;
+import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
-import java.io.PipedInputStream;
-import java.io.PipedOutputStream;
import org.eclipse.jgit.errors.TransportException;
import org.eclipse.jgit.internal.JGitText;
-import org.eclipse.jgit.util.io.StreamCopyThread;
+import org.eclipse.jgit.util.io.IsolatedOutputStream;
import com.jcraft.jsch.Channel;
import com.jcraft.jsch.ChannelExec;
@@ -178,33 +177,12 @@ private void setupStreams() throws IOException {
// that we spawn a background thread to shuttle data through a pipe,
// as we can issue an interrupted write out of that. Its slower, so
// we only use this route if there is a timeout.
- final OutputStream out = channel.getOutputStream();
+ OutputStream out = channel.getOutputStream();
if (timeout <= 0) {
outputStream = out;
} else {
- final PipedInputStream pipeIn = new PipedInputStream();
- final StreamCopyThread copier = new StreamCopyThread(pipeIn,
- out);
- final PipedOutputStream pipeOut = new PipedOutputStream(pipeIn) {
- @Override
- public void flush() throws IOException {
- super.flush();
- copier.flush();
- }
-
- @Override
- public void close() throws IOException {
- super.close();
- try {
- copier.join(timeout * 1000);
- } catch (InterruptedException e) {
- // Just wake early, the thread will terminate
- // anyway.
- }
- }
- };
- copier.start();
- outputStream = pipeOut;
+ IsolatedOutputStream i = new IsolatedOutputStream(out);
+ outputStream = new BufferedOutputStream(i, 16 * 1024);
}
errStream = channel.getErrStream();
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/WalkEncryption.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/WalkEncryption.java
index fe03bdc..bdac7eb 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/WalkEncryption.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/WalkEncryption.java
@@ -143,35 +143,6 @@ OutputStream encrypt(OutputStream os) {
}
}
- // PBEParameterSpec factory for Java (version <= 7).
- // Does not support AlgorithmParameterSpec.
- static PBEParameterSpec java7PBEParameterSpec(byte[] salt,
- int iterationCount) {
- return new PBEParameterSpec(salt, iterationCount);
- }
-
- // PBEParameterSpec factory for Java (version >= 8).
- // Adds support for AlgorithmParameterSpec.
- static PBEParameterSpec java8PBEParameterSpec(byte[] salt,
- int iterationCount, AlgorithmParameterSpec paramSpec) {
- try {
- @SuppressWarnings("boxing")
- PBEParameterSpec instance = PBEParameterSpec.class
- .getConstructor(byte[].class, int.class,
- AlgorithmParameterSpec.class)
- .newInstance(salt, iterationCount, paramSpec);
- return instance;
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- // Current runtime version.
- // https://docs.oracle.com/javase/7/docs/technotes/guides/versioning/spec/versioning2.html
- static double javaVersion() {
- return Double.parseDouble(System.getProperty("java.specification.version")); //$NON-NLS-1$
- }
-
/**
* JetS3t compatibility reference: <a href=
* "https://bitbucket.org/jmurty/jets3t/src/156c00eb160598c2e9937fd6873f00d3190e28ca/src/org/jets3t/service/security/EncryptionUtil.java">
@@ -217,7 +188,7 @@ static class JetS3tV2 extends WalkEncryption {
cryptoAlg = algo;
// Verify if cipher is present.
- Cipher cipher = Cipher.getInstance(cryptoAlg);
+ Cipher cipher = InsecureCipherFactory.create(cryptoAlg);
// Standard names are not case-sensitive.
// http://docs.oracle.com/javase/8/docs/technotes/guides/security/StandardNames.html
@@ -233,9 +204,7 @@ static class JetS3tV2 extends WalkEncryption {
boolean useIV = cryptoName.contains("AES"); //$NON-NLS-1$
// PBEParameterSpec algorithm parameters are supported from Java 8.
- boolean isJava8 = javaVersion() >= 1.8;
-
- if (useIV && isJava8) {
+ if (useIV) {
// Support IV where possible:
// * since JCE provider uses random IV for PBE/AES
// * and there is no place to store dynamic IV in JetS3t V2
@@ -245,16 +214,15 @@ static class JetS3tV2 extends WalkEncryption {
// https://bitbucket.org/jmurty/jets3t/raw/156c00eb160598c2e9937fd6873f00d3190e28ca/src/org/jets3t/service/security/EncryptionUtil.java
// http://cr.openjdk.java.net/~mullan/webrevs/ascarpin/webrev.00/raw_files/new/src/share/classes/com/sun/crypto/provider/PBES2Core.java
IvParameterSpec paramIV = new IvParameterSpec(ZERO_AES_IV);
- paramSpec = java8PBEParameterSpec(SALT, ITERATIONS, paramIV);
+ paramSpec = new PBEParameterSpec(SALT, ITERATIONS, paramIV);
} else {
// Strict legacy JetS3t V2 compatibility, with no IV support.
- paramSpec = java7PBEParameterSpec(SALT, ITERATIONS);
+ paramSpec = new PBEParameterSpec(SALT, ITERATIONS);
}
// Verify if cipher + key are allowed by policy.
cipher.init(Cipher.ENCRYPT_MODE, secretKey, paramSpec);
cipher.doFinal();
-
}
@Override
@@ -272,7 +240,7 @@ void validate(final HttpURLConnection u, final String prefix)
@Override
OutputStream encrypt(final OutputStream os) throws IOException {
try {
- final Cipher cipher = Cipher.getInstance(cryptoAlg);
+ final Cipher cipher = InsecureCipherFactory.create(cryptoAlg);
cipher.init(Cipher.ENCRYPT_MODE, secretKey, paramSpec);
return new CipherOutputStream(os, cipher);
} catch (GeneralSecurityException e) {
@@ -283,7 +251,7 @@ OutputStream encrypt(final OutputStream os) throws IOException {
@Override
InputStream decrypt(final InputStream in) throws IOException {
try {
- final Cipher cipher = Cipher.getInstance(cryptoAlg);
+ final Cipher cipher = InsecureCipherFactory.create(cryptoAlg);
cipher.init(Cipher.DECRYPT_MODE, secretKey, paramSpec);
return new CipherInputStream(in, cipher);
} catch (GeneralSecurityException e) {
@@ -374,7 +342,7 @@ static abstract class SymmetricEncryption extends WalkEncryption
String keySalt = props.getProperty(profile + X_KEY_SALT, DEFAULT_KEY_SALT);
// Verify if cipher is present.
- Cipher cipher = Cipher.getInstance(cipherAlgo);
+ Cipher cipher = InsecureCipherFactory.create(cipherAlgo);
// Verify if key factory is present.
SecretKeyFactory factory = SecretKeyFactory.getInstance(keyAlgo);
@@ -432,7 +400,7 @@ static abstract class SymmetricEncryption extends WalkEncryption
@Override
OutputStream encrypt(OutputStream output) throws IOException {
try {
- Cipher cipher = Cipher.getInstance(cipherAlgo);
+ Cipher cipher = InsecureCipherFactory.create(cipherAlgo);
cipher.init(Cipher.ENCRYPT_MODE, secretKey);
AlgorithmParameters params = cipher.getParameters();
if (params == null) {
@@ -489,7 +457,7 @@ void validate(HttpURLConnection conn, String prefix)
JGitText.get().unsupportedEncryptionVersion, vers));
}
try {
- decryptCipher = Cipher.getInstance(cipherAlgo);
+ decryptCipher = InsecureCipherFactory.create(cipherAlgo);
if (cont.isEmpty()) {
decryptCipher.init(Cipher.DECRYPT_MODE, secretKey);
} else {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/io/IsolatedOutputStream.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/io/IsolatedOutputStream.java
new file mode 100644
index 0000000..cdc4a4d
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/io/IsolatedOutputStream.java
@@ -0,0 +1,241 @@
+/*
+ * Copyright (C) 2016, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.util.io;
+
+import java.io.IOException;
+import java.io.InterruptedIOException;
+import java.io.OutputStream;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.RejectedExecutionException;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.eclipse.jgit.internal.JGitText;
+
+/**
+ * OutputStream isolated from interrupts.
+ * <p>
+ * Wraps an OutputStream to prevent interrupts during writes from being made
+ * visible to that stream instance. This works around buggy or difficult
+ * OutputStream implementations like JSch that cannot gracefully handle an
+ * interrupt during write.
+ * <p>
+ * Every write (or flush) requires a context switch to another thread. Callers
+ * should wrap this stream with {@code BufferedOutputStream} using a suitable
+ * buffer size to amortize the cost of context switches.
+ */
+public class IsolatedOutputStream extends OutputStream {
+ private final OutputStream dst;
+ private final ExecutorService copier;
+ private Future<Void> pending;
+
+ /**
+ * Wraps an OutputStream.
+ *
+ * @param out
+ * stream to send all writes to.
+ */
+ public IsolatedOutputStream(OutputStream out) {
+ dst = out;
+ copier = new ThreadPoolExecutor(1, 1, 0, TimeUnit.MILLISECONDS,
+ new ArrayBlockingQueue<Runnable>(1), new NamedThreadFactory());
+ }
+
+ @Override
+ public void write(int ch) throws IOException {
+ write(new byte[] { (byte) ch }, 0, 1);
+ }
+
+ @Override
+ public void write(final byte[] buf, final int pos, final int cnt)
+ throws IOException {
+ checkClosed();
+ execute(new Callable<Void>() {
+ @Override
+ public Void call() throws IOException {
+ dst.write(buf, pos, cnt);
+ return null;
+ }
+ });
+ }
+
+ @Override
+ public void flush() throws IOException {
+ checkClosed();
+ execute(new Callable<Void>() {
+ @Override
+ public Void call() throws IOException {
+ dst.flush();
+ return null;
+ }
+ });
+ }
+
+ @Override
+ public void close() throws IOException {
+ if (!copier.isShutdown()) {
+ try {
+ if (pending == null || tryCleanClose()) {
+ cleanClose();
+ } else {
+ dirtyClose();
+ }
+ } finally {
+ copier.shutdown();
+ }
+ }
+ }
+
+ private boolean tryCleanClose() {
+ /*
+ * If the caller stopped waiting for a prior write or flush, they could
+ * be trying to close a stream that is still in-use. Check if the prior
+ * operation ended in a predictable way.
+ */
+ try {
+ pending.get(0, TimeUnit.MILLISECONDS);
+ pending = null;
+ return true;
+ } catch (TimeoutException | InterruptedException e) {
+ return false;
+ } catch (ExecutionException e) {
+ pending = null;
+ return true;
+ }
+ }
+
+ private void cleanClose() throws IOException {
+ execute(new Callable<Void>() {
+ @Override
+ public Void call() throws IOException {
+ dst.close();
+ return null;
+ }
+ });
+ }
+
+ private void dirtyClose() throws IOException {
+ /*
+ * Interrupt any still pending write or flush operation. This may cause
+ * massive failures inside of the stream, but its going to be closed as
+ * the next step.
+ */
+ pending.cancel(true);
+
+ Future<Void> close;
+ try {
+ close = copier.submit(new Callable<Void>() {
+ @Override
+ public Void call() throws IOException {
+ dst.close();
+ return null;
+ }
+ });
+ } catch (RejectedExecutionException e) {
+ throw new IOException(e);
+ }
+ try {
+ close.get(200, TimeUnit.MILLISECONDS);
+ } catch (InterruptedException | TimeoutException e) {
+ close.cancel(true);
+ throw new IOException(e);
+ } catch (ExecutionException e) {
+ throw new IOException(e.getCause());
+ }
+ }
+
+ private void checkClosed() throws IOException {
+ if (copier.isShutdown()) {
+ throw new IOException(JGitText.get().closed);
+ }
+ }
+
+ private void execute(Callable<Void> task) throws IOException {
+ if (pending != null) {
+ // Check (and rethrow) any prior failed operation.
+ checkedGet(pending);
+ }
+ try {
+ pending = copier.submit(task);
+ } catch (RejectedExecutionException e) {
+ throw new IOException(e);
+ }
+ checkedGet(pending);
+ pending = null;
+ }
+
+ private static void checkedGet(Future<Void> future) throws IOException {
+ try {
+ future.get();
+ } catch (InterruptedException e) {
+ throw interrupted(e);
+ } catch (ExecutionException e) {
+ throw new IOException(e.getCause());
+ }
+ }
+
+ private static InterruptedIOException interrupted(InterruptedException c) {
+ InterruptedIOException e = new InterruptedIOException();
+ e.initCause(c);
+ return e;
+ }
+
+ private static class NamedThreadFactory implements ThreadFactory {
+ private static final AtomicInteger cnt = new AtomicInteger();
+
+ @Override
+ public Thread newThread(Runnable r) {
+ int n = cnt.incrementAndGet();
+ String name = IsolatedOutputStream.class.getSimpleName() + '-' + n;
+ return new Thread(r, name);
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/io/StreamCopyThread.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/io/StreamCopyThread.java
index 8d39a22..7aba0a5 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/io/StreamCopyThread.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/io/StreamCopyThread.java
@@ -47,7 +47,6 @@
import java.io.InputStream;
import java.io.InterruptedIOException;
import java.io.OutputStream;
-import java.util.concurrent.atomic.AtomicInteger;
/** Thread to copy from an input stream to an output stream. */
public class StreamCopyThread extends Thread {
@@ -59,7 +58,8 @@ public class StreamCopyThread extends Thread {
private volatile boolean done;
- private final AtomicInteger flushCount = new AtomicInteger(0);
+ /** Lock held by flush to avoid interrupting a write. */
+ private final Object writeLock;
/**
* Create a thread to copy data from an input stream to an output stream.
@@ -75,6 +75,7 @@ public StreamCopyThread(final InputStream i, final OutputStream o) {
setName(Thread.currentThread().getName() + "-StreamCopy"); //$NON-NLS-1$
src = i;
dst = o;
+ writeLock = new Object();
}
/**
@@ -84,9 +85,11 @@ public StreamCopyThread(final InputStream i, final OutputStream o) {
* happen at some future point in time, when the thread wakes up to process
* the request.
*/
+ @Deprecated
public void flush() {
- flushCount.incrementAndGet();
- interrupt();
+ synchronized (writeLock) {
+ interrupt();
+ }
}
/**
@@ -113,25 +116,23 @@ public void halt() throws InterruptedException {
public void run() {
try {
final byte[] buf = new byte[BUFFER_SIZE];
- int flushCountBeforeRead = 0;
boolean readInterrupted = false;
for (;;) {
try {
if (readInterrupted) {
- dst.flush();
- readInterrupted = false;
- if (!flushCount.compareAndSet(flushCountBeforeRead, 0)) {
- // There was a flush() call since last blocked read.
- // Set interrupt status, so next blocked read will throw
- // an InterruptedIOException and we will flush again.
- interrupt();
+ synchronized (writeLock) {
+ boolean interruptedAgain = Thread.interrupted();
+ dst.flush();
+ if (interruptedAgain) {
+ interrupt();
+ }
}
+ readInterrupted = false;
}
if (done)
break;
- flushCountBeforeRead = flushCount.get();
final int n;
try {
n = src.read(buf);
@@ -142,20 +143,12 @@ public void run() {
if (n < 0)
break;
- boolean writeInterrupted = false;
- for (;;) {
- try {
- dst.write(buf, 0, n);
- } catch (InterruptedIOException wakey) {
- writeInterrupted = true;
- continue;
- }
-
- // set interrupt status, which will be checked
- // when we block in src.read
- if (writeInterrupted || flushCount.get() > 0)
+ synchronized (writeLock) {
+ boolean writeInterrupted = Thread.interrupted();
+ dst.write(buf, 0, n);
+ if (writeInterrupted) {
interrupt();
- break;
+ }
}
} catch (IOException e) {
break;