Merge "Fix JGit set core.fileMode to false by default instead of true for non Windows OS."
diff --git a/.gitignore b/.gitignore
index 963b8a4..3679a33 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,9 +1,6 @@
/.project
/target
+.DS_Store
infer-out
-bazel-bin
-bazel-genfiles
-bazel-jgit
-bazel-out
-bazel-testlogs
+bazel-*
*~
diff --git a/Documentation/technical/reftable.md b/Documentation/technical/reftable.md
new file mode 100644
index 0000000..34f7529
--- /dev/null
+++ b/Documentation/technical/reftable.md
@@ -0,0 +1,950 @@
+# reftable
+
+[TOC]
+
+## Overview
+
+### Problem statement
+
+Some repositories contain a lot of references (e.g. android at 866k,
+rails at 31k). The existing packed-refs format takes up a lot of
+space (e.g. 62M), and does not scale with additional references.
+Lookup of a single reference requires linearly scanning the file.
+
+Atomic pushes modifying multiple references require copying the
+entire packed-refs file, which can be a considerable amount of data
+moved (e.g. 62M in, 62M out) for even small transactions (2 refs
+modified).
+
+Repositories with many loose references occupy a large number of disk
+blocks from the local file system, as each reference is its own file
+storing 41 bytes (and another file for the corresponding reflog).
+This negatively affects the number of inodes available when a large
+number of repositories are stored on the same filesystem. Readers can
+be penalized due to the larger number of syscalls required to traverse
+and read the `$GIT_DIR/refs` directory.
+
+### Objectives
+
+- Near constant time lookup for any single reference, even when the
+ repository is cold and not in process or kernel cache.
+- Near constant time verification if a SHA-1 is referred to by at
+ least one reference (for allow-tip-sha1-in-want).
+- Efficient lookup of an entire namespace, such as `refs/tags/`.
+- Support atomic push with `O(size_of_update)` operations.
+- Combine reflog storage with ref storage for small transactions.
+- Separate reflog storage for base refs and historical logs.
+
+### Description
+
+A reftable file is a portable binary file format customized for
+reference storage. References are sorted, enabling linear scans,
+binary search lookup, and range scans.
+
+Storage in the file is organized into variable sized blocks. Prefix
+compression is used within a single block to reduce disk space. Block
+size and alignment is tunable by the writer.
+
+### Performance
+
+Space used, packed-refs vs. reftable:
+
+repository | packed-refs | reftable | % original | avg ref | avg obj
+-----------|------------:|---------:|-----------:|---------:|--------:
+android | 62.2 M | 36.1 M | 58.0% | 33 bytes | 5 bytes
+rails | 1.8 M | 1.1 M | 57.7% | 29 bytes | 4 bytes
+git | 78.7 K | 48.1 K | 61.0% | 50 bytes | 4 bytes
+git (heads)| 332 b | 269 b | 81.0% | 33 bytes | 0 bytes
+
+Scan (read 866k refs), by reference name lookup (single ref from 866k
+refs), and by SHA-1 lookup (refs with that SHA-1, from 866k refs):
+
+format | cache | scan | by name | by SHA-1
+------------|------:|--------:|---------------:|---------------:
+packed-refs | cold | 402 ms | 409,660.1 usec | 412,535.8 usec
+packed-refs | hot | | 6,844.6 usec | 20,110.1 usec
+reftable | cold | 112 ms | 33.9 usec | 323.2 usec
+reftable | hot | | 20.2 usec | 320.8 usec
+
+Space used for 149,932 log entries for 43,061 refs,
+reflog vs. reftable:
+
+format | size | avg entry
+--------------|------:|-----------:
+$GIT_DIR/logs | 173 M | 1209 bytes
+reftable | 5 M | 37 bytes
+
+## Details
+
+### Peeling
+
+References stored in a reftable are peeled, a record for an annotated
+(or signed) tag records both the tag object, and the object it refers
+to.
+
+### Reference name encoding
+
+Reference names are an uninterpreted sequence of bytes that must pass
+[git-check-ref-format][ref-fmt] as a valid reference name.
+
+[ref-fmt]: https://git-scm.com/docs/git-check-ref-format
+
+### Network byte order
+
+All multi-byte, fixed width fields are in network byte order.
+
+### Ordering
+
+Blocks are lexicographically ordered by their first reference.
+
+### Directory/file conflicts
+
+The reftable format accepts both `refs/heads/foo` and
+`refs/heads/foo/bar` as distinct references.
+
+This property is useful for retaining log records in reftable, but may
+confuse versions of Git using `$GIT_DIR/refs` directory tree to
+maintain references. Users of reftable may choose to continue to
+reject `foo` and `foo/bar` type conflicts to prevent problems for
+peers.
+
+## File format
+
+### Structure
+
+A reftable file has the following high-level structure:
+
+ first_block {
+ header
+ first_ref_block
+ }
+ ref_block*
+ ref_index*
+ obj_block*
+ obj_index*
+ log_block*
+ log_index*
+ footer
+
+A log-only file omits the `ref_block`, `ref_index`, `obj_block` and
+`obj_index` sections, containing only the file header and log block:
+
+ first_block {
+ header
+ }
+ log_block*
+ log_index*
+ footer
+
+in a log-only file the first log block immediately follows the file
+header, without padding to block alignment.
+
+### Block size
+
+The file's block size is arbitrarily determined by the writer, and
+does not have to be a power of 2. The block size must be larger than
+the longest reference name or log entry used in the repository, as
+references cannot span blocks.
+
+Powers of two that are friendly to the virtual memory system or
+filesystem (such as 4k or 8k) are recommended. Larger sizes (64k) can
+yield better compression, with a possible increased cost incurred by
+readers during access.
+
+The largest block size is `16777215` bytes (15.99 MiB).
+
+### Block alignment
+
+Writers may choose to align blocks at multiples of the block size by
+including `padding` filled with NUL bytes at the end of a block to
+round out to the chosen alignment. When alignment is used, writers
+must specify the alignment with the file header's `block_size` field.
+
+Block alignment is not required by the file format. Unaligned files
+must set `block_size = 0` in the file header, and omit `padding`.
+Unaligned files with more than one ref block must include the
+[ref index](#Ref-index) to support fast lookup. Readers must be
+able to read both aligned and non-aligned files.
+
+Very small files (e.g. 1 only ref block) may omit `padding` and the
+ref index to reduce total file size.
+
+### Header
+
+A 24-byte header appears at the beginning of the file:
+
+ 'REFT'
+ uint8( version_number = 1 )
+ uint24( block_size )
+ uint64( min_update_index )
+ uint64( max_update_index )
+
+Aligned files must specify `block_size` to configure readers with the
+expected block alignment. Unaligned files must set `block_size = 0`.
+
+The `min_update_index` and `max_update_index` describe bounds for the
+`update_index` field of all log records in this file. When reftables
+are used in a stack for [transactions](#Update-transactions), these
+fields can order the files such that the prior file's
+`max_update_index + 1` is the next file's `min_update_index`.
+
+### First ref block
+
+The first ref block shares the same block as the file header, and is
+24 bytes smaller than all other blocks in the file. The first block
+immediately begins after the file header, at position 24.
+
+If the first block is a log block (a log-only file), its block header
+begins immediately at position 24.
+
+### Ref block format
+
+A ref block is written as:
+
+ 'r'
+ uint24( block_len )
+ ref_record+
+ uint24( restart_offset )+
+ uint16( restart_count )
+
+ padding?
+
+Blocks begin with `block_type = 'r'` and a 3-byte `block_len` which
+encodes the number of bytes in the block up to, but not including the
+optional `padding`. This is always less than or equal to the file's
+block size. In the first ref block, `block_len` includes 24 bytes
+for the file header.
+
+The 2-byte `restart_count` stores the number of entries in the
+`restart_offset` list, which must not be empty. Readers can use
+`restart_count` to binary search between restarts before starting a
+linear scan.
+
+Exactly `restart_count` 3-byte `restart_offset` values precedes the
+`restart_count`. Offsets are relative to the start of the block and
+refer to the first byte of any `ref_record` whose name has not been
+prefix compressed. Entries in the `restart_offset` list must be
+sorted, ascending. Readers can start linear scans from any of these
+records.
+
+A variable number of `ref_record` fill the middle of the block,
+describing reference names and values. The format is described below.
+
+As the first ref block shares the first file block with the file
+header, all `restart_offset` in the first block are relative to the
+start of the file (position 0), and include the file header. This
+forces the first `restart_offset` to be `28`.
+
+#### ref record
+
+A `ref_record` describes a single reference, storing both the name and
+its value(s). Records are formatted as:
+
+ varint( prefix_length )
+ varint( (suffix_length << 3) | value_type )
+ suffix
+ value?
+
+The `prefix_length` field specifies how many leading bytes of the
+prior reference record's name should be copied to obtain this
+reference's name. This must be 0 for the first reference in any
+block, and also must be 0 for any `ref_record` whose offset is listed
+in the `restart_offset` table at the end of the block.
+
+Recovering a reference name from any `ref_record` is a simple concat:
+
+ this_name = prior_name[0..prefix_length] + suffix
+
+The `suffix_length` value provides the number of bytes available in
+`suffix` to copy from `suffix` to complete the reference name.
+
+The `value` follows. Its format is determined by `value_type`, one of
+the following:
+
+- `0x0`: deletion; no value data (see transactions, below)
+- `0x1`: one 20-byte object id; value of the ref
+- `0x2`: two 20-byte object ids; value of the ref, peeled target
+- `0x3`: symbolic reference: `varint( target_len ) target`
+
+Symbolic references use `0x3`, followed by the complete name of the
+reference target. No compression is applied to the target name.
+
+Types `0x4..0x7` are reserved for future use.
+
+### Ref index
+
+The ref index stores the name of the last reference from every ref
+block in the file, enabling reduced disk seeks for lookups. Any
+reference can be found by searching the index, identifying the
+containing block, and searching within that block.
+
+The index may be organized into a multi-level index, where the 1st
+level index block points to additional ref index blocks (2nd level),
+which may in turn point to either additional index blocks (e.g. 3rd
+level) or ref blocks (leaf level). Disk reads required to access a
+ref go up with higher index levels. Multi-level indexes may be
+required to ensure no single index block exceeds the file format's max
+block size of `16777215` bytes (15.99 MiB). To acheive constant O(1)
+disk seeks for lookups the index must be a single level, which is
+permitted to exceed the file's configured block size, but not the
+format's max block size of 15.99 MiB.
+
+If present, the ref index block(s) appears after the last ref block.
+
+If there are at least 4 ref blocks, a ref index block should be
+written to improve lookup times. Cold reads using the index require
+2 disk reads (read index, read block), and binary searching < 4 blocks
+also requires <= 2 reads. Omitting the index block from smaller files
+saves space.
+
+If the file is unaligned and contains more than one ref block, the ref
+index must be written.
+
+Index block format:
+
+ 'i'
+ uint24( block_len )
+ index_record+
+ uint24( restart_offset )+
+ uint16( restart_count )
+
+ padding?
+
+The index blocks begin with `block_type = 'i'` and a 3-byte
+`block_len` which encodes the number of bytes in the block,
+up to but not including the optional `padding`.
+
+The `restart_offset` and `restart_count` fields are identical in
+format, meaning and usage as in ref blocks.
+
+To reduce the number of reads required for random access in very large
+files the index block may be larger than other blocks. However,
+readers must hold the entire index in memory to benefit from this, so
+it's a time-space tradeoff in both file size and reader memory.
+
+Increasing the file's block size decreases the index size.
+Alternatively a multi-level index may be used, keeping index blocks
+within the file's block size, but increasing the number of blocks
+that need to be accessed.
+
+#### index record
+
+An index record describes the last entry in another block.
+Index records are written as:
+
+ varint( prefix_length )
+ varint( (suffix_length << 3) | 0 )
+ suffix
+ varint( block_position )
+
+Index records use prefix compression exactly like `ref_record`.
+
+Index records store `block_position` after the suffix, specifying the
+absolute position in bytes (from the start of the file) of the block
+that ends with this reference. Readers can seek to `block_position` to
+begin reading the block header.
+
+Readers must examine the block header at `block_position` to determine
+if the next block is another level index block, or the leaf-level ref
+block.
+
+#### Reading the index
+
+Readers loading the ref index must first read the footer (below) to
+obtain `ref_index_position`. If not present, the position will be 0.
+The `ref_index_position` is for the 1st level root of the ref index.
+
+### Obj block format
+
+Object blocks are optional. Writers may choose to omit object blocks,
+especially if readers will not use the SHA-1 to ref mapping.
+
+Object blocks use unique, abbreviated 2-20 byte SHA-1 keys, mapping
+to ref blocks containing references pointing to that object directly,
+or as the peeled value of an annotated tag. Like ref blocks, object
+blocks use the file's standard block size. The abbrevation length is
+available in the footer as `obj_id_len`.
+
+To save space in small files, object blocks may be omitted if the ref
+index is not present, as brute force search will only need to read a
+few ref blocks. When missing, readers should brute force a linear
+search of all references to lookup by SHA-1.
+
+An object block is written as:
+
+ 'o'
+ uint24( block_len )
+ obj_record+
+ uint24( restart_offset )+
+ uint16( restart_count )
+
+ padding?
+
+Fields are identical to ref block. Binary search using the restart
+table works the same as in reference blocks.
+
+Because object identifiers are abbreviated by writers to the shortest
+unique abbreviation within the reftable, obj key lengths are variable
+between 2 and 20 bytes. Readers must compare only for common prefix
+match within an obj block or obj index.
+
+#### obj record
+
+An `obj_record` describes a single object abbreviation, and the blocks
+containing references using that unique abbreviation:
+
+ varint( prefix_length )
+ varint( (suffix_length << 3) | cnt_3 )
+ suffix
+ varint( cnt_large )?
+ varint( position_delta )*
+
+Like in reference blocks, abbreviations are prefix compressed within
+an obj block. On large reftables with many unique objects, higher
+block sizes (64k), and higher restart interval (128), a
+`prefix_length` of 2 or 3 and `suffix_length` of 3 may be common in
+obj records (unique abbreviation of 5-6 raw bytes, 10-12 hex digits).
+
+Each record contains `position_count` number of positions for matching
+ref blocks. For 1-7 positions the count is stored in `cnt_3`. When
+`cnt_3 = 0` the actual count follows in a varint, `cnt_large`.
+
+The use of `cnt_3` bets most objects are pointed to by only a single
+reference, some may be pointed to by a couple of references, and very
+few (if any) are pointed to by more than 7 references.
+
+A special case exists when `cnt_3 = 0` and `cnt_large = 0`: there
+are no `position_delta`, but at least one reference starts with this
+abbreviation. A reader that needs exact reference names must scan all
+references to find which specific references have the desired object.
+Writers should use this format when the `position_delta` list would have
+overflowed the file's block size due to a high number of references
+pointing to the same object.
+
+The first `position_delta` is the position from the start of the file.
+Additional `position_delta` entries are sorted ascending and relative
+to the prior entry, e.g. a reader would perform:
+
+ pos = position_delta[0]
+ prior = pos
+ for (j = 1; j < position_count; j++) {
+ pos = prior + position_delta[j]
+ prior = pos
+ }
+
+With a position in hand, a reader must linearly scan the ref block,
+starting from the first `ref_record`, testing each reference's SHA-1s
+(for `value_type = 0x1` or `0x2`) for full equality. Faster searching
+by SHA-1 within a single ref block is not supported by the reftable
+format. Smaller block sizes reduce the number of candidates this step
+must consider.
+
+### Obj index
+
+The obj index stores the abbreviation from the last entry for every
+obj block in the file, enabling reduced disk seeks for all lookups.
+It is formatted exactly the same as the ref index, but refers to obj
+blocks.
+
+The obj index should be present if obj blocks are present, as
+obj blocks should only be written in larger files.
+
+Readers loading the obj index must first read the footer (below) to
+obtain `obj_index_position`. If not present, the position will be 0.
+
+### Log block format
+
+Unlike ref and obj blocks, log blocks are always unaligned.
+
+Log blocks are variable in size, and do not match the `block_size`
+specified in the file header or footer. Writers should choose an
+appropriate buffer size to prepare a log block for deflation, such as
+`2 * block_size`.
+
+A log block is written as:
+
+ 'g'
+ uint24( block_len )
+ zlib_deflate {
+ log_record+
+ uint24( restart_offset )+
+ uint16( restart_count )
+ }
+
+Log blocks look similar to ref blocks, except `block_type = 'g'`.
+
+The 4-byte block header is followed by the deflated block contents
+using zlib deflate. The `block_len` in the header is the inflated
+size (including 4-byte block header), and should be used by readers to
+preallocate the inflation output buffer. A log block's `block_len`
+may exceed the file's block size.
+
+Offsets within the log block (e.g. `restart_offset`) still include
+the 4-byte header. Readers may prefer prefixing the inflation output
+buffer with the 4-byte header.
+
+Within the deflate container, a variable number of `log_record`
+describe reference changes. The log record format is described
+below. See ref block format (above) for a description of
+`restart_offset` and `restart_count`.
+
+Because log blocks have no alignment or padding between blocks,
+readers must keep track of the bytes consumed by the inflater to
+know where the next log block begins.
+
+#### log record
+
+Log record keys are structured as:
+
+ ref_name '\0' reverse_int64( update_index )
+
+where `update_index` is the unique transaction identifier. The
+`update_index` field must be unique within the scope of a `ref_name`.
+See the update transactions section below for further details.
+
+The `reverse_int64` function inverses the value so lexographical
+ordering the network byte order encoding sorts the more recent records
+with higher `update_index` values first:
+
+ reverse_int64(int64 t) {
+ return 0xffffffffffffffff - t;
+ }
+
+Log records have a similar starting structure to ref and index
+records, utilizing the same prefix compression scheme applied to the
+log record key described above.
+
+```
+ varint( prefix_length )
+ varint( (suffix_length << 3) | log_type )
+ suffix
+ log_data {
+ old_id
+ new_id
+ varint( name_length ) name
+ varint( email_length ) email
+ varint( time_seconds )
+ sint16( tz_offset )
+ varint( message_length ) message
+ }?
+```
+
+Log record entries use `log_type` to indicate what follows:
+
+- `0x0`: deletion; no log data.
+- `0x1`: standard git reflog data using `log_data` above.
+
+The `log_type = 0x0` is mostly useful for `git stash drop`, removing
+an entry from the reflog of `refs/stash` in a transaction file
+(below), without needing to rewrite larger files. Readers reading a
+stack of reflogs must treat this as a deletion.
+
+For `log_type = 0x1`, the `log_data` section follows
+[git update-ref][update-ref] logging, and includes:
+
+- two 20-byte SHA-1s (old id, new id)
+- varint string of committer's name
+- varint string of committer's email
+- varint time in seconds since epoch (Jan 1, 1970)
+- 2-byte timezone offset in minutes (signed)
+- varint string of message
+
+`tz_offset` is the absolute number of minutes from GMT the committer
+was at the time of the update. For example `GMT-0800` is encoded in
+reftable as `sint16(-480)` and `GMT+0230` is `sint16(150)`.
+
+The committer email does not contain `<` or `>`, it's the value
+normally found between the `<>` in a git commit object header.
+
+The `message_length` may be 0, in which case there was no message
+supplied for the update.
+
+[update-ref]: https://git-scm.com/docs/git-update-ref#_logging_updates
+
+#### Reading the log
+
+Readers accessing the log must first read the footer (below) to
+determine the `log_position`. The first block of the log begins at
+`log_position` bytes since the start of the file. The `log_position`
+is not block aligned.
+
+#### Importing logs
+
+When importing from `$GIT_DIR/logs` writers should globally order all
+log records roughly by timestamp while preserving file order, and
+assign unique, increasing `update_index` values for each log line.
+Newer log records get higher `update_index` values.
+
+Although an import may write only a single reftable file, the reftable
+file must span many unique `update_index`, as each log line requires
+its own `update_index` to preserve semantics.
+
+### Log index
+
+The log index stores the log key (`refname \0 reverse_int64(update_index)`)
+for the last log record of every log block in the file, supporting
+bounded-time lookup.
+
+A log index block must be written if 2 or more log blocks are written
+to the file. If present, the log index appears after the last log
+block. There is no padding used to align the log index to block
+alignment.
+
+Log index format is identical to ref index, except the keys are 9
+bytes longer to include `'\0'` and the 8-byte
+`reverse_int64(update_index)`. Records use `block_position` to
+refer to the start of a log block.
+
+#### Reading the index
+
+Readers loading the log index must first read the footer (below) to
+obtain `log_index_position`. If not present, the position will be 0.
+
+### Footer
+
+After the last block of the file, a file footer is written. It begins
+like the file header, but is extended with additional data.
+
+A 68-byte footer appears at the end:
+
+```
+ 'REFT'
+ uint8( version_number = 1 )
+ uint24( block_size )
+ uint64( min_update_index )
+ uint64( max_update_index )
+
+ uint64( ref_index_position )
+ uint64( (obj_position << 5) | obj_id_len )
+ uint64( obj_index_position )
+
+ uint64( log_position )
+ uint64( log_index_position )
+
+ uint32( CRC-32 of above )
+```
+
+If a section is missing (e.g. ref index) the corresponding position
+field (e.g. `ref_index_position`) will be 0.
+
+- `obj_position`: byte position for the first obj block.
+- `obj_id_len`: number of bytes used to abbreviate object identifiers
+ in obj blocks.
+- `log_position`: byte position for the first log block.
+- `ref_index_position`: byte position for the start of the ref index.
+- `obj_index_position`: byte position for the start of the obj index.
+- `log_index_position`: byte position for the start of the log index.
+
+#### Reading the footer
+
+Readers must seek to `file_length - 68` to access the footer. A
+trusted external source (such as `stat(2)`) is necessary to obtain
+`file_length`. When reading the footer, readers must verify:
+
+- 4-byte magic is correct
+- 1-byte version number is recognized
+- 4-byte CRC-32 matches the other 64 bytes (including magic, and version)
+
+Once verified, the other fields of the footer can be accessed.
+
+### Varint encoding
+
+Varint encoding is identical to the ofs-delta encoding method used
+within pack files.
+
+Decoder works such as:
+
+ val = buf[ptr] & 0x7f
+ while (buf[ptr] & 0x80) {
+ ptr++
+ val = ((val + 1) << 7) | (buf[ptr] & 0x7f)
+ }
+
+### Binary search
+
+Binary search within a block is supported by the `restart_offset`
+fields at the end of the block. Readers can binary search through the
+restart table to locate between which two restart points the sought
+reference or key should appear.
+
+Each record identified by a `restart_offset` stores the complete key
+in the `suffix` field of the record, making the compare operation
+during binary search straightforward.
+
+Once a restart point lexicographically before the sought reference has
+been identified, readers can linearly scan through the following
+record entries to locate the sought record, terminating if the current
+record sorts after (and therefore the sought key is not present).
+
+#### Restart point selection
+
+Writers determine the restart points at file creation. The process is
+arbitrary, but every 16 or 64 records is recommended. Every 16 may
+be more suitable for smaller block sizes (4k or 8k), every 64 for
+larger block sizes (64k).
+
+More frequent restart points reduces prefix compression and increases
+space consumed by the restart table, both of which increase file size.
+
+Less frequent restart points makes prefix compression more effective,
+decreasing overall file size, with increased penalities for readers
+walking through more records after the binary search step.
+
+A maximum of `65535` restart points per block is supported.
+
+## Considerations
+
+### Lightweight refs dominate
+
+The reftable format assumes the vast majority of references are single
+SHA-1 valued with common prefixes, such as Gerrit Code Review's
+`refs/changes/` namespace, GitHub's `refs/pulls/` namespace, or many
+lightweight tags in the `refs/tags/` namespace.
+
+Annotated tags storing the peeled object cost an additional 20 bytes
+per reference.
+
+### Low overhead
+
+A reftable with very few references (e.g. git.git with 5 heads)
+is 269 bytes for reftable, vs. 332 bytes for packed-refs. This
+supports reftable scaling down for transaction logs (below).
+
+### Block size
+
+For a Gerrit Code Review type repository with many change refs, larger
+block sizes (64 KiB) and less frequent restart points (every 64) yield
+better compression due to more references within the block compressing
+against the prior reference.
+
+Larger block sizes reduce the index size, as the reftable will
+require fewer blocks to store the same number of references.
+
+### Minimal disk seeks
+
+Assuming the index block has been loaded into memory, binary searching
+for any single reference requires exactly 1 disk seek to load the
+containing block.
+
+### Scans and lookups dominate
+
+Scanning all references and lookup by name (or namespace such as
+`refs/heads/`) are the most common activities performed on repositories.
+SHA-1s are stored directly with references to optimize this use case.
+
+### Logs are infrequently read
+
+Logs are infrequently accessed, but can be large. Deflating log
+blocks saves disk space, with some increased penalty at read time.
+
+Logs are stored in an isolated section from refs, reducing the burden
+on reference readers that want to ignore logs. Further, historical
+logs can be isolated into log-only files.
+
+### Logs are read backwards
+
+Logs are frequently accessed backwards (most recent N records for
+master to answer `master@{4}`), so log records are grouped by
+reference, and sorted descending by update index.
+
+## Repository format
+
+### Version 1
+
+A repository must set its `$GIT_DIR/config` to configure reftable:
+
+ [core]
+ repositoryformatversion = 1
+ [extensions]
+ refStorage = reftable
+
+### Layout
+
+The `$GIT_DIR/refs` path is a file when reftable is configured, not a
+directory. This prevents loose references from being stored.
+
+A collection of reftable files are stored in the `$GIT_DIR/reftable/`
+directory:
+
+ 00000001.log
+ 00000001.ref
+ 00000002.ref
+
+where reftable files are named by a unique name such as produced by
+the function `${update_index}.ref`.
+
+Log-only files use the `.log` extension, while ref-only and mixed ref
+and log files use `.ref`. extension.
+
+The stack ordering file is `$GIT_DIR/refs` and lists the current
+files, one per line, in order, from oldest (base) to newest (most
+recent):
+
+ $ cat .git/refs
+ 00000001.log
+ 00000001.ref
+ 00000002.ref
+
+Readers must read `$GIT_DIR/refs` to determine which files are
+relevant right now, and search through the stack in reverse order
+(last reftable is examined first).
+
+Reftable files not listed in `refs` may be new (and about to be added
+to the stack by the active writer), or ancient and ready to be pruned.
+
+### Readers
+
+Readers can obtain a consistent snapshot of the reference space by
+following:
+
+1. Open and read the `refs` file.
+2. Open each of the reftable files that it mentions.
+3. If any of the files is missing, goto 1.
+4. Read from the now-open files as long as necessary.
+
+### Update transactions
+
+Although reftables are immutable, mutations are supported by writing a
+new reftable and atomically appending it to the stack:
+
+1. Acquire `refs.lock`.
+2. Read `refs` to determine current reftables.
+3. Select `update_index` to be most recent file's `max_update_index + 1`.
+4. Prepare temp reftable `${update_index}_XXXXXX`, including log entries.
+5. Rename `${update_index}_XXXXXX` to `${update_index}.ref`.
+6. Copy `refs` to `refs.lock`, appending file from (5).
+7. Rename `refs.lock` to `refs`.
+
+During step 4 the new file's `min_update_index` and `max_update_index`
+are both set to the `update_index` selected by step 3. All log
+records for the transaction use the same `update_index` in their keys.
+This enables later correlation of which references were updated by the
+same transaction.
+
+Because a single `refs.lock` file is used to manage locking, the
+repository is single-threaded for writers. Writers may have to
+busy-spin (with backoff) around creating `refs.lock`, for up to an
+acceptable wait period, aborting if the repository is too busy to
+mutate. Application servers wrapped around repositories (e.g. Gerrit
+Code Review) can layer their own lock/wait queue to improve fairness
+to writers.
+
+### Reference deletions
+
+Deletion of any reference can be explicitly stored by setting the
+`type` to `0x0` and omitting the `value` field of the `ref_record`.
+This serves as a tombstone, overriding any assertions about the
+existence of the reference from earlier files in the stack.
+
+### Compaction
+
+A partial stack of reftables can be compacted by merging references
+using a straightforward merge join across reftables, selecting the
+most recent value for output, and omitting deleted references that do
+not appear in remaining, lower reftables.
+
+A compacted reftable should set its `min_update_index` to the smallest of
+the input files' `min_update_index`, and its `max_update_index`
+likewise to the largest input `max_update_index`.
+
+For sake of illustration, assume the stack currently consists of
+reftable files (from oldest to newest): A, B, C, and D. The compactor
+is going to compact B and C, leaving A and D alone.
+
+1. Obtain lock `refs.lock` and read the `refs` file.
+2. Obtain locks `B.lock` and `C.lock`.
+ Ownership of these locks prevents other processes from trying
+ to compact these files.
+3. Release `refs.lock`.
+4. Compact `B` and `C` into a temp file `${min_update_index}_XXXXXX`.
+5. Reacquire lock `refs.lock`.
+6. Verify that `B` and `C` are still in the stack, in that order. This
+ should always be the case, assuming that other processes are adhering
+ to the locking protocol.
+7. Rename `${min_update_index}_XXXXXX` to `${min_update_index}_2.ref`.
+8. Write the new stack to `refs.lock`, replacing `B` and `C` with the
+ file from (4).
+9. Rename `refs.lock` to `refs`.
+10. Delete `B` and `C`, perhaps after a short sleep to avoid forcing
+ readers to backtrack.
+
+This strategy permits compactions to proceed independently of updates.
+
+## Alternatives considered
+
+### bzip packed-refs
+
+`bzip2` can significantly shrink a large packed-refs file (e.g. 62
+MiB compresses to 23 MiB, 37%). However the bzip format does not support
+random access to a single reference. Readers must inflate and discard
+while performing a linear scan.
+
+Breaking packed-refs into chunks (individually compressing each chunk)
+would reduce the amount of data a reader must inflate, but still
+leaves the problem of indexing chunks to support readers efficiently
+locating the correct chunk.
+
+Given the compression achieved by reftable's encoding, it does not
+seem necessary to add the complexity of bzip/gzip/zlib.
+
+### Michael Haggerty's alternate format
+
+Michael Haggerty proposed [an alternate][mh-alt] format to reftable on
+the Git mailing list. This format uses smaller chunks, without the
+restart table, and avoids block alignment with padding. Reflog entries
+immediately follow each ref, and are thus interleaved between refs.
+
+Performance testing indicates reftable is faster for lookups (51%
+faster, 11.2 usec vs. 5.4 usec), although reftable produces a
+slightly larger file (+ ~3.2%, 28.3M vs 29.2M):
+
+format | size | seek cold | seek hot |
+---------:|-------:|----------:|----------:|
+mh-alt | 28.3 M | 23.4 usec | 11.2 usec |
+reftable | 29.2 M | 19.9 usec | 5.4 usec |
+
+[mh-alt]: https://public-inbox.org/git/CAMy9T_HCnyc1g8XWOOWhe7nN0aEFyyBskV2aOMb_fe+wGvEJ7A@mail.gmail.com/
+
+### JGit Ketch RefTree
+
+[JGit Ketch][ketch] proposed [RefTree][reftree], an encoding of
+references inside Git tree objects stored as part of the repository's
+object database.
+
+The RefTree format adds additional load on the object database storage
+layer (more loose objects, more objects in packs), and relies heavily
+on the packer's delta compression to save space. Namespaces which are
+flat (e.g. thousands of tags in refs/tags) initially create very
+large loose objects, and so RefTree does not address the problem of
+copying many references to modify a handful.
+
+Flat namespaces are not efficiently searchable in RefTree, as tree
+objects in canonical formatting cannot be binary searched. This fails
+the need to handle a large number of references in a single namespace,
+such as GitHub's `refs/pulls`, or a project with many tags.
+
+[ketch]: https://dev.eclipse.org/mhonarc/lists/jgit-dev/msg03073.html
+[reftree]: https://public-inbox.org/git/CAJo=hJvnAPNAdDcAAwAvU9C4RVeQdoS3Ev9WTguHx4fD0V_nOg@mail.gmail.com/
+
+### LMDB
+
+David Turner proposed [using LMDB][dt-lmdb], as LMDB is lightweight
+(64k of runtime code) and GPL-compatible license.
+
+A downside of LMDB is its reliance on a single C implementation. This
+makes embedding inside JGit (a popular reimplemenation of Git)
+difficult, and hoisting onto virtual storage (for JGit DFS) virtually
+impossible.
+
+A common format that can be supported by all major Git implementations
+(git-core, JGit, libgit2) is strongly preferred.
+
+[dt-lmdb]: https://public-inbox.org/git/1455772670-21142-26-git-send-email-dturner@twopensource.com/
+
+## Future
+
+### Longer hashes
+
+Version will bump (e.g. 2) to indicate `value` uses a different
+object id length other than 20. The length could be stored in an
+expanded file header, or hardcoded as part of the version.
diff --git a/lib/BUILD b/lib/BUILD
index 827e6b6..ffe66a3 100644
--- a/lib/BUILD
+++ b/lib/BUILD
@@ -115,9 +115,9 @@
testonly = 1,
visibility = ["//visibility:public"],
exports = [
- "@junit//jar",
- "@hamcrest_core//jar",
- "@hamcrest_library//jar",
+ "@hamcrest_core//jar",
+ "@hamcrest_library//jar",
+ "@junit//jar",
],
)
diff --git a/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/GitSmartHttpTools.java b/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/GitSmartHttpTools.java
index 03c9d8d..cfe4822 100644
--- a/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/GitSmartHttpTools.java
+++ b/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/GitSmartHttpTools.java
@@ -201,7 +201,7 @@ public static void sendError(HttpServletRequest req,
} else {
if (httpStatus < 400)
ServletUtils.consumeRequestBody(req);
- res.sendError(httpStatus);
+ res.sendError(httpStatus, textForGit);
}
}
diff --git a/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/glue/ServletBinder.java b/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/glue/ServletBinder.java
index 9c3ed50..47443f5 100644
--- a/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/glue/ServletBinder.java
+++ b/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/glue/ServletBinder.java
@@ -60,4 +60,4 @@ public interface ServletBinder {
* the servlet to execute on this path.
*/
public void with(HttpServlet servlet);
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/resolver/AsIsFileService.java b/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/resolver/AsIsFileService.java
index 88ad472..d20fe9f 100644
--- a/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/resolver/AsIsFileService.java
+++ b/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/resolver/AsIsFileService.java
@@ -47,7 +47,6 @@
import org.eclipse.jgit.http.server.GitServlet;
import org.eclipse.jgit.lib.Config;
-import org.eclipse.jgit.lib.Config.SectionParser;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.transport.resolver.ServiceNotAuthorizedException;
import org.eclipse.jgit.transport.resolver.ServiceNotEnabledException;
@@ -71,13 +70,6 @@ public void access(HttpServletRequest req, Repository db)
}
};
- private static final SectionParser<ServiceConfig> CONFIG = new SectionParser<ServiceConfig>() {
- @Override
- public ServiceConfig parse(final Config cfg) {
- return new ServiceConfig(cfg);
- }
- };
-
private static class ServiceConfig {
final boolean enabled;
@@ -96,7 +88,7 @@ private static class ServiceConfig {
* {@code true}.
*/
protected static boolean isEnabled(Repository db) {
- return db.getConfig().get(CONFIG).enabled;
+ return db.getConfig().get(ServiceConfig::new).enabled;
}
/**
diff --git a/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/resolver/DefaultReceivePackFactory.java b/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/resolver/DefaultReceivePackFactory.java
index 04e192b..c0ffbb6 100644
--- a/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/resolver/DefaultReceivePackFactory.java
+++ b/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/resolver/DefaultReceivePackFactory.java
@@ -46,7 +46,6 @@
import javax.servlet.http.HttpServletRequest;
import org.eclipse.jgit.lib.Config;
-import org.eclipse.jgit.lib.Config.SectionParser;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.transport.ReceivePack;
@@ -68,13 +67,6 @@
*/
public class DefaultReceivePackFactory implements
ReceivePackFactory<HttpServletRequest> {
- private static final SectionParser<ServiceConfig> CONFIG = new SectionParser<ServiceConfig>() {
- @Override
- public ServiceConfig parse(final Config cfg) {
- return new ServiceConfig(cfg);
- }
- };
-
private static class ServiceConfig {
final boolean set;
@@ -89,7 +81,7 @@ private static class ServiceConfig {
@Override
public ReceivePack create(final HttpServletRequest req, final Repository db)
throws ServiceNotEnabledException, ServiceNotAuthorizedException {
- final ServiceConfig cfg = db.getConfig().get(CONFIG);
+ final ServiceConfig cfg = db.getConfig().get(ServiceConfig::new);
String user = req.getRemoteUser();
if (cfg.set) {
diff --git a/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/resolver/DefaultUploadPackFactory.java b/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/resolver/DefaultUploadPackFactory.java
index d01e2ef..642623b 100644
--- a/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/resolver/DefaultUploadPackFactory.java
+++ b/org.eclipse.jgit.http.server/src/org/eclipse/jgit/http/server/resolver/DefaultUploadPackFactory.java
@@ -46,7 +46,6 @@
import javax.servlet.http.HttpServletRequest;
import org.eclipse.jgit.lib.Config;
-import org.eclipse.jgit.lib.Config.SectionParser;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.transport.UploadPack;
import org.eclipse.jgit.transport.resolver.ServiceNotAuthorizedException;
@@ -61,13 +60,6 @@
*/
public class DefaultUploadPackFactory implements
UploadPackFactory<HttpServletRequest> {
- private static final SectionParser<ServiceConfig> CONFIG = new SectionParser<ServiceConfig>() {
- @Override
- public ServiceConfig parse(final Config cfg) {
- return new ServiceConfig(cfg);
- }
- };
-
private static class ServiceConfig {
final boolean enabled;
@@ -79,7 +71,7 @@ private static class ServiceConfig {
@Override
public UploadPack create(final HttpServletRequest req, final Repository db)
throws ServiceNotEnabledException, ServiceNotAuthorizedException {
- if (db.getConfig().get(CONFIG).enabled)
+ if (db.getConfig().get(ServiceConfig::new).enabled)
return new UploadPack(db);
else
throw new ServiceNotEnabledException();
diff --git a/org.eclipse.jgit.http.test/BUILD b/org.eclipse.jgit.http.test/BUILD
index ce2d611..85a2242 100644
--- a/org.eclipse.jgit.http.test/BUILD
+++ b/org.eclipse.jgit.http.test/BUILD
@@ -34,6 +34,7 @@
srcs = glob(["src/**/*.java"]),
deps = [
"//lib:junit",
+ "//lib:servlet-api",
"//org.eclipse.jgit.http.server:jgit-servlet",
"//org.eclipse.jgit:jgit",
"//org.eclipse.jgit.junit.http:junit-http",
diff --git a/org.eclipse.jgit.http.test/META-INF/MANIFEST.MF b/org.eclipse.jgit.http.test/META-INF/MANIFEST.MF
index 421fa8a..08bd0ef 100644
--- a/org.eclipse.jgit.http.test/META-INF/MANIFEST.MF
+++ b/org.eclipse.jgit.http.test/META-INF/MANIFEST.MF
@@ -8,6 +8,8 @@
Bundle-RequiredExecutionEnvironment: JavaSE-1.8
Import-Package: javax.servlet;version="[2.5.0,3.2.0)",
javax.servlet.http;version="[2.5.0,3.2.0)",
+ org.apache.commons.codec;version="1.6.0",
+ org.apache.commons.codec.binary;version="1.6.0",
org.eclipse.jetty.continuation;version="[9.4.5,10.0.0)",
org.eclipse.jetty.http;version="[9.4.5,10.0.0)",
org.eclipse.jetty.io;version="[9.4.5,10.0.0)",
diff --git a/org.eclipse.jgit.http.test/pom.xml b/org.eclipse.jgit.http.test/pom.xml
index 7d9c17f..85b7c68 100644
--- a/org.eclipse.jgit.http.test/pom.xml
+++ b/org.eclipse.jgit.http.test/pom.xml
@@ -87,14 +87,12 @@
<groupId>org.eclipse.jgit</groupId>
<artifactId>org.eclipse.jgit.junit.http</artifactId>
<version>${project.version}</version>
- <scope>test</scope>
</dependency>
<dependency>
<groupId>org.eclipse.jgit</groupId>
<artifactId>org.eclipse.jgit.junit</artifactId>
<version>${project.version}</version>
- <scope>test</scope>
</dependency>
<dependency>
@@ -107,7 +105,6 @@
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
- <scope>test</scope>
</dependency>
</dependencies>
diff --git a/org.eclipse.jgit.http.test/src/org/eclipse/jgit/http/test/TestRepositoryResolver.java b/org.eclipse.jgit.http.test/src/org/eclipse/jgit/http/test/TestRepositoryResolver.java
new file mode 100644
index 0000000..334e57c
--- /dev/null
+++ b/org.eclipse.jgit.http.test/src/org/eclipse/jgit/http/test/TestRepositoryResolver.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2016, 2017 Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.eclipse.jgit.http.test;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.eclipse.jgit.errors.RepositoryNotFoundException;
+import org.eclipse.jgit.junit.TestRepository;
+import org.eclipse.jgit.lib.Repository;
+import org.eclipse.jgit.transport.resolver.RepositoryResolver;
+import org.eclipse.jgit.transport.resolver.ServiceNotEnabledException;
+
+/** A simple repository resolver for tests. */
+public final class TestRepositoryResolver
+ implements RepositoryResolver<HttpServletRequest> {
+
+ private final TestRepository<Repository> repo;
+
+ private final String repoName;
+
+ /**
+ * Creates a new {@link TestRepositoryResolver} that resolves the given name to
+ * the given repository.
+ *
+ * @param repo
+ * to resolve to
+ * @param repoName
+ * to match
+ */
+ public TestRepositoryResolver(TestRepository<Repository> repo, String repoName) {
+ this.repo = repo;
+ this.repoName = repoName;
+ }
+
+ @Override
+ public Repository open(HttpServletRequest req, String name)
+ throws RepositoryNotFoundException, ServiceNotEnabledException {
+ if (!name.equals(repoName)) {
+ throw new RepositoryNotFoundException(name);
+ }
+ Repository db = repo.getRepository();
+ db.incrementOpen();
+ return db;
+ }
+}
diff --git a/org.eclipse.jgit.http.test/tst/org/eclipse/jgit/http/test/DumbClientSmartServerTest.java b/org.eclipse.jgit.http.test/tst/org/eclipse/jgit/http/test/DumbClientSmartServerTest.java
index 06bfd79..727f9ba 100644
--- a/org.eclipse.jgit.http.test/tst/org/eclipse/jgit/http/test/DumbClientSmartServerTest.java
+++ b/org.eclipse.jgit.http.test/tst/org/eclipse/jgit/http/test/DumbClientSmartServerTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2010, Google Inc.
+ * Copyright (C) 2010, 2017 Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -60,12 +60,9 @@
import java.util.List;
import java.util.Map;
-import javax.servlet.http.HttpServletRequest;
-
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jgit.errors.NotSupportedException;
-import org.eclipse.jgit.errors.RepositoryNotFoundException;
import org.eclipse.jgit.http.server.GitServlet;
import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.junit.http.AccessEvent;
@@ -84,8 +81,6 @@
import org.eclipse.jgit.transport.http.HttpConnectionFactory;
import org.eclipse.jgit.transport.http.JDKHttpConnectionFactory;
import org.eclipse.jgit.transport.http.apache.HttpClientConnectionFactory;
-import org.eclipse.jgit.transport.resolver.RepositoryResolver;
-import org.eclipse.jgit.transport.resolver.ServiceNotEnabledException;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@@ -124,19 +119,7 @@ public void setUp() throws Exception {
ServletContextHandler app = server.addContext("/git");
GitServlet gs = new GitServlet();
- gs.setRepositoryResolver(new RepositoryResolver<HttpServletRequest>() {
- @Override
- public Repository open(HttpServletRequest req, String name)
- throws RepositoryNotFoundException,
- ServiceNotEnabledException {
- if (!name.equals(srcName))
- throw new RepositoryNotFoundException(name);
-
- final Repository db = src.getRepository();
- db.incrementOpen();
- return db;
- }
- });
+ gs.setRepositoryResolver(new TestRepositoryResolver(src, srcName));
app.addServlet(new ServletHolder(gs), "/*");
server.setUp();
diff --git a/org.eclipse.jgit.http.test/tst/org/eclipse/jgit/http/test/SmartClientSmartServerSslTest.java b/org.eclipse.jgit.http.test/tst/org/eclipse/jgit/http/test/SmartClientSmartServerSslTest.java
new file mode 100644
index 0000000..4206458
--- /dev/null
+++ b/org.eclipse.jgit.http.test/tst/org/eclipse/jgit/http/test/SmartClientSmartServerSslTest.java
@@ -0,0 +1,280 @@
+/*
+ * Copyright (C) 2017 Thomas Wolf <thomas.wolf@paranor.ch>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.http.test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.EnumSet;
+import java.util.List;
+
+import javax.servlet.DispatcherType;
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.eclipse.jetty.servlet.FilterHolder;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jgit.errors.TransportException;
+import org.eclipse.jgit.http.server.GitServlet;
+import org.eclipse.jgit.junit.TestRepository;
+import org.eclipse.jgit.junit.http.AccessEvent;
+import org.eclipse.jgit.junit.http.AppServer;
+import org.eclipse.jgit.junit.http.HttpTestCase;
+import org.eclipse.jgit.lib.ConfigConstants;
+import org.eclipse.jgit.lib.NullProgressMonitor;
+import org.eclipse.jgit.lib.Repository;
+import org.eclipse.jgit.revwalk.RevBlob;
+import org.eclipse.jgit.revwalk.RevCommit;
+import org.eclipse.jgit.storage.file.FileBasedConfig;
+import org.eclipse.jgit.transport.HttpTransport;
+import org.eclipse.jgit.transport.Transport;
+import org.eclipse.jgit.transport.URIish;
+import org.eclipse.jgit.transport.http.HttpConnectionFactory;
+import org.eclipse.jgit.transport.http.JDKHttpConnectionFactory;
+import org.eclipse.jgit.transport.http.apache.HttpClientConnectionFactory;
+import org.eclipse.jgit.util.FS;
+import org.eclipse.jgit.util.HttpSupport;
+import org.eclipse.jgit.util.SystemReader;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+@RunWith(Parameterized.class)
+public class SmartClientSmartServerSslTest extends HttpTestCase {
+
+ private URIish remoteURI;
+
+ private URIish secureURI;
+
+ private RevBlob A_txt;
+
+ private RevCommit A, B;
+
+ @Parameters
+ public static Collection<Object[]> data() {
+ // run all tests with both connection factories we have
+ return Arrays.asList(new Object[][] {
+ { new JDKHttpConnectionFactory() },
+ { new HttpClientConnectionFactory() } });
+ }
+
+ public SmartClientSmartServerSslTest(HttpConnectionFactory cf) {
+ HttpTransport.setConnectionFactory(cf);
+ }
+
+ @Override
+ protected AppServer createServer() {
+ return new AppServer(0, 0);
+ }
+
+ @Override
+ @Before
+ public void setUp() throws Exception {
+ super.setUp();
+
+ final TestRepository<Repository> src = createTestRepository();
+ final String srcName = src.getRepository().getDirectory().getName();
+ src.getRepository()
+ .getConfig()
+ .setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
+ ConfigConstants.CONFIG_KEY_LOGALLREFUPDATES, true);
+
+ GitServlet gs = new GitServlet();
+
+ ServletContextHandler app = addNormalContext(gs, src, srcName);
+
+ server.setUp();
+
+ remoteURI = toURIish(app, srcName);
+ secureURI = new URIish(rewriteUrl(remoteURI.toString(), "https",
+ server.getSecurePort()));
+
+ A_txt = src.blob("A");
+ A = src.commit().add("A_txt", A_txt).create();
+ B = src.commit().parent(A).add("A_txt", "C").add("B", "B").create();
+ src.update(master, B);
+
+ src.update("refs/garbage/a/very/long/ref/name/to/compress", B);
+
+ FileBasedConfig userConfig = SystemReader.getInstance()
+ .openUserConfig(null, FS.DETECTED);
+ userConfig.setBoolean("http", null, "sslVerify", false);
+ userConfig.save();
+ }
+
+ private ServletContextHandler addNormalContext(GitServlet gs, TestRepository<Repository> src, String srcName) {
+ ServletContextHandler app = server.addContext("/git");
+ app.addFilter(new FilterHolder(new Filter() {
+
+ @Override
+ public void init(FilterConfig filterConfig)
+ throws ServletException {
+ // empty
+ }
+
+ // Redirects http to https for requests containing "/https/".
+ @Override
+ public void doFilter(ServletRequest request,
+ ServletResponse response, FilterChain chain)
+ throws IOException, ServletException {
+ final HttpServletResponse httpServletResponse = (HttpServletResponse) response;
+ final HttpServletRequest httpServletRequest = (HttpServletRequest) request;
+ final StringBuffer fullUrl = httpServletRequest.getRequestURL();
+ if (httpServletRequest.getQueryString() != null) {
+ fullUrl.append("?")
+ .append(httpServletRequest.getQueryString());
+ }
+ String urlString = rewriteUrl(fullUrl.toString(), "https",
+ server.getSecurePort());
+ httpServletResponse
+ .setStatus(HttpServletResponse.SC_MOVED_PERMANENTLY);
+ httpServletResponse.setHeader(HttpSupport.HDR_LOCATION,
+ urlString.replace("/https/", "/"));
+ }
+
+ @Override
+ public void destroy() {
+ // empty
+ }
+ }), "/https/*", EnumSet.of(DispatcherType.REQUEST));
+ app.addFilter(new FilterHolder(new Filter() {
+
+ @Override
+ public void init(FilterConfig filterConfig)
+ throws ServletException {
+ // empty
+ }
+
+ // Redirects https back to http for requests containing "/back/".
+ @Override
+ public void doFilter(ServletRequest request,
+ ServletResponse response, FilterChain chain)
+ throws IOException, ServletException {
+ final HttpServletResponse httpServletResponse = (HttpServletResponse) response;
+ final HttpServletRequest httpServletRequest = (HttpServletRequest) request;
+ final StringBuffer fullUrl = httpServletRequest.getRequestURL();
+ if (httpServletRequest.getQueryString() != null) {
+ fullUrl.append("?")
+ .append(httpServletRequest.getQueryString());
+ }
+ String urlString = rewriteUrl(fullUrl.toString(), "http",
+ server.getPort());
+ httpServletResponse
+ .setStatus(HttpServletResponse.SC_MOVED_PERMANENTLY);
+ httpServletResponse.setHeader(HttpSupport.HDR_LOCATION,
+ urlString.replace("/back/", "/"));
+ }
+
+ @Override
+ public void destroy() {
+ // empty
+ }
+ }), "/back/*", EnumSet.of(DispatcherType.REQUEST));
+ gs.setRepositoryResolver(new TestRepositoryResolver(src, srcName));
+ app.addServlet(new ServletHolder(gs), "/*");
+ return app;
+ }
+
+ @Test
+ public void testInitialClone_ViaHttps() throws Exception {
+ Repository dst = createBareRepository();
+ assertFalse(dst.hasObject(A_txt));
+
+ try (Transport t = Transport.open(dst, secureURI)) {
+ t.fetch(NullProgressMonitor.INSTANCE, mirror(master));
+ }
+ assertTrue(dst.hasObject(A_txt));
+ assertEquals(B, dst.exactRef(master).getObjectId());
+ fsck(dst, B);
+
+ List<AccessEvent> requests = getRequests();
+ assertEquals(2, requests.size());
+ }
+
+ @Test
+ public void testInitialClone_RedirectToHttps() throws Exception {
+ Repository dst = createBareRepository();
+ assertFalse(dst.hasObject(A_txt));
+
+ URIish cloneFrom = extendPath(remoteURI, "/https");
+ try (Transport t = Transport.open(dst, cloneFrom)) {
+ t.fetch(NullProgressMonitor.INSTANCE, mirror(master));
+ }
+ assertTrue(dst.hasObject(A_txt));
+ assertEquals(B, dst.exactRef(master).getObjectId());
+ fsck(dst, B);
+
+ List<AccessEvent> requests = getRequests();
+ assertEquals(3, requests.size());
+ }
+
+ @Test
+ public void testInitialClone_RedirectBackToHttp() throws Exception {
+ Repository dst = createBareRepository();
+ assertFalse(dst.hasObject(A_txt));
+
+ URIish cloneFrom = extendPath(secureURI, "/back");
+ try (Transport t = Transport.open(dst, cloneFrom)) {
+ t.fetch(NullProgressMonitor.INSTANCE, mirror(master));
+ fail("Should have failed (redirect from https to http)");
+ } catch (TransportException e) {
+ assertTrue(e.getMessage().contains("not allowed"));
+ }
+ }
+
+}
diff --git a/org.eclipse.jgit.http.test/tst/org/eclipse/jgit/http/test/SmartClientSmartServerTest.java b/org.eclipse.jgit.http.test/tst/org/eclipse/jgit/http/test/SmartClientSmartServerTest.java
index ed223c9..51b7990 100644
--- a/org.eclipse.jgit.http.test/tst/org/eclipse/jgit/http/test/SmartClientSmartServerTest.java
+++ b/org.eclipse.jgit.http.test/tst/org/eclipse/jgit/http/test/SmartClientSmartServerTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2010, Google Inc.
+ * Copyright (C) 2010, 2017 Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -57,17 +57,21 @@
import java.io.PrintWriter;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
+import java.text.MessageFormat;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
import javax.servlet.DispatcherType;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
+import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
@@ -78,8 +82,8 @@
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jgit.errors.RemoteRepositoryException;
-import org.eclipse.jgit.errors.RepositoryNotFoundException;
import org.eclipse.jgit.errors.TransportException;
+import org.eclipse.jgit.errors.UnsupportedCredentialItem;
import org.eclipse.jgit.http.server.GitServlet;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.internal.storage.dfs.DfsRepositoryDescription;
@@ -101,18 +105,22 @@
import org.eclipse.jgit.lib.StoredConfig;
import org.eclipse.jgit.revwalk.RevBlob;
import org.eclipse.jgit.revwalk.RevCommit;
+import org.eclipse.jgit.storage.file.FileBasedConfig;
+import org.eclipse.jgit.transport.CredentialItem;
+import org.eclipse.jgit.transport.CredentialsProvider;
import org.eclipse.jgit.transport.FetchConnection;
import org.eclipse.jgit.transport.HttpTransport;
import org.eclipse.jgit.transport.RemoteRefUpdate;
import org.eclipse.jgit.transport.Transport;
import org.eclipse.jgit.transport.TransportHttp;
import org.eclipse.jgit.transport.URIish;
+import org.eclipse.jgit.transport.UsernamePasswordCredentialsProvider;
import org.eclipse.jgit.transport.http.HttpConnectionFactory;
import org.eclipse.jgit.transport.http.JDKHttpConnectionFactory;
import org.eclipse.jgit.transport.http.apache.HttpClientConnectionFactory;
-import org.eclipse.jgit.transport.resolver.RepositoryResolver;
-import org.eclipse.jgit.transport.resolver.ServiceNotEnabledException;
+import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.HttpSupport;
+import org.eclipse.jgit.util.SystemReader;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@@ -125,12 +133,19 @@ public class SmartClientSmartServerTest extends HttpTestCase {
private Repository remoteRepository;
+ private CredentialsProvider testCredentials = new UsernamePasswordCredentialsProvider(
+ AppServer.username, AppServer.password);
+
private URIish remoteURI;
private URIish brokenURI;
private URIish redirectURI;
+ private URIish authURI;
+
+ private URIish authOnPostURI;
+
private RevBlob A_txt;
private RevCommit A, B;
@@ -165,7 +180,11 @@ public void setUp() throws Exception {
ServletContextHandler broken = addBrokenContext(gs, src, srcName);
- ServletContextHandler redirect = addRedirectContext(gs, src, srcName);
+ ServletContextHandler redirect = addRedirectContext(gs);
+
+ ServletContextHandler auth = addAuthContext(gs, "auth");
+
+ ServletContextHandler authOnPost = addAuthContext(gs, "pauth", "POST");
server.setUp();
@@ -173,6 +192,8 @@ public void setUp() throws Exception {
remoteURI = toURIish(app, srcName);
brokenURI = toURIish(broken, srcName);
redirectURI = toURIish(redirect, srcName);
+ authURI = toURIish(auth, srcName);
+ authOnPostURI = toURIish(authOnPost, srcName);
A_txt = src.blob("A");
A = src.commit().add("A_txt", A_txt).create();
@@ -184,7 +205,52 @@ public void setUp() throws Exception {
private ServletContextHandler addNormalContext(GitServlet gs, TestRepository<Repository> src, String srcName) {
ServletContextHandler app = server.addContext("/git");
- gs.setRepositoryResolver(new TestRepoResolver(src, srcName));
+ app.addFilter(new FilterHolder(new Filter() {
+
+ @Override
+ public void init(FilterConfig filterConfig)
+ throws ServletException {
+ // empty
+ }
+
+ // Does an internal forward for GET requests containing "/post/",
+ // and issues a 301 redirect on POST requests for such URLs. Used
+ // in the POST redirect tests.
+ @Override
+ public void doFilter(ServletRequest request,
+ ServletResponse response, FilterChain chain)
+ throws IOException, ServletException {
+ final HttpServletResponse httpServletResponse = (HttpServletResponse) response;
+ final HttpServletRequest httpServletRequest = (HttpServletRequest) request;
+ final StringBuffer fullUrl = httpServletRequest.getRequestURL();
+ if (httpServletRequest.getQueryString() != null) {
+ fullUrl.append("?")
+ .append(httpServletRequest.getQueryString());
+ }
+ String urlString = fullUrl.toString();
+ if ("POST".equalsIgnoreCase(httpServletRequest.getMethod())) {
+ httpServletResponse.setStatus(
+ HttpServletResponse.SC_MOVED_PERMANENTLY);
+ httpServletResponse.setHeader(HttpSupport.HDR_LOCATION,
+ urlString.replace("/post/", "/"));
+ } else {
+ String path = httpServletRequest.getPathInfo();
+ path = path.replace("/post/", "/");
+ if (httpServletRequest.getQueryString() != null) {
+ path += '?' + httpServletRequest.getQueryString();
+ }
+ RequestDispatcher dispatcher = httpServletRequest
+ .getRequestDispatcher(path);
+ dispatcher.forward(httpServletRequest, httpServletResponse);
+ }
+ }
+
+ @Override
+ public void destroy() {
+ // empty
+ }
+ }), "/post/*", EnumSet.of(DispatcherType.REQUEST));
+ gs.setRepositoryResolver(new TestRepositoryResolver(src, srcName));
app.addServlet(new ServletHolder(gs), "/*");
return app;
}
@@ -222,12 +288,28 @@ public void destroy() {
return broken;
}
- @SuppressWarnings("unused")
- private ServletContextHandler addRedirectContext(GitServlet gs,
- TestRepository<Repository> src, String srcName) {
+ private ServletContextHandler addAuthContext(GitServlet gs,
+ String contextPath, String... methods) {
+ ServletContextHandler auth = server.addContext('/' + contextPath);
+ auth.addServlet(new ServletHolder(gs), "/*");
+ return server.authBasic(auth, methods);
+ }
+
+ private ServletContextHandler addRedirectContext(GitServlet gs) {
ServletContextHandler redirect = server.addContext("/redirect");
redirect.addFilter(new FilterHolder(new Filter() {
+ // Enables tests for different codes, and for multiple redirects.
+ // First parameter is the number of redirects, second one is the
+ // redirect status code that should be used
+ private Pattern responsePattern = Pattern
+ .compile("/response/(\\d+)/(30[1237])/");
+
+ // Enables tests to specify the context that the request should be
+ // redirected to in the end. If not present, redirects got to the
+ // normal /git context.
+ private Pattern targetPattern = Pattern.compile("/target(/\\w+)/");
+
@Override
public void init(FilterConfig filterConfig)
throws ServletException {
@@ -245,10 +327,50 @@ public void doFilter(ServletRequest request,
fullUrl.append("?")
.append(httpServletRequest.getQueryString());
}
- httpServletResponse
- .setStatus(HttpServletResponse.SC_MOVED_PERMANENTLY);
+ String urlString = fullUrl.toString();
+ if (urlString.contains("/loop/")) {
+ urlString = urlString.replace("/loop/", "/loop/x/");
+ if (urlString.contains("/loop/x/x/x/x/x/x/x/x/")) {
+ // Go back to initial.
+ urlString = urlString.replace("/loop/x/x/x/x/x/x/x/x/",
+ "/loop/");
+ }
+ httpServletResponse.setStatus(
+ HttpServletResponse.SC_MOVED_TEMPORARILY);
+ httpServletResponse.setHeader(HttpSupport.HDR_LOCATION,
+ urlString);
+ return;
+ }
+ int responseCode = HttpServletResponse.SC_MOVED_PERMANENTLY;
+ int nofRedirects = 0;
+ Matcher matcher = responsePattern.matcher(urlString);
+ if (matcher.find()) {
+ nofRedirects = Integer
+ .parseUnsignedInt(matcher.group(1));
+ responseCode = Integer.parseUnsignedInt(matcher.group(2));
+ if (--nofRedirects <= 0) {
+ urlString = urlString.substring(0, matcher.start())
+ + '/' + urlString.substring(matcher.end());
+ } else {
+ urlString = urlString.substring(0, matcher.start())
+ + "/response/" + nofRedirects + "/"
+ + responseCode + '/'
+ + urlString.substring(matcher.end());
+ }
+ }
+ httpServletResponse.setStatus(responseCode);
+ if (nofRedirects <= 0) {
+ String targetContext = "/git";
+ matcher = targetPattern.matcher(urlString);
+ if (matcher.find()) {
+ urlString = urlString.substring(0, matcher.start())
+ + '/' + urlString.substring(matcher.end());
+ targetContext = matcher.group(1);
+ }
+ urlString = urlString.replace("/redirect", targetContext);
+ }
httpServletResponse.setHeader(HttpSupport.HDR_LOCATION,
- fullUrl.toString().replace("/redirect", "/git"));
+ urlString);
}
@Override
@@ -373,13 +495,332 @@ public void testInitialClone_Small() throws Exception {
.getResponseHeader(HDR_CONTENT_TYPE));
}
+ private void initialClone_Redirect(int nofRedirects, int code)
+ throws Exception {
+ Repository dst = createBareRepository();
+ assertFalse(dst.hasObject(A_txt));
+
+ URIish cloneFrom = redirectURI;
+ if (code != 301 || nofRedirects > 1) {
+ cloneFrom = extendPath(cloneFrom,
+ "/response/" + nofRedirects + "/" + code);
+ }
+ try (Transport t = Transport.open(dst, cloneFrom)) {
+ t.fetch(NullProgressMonitor.INSTANCE, mirror(master));
+ }
+
+ assertTrue(dst.hasObject(A_txt));
+ assertEquals(B, dst.exactRef(master).getObjectId());
+ fsck(dst, B);
+
+ List<AccessEvent> requests = getRequests();
+ assertEquals(2 + nofRedirects, requests.size());
+
+ int n = 0;
+ while (n < nofRedirects) {
+ AccessEvent redirect = requests.get(n++);
+ assertEquals(code, redirect.getStatus());
+ }
+
+ AccessEvent info = requests.get(n++);
+ assertEquals("GET", info.getMethod());
+ assertEquals(join(remoteURI, "info/refs"), info.getPath());
+ assertEquals(1, info.getParameters().size());
+ assertEquals("git-upload-pack", info.getParameter("service"));
+ assertEquals(200, info.getStatus());
+ assertEquals("application/x-git-upload-pack-advertisement",
+ info.getResponseHeader(HDR_CONTENT_TYPE));
+ assertEquals("gzip", info.getResponseHeader(HDR_CONTENT_ENCODING));
+
+ AccessEvent service = requests.get(n++);
+ assertEquals("POST", service.getMethod());
+ assertEquals(join(remoteURI, "git-upload-pack"), service.getPath());
+ assertEquals(0, service.getParameters().size());
+ assertNotNull("has content-length",
+ service.getRequestHeader(HDR_CONTENT_LENGTH));
+ assertNull("not chunked",
+ service.getRequestHeader(HDR_TRANSFER_ENCODING));
+
+ assertEquals(200, service.getStatus());
+ assertEquals("application/x-git-upload-pack-result",
+ service.getResponseHeader(HDR_CONTENT_TYPE));
+ }
+
@Test
- public void testInitialClone_RedirectSmall() throws Exception {
+ public void testInitialClone_Redirect301Small() throws Exception {
+ initialClone_Redirect(1, 301);
+ }
+
+ @Test
+ public void testInitialClone_Redirect302Small() throws Exception {
+ initialClone_Redirect(1, 302);
+ }
+
+ @Test
+ public void testInitialClone_Redirect303Small() throws Exception {
+ initialClone_Redirect(1, 303);
+ }
+
+ @Test
+ public void testInitialClone_Redirect307Small() throws Exception {
+ initialClone_Redirect(1, 307);
+ }
+
+ @Test
+ public void testInitialClone_RedirectMultiple() throws Exception {
+ initialClone_Redirect(4, 302);
+ }
+
+ @Test
+ public void testInitialClone_RedirectMax() throws Exception {
+ FileBasedConfig userConfig = SystemReader.getInstance()
+ .openUserConfig(null, FS.DETECTED);
+ userConfig.setInt("http", null, "maxRedirects", 4);
+ userConfig.save();
+ initialClone_Redirect(4, 302);
+ }
+
+ @Test
+ public void testInitialClone_RedirectTooOften() throws Exception {
+ FileBasedConfig userConfig = SystemReader.getInstance()
+ .openUserConfig(null, FS.DETECTED);
+ userConfig.setInt("http", null, "maxRedirects", 3);
+ userConfig.save();
+ Repository dst = createBareRepository();
+ assertFalse(dst.hasObject(A_txt));
+
+ URIish cloneFrom = extendPath(redirectURI, "/response/4/302");
+ String remoteUri = cloneFrom.toString();
+ try (Transport t = Transport.open(dst, cloneFrom)) {
+ t.fetch(NullProgressMonitor.INSTANCE, mirror(master));
+ fail("Should have failed (too many redirects)");
+ } catch (TransportException e) {
+ String expectedMessageBegin = remoteUri.toString() + ": "
+ + MessageFormat.format(JGitText.get().redirectLimitExceeded,
+ "3", remoteUri.replace("/4/", "/1/") + '/', "");
+ String message = e.getMessage();
+ if (message.length() > expectedMessageBegin.length()) {
+ message = message.substring(0, expectedMessageBegin.length());
+ }
+ assertEquals(expectedMessageBegin, message);
+ }
+ }
+
+ @Test
+ public void testInitialClone_RedirectLoop() throws Exception {
+ Repository dst = createBareRepository();
+ assertFalse(dst.hasObject(A_txt));
+
+ URIish cloneFrom = extendPath(redirectURI, "/loop");
+ try (Transport t = Transport.open(dst, cloneFrom)) {
+ t.fetch(NullProgressMonitor.INSTANCE, mirror(master));
+ fail("Should have failed (redirect loop)");
+ } catch (TransportException e) {
+ assertTrue(e.getMessage().contains("Redirected more than"));
+ }
+ }
+
+ @Test
+ public void testInitialClone_RedirectOnPostAllowed() throws Exception {
+ FileBasedConfig userConfig = SystemReader.getInstance()
+ .openUserConfig(null, FS.DETECTED);
+ userConfig.setString("http", null, "followRedirects", "true");
+ userConfig.save();
+ Repository dst = createBareRepository();
+ assertFalse(dst.hasObject(A_txt));
+
+ URIish cloneFrom = extendPath(remoteURI, "/post");
+ try (Transport t = Transport.open(dst, cloneFrom)) {
+ t.fetch(NullProgressMonitor.INSTANCE, mirror(master));
+ }
+
+ assertTrue(dst.hasObject(A_txt));
+ assertEquals(B, dst.exactRef(master).getObjectId());
+ fsck(dst, B);
+
+ List<AccessEvent> requests = getRequests();
+ assertEquals(3, requests.size());
+
+ AccessEvent info = requests.get(0);
+ assertEquals("GET", info.getMethod());
+ assertEquals(join(cloneFrom, "info/refs"), info.getPath());
+ assertEquals(1, info.getParameters().size());
+ assertEquals("git-upload-pack", info.getParameter("service"));
+ assertEquals(200, info.getStatus());
+ assertEquals("application/x-git-upload-pack-advertisement",
+ info.getResponseHeader(HDR_CONTENT_TYPE));
+ assertEquals("gzip", info.getResponseHeader(HDR_CONTENT_ENCODING));
+
+ AccessEvent redirect = requests.get(1);
+ assertEquals("POST", redirect.getMethod());
+ assertEquals(301, redirect.getStatus());
+
+ AccessEvent service = requests.get(2);
+ assertEquals("POST", service.getMethod());
+ assertEquals(join(remoteURI, "git-upload-pack"), service.getPath());
+ assertEquals(0, service.getParameters().size());
+ assertNotNull("has content-length",
+ service.getRequestHeader(HDR_CONTENT_LENGTH));
+ assertNull("not chunked",
+ service.getRequestHeader(HDR_TRANSFER_ENCODING));
+
+ assertEquals(200, service.getStatus());
+ assertEquals("application/x-git-upload-pack-result",
+ service.getResponseHeader(HDR_CONTENT_TYPE));
+ }
+
+ @Test
+ public void testInitialClone_RedirectOnPostForbidden() throws Exception {
+ Repository dst = createBareRepository();
+ assertFalse(dst.hasObject(A_txt));
+
+ URIish cloneFrom = extendPath(remoteURI, "/post");
+ try (Transport t = Transport.open(dst, cloneFrom)) {
+ t.fetch(NullProgressMonitor.INSTANCE, mirror(master));
+ fail("Should have failed (redirect on POST)");
+ } catch (TransportException e) {
+ assertTrue(e.getMessage().contains("301"));
+ }
+ }
+
+ @Test
+ public void testInitialClone_RedirectForbidden() throws Exception {
+ FileBasedConfig userConfig = SystemReader.getInstance()
+ .openUserConfig(null, FS.DETECTED);
+ userConfig.setString("http", null, "followRedirects", "false");
+ userConfig.save();
+
Repository dst = createBareRepository();
assertFalse(dst.hasObject(A_txt));
try (Transport t = Transport.open(dst, redirectURI)) {
t.fetch(NullProgressMonitor.INSTANCE, mirror(master));
+ fail("Should have failed (redirects forbidden)");
+ } catch (TransportException e) {
+ assertTrue(
+ e.getMessage().contains("http.followRedirects is false"));
+ }
+ }
+
+ @Test
+ public void testInitialClone_WithAuthentication() throws Exception {
+ Repository dst = createBareRepository();
+ assertFalse(dst.hasObject(A_txt));
+
+ try (Transport t = Transport.open(dst, authURI)) {
+ t.setCredentialsProvider(testCredentials);
+ t.fetch(NullProgressMonitor.INSTANCE, mirror(master));
+ }
+
+ assertTrue(dst.hasObject(A_txt));
+ assertEquals(B, dst.exactRef(master).getObjectId());
+ fsck(dst, B);
+
+ List<AccessEvent> requests = getRequests();
+ assertEquals(3, requests.size());
+
+ AccessEvent info = requests.get(0);
+ assertEquals("GET", info.getMethod());
+ assertEquals(401, info.getStatus());
+
+ info = requests.get(1);
+ assertEquals("GET", info.getMethod());
+ assertEquals(join(authURI, "info/refs"), info.getPath());
+ assertEquals(1, info.getParameters().size());
+ assertEquals("git-upload-pack", info.getParameter("service"));
+ assertEquals(200, info.getStatus());
+ assertEquals("application/x-git-upload-pack-advertisement",
+ info.getResponseHeader(HDR_CONTENT_TYPE));
+ assertEquals("gzip", info.getResponseHeader(HDR_CONTENT_ENCODING));
+
+ AccessEvent service = requests.get(2);
+ assertEquals("POST", service.getMethod());
+ assertEquals(join(authURI, "git-upload-pack"), service.getPath());
+ assertEquals(0, service.getParameters().size());
+ assertNotNull("has content-length",
+ service.getRequestHeader(HDR_CONTENT_LENGTH));
+ assertNull("not chunked",
+ service.getRequestHeader(HDR_TRANSFER_ENCODING));
+
+ assertEquals(200, service.getStatus());
+ assertEquals("application/x-git-upload-pack-result",
+ service.getResponseHeader(HDR_CONTENT_TYPE));
+ }
+
+ @Test
+ public void testInitialClone_WithAuthenticationNoCredentials()
+ throws Exception {
+ Repository dst = createBareRepository();
+ assertFalse(dst.hasObject(A_txt));
+
+ try (Transport t = Transport.open(dst, authURI)) {
+ t.fetch(NullProgressMonitor.INSTANCE, mirror(master));
+ fail("Should not have succeeded -- no authentication");
+ } catch (TransportException e) {
+ String msg = e.getMessage();
+ assertTrue("Unexpected exception message: " + msg,
+ msg.contains("no CredentialsProvider"));
+ }
+ List<AccessEvent> requests = getRequests();
+ assertEquals(1, requests.size());
+
+ AccessEvent info = requests.get(0);
+ assertEquals("GET", info.getMethod());
+ assertEquals(401, info.getStatus());
+ }
+
+ @Test
+ public void testInitialClone_WithAuthenticationWrongCredentials()
+ throws Exception {
+ Repository dst = createBareRepository();
+ assertFalse(dst.hasObject(A_txt));
+
+ try (Transport t = Transport.open(dst, authURI)) {
+ t.setCredentialsProvider(new UsernamePasswordCredentialsProvider(
+ AppServer.username, "wrongpassword"));
+ t.fetch(NullProgressMonitor.INSTANCE, mirror(master));
+ fail("Should not have succeeded -- wrong password");
+ } catch (TransportException e) {
+ String msg = e.getMessage();
+ assertTrue("Unexpected exception message: " + msg,
+ msg.contains("auth"));
+ }
+ List<AccessEvent> requests = getRequests();
+ // Once without authentication plus three re-tries with authentication
+ assertEquals(4, requests.size());
+
+ for (AccessEvent event : requests) {
+ assertEquals("GET", event.getMethod());
+ assertEquals(401, event.getStatus());
+ }
+ }
+
+ @Test
+ public void testInitialClone_WithAuthenticationAfterRedirect()
+ throws Exception {
+ Repository dst = createBareRepository();
+ assertFalse(dst.hasObject(A_txt));
+
+ URIish cloneFrom = extendPath(redirectURI, "/target/auth");
+ CredentialsProvider uriSpecificCredentialsProvider = new UsernamePasswordCredentialsProvider(
+ "unknown", "none") {
+ @Override
+ public boolean get(URIish uri, CredentialItem... items)
+ throws UnsupportedCredentialItem {
+ // Only return the true credentials if the uri path starts with
+ // /auth. This ensures that we do provide the correct
+ // credentials only for the URi after the redirect, making the
+ // test fail if we should be asked for the credentials for the
+ // original URI.
+ if (uri.getPath().startsWith("/auth")) {
+ return testCredentials.get(uri, items);
+ }
+ return super.get(uri, items);
+ }
+ };
+ try (Transport t = Transport.open(dst, cloneFrom)) {
+ t.setCredentialsProvider(uriSpecificCredentialsProvider);
+ t.fetch(NullProgressMonitor.INSTANCE, mirror(master));
}
assertTrue(dst.hasObject(A_txt));
@@ -389,12 +830,19 @@ public void testInitialClone_RedirectSmall() throws Exception {
List<AccessEvent> requests = getRequests();
assertEquals(4, requests.size());
- AccessEvent firstRedirect = requests.get(0);
- assertEquals(301, firstRedirect.getStatus());
+ AccessEvent redirect = requests.get(0);
+ assertEquals("GET", redirect.getMethod());
+ assertEquals(join(cloneFrom, "info/refs"), redirect.getPath());
+ assertEquals(301, redirect.getStatus());
AccessEvent info = requests.get(1);
assertEquals("GET", info.getMethod());
- assertEquals(join(remoteURI, "info/refs"), info.getPath());
+ assertEquals(join(authURI, "info/refs"), info.getPath());
+ assertEquals(401, info.getStatus());
+
+ info = requests.get(2);
+ assertEquals("GET", info.getMethod());
+ assertEquals(join(authURI, "info/refs"), info.getPath());
assertEquals(1, info.getParameters().size());
assertEquals("git-upload-pack", info.getParameter("service"));
assertEquals(200, info.getStatus());
@@ -402,12 +850,56 @@ public void testInitialClone_RedirectSmall() throws Exception {
info.getResponseHeader(HDR_CONTENT_TYPE));
assertEquals("gzip", info.getResponseHeader(HDR_CONTENT_ENCODING));
- AccessEvent secondRedirect = requests.get(2);
- assertEquals(301, secondRedirect.getStatus());
-
AccessEvent service = requests.get(3);
assertEquals("POST", service.getMethod());
- assertEquals(join(remoteURI, "git-upload-pack"), service.getPath());
+ assertEquals(join(authURI, "git-upload-pack"), service.getPath());
+ assertEquals(0, service.getParameters().size());
+ assertNotNull("has content-length",
+ service.getRequestHeader(HDR_CONTENT_LENGTH));
+ assertNull("not chunked",
+ service.getRequestHeader(HDR_TRANSFER_ENCODING));
+
+ assertEquals(200, service.getStatus());
+ assertEquals("application/x-git-upload-pack-result",
+ service.getResponseHeader(HDR_CONTENT_TYPE));
+ }
+
+ @Test
+ public void testInitialClone_WithAuthenticationOnPostOnly()
+ throws Exception {
+ Repository dst = createBareRepository();
+ assertFalse(dst.hasObject(A_txt));
+
+ try (Transport t = Transport.open(dst, authOnPostURI)) {
+ t.setCredentialsProvider(testCredentials);
+ t.fetch(NullProgressMonitor.INSTANCE, mirror(master));
+ }
+
+ assertTrue(dst.hasObject(A_txt));
+ assertEquals(B, dst.exactRef(master).getObjectId());
+ fsck(dst, B);
+
+ List<AccessEvent> requests = getRequests();
+ assertEquals(3, requests.size());
+
+ AccessEvent info = requests.get(0);
+ assertEquals("GET", info.getMethod());
+ assertEquals(join(authOnPostURI, "info/refs"), info.getPath());
+ assertEquals(1, info.getParameters().size());
+ assertEquals("git-upload-pack", info.getParameter("service"));
+ assertEquals(200, info.getStatus());
+ assertEquals("application/x-git-upload-pack-advertisement",
+ info.getResponseHeader(HDR_CONTENT_TYPE));
+ assertEquals("gzip", info.getResponseHeader(HDR_CONTENT_ENCODING));
+
+ AccessEvent service = requests.get(1);
+ assertEquals("POST", service.getMethod());
+ assertEquals(join(authOnPostURI, "git-upload-pack"), service.getPath());
+ assertEquals(401, service.getStatus());
+
+ service = requests.get(2);
+ assertEquals("POST", service.getMethod());
+ assertEquals(join(authOnPostURI, "git-upload-pack"), service.getPath());
assertEquals(0, service.getParameters().size());
assertNotNull("has content-length",
service.getRequestHeader(HDR_CONTENT_LENGTH));
@@ -619,7 +1111,7 @@ public void testFetch_RefsUnreadableOnUpload() throws Exception {
ServletContextHandler app = noRefServer.addContext("/git");
GitServlet gs = new GitServlet();
- gs.setRepositoryResolver(new TestRepoResolver(repo, repoName));
+ gs.setRepositoryResolver(new TestRepositoryResolver(repo, repoName));
app.addServlet(new ServletHolder(gs), "/*");
noRefServer.setUp();
@@ -822,28 +1314,4 @@ private void enableReceivePack() throws IOException {
cfg.save();
}
- private final class TestRepoResolver
- implements RepositoryResolver<HttpServletRequest> {
-
- private final TestRepository<Repository> repo;
-
- private final String repoName;
-
- private TestRepoResolver(TestRepository<Repository> repo,
- String repoName) {
- this.repo = repo;
- this.repoName = repoName;
- }
-
- @Override
- public Repository open(HttpServletRequest req, String name)
- throws RepositoryNotFoundException, ServiceNotEnabledException {
- if (!name.equals(repoName))
- throw new RepositoryNotFoundException(name);
-
- Repository db = repo.getRepository();
- db.incrementOpen();
- return db;
- }
- }
}
diff --git a/org.eclipse.jgit.junit.http/META-INF/MANIFEST.MF b/org.eclipse.jgit.junit.http/META-INF/MANIFEST.MF
index dd6cb48..f5e3033 100644
--- a/org.eclipse.jgit.junit.http/META-INF/MANIFEST.MF
+++ b/org.eclipse.jgit.junit.http/META-INF/MANIFEST.MF
@@ -20,6 +20,7 @@
org.eclipse.jetty.util.component;version="[9.4.5,10.0.0)",
org.eclipse.jetty.util.log;version="[9.4.5,10.0.0)",
org.eclipse.jetty.util.security;version="[9.4.5,10.0.0)",
+ org.eclipse.jetty.util.ssl;version="[9.4.5,10.0.0)",
org.eclipse.jgit.errors;version="[4.9.0,4.10.0)",
org.eclipse.jgit.http.server;version="[4.9.0,4.10.0)",
org.eclipse.jgit.internal.storage.file;version="[4.9.0,4.10.0)",
diff --git a/org.eclipse.jgit.junit.http/src/org/eclipse/jgit/junit/http/AppServer.java b/org.eclipse.jgit.junit.http/src/org/eclipse/jgit/junit/http/AppServer.java
index 28c0f21..e257cf6 100644
--- a/org.eclipse.jgit.junit.http/src/org/eclipse/jgit/junit/http/AppServer.java
+++ b/org.eclipse.jgit.junit.http/src/org/eclipse/jgit/junit/http/AppServer.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2010, 2012 Google Inc.
+ * Copyright (C) 2010, 2017 Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -46,15 +46,20 @@
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
+import java.io.File;
+import java.io.IOException;
import java.net.InetAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.UnknownHostException;
+import java.nio.file.Files;
import java.util.ArrayList;
import java.util.List;
+import java.util.Locale;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
+import org.eclipse.jetty.http.HttpVersion;
import org.eclipse.jetty.security.AbstractLoginService;
import org.eclipse.jetty.security.Authenticator;
import org.eclipse.jetty.security.ConstraintMapping;
@@ -65,10 +70,12 @@
import org.eclipse.jetty.server.HttpConnectionFactory;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.SslConnectionFactory;
import org.eclipse.jetty.server.handler.ContextHandlerCollection;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.util.security.Constraint;
import org.eclipse.jetty.util.security.Password;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.eclipse.jgit.transport.URIish;
/**
@@ -88,6 +95,12 @@ public class AppServer {
/** Password for {@link #username} in secured access areas. */
public static final String password = "letmein";
+ /** SSL keystore password; must have at least 6 characters. */
+ private static final String keyPassword = "mykeys";
+
+ /** Role for authentication. */
+ private static final String authRole = "can-access";
+
static {
// Install a logger that throws warning messages.
//
@@ -97,48 +110,141 @@ public class AppServer {
private final Server server;
+ private final HttpConfiguration config;
+
private final ServerConnector connector;
+ private final HttpConfiguration secureConfig;
+
+ private final ServerConnector secureConnector;
+
private final ContextHandlerCollection contexts;
private final TestRequestLog log;
+ private List<File> filesToDelete = new ArrayList<>();
+
public AppServer() {
- this(0);
+ this(0, -1);
}
/**
* @param port
- * the http port number
+ * the http port number; may be zero to allocate a port
+ * dynamically
* @since 4.2
*/
public AppServer(int port) {
+ this(port, -1);
+ }
+
+ /**
+ * @param port
+ * for http, may be zero to allocate a port dynamically
+ * @param sslPort
+ * for https,may be zero to allocate a port dynamically. If
+ * negative, the server will be set up without https support.
+ * @since 4.9
+ */
+ public AppServer(int port, int sslPort) {
server = new Server();
- HttpConfiguration http_config = new HttpConfiguration();
- http_config.setSecureScheme("https");
- http_config.setSecurePort(8443);
- http_config.setOutputBufferSize(32768);
+ config = new HttpConfiguration();
+ config.setSecureScheme("https");
+ config.setSecurePort(0);
+ config.setOutputBufferSize(32768);
connector = new ServerConnector(server,
- new HttpConnectionFactory(http_config));
+ new HttpConnectionFactory(config));
connector.setPort(port);
+ String ip;
+ String hostName;
try {
final InetAddress me = InetAddress.getByName("localhost");
- connector.setHost(me.getHostAddress());
+ ip = me.getHostAddress();
+ connector.setHost(ip);
+ hostName = InetAddress.getLocalHost().getCanonicalHostName();
} catch (UnknownHostException e) {
throw new RuntimeException("Cannot find localhost", e);
}
+ if (sslPort >= 0) {
+ SslContextFactory sslContextFactory = createTestSslContextFactory(
+ hostName);
+ secureConfig = new HttpConfiguration(config);
+ secureConnector = new ServerConnector(server,
+ new SslConnectionFactory(sslContextFactory,
+ HttpVersion.HTTP_1_1.asString()),
+ new HttpConnectionFactory(secureConfig));
+ secureConnector.setPort(sslPort);
+ secureConnector.setHost(ip);
+ } else {
+ secureConfig = null;
+ secureConnector = null;
+ }
+
contexts = new ContextHandlerCollection();
log = new TestRequestLog();
log.setHandler(contexts);
- server.setConnectors(new Connector[] { connector });
+ if (secureConnector == null) {
+ server.setConnectors(new Connector[] { connector });
+ } else {
+ server.setConnectors(
+ new Connector[] { connector, secureConnector });
+ }
server.setHandler(log);
}
+ private SslContextFactory createTestSslContextFactory(String hostName) {
+ SslContextFactory factory = new SslContextFactory(true);
+
+ String dName = "CN=,OU=,O=,ST=,L=,C=";
+
+ try {
+ File tmpDir = Files.createTempDirectory("jks").toFile();
+ tmpDir.deleteOnExit();
+ makePrivate(tmpDir);
+ File keyStore = new File(tmpDir, "keystore.jks");
+ Runtime.getRuntime().exec(
+ new String[] {
+ "keytool", //
+ "-keystore", keyStore.getAbsolutePath(), //
+ "-storepass", keyPassword,
+ "-alias", hostName, //
+ "-genkeypair", //
+ "-keyalg", "RSA", //
+ "-keypass", keyPassword, //
+ "-dname", dName, //
+ "-validity", "2" //
+ }).waitFor();
+ keyStore.deleteOnExit();
+ makePrivate(keyStore);
+ filesToDelete.add(keyStore);
+ filesToDelete.add(tmpDir);
+ factory.setKeyStorePath(keyStore.getAbsolutePath());
+ factory.setKeyStorePassword(keyPassword);
+ factory.setKeyManagerPassword(keyPassword);
+ factory.setTrustStorePath(keyStore.getAbsolutePath());
+ factory.setTrustStorePassword(keyPassword);
+ } catch (InterruptedException | IOException e) {
+ throw new RuntimeException("Cannot create ssl key/certificate", e);
+ }
+ return factory;
+ }
+
+ private void makePrivate(File file) {
+ file.setReadable(false);
+ file.setWritable(false);
+ file.setExecutable(false);
+ file.setReadable(true, true);
+ file.setWritable(true, true);
+ if (file.isDirectory()) {
+ file.setExecutable(true, true);
+ }
+ }
+
/**
* Create a new servlet context within the server.
* <p>
@@ -162,9 +268,10 @@ public ServletContextHandler addContext(String path) {
return ctx;
}
- public ServletContextHandler authBasic(ServletContextHandler ctx) {
+ public ServletContextHandler authBasic(ServletContextHandler ctx,
+ String... methods) {
assertNotYetSetUp();
- auth(ctx, new BasicAuthenticator());
+ auth(ctx, new BasicAuthenticator(), methods);
return ctx;
}
@@ -199,22 +306,36 @@ protected UserPrincipal loadUserInfo(String user) {
}
}
- private void auth(ServletContextHandler ctx, Authenticator authType) {
- final String role = "can-access";
-
- AbstractLoginService users = new TestMappedLoginService(role);
+ private ConstraintMapping createConstraintMapping() {
ConstraintMapping cm = new ConstraintMapping();
cm.setConstraint(new Constraint());
cm.getConstraint().setAuthenticate(true);
cm.getConstraint().setDataConstraint(Constraint.DC_NONE);
- cm.getConstraint().setRoles(new String[] { role });
+ cm.getConstraint().setRoles(new String[] { authRole });
cm.setPathSpec("/*");
+ return cm;
+ }
+
+ private void auth(ServletContextHandler ctx, Authenticator authType,
+ String... methods) {
+ AbstractLoginService users = new TestMappedLoginService(authRole);
+ List<ConstraintMapping> mappings = new ArrayList<>();
+ if (methods == null || methods.length == 0) {
+ mappings.add(createConstraintMapping());
+ } else {
+ for (String method : methods) {
+ ConstraintMapping cm = createConstraintMapping();
+ cm.setMethod(method.toUpperCase(Locale.ROOT));
+ mappings.add(cm);
+ }
+ }
ConstraintSecurityHandler sec = new ConstraintSecurityHandler();
sec.setRealmName(realm);
sec.setAuthenticator(authType);
sec.setLoginService(users);
- sec.setConstraintMappings(new ConstraintMapping[] { cm });
+ sec.setConstraintMappings(
+ mappings.toArray(new ConstraintMapping[mappings.size()]));
sec.setHandler(ctx);
contexts.removeHandler(ctx);
@@ -231,6 +352,10 @@ public void setUp() throws Exception {
RecordingLogger.clear();
log.clear();
server.start();
+ config.setSecurePort(getSecurePort());
+ if (secureConfig != null) {
+ secureConfig.setSecurePort(getSecurePort());
+ }
}
/**
@@ -243,6 +368,10 @@ public void tearDown() throws Exception {
RecordingLogger.clear();
log.clear();
server.stop();
+ for (File f : filesToDelete) {
+ f.delete();
+ }
+ filesToDelete.clear();
}
/**
@@ -272,6 +401,12 @@ public int getPort() {
return connector.getLocalPort();
}
+ /** @return the HTTPS port or -1 if not configured. */
+ public int getSecurePort() {
+ assertAlreadySetUp();
+ return secureConnector != null ? secureConnector.getLocalPort() : -1;
+ }
+
/** @return all requests since the server was started. */
public List<AccessEvent> getRequests() {
return new ArrayList<>(log.getEvents());
diff --git a/org.eclipse.jgit.junit.http/src/org/eclipse/jgit/junit/http/HttpTestCase.java b/org.eclipse.jgit.junit.http/src/org/eclipse/jgit/junit/http/HttpTestCase.java
index 1b94e02..eabb0f2 100644
--- a/org.eclipse.jgit.junit.http/src/org/eclipse/jgit/junit/http/HttpTestCase.java
+++ b/org.eclipse.jgit.junit.http/src/org/eclipse/jgit/junit/http/HttpTestCase.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2009-2010, Google Inc.
+ * Copyright (C) 2009-2017, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -77,7 +77,7 @@ public abstract class HttpTestCase extends LocalDiskRepositoryTestCase {
@Override
public void setUp() throws Exception {
super.setUp();
- server = new AppServer();
+ server = createServer();
}
@Override
@@ -86,6 +86,20 @@ public void tearDown() throws Exception {
super.tearDown();
}
+ /**
+ * Creates the {@linkAppServer}.This default implementation creates a server
+ * without SSLsupport listening for HTTP connections on a dynamically chosen
+ * port, which can be gotten once the server has been started via its
+ * {@link AppServer#getPort()} method. Subclasses may override if they need
+ * a more specialized server.
+ *
+ * @return the {@link AppServer}.
+ * @since 4.9
+ */
+ protected AppServer createServer() {
+ return new AppServer();
+ }
+
protected TestRepository<Repository> createTestRepository()
throws IOException {
return new TestRepository<>(createBareRepository());
@@ -165,4 +179,37 @@ public static String join(URIish base, String path) {
dir += "/";
return dir + path;
}
+
+ protected static String rewriteUrl(String url, String newProtocol,
+ int newPort) {
+ String newUrl = url;
+ if (newProtocol != null && !newProtocol.isEmpty()) {
+ int schemeEnd = newUrl.indexOf("://");
+ if (schemeEnd >= 0) {
+ newUrl = newProtocol + newUrl.substring(schemeEnd);
+ }
+ }
+ if (newPort > 0) {
+ newUrl = newUrl.replaceFirst(":\\d+/", ":" + newPort + "/");
+ } else {
+ // Remove the port, if any
+ newUrl = newUrl.replaceFirst(":\\d+/", "/");
+ }
+ return newUrl;
+ }
+
+ protected static URIish extendPath(URIish uri, String pathComponents)
+ throws URISyntaxException {
+ String raw = uri.toString();
+ String newComponents = pathComponents;
+ if (!newComponents.startsWith("/")) {
+ newComponents = '/' + newComponents;
+ }
+ if (!newComponents.endsWith("/")) {
+ newComponents += '/';
+ }
+ int i = raw.lastIndexOf('/');
+ raw = raw.substring(0, i) + newComponents + raw.substring(i + 1);
+ return new URIish(raw);
+ }
}
diff --git a/org.eclipse.jgit.junit.http/src/org/eclipse/jgit/junit/http/MockServletConfig.java b/org.eclipse.jgit.junit.http/src/org/eclipse/jgit/junit/http/MockServletConfig.java
index 9defcd9..03c0816 100644
--- a/org.eclipse.jgit.junit.http/src/org/eclipse/jgit/junit/http/MockServletConfig.java
+++ b/org.eclipse.jgit.junit.http/src/org/eclipse/jgit/junit/http/MockServletConfig.java
@@ -88,4 +88,4 @@ public String getServletName() {
public ServletContext getServletContext() {
return null;
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/JGitTestUtil.java b/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/JGitTestUtil.java
index 2962e71..5bf61f0 100644
--- a/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/JGitTestUtil.java
+++ b/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/JGitTestUtil.java
@@ -258,4 +258,27 @@ public static Path writeLink(Repository db, String link,
target);
}
+ /**
+ * Concatenate byte arrays.
+ *
+ * @param b
+ * byte arrays to combine together.
+ * @return a single byte array that contains all bytes copied from input
+ * byte arrays.
+ * @since 4.9
+ */
+ public static byte[] concat(byte[]... b) {
+ int n = 0;
+ for (byte[] a : b) {
+ n += a.length;
+ }
+
+ byte[] data = new byte[n];
+ n = 0;
+ for (byte[] a : b) {
+ System.arraycopy(a, 0, data, n, a.length);
+ n += a.length;
+ }
+ return data;
+ }
}
diff --git a/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/Repeat.java b/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/Repeat.java
index 22b5007..a3c869f 100644
--- a/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/Repeat.java
+++ b/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/Repeat.java
@@ -50,4 +50,4 @@
@Target({ java.lang.annotation.ElementType.METHOD })
public @interface Repeat {
public abstract int n();
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/RepeatRule.java b/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/RepeatRule.java
index 75e1a67..4230073 100644
--- a/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/RepeatRule.java
+++ b/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/RepeatRule.java
@@ -128,4 +128,4 @@ public Statement apply(Statement statement, Description description) {
}
return result;
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/StrictWorkMonitor.java b/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/StrictWorkMonitor.java
new file mode 100644
index 0000000..22b69a3
--- /dev/null
+++ b/org.eclipse.jgit.junit/src/org/eclipse/jgit/junit/StrictWorkMonitor.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2017 Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.junit;
+
+import static org.junit.Assert.assertEquals;
+
+import org.eclipse.jgit.lib.ProgressMonitor;
+
+public final class StrictWorkMonitor implements ProgressMonitor {
+ private int lastWork, totalWork;
+
+ @Override
+ public void start(int totalTasks) {
+ // empty
+ }
+
+ @Override
+ public void beginTask(String title, int total) {
+ this.totalWork = total;
+ lastWork = 0;
+ }
+
+ @Override
+ public void update(int completed) {
+ lastWork += completed;
+ }
+
+ @Override
+ public void endTask() {
+ assertEquals("Units of work recorded", totalWork, lastWork);
+ }
+
+ @Override
+ public boolean isCancelled() {
+ return false;
+ }
+}
diff --git a/org.eclipse.jgit.lfs.server.test/tst/org/eclipse/jgit/lfs/server/fs/LfsServerTest.java b/org.eclipse.jgit.lfs.server.test/tst/org/eclipse/jgit/lfs/server/fs/LfsServerTest.java
index e10660d..5da502e 100644
--- a/org.eclipse.jgit.lfs.server.test/tst/org/eclipse/jgit/lfs/server/fs/LfsServerTest.java
+++ b/org.eclipse.jgit.lfs.server.test/tst/org/eclipse/jgit/lfs/server/fs/LfsServerTest.java
@@ -265,4 +265,4 @@ protected long createPseudoRandomContentFile(Path f, long size)
}
return Files.size(f);
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.lfs.server/src/org/eclipse/jgit/lfs/server/TransferHandler.java b/org.eclipse.jgit.lfs.server/src/org/eclipse/jgit/lfs/server/TransferHandler.java
index 86ca2d3..4fea92e 100644
--- a/org.eclipse.jgit.lfs.server/src/org/eclipse/jgit/lfs/server/TransferHandler.java
+++ b/org.eclipse.jgit.lfs.server/src/org/eclipse/jgit/lfs/server/TransferHandler.java
@@ -164,4 +164,4 @@ private void addObjectInfo(Response.Body body, LfsObject o)
}
abstract Response.Body process() throws IOException;
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.lfs.test/tst/org/eclipse/jgit/lfs/lib/LongObjectIdTest.java b/org.eclipse.jgit.lfs.test/tst/org/eclipse/jgit/lfs/lib/LongObjectIdTest.java
index e754d6f..31ab783 100644
--- a/org.eclipse.jgit.lfs.test/tst/org/eclipse/jgit/lfs/lib/LongObjectIdTest.java
+++ b/org.eclipse.jgit.lfs.test/tst/org/eclipse/jgit/lfs/lib/LongObjectIdTest.java
@@ -291,6 +291,8 @@ public void testCompareTo() {
"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef");
assertEquals(0, id1.compareTo(LongObjectId.fromString(
"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef")));
+ AnyLongObjectId self = id1;
+ assertEquals(0, id1.compareTo(self));
assertEquals(-1, id1.compareTo(LongObjectId.fromString(
"1123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef")));
diff --git a/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/internal/AtomicObjectOutputStream.java b/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/internal/AtomicObjectOutputStream.java
index 867cca5..1598b9e 100644
--- a/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/internal/AtomicObjectOutputStream.java
+++ b/org.eclipse.jgit.lfs/src/org/eclipse/jgit/lfs/internal/AtomicObjectOutputStream.java
@@ -146,4 +146,4 @@ public void abort() {
locked.unlock();
aborted = true;
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.5.target b/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.5.target
index b2099ae..8051080 100644
--- a/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.5.target
+++ b/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.5.target
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?pde?>
<!-- generated with https://github.com/mbarbero/fr.obeo.releng.targetplatform -->
-<target name="jgit-4.5" sequenceNumber="1496008880">
+<target name="jgit-4.5" sequenceNumber="1502749391">
<locations>
<location includeMode="slicer" includeAllPlatforms="false" includeSource="true" includeConfigurePhase="true" type="InstallableUnit">
<unit id="org.eclipse.jetty.client" version="9.4.5.v20170502"/>
diff --git a/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.6.target b/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.6.target
index e4baa5d..b6bbcda 100644
--- a/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.6.target
+++ b/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.6.target
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?pde?>
<!-- generated with https://github.com/mbarbero/fr.obeo.releng.targetplatform -->
-<target name="jgit-4.6" sequenceNumber="1496008884">
+<target name="jgit-4.6" sequenceNumber="1502749371">
<locations>
<location includeMode="slicer" includeAllPlatforms="false" includeSource="true" includeConfigurePhase="true" type="InstallableUnit">
<unit id="org.eclipse.jetty.client" version="9.4.5.v20170502"/>
diff --git a/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.7.target b/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.7.target
index 9a15741..6071c8f 100644
--- a/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.7.target
+++ b/org.eclipse.jgit.packaging/org.eclipse.jgit.target/jgit-4.7.target
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?pde?>
<!-- generated with https://github.com/mbarbero/fr.obeo.releng.targetplatform -->
-<target name="jgit-4.7" sequenceNumber="1496008862">
+<target name="jgit-4.7" sequenceNumber="1502749365">
<locations>
<location includeMode="slicer" includeAllPlatforms="false" includeSource="true" includeConfigurePhase="true" type="InstallableUnit">
<unit id="org.eclipse.jetty.client" version="9.4.5.v20170502"/>
diff --git a/org.eclipse.jgit.pgm.test/tst/org/eclipse/jgit/pgm/ReflogTest.java b/org.eclipse.jgit.pgm.test/tst/org/eclipse/jgit/pgm/ReflogTest.java
index 7330ee9..bf6bacb 100644
--- a/org.eclipse.jgit.pgm.test/tst/org/eclipse/jgit/pgm/ReflogTest.java
+++ b/org.eclipse.jgit.pgm.test/tst/org/eclipse/jgit/pgm/ReflogTest.java
@@ -80,4 +80,4 @@ public void testBranch() throws Exception {
"" }, execute("git reflog refs/heads/side"));
}
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.pgm/META-INF/MANIFEST.MF b/org.eclipse.jgit.pgm/META-INF/MANIFEST.MF
index e73b352..c59f636 100644
--- a/org.eclipse.jgit.pgm/META-INF/MANIFEST.MF
+++ b/org.eclipse.jgit.pgm/META-INF/MANIFEST.MF
@@ -37,8 +37,11 @@
org.eclipse.jgit.errors;version="[4.9.0,4.10.0)",
org.eclipse.jgit.gitrepo;version="[4.9.0,4.10.0)",
org.eclipse.jgit.internal.ketch;version="[4.9.0,4.10.0)",
+ org.eclipse.jgit.internal.storage.dfs;version="[4.9.0,4.10.0)",
org.eclipse.jgit.internal.storage.file;version="[4.9.0,4.10.0)",
+ org.eclipse.jgit.internal.storage.io;version="[4.9.0,4.10.0)",
org.eclipse.jgit.internal.storage.pack;version="[4.9.0,4.10.0)",
+ org.eclipse.jgit.internal.storage.reftable;version="[4.9.0,4.10.0)",
org.eclipse.jgit.internal.storage.reftree;version="[4.9.0,4.10.0)",
org.eclipse.jgit.lfs;version="[4.9.0,4.10.0)",
org.eclipse.jgit.lfs.lib;version="[4.9.0,4.10.0)",
diff --git a/org.eclipse.jgit.pgm/META-INF/services/org.eclipse.jgit.pgm.TextBuiltin b/org.eclipse.jgit.pgm/META-INF/services/org.eclipse.jgit.pgm.TextBuiltin
index 5495be6..9025473 100644
--- a/org.eclipse.jgit.pgm/META-INF/services/org.eclipse.jgit.pgm.TextBuiltin
+++ b/org.eclipse.jgit.pgm/META-INF/services/org.eclipse.jgit.pgm.TextBuiltin
@@ -38,10 +38,12 @@
org.eclipse.jgit.pgm.UploadPack
org.eclipse.jgit.pgm.Version
+org.eclipse.jgit.pgm.debug.BenchmarkReftable
org.eclipse.jgit.pgm.debug.DiffAlgorithms
org.eclipse.jgit.pgm.debug.LfsStore
org.eclipse.jgit.pgm.debug.MakeCacheTree
org.eclipse.jgit.pgm.debug.ReadDirCache
+org.eclipse.jgit.pgm.debug.ReadReftable
org.eclipse.jgit.pgm.debug.RebuildCommitGraph
org.eclipse.jgit.pgm.debug.RebuildRefTree
org.eclipse.jgit.pgm.debug.ShowCacheTree
@@ -49,5 +51,6 @@
org.eclipse.jgit.pgm.debug.ShowDirCache
org.eclipse.jgit.pgm.debug.ShowPackDelta
org.eclipse.jgit.pgm.debug.TextHashFunctions
-org.eclipse.jgit.pgm.debug.WriteDirCache
-
+org.eclipse.jgit.pgm.debug.VerifyReftable
+org.eclipse.jgit.pgm.debug.WriteReftable
+org.eclipse.jgit.pgm.debug.WriteReftable
diff --git a/org.eclipse.jgit.pgm/resources/org/eclipse/jgit/pgm/internal/CLIText.properties b/org.eclipse.jgit.pgm/resources/org/eclipse/jgit/pgm/internal/CLIText.properties
index 8666c34..7c9816c 100644
--- a/org.eclipse.jgit.pgm/resources/org/eclipse/jgit/pgm/internal/CLIText.properties
+++ b/org.eclipse.jgit.pgm/resources/org/eclipse/jgit/pgm/internal/CLIText.properties
@@ -257,7 +257,7 @@
usage_ReadDirCache= Read the DirCache 100 times
usage_RebuildCommitGraph=Recreate a repository from another one's commit graph
usage_RebuildRefTree=Copy references into a RefTree
-usage_RebuildRefTreeEnable=set extensions.refsStorage = reftree
+usage_RebuildRefTreeEnable=set extensions.refStorage = reftree
usage_Remote=Manage set of tracked repositories
usage_RepositoryToReadFrom=Repository to read from
usage_RepositoryToReceiveInto=Repository to receive into
diff --git a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Clone.java b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Clone.java
index ca5205a..a8eb474 100644
--- a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Clone.java
+++ b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Clone.java
@@ -101,6 +101,9 @@ protected void run() throws Exception {
if (localName == null) {
try {
localName = uri.getHumanishName();
+ if (isBare) {
+ localName = localName + Constants.DOT_GIT_EXT;
+ }
localNameF = new File(SystemReader.getInstance().getProperty(
Constants.OS_USER_DIR), localName);
} catch (IllegalArgumentException e) {
diff --git a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Config.java b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Config.java
index faae13a..f5c3f9a 100644
--- a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Config.java
+++ b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Config.java
@@ -122,4 +122,4 @@ private void list(StoredConfig config) throws IOException,
}
}
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Daemon.java b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Daemon.java
index 1008593..ceabe93 100644
--- a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Daemon.java
+++ b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/Daemon.java
@@ -204,4 +204,4 @@ public ReceivePack create(DaemonClient req, Repository repo)
}
});
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/debug/BenchmarkReftable.java b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/debug/BenchmarkReftable.java
new file mode 100644
index 0000000..71c8db8
--- /dev/null
+++ b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/debug/BenchmarkReftable.java
@@ -0,0 +1,315 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.pgm.debug;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.eclipse.jgit.lib.Constants.HEAD;
+import static org.eclipse.jgit.lib.Constants.MASTER;
+import static org.eclipse.jgit.lib.Constants.R_HEADS;
+import static org.eclipse.jgit.lib.Ref.Storage.NEW;
+import static org.eclipse.jgit.lib.Ref.Storage.PACKED;
+
+import java.io.BufferedReader;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStreamReader;
+
+import org.eclipse.jgit.internal.storage.io.BlockSource;
+import org.eclipse.jgit.internal.storage.reftable.RefCursor;
+import org.eclipse.jgit.internal.storage.reftable.ReftableReader;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectIdRef;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.SymbolicRef;
+import org.eclipse.jgit.pgm.Command;
+import org.eclipse.jgit.pgm.TextBuiltin;
+import org.eclipse.jgit.util.RefList;
+import org.kohsuke.args4j.Argument;
+import org.kohsuke.args4j.Option;
+
+@Command
+class BenchmarkReftable extends TextBuiltin {
+ enum Test {
+ SCAN,
+ SEEK_COLD, SEEK_HOT,
+ BY_ID_COLD, BY_ID_HOT;
+ }
+
+ @Option(name = "--tries")
+ private int tries = 10;
+
+ @Option(name = "--test")
+ private Test test = Test.SCAN;
+
+ @Option(name = "--ref")
+ private String ref;
+
+ @Option(name = "--object-id")
+ private String objectId;
+
+ @Argument(index = 0)
+ private String lsRemotePath;
+
+ @Argument(index = 1)
+ private String reftablePath;
+
+ @Override
+ protected void run() throws Exception {
+ switch (test) {
+ case SCAN:
+ scan();
+ break;
+
+ case SEEK_COLD:
+ seekCold(ref);
+ break;
+ case SEEK_HOT:
+ seekHot(ref);
+ break;
+
+ case BY_ID_COLD:
+ byIdCold(ObjectId.fromString(objectId));
+ break;
+ case BY_ID_HOT:
+ byIdHot(ObjectId.fromString(objectId));
+ break;
+ }
+ }
+
+ private void printf(String fmt, Object... args) throws IOException {
+ errw.println(String.format(fmt, args));
+ }
+
+ @SuppressWarnings({ "nls", "boxing" })
+ private void scan() throws Exception {
+ long start, tot;
+
+ start = System.currentTimeMillis();
+ for (int i = 0; i < tries; i++) {
+ readLsRemote();
+ }
+ tot = System.currentTimeMillis() - start;
+ printf("%12s %10d ms %6d ms/run", "packed-refs", tot, tot / tries);
+
+ start = System.currentTimeMillis();
+ for (int i = 0; i < tries; i++) {
+ try (FileInputStream in = new FileInputStream(reftablePath);
+ BlockSource src = BlockSource.from(in);
+ ReftableReader reader = new ReftableReader(src)) {
+ try (RefCursor rc = reader.allRefs()) {
+ while (rc.next()) {
+ rc.getRef();
+ }
+ }
+ }
+ }
+ tot = System.currentTimeMillis() - start;
+ printf("%12s %10d ms %6d ms/run", "reftable", tot, tot / tries);
+ }
+
+ private RefList<Ref> readLsRemote()
+ throws IOException, FileNotFoundException {
+ RefList.Builder<Ref> list = new RefList.Builder<>();
+ try (BufferedReader br = new BufferedReader(new InputStreamReader(
+ new FileInputStream(lsRemotePath), UTF_8))) {
+ Ref last = null;
+ String line;
+ while ((line = br.readLine()) != null) {
+ ObjectId id = ObjectId.fromString(line.substring(0, 40));
+ String name = line.substring(41, line.length());
+ if (last != null && name.endsWith("^{}")) { //$NON-NLS-1$
+ last = new ObjectIdRef.PeeledTag(PACKED, last.getName(),
+ last.getObjectId(), id);
+ list.set(list.size() - 1, last);
+ continue;
+ }
+
+ if (name.equals(HEAD)) {
+ last = new SymbolicRef(name, new ObjectIdRef.Unpeeled(NEW,
+ R_HEADS + MASTER, null));
+ } else {
+ last = new ObjectIdRef.PeeledNonTag(PACKED, name, id);
+ }
+ list.add(last);
+ }
+ }
+ list.sort();
+ return list.toRefList();
+ }
+
+ @SuppressWarnings({ "nls", "boxing" })
+ private void seekCold(String refName) throws Exception {
+ long start, tot;
+
+ int lsTries = Math.min(tries, 64);
+ start = System.nanoTime();
+ for (int i = 0; i < lsTries; i++) {
+ readLsRemote().get(refName);
+ }
+ tot = System.nanoTime() - start;
+ printf("%12s %10d usec %9.1f usec/run %5d runs", "packed-refs",
+ tot / 1000,
+ (((double) tot) / lsTries) / 1000,
+ lsTries);
+
+ start = System.nanoTime();
+ for (int i = 0; i < tries; i++) {
+ try (FileInputStream in = new FileInputStream(reftablePath);
+ BlockSource src = BlockSource.from(in);
+ ReftableReader reader = new ReftableReader(src)) {
+ try (RefCursor rc = reader.seekRef(refName)) {
+ while (rc.next()) {
+ rc.getRef();
+ }
+ }
+ }
+ }
+ tot = System.nanoTime() - start;
+ printf("%12s %10d usec %9.1f usec/run %5d runs", "reftable",
+ tot / 1000,
+ (((double) tot) / tries) / 1000,
+ tries);
+ }
+
+ @SuppressWarnings({ "nls", "boxing" })
+ private void seekHot(String refName) throws Exception {
+ long start, tot;
+
+ int lsTries = Math.min(tries, 64);
+ start = System.nanoTime();
+ RefList<Ref> lsRemote = readLsRemote();
+ for (int i = 0; i < lsTries; i++) {
+ lsRemote.get(refName);
+ }
+ tot = System.nanoTime() - start;
+ printf("%12s %10d usec %9.1f usec/run %5d runs", "packed-refs",
+ tot / 1000, (((double) tot) / lsTries) / 1000, lsTries);
+
+ start = System.nanoTime();
+ try (FileInputStream in = new FileInputStream(reftablePath);
+ BlockSource src = BlockSource.from(in);
+ ReftableReader reader = new ReftableReader(src)) {
+ for (int i = 0; i < tries; i++) {
+ try (RefCursor rc = reader.seekRef(refName)) {
+ while (rc.next()) {
+ rc.getRef();
+ }
+ }
+ }
+ }
+ tot = System.nanoTime() - start;
+ printf("%12s %10d usec %9.1f usec/run %5d runs", "reftable",
+ tot / 1000, (((double) tot) / tries) / 1000, tries);
+ }
+
+ @SuppressWarnings({ "nls", "boxing" })
+ private void byIdCold(ObjectId id) throws Exception {
+ long start, tot;
+
+ int lsTries = Math.min(tries, 64);
+ start = System.nanoTime();
+ for (int i = 0; i < lsTries; i++) {
+ for (Ref r : readLsRemote()) {
+ if (id.equals(r.getObjectId())) {
+ continue;
+ }
+ }
+ }
+ tot = System.nanoTime() - start;
+ printf("%12s %10d usec %9.1f usec/run %5d runs", "packed-refs",
+ tot / 1000, (((double) tot) / lsTries) / 1000, lsTries);
+
+ start = System.nanoTime();
+ for (int i = 0; i < tries; i++) {
+ try (FileInputStream in = new FileInputStream(reftablePath);
+ BlockSource src = BlockSource.from(in);
+ ReftableReader reader = new ReftableReader(src)) {
+ try (RefCursor rc = reader.byObjectId(id)) {
+ while (rc.next()) {
+ rc.getRef();
+ }
+ }
+ }
+ }
+ tot = System.nanoTime() - start;
+ printf("%12s %10d usec %9.1f usec/run %5d runs", "reftable",
+ tot / 1000, (((double) tot) / tries) / 1000, tries);
+ }
+
+ @SuppressWarnings({ "nls", "boxing" })
+ private void byIdHot(ObjectId id) throws Exception {
+ long start, tot;
+
+ int lsTries = Math.min(tries, 64);
+ start = System.nanoTime();
+ RefList<Ref> lsRemote = readLsRemote();
+ for (int i = 0; i < lsTries; i++) {
+ for (Ref r : lsRemote) {
+ if (id.equals(r.getObjectId())) {
+ continue;
+ }
+ }
+ }
+ tot = System.nanoTime() - start;
+ printf("%12s %10d usec %9.1f usec/run %5d runs", "packed-refs",
+ tot / 1000, (((double) tot) / lsTries) / 1000, lsTries);
+
+ start = System.nanoTime();
+ try (FileInputStream in = new FileInputStream(reftablePath);
+ BlockSource src = BlockSource.from(in);
+ ReftableReader reader = new ReftableReader(src)) {
+ for (int i = 0; i < tries; i++) {
+ try (RefCursor rc = reader.byObjectId(id)) {
+ while (rc.next()) {
+ rc.getRef();
+ }
+ }
+ }
+ }
+ tot = System.nanoTime() - start;
+ printf("%12s %10d usec %9.1f usec/run %5d runs", "reftable",
+ tot / 1000, (((double) tot) / tries) / 1000, tries);
+ }
+}
diff --git a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/debug/ReadReftable.java b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/debug/ReadReftable.java
new file mode 100644
index 0000000..9b8db3e
--- /dev/null
+++ b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/debug/ReadReftable.java
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.pgm.debug;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+
+import org.eclipse.jgit.internal.storage.io.BlockSource;
+import org.eclipse.jgit.internal.storage.reftable.RefCursor;
+import org.eclipse.jgit.internal.storage.reftable.ReftableReader;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.pgm.Command;
+import org.eclipse.jgit.pgm.TextBuiltin;
+import org.kohsuke.args4j.Argument;
+
+@Command
+class ReadReftable extends TextBuiltin {
+ @Argument(index = 0)
+ private String input;
+
+ @Argument(index = 1, required = false)
+ private String ref;
+
+ @Override
+ protected void run() throws Exception {
+ try (FileInputStream in = new FileInputStream(input);
+ BlockSource src = BlockSource.from(in);
+ ReftableReader reader = new ReftableReader(src)) {
+ try (RefCursor rc = ref != null
+ ? reader.seekRef(ref)
+ : reader.allRefs()) {
+ while (rc.next()) {
+ write(rc.getRef());
+ }
+ }
+ }
+ }
+
+ private void write(Ref r) throws IOException {
+ if (r.isSymbolic()) {
+ outw.println(r.getTarget().getName() + '\t' + r.getName());
+ return;
+ }
+
+ ObjectId id1 = r.getObjectId();
+ if (id1 != null) {
+ outw.println(id1.name() + '\t' + r.getName());
+ }
+
+ ObjectId id2 = r.getPeeledObjectId();
+ if (id2 != null) {
+ outw.println('^' + id2.name());
+ }
+ }
+}
diff --git a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/debug/RebuildRefTree.java b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/debug/RebuildRefTree.java
index 57345e2..8cde513 100644
--- a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/debug/RebuildRefTree.java
+++ b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/debug/RebuildRefTree.java
@@ -133,7 +133,7 @@ protected void run() throws Exception {
if (enable && !(db.getRefDatabase() instanceof RefTreeDatabase)) {
StoredConfig cfg = db.getConfig();
cfg.setInt("core", null, "repositoryformatversion", 1); //$NON-NLS-1$ //$NON-NLS-2$
- cfg.setString("extensions", null, "refsStorage", "reftree"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
+ cfg.setString("extensions", null, "refStorage", "reftree"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
cfg.save();
errw.println("Enabled reftree."); //$NON-NLS-1$
errw.flush();
diff --git a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/debug/VerifyReftable.java b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/debug/VerifyReftable.java
new file mode 100644
index 0000000..dffb579
--- /dev/null
+++ b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/debug/VerifyReftable.java
@@ -0,0 +1,211 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.pgm.debug;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.eclipse.jgit.internal.storage.io.BlockSource;
+import org.eclipse.jgit.internal.storage.reftable.RefCursor;
+import org.eclipse.jgit.internal.storage.reftable.ReftableReader;
+import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.RefComparator;
+import org.eclipse.jgit.lib.TextProgressMonitor;
+import org.eclipse.jgit.pgm.Command;
+import org.eclipse.jgit.pgm.TextBuiltin;
+import org.kohsuke.args4j.Argument;
+
+@Command
+class VerifyReftable extends TextBuiltin {
+ private static final long SEED1 = 0xaba8bb4de4caf86cL;
+ private static final long SEED2 = 0x28bb5c25ad43ecb5L;
+
+ @Argument(index = 0)
+ private String lsRemotePath;
+
+ @Argument(index = 1)
+ private String reftablePath;
+
+ @Override
+ protected void run() throws Exception {
+ List<Ref> refs = WriteReftable.readRefs(lsRemotePath);
+
+ try (FileInputStream in = new FileInputStream(reftablePath);
+ BlockSource src = BlockSource.from(in);
+ ReftableReader reader = new ReftableReader(src)) {
+ scan(refs, reader);
+ seek(refs, reader);
+ byId(refs, reader);
+ }
+ }
+
+ @SuppressWarnings("nls")
+ private void scan(List<Ref> refs, ReftableReader reader)
+ throws IOException {
+ errw.print(String.format("%-20s", "sequential scan..."));
+ errw.flush();
+ try (RefCursor rc = reader.allRefs()) {
+ for (Ref exp : refs) {
+ verify(exp, rc);
+ }
+ if (rc.next()) {
+ throw die("expected end of table");
+ }
+ }
+ errw.println(" OK");
+ }
+
+ @SuppressWarnings("nls")
+ private void seek(List<Ref> refs, ReftableReader reader)
+ throws IOException {
+ List<Ref> rnd = new ArrayList<>(refs);
+ Collections.shuffle(rnd, new Random(SEED1));
+
+ TextProgressMonitor pm = new TextProgressMonitor(errw);
+ pm.beginTask("random seek", rnd.size());
+ for (Ref exp : rnd) {
+ try (RefCursor rc = reader.seekRef(exp.getName())) {
+ verify(exp, rc);
+ if (rc.next()) {
+ throw die("should not have ref after " + exp.getName());
+ }
+ }
+ pm.update(1);
+ }
+ pm.endTask();
+ }
+
+ @SuppressWarnings("nls")
+ private void byId(List<Ref> refs, ReftableReader reader)
+ throws IOException {
+ Map<ObjectId, List<Ref>> want = groupById(refs);
+ List<List<Ref>> rnd = new ArrayList<>(want.values());
+ Collections.shuffle(rnd, new Random(SEED2));
+
+ TextProgressMonitor pm = new TextProgressMonitor(errw);
+ pm.beginTask("byObjectId", rnd.size());
+ for (List<Ref> exp : rnd) {
+ Collections.sort(exp, RefComparator.INSTANCE);
+ ObjectId id = exp.get(0).getObjectId();
+ try (RefCursor rc = reader.byObjectId(id)) {
+ for (Ref r : exp) {
+ verify(r, rc);
+ }
+ }
+ pm.update(1);
+ }
+ pm.endTask();
+ }
+
+ private static Map<ObjectId, List<Ref>> groupById(List<Ref> refs) {
+ Map<ObjectId, List<Ref>> m = new HashMap<>();
+ for (Ref r : refs) {
+ ObjectId id = r.getObjectId();
+ if (id != null) {
+ List<Ref> c = m.get(id);
+ if (c == null) {
+ c = new ArrayList<>(2);
+ m.put(id, c);
+ }
+ c.add(r);
+ }
+ }
+ return m;
+ }
+
+ @SuppressWarnings("nls")
+ private void verify(Ref exp, RefCursor rc) throws IOException {
+ if (!rc.next()) {
+ throw die("ended before " + exp.getName());
+ }
+
+ Ref act = rc.getRef();
+ if (!exp.getName().equals(act.getName())) {
+ throw die(String.format("expected %s, found %s",
+ exp.getName(),
+ act.getName()));
+ }
+
+ if (exp.isSymbolic()) {
+ if (!act.isSymbolic()) {
+ throw die("expected " + act.getName() + " to be symbolic");
+ }
+ if (!exp.getTarget().getName().equals(act.getTarget().getName())) {
+ throw die(String.format("expected %s to be %s, found %s",
+ exp.getName(),
+ exp.getLeaf().getName(),
+ act.getLeaf().getName()));
+ }
+ return;
+ }
+
+ if (!AnyObjectId.equals(exp.getObjectId(), act.getObjectId())) {
+ throw die(String.format("expected %s to be %s, found %s",
+ exp.getName(),
+ id(exp.getObjectId()),
+ id(act.getObjectId())));
+ }
+
+ if (exp.getPeeledObjectId() != null
+ && !AnyObjectId.equals(exp.getPeeledObjectId(), act.getPeeledObjectId())) {
+ throw die(String.format("expected %s to be %s, found %s",
+ exp.getName(),
+ id(exp.getPeeledObjectId()),
+ id(act.getPeeledObjectId())));
+ }
+ }
+
+ @SuppressWarnings("nls")
+ private static String id(ObjectId id) {
+ return id != null ? id.name() : "<null>";
+ }
+}
diff --git a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/debug/WriteReftable.java b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/debug/WriteReftable.java
new file mode 100644
index 0000000..76ffa19
--- /dev/null
+++ b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/debug/WriteReftable.java
@@ -0,0 +1,301 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.pgm.debug;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.eclipse.jgit.lib.Constants.HEAD;
+import static org.eclipse.jgit.lib.Constants.MASTER;
+import static org.eclipse.jgit.lib.Constants.R_HEADS;
+import static org.eclipse.jgit.lib.Ref.Storage.NEW;
+import static org.eclipse.jgit.lib.Ref.Storage.PACKED;
+
+import java.io.BufferedReader;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.eclipse.jgit.internal.storage.reftable.ReftableConfig;
+import org.eclipse.jgit.internal.storage.reftable.ReftableWriter;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectIdRef;
+import org.eclipse.jgit.lib.PersonIdent;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.SymbolicRef;
+import org.eclipse.jgit.pgm.Command;
+import org.eclipse.jgit.pgm.TextBuiltin;
+import org.kohsuke.args4j.Argument;
+import org.kohsuke.args4j.Option;
+
+@Command
+class WriteReftable extends TextBuiltin {
+ private static final int KIB = 1 << 10;
+ private static final int MIB = 1 << 20;
+
+ @Option(name = "--block-size")
+ private int refBlockSize;
+
+ @Option(name = "--log-block-size")
+ private int logBlockSize;
+
+ @Option(name = "--restart-interval")
+ private int restartInterval;
+
+ @Option(name = "--index-levels")
+ private int indexLevels;
+
+ @Option(name = "--reflog-in")
+ private String reflogIn;
+
+ @Option(name = "--no-index-objects")
+ private boolean noIndexObjects;
+
+ @Argument(index = 0)
+ private String in;
+
+ @Argument(index = 1)
+ private String out;
+
+ @SuppressWarnings({ "nls", "boxing" })
+ @Override
+ protected void run() throws Exception {
+ List<Ref> refs = readRefs(in);
+ List<LogEntry> logs = readLog(reflogIn);
+
+ ReftableWriter.Stats stats;
+ try (OutputStream os = new FileOutputStream(out)) {
+ ReftableConfig cfg = new ReftableConfig();
+ cfg.setIndexObjects(!noIndexObjects);
+ if (refBlockSize > 0) {
+ cfg.setRefBlockSize(refBlockSize);
+ }
+ if (logBlockSize > 0) {
+ cfg.setLogBlockSize(logBlockSize);
+ }
+ if (restartInterval > 0) {
+ cfg.setRestartInterval(restartInterval);
+ }
+ if (indexLevels > 0) {
+ cfg.setMaxIndexLevels(indexLevels);
+ }
+
+ ReftableWriter w = new ReftableWriter(cfg);
+ w.setMinUpdateIndex(min(logs)).setMaxUpdateIndex(max(logs));
+ w.begin(os);
+ w.sortAndWriteRefs(refs);
+ for (LogEntry e : logs) {
+ w.writeLog(e.ref, e.updateIndex, e.who,
+ e.oldId, e.newId, e.message);
+ }
+ stats = w.finish().getStats();
+ }
+
+ double fileMiB = ((double) stats.totalBytes()) / MIB;
+ printf("Summary:");
+ printf(" file sz : %.1f MiB (%d bytes)", fileMiB, stats.totalBytes());
+ printf(" padding : %d KiB", stats.paddingBytes() / KIB);
+ errw.println();
+
+ printf("Refs:");
+ printf(" ref blk : %d", stats.refBlockSize());
+ printf(" restarts: %d", stats.restartInterval());
+ printf(" refs : %d", stats.refCount());
+ if (stats.refIndexLevels() > 0) {
+ int idxSize = (int) Math.round(((double) stats.refIndexSize()) / KIB);
+ printf(" idx sz : %d KiB", idxSize);
+ printf(" idx lvl : %d", stats.refIndexLevels());
+ }
+ printf(" avg ref : %d bytes", stats.refBytes() / refs.size());
+ errw.println();
+
+ if (stats.objCount() > 0) {
+ int objMiB = (int) Math.round(((double) stats.objBytes()) / MIB);
+ int idLen = stats.objIdLength();
+ printf("Objects:");
+ printf(" obj blk : %d", stats.refBlockSize());
+ printf(" restarts: %d", stats.restartInterval());
+ printf(" objects : %d", stats.objCount());
+ printf(" obj sz : %d MiB (%d bytes)", objMiB, stats.objBytes());
+ if (stats.objIndexSize() > 0) {
+ int s = (int) Math.round(((double) stats.objIndexSize()) / KIB);
+ printf(" idx sz : %d KiB", s);
+ printf(" idx lvl : %d", stats.objIndexLevels());
+ }
+ printf(" id len : %d bytes (%d hex digits)", idLen, 2 * idLen);
+ printf(" avg obj : %d bytes", stats.objBytes() / stats.objCount());
+ errw.println();
+ }
+ if (stats.logCount() > 0) {
+ int logMiB = (int) Math.round(((double) stats.logBytes()) / MIB);
+ printf("Log:");
+ printf(" log blk : %d", stats.logBlockSize());
+ printf(" logs : %d", stats.logCount());
+ printf(" log sz : %d MiB (%d bytes)", logMiB, stats.logBytes());
+ printf(" avg log : %d bytes", stats.logBytes() / logs.size());
+ errw.println();
+ }
+ }
+
+ private void printf(String fmt, Object... args) throws IOException {
+ errw.println(String.format(fmt, args));
+ }
+
+ static List<Ref> readRefs(String inputFile) throws IOException {
+ List<Ref> refs = new ArrayList<>();
+ try (BufferedReader br = new BufferedReader(
+ new InputStreamReader(new FileInputStream(inputFile), UTF_8))) {
+ String line;
+ while ((line = br.readLine()) != null) {
+ ObjectId id = ObjectId.fromString(line.substring(0, 40));
+ String name = line.substring(41, line.length());
+ if (name.endsWith("^{}")) { //$NON-NLS-1$
+ int lastIdx = refs.size() - 1;
+ Ref last = refs.get(lastIdx);
+ refs.set(lastIdx, new ObjectIdRef.PeeledTag(PACKED,
+ last.getName(), last.getObjectId(), id));
+ continue;
+ }
+
+ Ref ref;
+ if (name.equals(HEAD)) {
+ ref = new SymbolicRef(name, new ObjectIdRef.Unpeeled(NEW,
+ R_HEADS + MASTER, null));
+ } else {
+ ref = new ObjectIdRef.PeeledNonTag(PACKED, name, id);
+ }
+ refs.add(ref);
+ }
+ }
+ Collections.sort(refs, (a, b) -> a.getName().compareTo(b.getName()));
+ return refs;
+ }
+
+ private static List<LogEntry> readLog(String logPath)
+ throws FileNotFoundException, IOException {
+ if (logPath == null) {
+ return Collections.emptyList();
+ }
+
+ List<LogEntry> log = new ArrayList<>();
+ try (BufferedReader br = new BufferedReader(
+ new InputStreamReader(new FileInputStream(logPath), UTF_8))) {
+ @SuppressWarnings("nls")
+ Pattern pattern = Pattern.compile("([^,]+)" // 1: ref
+ + ",([0-9]+(?:[.][0-9]+)?)" // 2: time
+ + ",([^,]+)" // 3: who
+ + ",([^,]+)" // 4: old
+ + ",([^,]+)" // 5: new
+ + ",(.*)"); // 6: msg
+ String line;
+ while ((line = br.readLine()) != null) {
+ Matcher m = pattern.matcher(line);
+ if (!m.matches()) {
+ throw new IOException("unparsed line: " + line); //$NON-NLS-1$
+ }
+ String ref = m.group(1);
+ double t = Double.parseDouble(m.group(2));
+ long time = ((long) t) * 1000L;
+ long index = (long) (t * 1e6);
+ String user = m.group(3);
+ ObjectId oldId = parseId(m.group(4));
+ ObjectId newId = parseId(m.group(5));
+ String msg = m.group(6);
+ String email = user + "@gerrit"; //$NON-NLS-1$
+ PersonIdent who = new PersonIdent(user, email, time, -480);
+ log.add(new LogEntry(ref, index, who, oldId, newId, msg));
+ }
+ }
+ Collections.sort(log, LogEntry::compare);
+ return log;
+ }
+
+ private static long min(List<LogEntry> log) {
+ return log.stream().mapToLong(e -> e.updateIndex).min().orElse(0);
+ }
+
+ private static long max(List<LogEntry> log) {
+ return log.stream().mapToLong(e -> e.updateIndex).max().orElse(0);
+ }
+
+ private static ObjectId parseId(String s) {
+ if ("NULL".equals(s)) { //$NON-NLS-1$
+ return ObjectId.zeroId();
+ }
+ return ObjectId.fromString(s);
+ }
+
+ private static class LogEntry {
+ static int compare(LogEntry a, LogEntry b) {
+ int cmp = a.ref.compareTo(b.ref);
+ if (cmp == 0) {
+ cmp = Long.signum(b.updateIndex - a.updateIndex);
+ }
+ return cmp;
+ }
+
+ final String ref;
+ final long updateIndex;
+ final PersonIdent who;
+ final ObjectId oldId;
+ final ObjectId newId;
+ final String message;
+
+ LogEntry(String ref, long updateIndex, PersonIdent who,
+ ObjectId oldId, ObjectId newId, String message) {
+ this.ref = ref;
+ this.updateIndex = updateIndex;
+ this.who = who;
+ this.oldId = oldId;
+ this.newId = newId;
+ this.message = message;
+ }
+ }
+}
diff --git a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/opt/UntrackedFilesHandler.java b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/opt/UntrackedFilesHandler.java
index c4e8b05..d6ff5f0 100644
--- a/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/opt/UntrackedFilesHandler.java
+++ b/org.eclipse.jgit.pgm/src/org/eclipse/jgit/pgm/opt/UntrackedFilesHandler.java
@@ -111,4 +111,4 @@ public int parseArguments(Parameters params) throws CmdLineException {
}
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.test/BUILD b/org.eclipse.jgit.test/BUILD
index ae6f242..bbda838 100644
--- a/org.eclipse.jgit.test/BUILD
+++ b/org.eclipse.jgit.test/BUILD
@@ -33,7 +33,7 @@
tests(glob(
["tst/**/*.java"],
- exclude = HELPERS + DATA
+ exclude = HELPERS + DATA,
))
java_library(
@@ -54,8 +54,8 @@
)
genrule2(
- name = 'tst_rsrc_jar',
- cmd = 'o=$$PWD/$@ && tar cf - $(SRCS) | tar -C $$TMP --strip-components=2 -xf - && cd $$TMP && zip -qr $$o .',
- srcs = glob(['tst-rsrc/**']),
- outs = ['tst_rsrc.jar',],
+ name = "tst_rsrc_jar",
+ srcs = glob(["tst-rsrc/**"]),
+ outs = ["tst_rsrc.jar"],
+ cmd = "o=$$PWD/$@ && tar cf - $(SRCS) | tar -C $$TMP --strip-components=2 -xf - && cd $$TMP && zip -qr $$o .",
)
diff --git a/org.eclipse.jgit.test/META-INF/MANIFEST.MF b/org.eclipse.jgit.test/META-INF/MANIFEST.MF
index 0400f1e..6791020 100644
--- a/org.eclipse.jgit.test/META-INF/MANIFEST.MF
+++ b/org.eclipse.jgit.test/META-INF/MANIFEST.MF
@@ -8,6 +8,7 @@
Bundle-ActivationPolicy: lazy
Bundle-RequiredExecutionEnvironment: JavaSE-1.8
Import-Package: com.googlecode.javaewah;version="[1.1.6,2.0.0)",
+ com.jcraft.jsch;version="[0.1.54,0.2.0)",
org.eclipse.jgit.api;version="[4.9.0,4.10.0)",
org.eclipse.jgit.api.errors;version="[4.9.0,4.10.0)",
org.eclipse.jgit.attributes;version="[4.9.0,4.10.0)",
@@ -23,9 +24,12 @@
org.eclipse.jgit.ignore;version="[4.9.0,4.10.0)",
org.eclipse.jgit.ignore.internal;version="[4.9.0,4.10.0)",
org.eclipse.jgit.internal;version="[4.9.0,4.10.0)",
+ org.eclipse.jgit.internal.fsck;version="[4.9.0,4.10.0)",
org.eclipse.jgit.internal.storage.dfs;version="[4.9.0,4.10.0)",
org.eclipse.jgit.internal.storage.file;version="[4.9.0,4.10.0)",
+ org.eclipse.jgit.internal.storage.io;version="[4.9.0,4.10.0)",
org.eclipse.jgit.internal.storage.pack;version="[4.9.0,4.10.0)",
+ org.eclipse.jgit.internal.storage.reftable;version="[4.9.0,4.10.0)",
org.eclipse.jgit.internal.storage.reftree;version="[4.9.0,4.10.0)",
org.eclipse.jgit.junit;version="[4.9.0,4.10.0)",
org.eclipse.jgit.lfs;version="[4.9.0,4.10.0)",
diff --git a/org.eclipse.jgit.test/pom.xml b/org.eclipse.jgit.test/pom.xml
index dad1e3c..084014c 100644
--- a/org.eclipse.jgit.test/pom.xml
+++ b/org.eclipse.jgit.test/pom.xml
@@ -66,7 +66,6 @@
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
- <scope>test</scope>
</dependency>
<!-- Optional security provider for encryption tests. -->
diff --git a/org.eclipse.jgit.test/src/org/eclipse/jgit/events/ChangeRecorder.java b/org.eclipse.jgit.test/src/org/eclipse/jgit/events/ChangeRecorder.java
new file mode 100644
index 0000000..c5582a8
--- /dev/null
+++ b/org.eclipse.jgit.test/src/org/eclipse/jgit/events/ChangeRecorder.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2017, Thomas Wolf <thomas.wolf@paranor.ch>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.events;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * A {@link WorkingTreeModifiedListener} that can be used in tests to check
+ * expected events.
+ */
+public class ChangeRecorder implements WorkingTreeModifiedListener {
+
+ public static final String[] EMPTY = new String[0];
+
+ private Set<String> modified = new HashSet<>();
+
+ private Set<String> deleted = new HashSet<>();
+
+ private int eventCount;
+
+ @Override
+ public void onWorkingTreeModified(WorkingTreeModifiedEvent event) {
+ eventCount++;
+ modified.removeAll(event.getDeleted());
+ deleted.removeAll(event.getModified());
+ modified.addAll(event.getModified());
+ deleted.addAll(event.getDeleted());
+ }
+
+ private String[] getModified() {
+ return modified.toArray(new String[modified.size()]);
+ }
+
+ private String[] getDeleted() {
+ return deleted.toArray(new String[deleted.size()]);
+ }
+
+ private void reset() {
+ eventCount = 0;
+ modified.clear();
+ deleted.clear();
+ }
+
+ public void assertNoEvent() {
+ assertEquals("Unexpected WorkingTreeModifiedEvent ", 0, eventCount);
+ }
+
+ public void assertEvent(String[] expectedModified,
+ String[] expectedDeleted) {
+ String[] actuallyModified = getModified();
+ String[] actuallyDeleted = getDeleted();
+ Arrays.sort(actuallyModified);
+ Arrays.sort(expectedModified);
+ Arrays.sort(actuallyDeleted);
+ Arrays.sort(expectedDeleted);
+ assertArrayEquals("Unexpected modifications reported", expectedModified,
+ actuallyModified);
+ assertArrayEquals("Unexpected deletions reported", expectedDeleted,
+ actuallyDeleted);
+ reset();
+ }
+}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/AddCommandTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/AddCommandTest.java
index ed3907e..aafda01 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/AddCommandTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/AddCommandTest.java
@@ -303,6 +303,21 @@ public void testAttributesWithTreeWalkFilter()
}
@Test
+ public void testAttributesConflictingMatch() throws Exception {
+ writeTrashFile(".gitattributes", "foo/** crlf=input\n*.jar binary");
+ writeTrashFile("foo/bar.jar", "\r\n");
+ // We end up with attributes [binary -diff -merge -text crlf=input].
+ // crlf should have no effect when -text is present.
+ try (Git git = new Git(db)) {
+ git.add().addFilepattern(".").call();
+ assertEquals(
+ "[.gitattributes, mode:100644, content:foo/** crlf=input\n*.jar binary]"
+ + "[foo/bar.jar, mode:100644, content:\r\n]",
+ indexState(CONTENT));
+ }
+ }
+
+ @Test
public void testCleanFilterEnvironment()
throws IOException, GitAPIException {
writeTrashFile(".gitattributes", "*.txt filter=tstFilter");
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/CloneCommandTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/CloneCommandTest.java
index ae0b8dd..6ff3b25 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/CloneCommandTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/CloneCommandTest.java
@@ -76,6 +76,7 @@
import org.eclipse.jgit.submodule.SubmoduleWalk;
import org.eclipse.jgit.transport.RefSpec;
import org.eclipse.jgit.transport.RemoteConfig;
+import org.eclipse.jgit.transport.URIish;
import org.eclipse.jgit.util.SystemReader;
import org.junit.Test;
@@ -145,16 +146,36 @@ public void testCloneRepositoryExplicitGitDir() throws IOException,
File directory = createTempDirectory("testCloneRepository");
CloneCommand command = Git.cloneRepository();
command.setDirectory(directory);
- command.setGitDir(new File(directory, ".git"));
+ command.setGitDir(new File(directory, Constants.DOT_GIT));
command.setURI(fileUri());
Git git2 = command.call();
addRepoToClose(git2.getRepository());
assertEquals(directory, git2.getRepository().getWorkTree());
- assertEquals(new File(directory, ".git"), git2.getRepository()
+ assertEquals(new File(directory, Constants.DOT_GIT), git2.getRepository()
.getDirectory());
}
@Test
+ public void testCloneRepositoryDefaultDirectory() throws IOException, URISyntaxException,
+ JGitInternalException, GitAPIException {
+ CloneCommand command = Git.cloneRepository().setURI(fileUri());
+
+ command.verifyDirectories(new URIish(fileUri()));
+ File directory = command.getDirectory();
+ assertEquals(git.getRepository().getWorkTree().getName(), directory.getName());
+ }
+
+ @Test
+ public void testCloneBareRepositoryDefaultDirectory() throws IOException, URISyntaxException,
+ JGitInternalException, GitAPIException {
+ CloneCommand command = Git.cloneRepository().setURI(fileUri()).setBare(true);
+
+ command.verifyDirectories(new URIish(fileUri()));
+ File directory = command.getDirectory();
+ assertEquals(git.getRepository().getWorkTree().getName() + Constants.DOT_GIT_EXT, directory.getName());
+ }
+
+ @Test
public void testCloneRepositoryExplicitGitDirNonStd() throws IOException,
JGitInternalException, GitAPIException {
File directory = createTempDirectory("testCloneRepository");
@@ -168,8 +189,8 @@ public void testCloneRepositoryExplicitGitDirNonStd() throws IOException,
assertEquals(directory, git2.getRepository().getWorkTree());
assertEquals(gDir, git2.getRepository()
.getDirectory());
- assertTrue(new File(directory, ".git").isFile());
- assertFalse(new File(gDir, ".git").exists());
+ assertTrue(new File(directory, Constants.DOT_GIT).isFile());
+ assertFalse(new File(gDir, Constants.DOT_GIT).exists());
}
@Test
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/CommitCommandTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/CommitCommandTest.java
index 7e657e6..a0834e7 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/CommitCommandTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/CommitCommandTest.java
@@ -76,6 +76,7 @@
import org.eclipse.jgit.treewalk.TreeWalk;
import org.eclipse.jgit.treewalk.filter.TreeFilter;
import org.eclipse.jgit.util.FS;
+import org.junit.Ignore;
import org.junit.Test;
/**
@@ -305,6 +306,7 @@ public void commitSubmoduleUpdate() throws Exception {
}
}
+ @Ignore("very flaky when run with Hudson")
@Test
public void commitUpdatesSmudgedEntries() throws Exception {
try (Git git = new Git(db)) {
@@ -361,6 +363,7 @@ public void commitUpdatesSmudgedEntries() throws Exception {
}
}
+ @Ignore("very flaky when run with Hudson")
@Test
public void commitIgnoresSmudgedEntryWithDifferentId() throws Exception {
try (Git git = new Git(db)) {
@@ -554,6 +557,11 @@ public void commitEmptyCommits() throws Exception {
} catch (EmtpyCommitException e) {
// expect this exception
}
+
+ // Allow empty commits also when setOnly was set
+ git.commit().setAuthor("New Author", "newauthor@example.org")
+ .setMessage("again no change").setOnly("file1")
+ .setAllowEmpty(true).call();
}
}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/LogCommandTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/LogCommandTest.java
index 38178bf..bd0efad 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/LogCommandTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/LogCommandTest.java
@@ -289,4 +289,4 @@ private void setCommitsAndMerge() throws Exception {
.setMessage("merge s0 with m1").call();
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/PullCommandTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/PullCommandTest.java
index 823516b..a341284 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/PullCommandTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/PullCommandTest.java
@@ -620,4 +620,4 @@ private static void assertFileContentsEqual(File actFile, String string)
fis.close();
}
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/StashApplyCommandTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/StashApplyCommandTest.java
index f2e4d5b..ad3ab7f 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/StashApplyCommandTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/api/StashApplyCommandTest.java
@@ -55,12 +55,15 @@
import org.eclipse.jgit.api.errors.JGitInternalException;
import org.eclipse.jgit.api.errors.NoHeadException;
import org.eclipse.jgit.api.errors.StashApplyFailureException;
+import org.eclipse.jgit.events.ChangeRecorder;
+import org.eclipse.jgit.events.ListenerHandle;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.junit.RepositoryTestCase;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.util.FileUtils;
+import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@@ -77,15 +80,31 @@ public class StashApplyCommandTest extends RepositoryTestCase {
private File committedFile;
+ private ChangeRecorder recorder;
+
+ private ListenerHandle handle;
+
@Override
@Before
public void setUp() throws Exception {
super.setUp();
git = Git.wrap(db);
+ recorder = new ChangeRecorder();
+ handle = db.getListenerList().addWorkingTreeModifiedListener(recorder);
committedFile = writeTrashFile(PATH, "content");
git.add().addFilepattern(PATH).call();
head = git.commit().setMessage("add file").call();
assertNotNull(head);
+ recorder.assertNoEvent();
+ }
+
+ @Override
+ @After
+ public void tearDown() throws Exception {
+ if (handle != null) {
+ handle.remove();
+ }
+ super.tearDown();
}
@Test
@@ -95,10 +114,12 @@ public void workingDirectoryDelete() throws Exception {
RevCommit stashed = git.stashCreate().call();
assertNotNull(stashed);
assertEquals("content", read(committedFile));
+ recorder.assertEvent(new String[] { PATH }, ChangeRecorder.EMPTY);
ObjectId unstashed = git.stashApply().call();
assertEquals(stashed, unstashed);
assertFalse(committedFile.exists());
+ recorder.assertEvent(ChangeRecorder.EMPTY, new String[] { PATH });
Status status = git.status().call();
assertTrue(status.getAdded().isEmpty());
@@ -121,11 +142,13 @@ public void indexAdd() throws Exception {
RevCommit stashed = git.stashCreate().call();
assertNotNull(stashed);
assertFalse(addedFile.exists());
+ recorder.assertEvent(ChangeRecorder.EMPTY, new String[] { addedPath });
ObjectId unstashed = git.stashApply().call();
assertEquals(stashed, unstashed);
assertTrue(addedFile.exists());
assertEquals("content2", read(addedFile));
+ recorder.assertEvent(new String[] { addedPath }, ChangeRecorder.EMPTY);
Status status = git.status().call();
assertTrue(status.getChanged().isEmpty());
@@ -142,14 +165,17 @@ public void indexAdd() throws Exception {
@Test
public void indexDelete() throws Exception {
git.rm().addFilepattern("file.txt").call();
+ recorder.assertEvent(ChangeRecorder.EMPTY, new String[] { "file.txt" });
RevCommit stashed = git.stashCreate().call();
assertNotNull(stashed);
assertEquals("content", read(committedFile));
+ recorder.assertEvent(new String[] { "file.txt" }, ChangeRecorder.EMPTY);
ObjectId unstashed = git.stashApply().call();
assertEquals(stashed, unstashed);
assertFalse(committedFile.exists());
+ recorder.assertEvent(ChangeRecorder.EMPTY, new String[] { "file.txt" });
Status status = git.status().call();
assertTrue(status.getAdded().isEmpty());
@@ -170,10 +196,12 @@ public void workingDirectoryModify() throws Exception {
RevCommit stashed = git.stashCreate().call();
assertNotNull(stashed);
assertEquals("content", read(committedFile));
+ recorder.assertEvent(new String[] { "file.txt" }, ChangeRecorder.EMPTY);
ObjectId unstashed = git.stashApply().call();
assertEquals(stashed, unstashed);
assertEquals("content2", read(committedFile));
+ recorder.assertEvent(new String[] { "file.txt" }, ChangeRecorder.EMPTY);
Status status = git.status().call();
assertTrue(status.getAdded().isEmpty());
@@ -193,16 +221,21 @@ public void workingDirectoryModifyInSubfolder() throws Exception {
File subfolderFile = writeTrashFile(path, "content");
git.add().addFilepattern(path).call();
head = git.commit().setMessage("add file").call();
+ recorder.assertNoEvent();
writeTrashFile(path, "content2");
RevCommit stashed = git.stashCreate().call();
assertNotNull(stashed);
assertEquals("content", read(subfolderFile));
+ recorder.assertEvent(new String[] { "d1/d2/f.txt" },
+ ChangeRecorder.EMPTY);
ObjectId unstashed = git.stashApply().call();
assertEquals(stashed, unstashed);
assertEquals("content2", read(subfolderFile));
+ recorder.assertEvent(new String[] { "d1/d2/f.txt", "d1/d2", "d1" },
+ ChangeRecorder.EMPTY);
Status status = git.status().call();
assertTrue(status.getAdded().isEmpty());
@@ -225,10 +258,12 @@ public void workingDirectoryModifyIndexChanged() throws Exception {
RevCommit stashed = git.stashCreate().call();
assertNotNull(stashed);
assertEquals("content", read(committedFile));
+ recorder.assertEvent(new String[] { "file.txt" }, ChangeRecorder.EMPTY);
ObjectId unstashed = git.stashApply().call();
assertEquals(stashed, unstashed);
assertEquals("content3", read(committedFile));
+ recorder.assertEvent(new String[] { "file.txt" }, ChangeRecorder.EMPTY);
Status status = git.status().call();
assertTrue(status.getAdded().isEmpty());
@@ -252,10 +287,12 @@ public void workingDirectoryCleanIndexModify() throws Exception {
RevCommit stashed = git.stashCreate().call();
assertNotNull(stashed);
assertEquals("content", read(committedFile));
+ recorder.assertEvent(new String[] { "file.txt" }, ChangeRecorder.EMPTY);
ObjectId unstashed = git.stashApply().call();
assertEquals(stashed, unstashed);
assertEquals("content2", read(committedFile));
+ recorder.assertEvent(new String[] { "file.txt" }, ChangeRecorder.EMPTY);
Status status = git.status().call();
assertTrue(status.getAdded().isEmpty());
@@ -281,10 +318,12 @@ public void workingDirectoryDeleteIndexAdd() throws Exception {
RevCommit stashed = git.stashCreate().call();
assertNotNull(stashed);
assertFalse(added.exists());
+ recorder.assertNoEvent();
ObjectId unstashed = git.stashApply().call();
assertEquals(stashed, unstashed);
assertEquals("content2", read(added));
+ recorder.assertEvent(new String[] { path }, ChangeRecorder.EMPTY);
Status status = git.status().call();
assertTrue(status.getChanged().isEmpty());
@@ -308,10 +347,12 @@ public void workingDirectoryDeleteIndexEdit() throws Exception {
RevCommit stashed = git.stashCreate().call();
assertNotNull(stashed);
assertEquals("content", read(committedFile));
+ recorder.assertEvent(new String[] { PATH }, ChangeRecorder.EMPTY);
ObjectId unstashed = git.stashApply().call();
assertEquals(stashed, unstashed);
assertFalse(committedFile.exists());
+ recorder.assertEvent(ChangeRecorder.EMPTY, new String[] { PATH });
Status status = git.status().call();
assertTrue(status.getAdded().isEmpty());
@@ -337,9 +378,13 @@ public void multipleEdits() throws Exception {
assertNotNull(stashed);
assertTrue(committedFile.exists());
assertFalse(addedFile.exists());
+ recorder.assertEvent(new String[] { PATH },
+ new String[] { "file2.txt" });
ObjectId unstashed = git.stashApply().call();
assertEquals(stashed, unstashed);
+ recorder.assertEvent(new String[] { "file2.txt" },
+ new String[] { PATH });
Status status = git.status().call();
assertTrue(status.getChanged().isEmpty());
@@ -362,6 +407,7 @@ public void workingDirectoryContentConflict() throws Exception {
assertNotNull(stashed);
assertEquals("content", read(committedFile));
assertTrue(git.status().call().isClean());
+ recorder.assertEvent(new String[] { PATH }, ChangeRecorder.EMPTY);
writeTrashFile(PATH, "content3");
@@ -372,6 +418,7 @@ public void workingDirectoryContentConflict() throws Exception {
// expected
}
assertEquals("content3", read(PATH));
+ recorder.assertNoEvent();
}
@Test
@@ -391,10 +438,12 @@ public void stashedContentMerge() throws Exception {
assertEquals("content\nhead change\nmore content\n",
read(committedFile));
assertTrue(git.status().call().isClean());
+ recorder.assertEvent(new String[] { PATH }, ChangeRecorder.EMPTY);
writeTrashFile(PATH, "content\nmore content\ncommitted change\n");
git.add().addFilepattern(PATH).call();
git.commit().setMessage("committed change").call();
+ recorder.assertNoEvent();
try {
git.stashApply().call();
@@ -402,6 +451,7 @@ public void stashedContentMerge() throws Exception {
} catch (StashApplyFailureException e) {
// expected
}
+ recorder.assertEvent(new String[] { PATH }, ChangeRecorder.EMPTY);
Status status = new StatusCommand(db).call();
assertEquals(1, status.getConflicting().size());
assertEquals(
@@ -426,12 +476,15 @@ public void stashedApplyOnOtherBranch() throws Exception {
writeTrashFile(PATH, "master content");
git.add().addFilepattern(PATH).call();
git.commit().setMessage("even content").call();
+ recorder.assertNoEvent();
git.checkout().setName(otherBranch).call();
+ recorder.assertEvent(new String[] { PATH }, ChangeRecorder.EMPTY);
writeTrashFile(PATH, "otherBranch content");
git.add().addFilepattern(PATH).call();
git.commit().setMessage("even more content").call();
+ recorder.assertNoEvent();
writeTrashFile(path2, "content\nstashed change\nmore content\n");
@@ -442,12 +495,15 @@ public void stashedApplyOnOtherBranch() throws Exception {
assertEquals("otherBranch content",
read(committedFile));
assertTrue(git.status().call().isClean());
+ recorder.assertEvent(new String[] { path2 }, ChangeRecorder.EMPTY);
git.checkout().setName("master").call();
+ recorder.assertEvent(new String[] { PATH }, ChangeRecorder.EMPTY);
git.stashApply().call();
assertEquals("content\nstashed change\nmore content\n", read(file2));
assertEquals("master content",
read(committedFile));
+ recorder.assertEvent(new String[] { path2 }, ChangeRecorder.EMPTY);
}
@Test
@@ -467,12 +523,15 @@ public void stashedApplyOnOtherBranchWithStagedChange() throws Exception {
writeTrashFile(PATH, "master content");
git.add().addFilepattern(PATH).call();
git.commit().setMessage("even content").call();
+ recorder.assertNoEvent();
git.checkout().setName(otherBranch).call();
+ recorder.assertEvent(new String[] { PATH }, ChangeRecorder.EMPTY);
writeTrashFile(PATH, "otherBranch content");
git.add().addFilepattern(PATH).call();
git.commit().setMessage("even more content").call();
+ recorder.assertNoEvent();
writeTrashFile(path2,
"content\nstashed change in index\nmore content\n");
@@ -485,8 +544,10 @@ public void stashedApplyOnOtherBranchWithStagedChange() throws Exception {
assertEquals("content\nmore content\n", read(file2));
assertEquals("otherBranch content", read(committedFile));
assertTrue(git.status().call().isClean());
+ recorder.assertEvent(new String[] { path2 }, ChangeRecorder.EMPTY);
git.checkout().setName("master").call();
+ recorder.assertEvent(new String[] { PATH }, ChangeRecorder.EMPTY);
git.stashApply().call();
assertEquals("content\nstashed change\nmore content\n", read(file2));
assertEquals(
@@ -494,6 +555,7 @@ public void stashedApplyOnOtherBranchWithStagedChange() throws Exception {
+ "[file2.txt, mode:100644, content:content\nstashed change in index\nmore content\n]",
indexState(CONTENT));
assertEquals("master content", read(committedFile));
+ recorder.assertEvent(new String[] { path2 }, ChangeRecorder.EMPTY);
}
@Test
@@ -501,6 +563,7 @@ public void workingDirectoryContentMerge() throws Exception {
writeTrashFile(PATH, "content\nmore content\n");
git.add().addFilepattern(PATH).call();
git.commit().setMessage("more content").call();
+ recorder.assertNoEvent();
writeTrashFile(PATH, "content\nstashed change\nmore content\n");
@@ -508,15 +571,18 @@ public void workingDirectoryContentMerge() throws Exception {
assertNotNull(stashed);
assertEquals("content\nmore content\n", read(committedFile));
assertTrue(git.status().call().isClean());
+ recorder.assertEvent(new String[] { PATH }, ChangeRecorder.EMPTY);
writeTrashFile(PATH, "content\nmore content\ncommitted change\n");
git.add().addFilepattern(PATH).call();
git.commit().setMessage("committed change").call();
+ recorder.assertNoEvent();
git.stashApply().call();
assertEquals(
"content\nstashed change\nmore content\ncommitted change\n",
read(committedFile));
+ recorder.assertEvent(new String[] { PATH }, ChangeRecorder.EMPTY);
}
@Test
@@ -527,6 +593,7 @@ public void indexContentConflict() throws Exception {
assertNotNull(stashed);
assertEquals("content", read(committedFile));
assertTrue(git.status().call().isClean());
+ recorder.assertEvent(new String[] { PATH }, ChangeRecorder.EMPTY);
writeTrashFile(PATH, "content3");
git.add().addFilepattern(PATH).call();
@@ -538,6 +605,7 @@ public void indexContentConflict() throws Exception {
} catch (StashApplyFailureException e) {
// expected
}
+ recorder.assertNoEvent();
assertEquals("content2", read(PATH));
}
@@ -549,6 +617,7 @@ public void workingDirectoryEditPreCommit() throws Exception {
assertNotNull(stashed);
assertEquals("content", read(committedFile));
assertTrue(git.status().call().isClean());
+ recorder.assertEvent(new String[] { PATH }, ChangeRecorder.EMPTY);
String path2 = "file2.txt";
writeTrashFile(path2, "content3");
@@ -557,6 +626,7 @@ public void workingDirectoryEditPreCommit() throws Exception {
ObjectId unstashed = git.stashApply().call();
assertEquals(stashed, unstashed);
+ recorder.assertEvent(new String[] { PATH }, ChangeRecorder.EMPTY);
Status status = git.status().call();
assertTrue(status.getAdded().isEmpty());
@@ -583,12 +653,15 @@ public void stashChangeInANewSubdirectory() throws Exception {
RevCommit stashed = git.stashCreate().call();
assertNotNull(stashed);
assertTrue(git.status().call().isClean());
+ recorder.assertEvent(ChangeRecorder.EMPTY,
+ new String[] { subdir, path });
git.branchCreate().setName(otherBranch).call();
git.checkout().setName(otherBranch).call();
ObjectId unstashed = git.stashApply().call();
assertEquals(stashed, unstashed);
+ recorder.assertEvent(new String[] { path }, ChangeRecorder.EMPTY);
Status status = git.status().call();
assertTrue(status.getChanged().isEmpty());
@@ -643,12 +716,15 @@ public void testApplyStashWithDeletedFile() throws Exception {
git.commit().setMessage("x").call();
file.delete();
git.rm().addFilepattern("file").call();
+ recorder.assertNoEvent();
git.stashCreate().call();
+ recorder.assertEvent(new String[] { "file" }, ChangeRecorder.EMPTY);
file.delete();
git.stashApply().setStashRef("stash@{0}").call();
assertFalse(file.exists());
+ recorder.assertEvent(ChangeRecorder.EMPTY, new String[] { "file" });
}
@Test
@@ -660,9 +736,11 @@ public void untrackedFileNotIncluded() throws Exception {
git.add().addFilepattern(PATH).call();
git.stashCreate().call();
assertTrue(untrackedFile.exists());
+ recorder.assertEvent(new String[] { PATH }, ChangeRecorder.EMPTY);
git.stashApply().setStashRef("stash@{0}").call();
assertTrue(untrackedFile.exists());
+ recorder.assertEvent(new String[] { PATH }, ChangeRecorder.EMPTY);
Status status = git.status().call();
assertEquals(1, status.getUntracked().size());
@@ -684,11 +762,14 @@ public void untrackedFileIncluded() throws Exception {
.call();
assertNotNull(stashedCommit);
assertFalse(untrackedFile.exists());
+ recorder.assertEvent(ChangeRecorder.EMPTY, new String[] { path });
+
deleteTrashFile("a/b"); // checkout should create parent dirs
git.stashApply().setStashRef("stash@{0}").call();
assertTrue(untrackedFile.exists());
assertEquals("content", read(path));
+ recorder.assertEvent(new String[] { path }, ChangeRecorder.EMPTY);
Status status = git.status().call();
assertEquals(1, status.getUntracked().size());
@@ -706,6 +787,7 @@ public void untrackedFileConflictsWithCommit() throws Exception {
String path = "untracked.txt";
writeTrashFile(path, "untracked");
git.stashCreate().setIncludeUntracked(true).call();
+ recorder.assertEvent(ChangeRecorder.EMPTY, new String[] { path });
writeTrashFile(path, "committed");
head = git.commit().setMessage("add file").call();
@@ -719,6 +801,7 @@ public void untrackedFileConflictsWithCommit() throws Exception {
assertEquals(e.getMessage(), JGitText.get().stashApplyConflict);
}
assertEquals("committed", read(path));
+ recorder.assertNoEvent();
}
@Test
@@ -727,6 +810,7 @@ public void untrackedFileConflictsWithWorkingDirectory()
String path = "untracked.txt";
writeTrashFile(path, "untracked");
git.stashCreate().setIncludeUntracked(true).call();
+ recorder.assertEvent(ChangeRecorder.EMPTY, new String[] { path });
writeTrashFile(path, "working-directory");
try {
@@ -736,6 +820,7 @@ public void untrackedFileConflictsWithWorkingDirectory()
assertEquals(e.getMessage(), JGitText.get().stashApplyConflict);
}
assertEquals("working-directory", read(path));
+ recorder.assertNoEvent();
}
@Test
@@ -747,11 +832,13 @@ public void untrackedAndTrackedChanges() throws Exception {
assertTrue(PATH + " should exist", check(PATH));
assertEquals(PATH + " should have been reset", "content", read(PATH));
assertFalse(path + " should not exist", check(path));
+ recorder.assertEvent(new String[] { PATH }, new String[] { path });
git.stashApply().setStashRef("stash@{0}").call();
assertTrue(PATH + " should exist", check(PATH));
assertEquals(PATH + " should have new content", "changed", read(PATH));
assertTrue(path + " should exist", check(path));
assertEquals(path + " should have new content", "untracked",
read(path));
+ recorder.assertEvent(new String[] { PATH, path }, ChangeRecorder.EMPTY);
}
}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/attributes/AttributesHandlerTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/attributes/AttributesHandlerTest.java
index ca456b3..50d020c 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/attributes/AttributesHandlerTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/attributes/AttributesHandlerTest.java
@@ -254,6 +254,33 @@ public void testCyclicMacros() throws Exception {
endWalk();
}
+ @Test
+ public void testRelativePaths() throws Exception {
+ setupRepo("sub/ global", "sub/** init",
+ "sub/** top_sub\n*.txt top",
+ "sub/** subsub\nsub/ subsub2\n*.txt foo");
+ // The last two sub/** and sub/ rules are in sub/.gitattributes. They
+ // must not apply to any of the files here. They would match for a
+ // further subdirectory sub/sub.
+ walk = beginWalk();
+ assertIteration(F, ".gitattributes");
+ assertIteration(D, "sub", attrs("global"));
+ assertIteration(F, "sub/.gitattributes", attrs("init top_sub global"));
+ assertIteration(F, "sub/a.txt", attrs("init foo top top_sub global"));
+ endWalk();
+ // All right, let's see that they *do* apply in sub/sub:
+ writeTrashFile("sub/sub/b.txt", "b");
+ walk = beginWalk();
+ assertIteration(F, ".gitattributes");
+ assertIteration(D, "sub", attrs("global"));
+ assertIteration(F, "sub/.gitattributes", attrs("init top_sub global"));
+ assertIteration(F, "sub/a.txt", attrs("init foo top top_sub global"));
+ assertIteration(D, "sub/sub", attrs("init subsub2 top_sub global"));
+ assertIteration(F, "sub/sub/b.txt",
+ attrs("init foo subsub2 subsub top top_sub global"));
+ endWalk();
+ }
+
private static Collection<Attribute> attrs(String s) {
return new AttributesRule("*", s).getAttributes();
}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/ignore/internal/StringsTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/ignore/internal/StringsTest.java
new file mode 100644
index 0000000..468989f
--- /dev/null
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/ignore/internal/StringsTest.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2017, Thomas Wolf <thomas.wolf@paranor.ch>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.eclipse.jgit.ignore.internal;
+
+import static org.junit.Assert.assertEquals;
+
+import org.junit.Test;
+
+public class StringsTest {
+
+ private void testString(String string, int n, int m) {
+ assertEquals(string, n, Strings.count(string, '/', false));
+ assertEquals(string, m, Strings.count(string, '/', true));
+ }
+
+ @Test
+ public void testCount() {
+ testString("", 0, 0);
+ testString("/", 1, 0);
+ testString("//", 2, 0);
+ testString("///", 3, 1);
+ testString("////", 4, 2);
+ testString("foo", 0, 0);
+ testString("/foo", 1, 0);
+ testString("foo/", 1, 0);
+ testString("/foo/", 2, 0);
+ testString("foo/bar", 1, 1);
+ testString("/foo/bar/", 3, 1);
+ testString("/foo/bar//", 4, 2);
+ testString("/foo//bar/", 4, 2);
+ testString(" /foo/ ", 2, 2);
+ }
+}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsFsckTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsFsckTest.java
new file mode 100644
index 0000000..804d744
--- /dev/null
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/dfs/DfsFsckTest.java
@@ -0,0 +1,269 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.dfs;
+
+import static org.eclipse.jgit.junit.JGitTestUtil.concat;
+import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
+import static org.eclipse.jgit.lib.Constants.encodeASCII;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+
+import org.eclipse.jgit.internal.fsck.FsckError;
+import org.eclipse.jgit.internal.fsck.FsckError.CorruptObject;
+import org.eclipse.jgit.junit.TestRepository;
+import org.eclipse.jgit.lib.Constants;
+import org.eclipse.jgit.lib.ObjectChecker.ErrorType;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectInserter;
+import org.eclipse.jgit.revwalk.RevCommit;
+import org.junit.Before;
+import org.junit.Test;
+
+public class DfsFsckTest {
+ private TestRepository<InMemoryRepository> git;
+
+ private InMemoryRepository repo;
+
+ private ObjectInserter ins;
+
+ @Before
+ public void setUp() throws IOException {
+ DfsRepositoryDescription desc = new DfsRepositoryDescription("test");
+ git = new TestRepository<>(new InMemoryRepository(desc));
+ repo = git.getRepository();
+ ins = repo.newObjectInserter();
+ }
+
+ @Test
+ public void testHealthyRepo() throws Exception {
+ RevCommit commit0 = git.commit().message("0").create();
+ RevCommit commit1 = git.commit().message("1").parent(commit0).create();
+ git.update("master", commit1);
+
+ DfsFsck fsck = new DfsFsck(repo);
+ FsckError errors = fsck.check(null);
+
+ assertEquals(errors.getCorruptObjects().size(), 0);
+ assertEquals(errors.getMissingObjects().size(), 0);
+ assertEquals(errors.getCorruptIndices().size(), 0);
+ }
+
+ @Test
+ public void testCommitWithCorruptAuthor() throws Exception {
+ StringBuilder b = new StringBuilder();
+ b.append("tree be9bfa841874ccc9f2ef7c48d0c76226f89b7189\n");
+ b.append("author b <b@c> <b@c> 0 +0000\n");
+ b.append("committer <> 0 +0000\n");
+ byte[] data = encodeASCII(b.toString());
+ ObjectId id = ins.insert(Constants.OBJ_COMMIT, data);
+ ins.flush();
+
+ DfsFsck fsck = new DfsFsck(repo);
+ FsckError errors = fsck.check(null);
+
+ assertEquals(errors.getCorruptObjects().size(), 1);
+ CorruptObject o = errors.getCorruptObjects().iterator().next();
+ assertTrue(o.getId().equals(id));
+ assertEquals(o.getErrorType(), ErrorType.BAD_DATE);
+ }
+
+ @Test
+ public void testCommitWithoutTree() throws Exception {
+ StringBuilder b = new StringBuilder();
+ b.append("parent ");
+ b.append("be9bfa841874ccc9f2ef7c48d0c76226f89b7189");
+ b.append('\n');
+ byte[] data = encodeASCII(b.toString());
+ ObjectId id = ins.insert(Constants.OBJ_COMMIT, data);
+ ins.flush();
+
+ DfsFsck fsck = new DfsFsck(repo);
+ FsckError errors = fsck.check(null);
+
+ assertEquals(errors.getCorruptObjects().size(), 1);
+ CorruptObject o = errors.getCorruptObjects().iterator().next();
+ assertTrue(o.getId().equals(id));
+ assertEquals(o.getErrorType(), ErrorType.MISSING_TREE);
+ }
+
+ @Test
+ public void testTagWithoutObject() throws Exception {
+ StringBuilder b = new StringBuilder();
+ b.append("type commit\n");
+ b.append("tag test-tag\n");
+ b.append("tagger A. U. Thor <author@localhost> 1 +0000\n");
+ byte[] data = encodeASCII(b.toString());
+ ObjectId id = ins.insert(Constants.OBJ_TAG, data);
+ ins.flush();
+
+ DfsFsck fsck = new DfsFsck(repo);
+ FsckError errors = fsck.check(null);
+
+ assertEquals(errors.getCorruptObjects().size(), 1);
+ CorruptObject o = errors.getCorruptObjects().iterator().next();
+ assertTrue(o.getId().equals(id));
+ assertEquals(o.getErrorType(), ErrorType.MISSING_OBJECT);
+ }
+
+ @Test
+ public void testTreeWithNullSha() throws Exception {
+ byte[] data = concat(encodeASCII("100644 A"), new byte[] { '\0' },
+ new byte[OBJECT_ID_LENGTH]);
+ ObjectId id = ins.insert(Constants.OBJ_TREE, data);
+ ins.flush();
+
+ DfsFsck fsck = new DfsFsck(repo);
+ FsckError errors = fsck.check(null);
+
+ assertEquals(errors.getCorruptObjects().size(), 1);
+ CorruptObject o = errors.getCorruptObjects().iterator().next();
+ assertTrue(o.getId().equals(id));
+ assertEquals(o.getErrorType(), ErrorType.NULL_SHA1);
+ }
+
+ @Test
+ public void testMultipleInvalidObjects() throws Exception {
+ StringBuilder b = new StringBuilder();
+ b.append("tree ");
+ b.append("be9bfa841874ccc9f2ef7c48d0c76226f89b7189");
+ b.append('\n');
+ b.append("parent ");
+ b.append("\n");
+ byte[] data = encodeASCII(b.toString());
+ ObjectId id1 = ins.insert(Constants.OBJ_COMMIT, data);
+
+ b = new StringBuilder();
+ b.append("100644");
+ data = encodeASCII(b.toString());
+ ObjectId id2 = ins.insert(Constants.OBJ_TREE, data);
+
+ ins.flush();
+
+ DfsFsck fsck = new DfsFsck(repo);
+ FsckError errors = fsck.check(null);
+
+ assertEquals(errors.getCorruptObjects().size(), 2);
+ for (CorruptObject o : errors.getCorruptObjects()) {
+ if (o.getId().equals(id1)) {
+ assertEquals(o.getErrorType(), ErrorType.BAD_PARENT_SHA1);
+ } else if (o.getId().equals(id2)) {
+ assertNull(o.getErrorType());
+ } else {
+ fail();
+ }
+ }
+ }
+
+ @Test
+ public void testValidConnectivity() throws Exception {
+ ObjectId blobId = ins
+ .insert(Constants.OBJ_BLOB, Constants.encode("foo"));
+
+ byte[] blobIdBytes = new byte[OBJECT_ID_LENGTH];
+ blobId.copyRawTo(blobIdBytes, 0);
+ byte[] data = concat(encodeASCII("100644 regular-file\0"), blobIdBytes);
+ ObjectId treeId = ins.insert(Constants.OBJ_TREE, data);
+ ins.flush();
+
+ RevCommit commit = git.commit().message("0").setTopLevelTree(treeId)
+ .create();
+
+ git.update("master", commit);
+
+ DfsFsck fsck = new DfsFsck(repo);
+ FsckError errors = fsck.check(null);
+ assertEquals(errors.getMissingObjects().size(), 0);
+ }
+
+ @Test
+ public void testMissingObject() throws Exception {
+ ObjectId blobId = ObjectId
+ .fromString("19102815663d23f8b75a47e7a01965dcdc96468c");
+ byte[] blobIdBytes = new byte[OBJECT_ID_LENGTH];
+ blobId.copyRawTo(blobIdBytes, 0);
+ byte[] data = concat(encodeASCII("100644 regular-file\0"), blobIdBytes);
+ ObjectId treeId = ins.insert(Constants.OBJ_TREE, data);
+ ins.flush();
+
+ RevCommit commit = git.commit().message("0").setTopLevelTree(treeId)
+ .create();
+
+ git.update("master", commit);
+
+ DfsFsck fsck = new DfsFsck(repo);
+ FsckError errors = fsck.check(null);
+ assertEquals(errors.getMissingObjects().size(), 1);
+ assertEquals(errors.getMissingObjects().iterator().next(), blobId);
+ }
+
+ @Test
+ public void testNonCommitHead() throws Exception {
+ RevCommit commit0 = git.commit().message("0").create();
+ StringBuilder b = new StringBuilder();
+ b.append("object ");
+ b.append(commit0.getName());
+ b.append('\n');
+ b.append("type commit\n");
+ b.append("tag test-tag\n");
+ b.append("tagger A. U. Thor <author@localhost> 1 +0000\n");
+
+ byte[] data = encodeASCII(b.toString());
+ ObjectId tagId = ins.insert(Constants.OBJ_TAG, data);
+ ins.flush();
+
+ git.update("master", tagId);
+
+ DfsFsck fsck = new DfsFsck(repo);
+ FsckError errors = fsck.check(null);
+ assertEquals(errors.getCorruptObjects().size(), 0);
+ assertEquals(errors.getNonCommitHeads().size(), 1);
+ assertEquals(errors.getNonCommitHeads().iterator().next(),
+ "refs/heads/master");
+ }
+
+}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/BatchRefUpdateTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/BatchRefUpdateTest.java
new file mode 100644
index 0000000..34f6c71
--- /dev/null
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/BatchRefUpdateTest.java
@@ -0,0 +1,915 @@
+/*
+ * Copyright (C) 2017 Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.file;
+
+import static java.util.concurrent.TimeUnit.NANOSECONDS;
+import static java.util.concurrent.TimeUnit.SECONDS;
+import static org.eclipse.jgit.internal.storage.file.BatchRefUpdateTest.Result.LOCK_FAILURE;
+import static org.eclipse.jgit.internal.storage.file.BatchRefUpdateTest.Result.OK;
+import static org.eclipse.jgit.internal.storage.file.BatchRefUpdateTest.Result.REJECTED_MISSING_OBJECT;
+import static org.eclipse.jgit.internal.storage.file.BatchRefUpdateTest.Result.REJECTED_NONFASTFORWARD;
+import static org.eclipse.jgit.internal.storage.file.BatchRefUpdateTest.Result.TRANSACTION_ABORTED;
+import static org.eclipse.jgit.lib.ObjectId.zeroId;
+import static org.eclipse.jgit.transport.ReceiveCommand.Type.CREATE;
+import static org.eclipse.jgit.transport.ReceiveCommand.Type.DELETE;
+import static org.eclipse.jgit.transport.ReceiveCommand.Type.UPDATE;
+import static org.eclipse.jgit.transport.ReceiveCommand.Type.UPDATE_NONFASTFORWARD;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assume.assumeTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.function.Predicate;
+
+import org.eclipse.jgit.junit.LocalDiskRepositoryTestCase;
+import org.eclipse.jgit.junit.StrictWorkMonitor;
+import org.eclipse.jgit.junit.TestRepository;
+import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.BatchRefUpdate;
+import org.eclipse.jgit.lib.CheckoutEntry;
+import org.eclipse.jgit.lib.ConfigConstants;
+import org.eclipse.jgit.lib.Constants;
+import org.eclipse.jgit.lib.NullProgressMonitor;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.PersonIdent;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.RefDatabase;
+import org.eclipse.jgit.lib.RefUpdate;
+import org.eclipse.jgit.lib.ReflogEntry;
+import org.eclipse.jgit.lib.ReflogReader;
+import org.eclipse.jgit.lib.Repository;
+import org.eclipse.jgit.lib.StoredConfig;
+import org.eclipse.jgit.revwalk.RevCommit;
+import org.eclipse.jgit.revwalk.RevWalk;
+import org.eclipse.jgit.transport.ReceiveCommand;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameter;
+import org.junit.runners.Parameterized.Parameters;
+
+@SuppressWarnings("boxing")
+@RunWith(Parameterized.class)
+public class BatchRefUpdateTest extends LocalDiskRepositoryTestCase {
+ @Parameter
+ public boolean atomic;
+
+ @Parameters(name = "atomic={0}")
+ public static Collection<Object[]> data() {
+ return Arrays.asList(new Object[][]{ {Boolean.FALSE}, {Boolean.TRUE} });
+ }
+
+ private Repository diskRepo;
+ private TestRepository<Repository> repo;
+ private RefDirectory refdir;
+ private RevCommit A;
+ private RevCommit B;
+
+ @Override
+ @Before
+ public void setUp() throws Exception {
+ super.setUp();
+
+ diskRepo = createBareRepository();
+ StoredConfig cfg = diskRepo.getConfig();
+ cfg.load();
+ cfg.setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null,
+ ConfigConstants.CONFIG_KEY_LOGALLREFUPDATES, true);
+ cfg.save();
+
+ refdir = (RefDirectory) diskRepo.getRefDatabase();
+ refdir.setRetrySleepMs(Arrays.asList(0, 0));
+
+ repo = new TestRepository<>(diskRepo);
+ A = repo.commit().create();
+ B = repo.commit(repo.getRevWalk().parseCommit(A));
+ }
+
+ @Test
+ public void simpleNoForce() throws IOException {
+ writeLooseRef("refs/heads/master", A);
+ writeLooseRef("refs/heads/masters", B);
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+ new ReceiveCommand(B, A, "refs/heads/masters", UPDATE_NONFASTFORWARD));
+ execute(newBatchUpdate(cmds));
+
+ if (atomic) {
+ assertResults(cmds, TRANSACTION_ABORTED, REJECTED_NONFASTFORWARD);
+ assertRefs(
+ "refs/heads/master", A,
+ "refs/heads/masters", B);
+ } else {
+ assertResults(cmds, OK, REJECTED_NONFASTFORWARD);
+ assertRefs(
+ "refs/heads/master", B,
+ "refs/heads/masters", B);
+ }
+ }
+
+ @Test
+ public void simpleForce() throws IOException {
+ writeLooseRef("refs/heads/master", A);
+ writeLooseRef("refs/heads/masters", B);
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+ new ReceiveCommand(B, A, "refs/heads/masters", UPDATE_NONFASTFORWARD));
+ execute(newBatchUpdate(cmds).setAllowNonFastForwards(true));
+
+ assertResults(cmds, OK, OK);
+ assertRefs(
+ "refs/heads/master", B,
+ "refs/heads/masters", A);
+ }
+
+ @Test
+ public void nonFastForwardDoesNotDoExpensiveMergeCheck() throws IOException {
+ writeLooseRef("refs/heads/master", B);
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(B, A, "refs/heads/master", UPDATE_NONFASTFORWARD));
+ try (RevWalk rw = new RevWalk(diskRepo) {
+ @Override
+ public boolean isMergedInto(RevCommit base, RevCommit tip) {
+ throw new AssertionError("isMergedInto() should not be called");
+ }
+ }) {
+ newBatchUpdate(cmds)
+ .setAllowNonFastForwards(true)
+ .execute(rw, new StrictWorkMonitor());
+ }
+
+ assertResults(cmds, OK);
+ assertRefs("refs/heads/master", A);
+ }
+
+ @Test
+ public void fileDirectoryConflict() throws IOException {
+ writeLooseRef("refs/heads/master", A);
+ writeLooseRef("refs/heads/masters", B);
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+ new ReceiveCommand(zeroId(), A, "refs/heads/master/x", CREATE),
+ new ReceiveCommand(zeroId(), A, "refs/heads", CREATE));
+ execute(newBatchUpdate(cmds).setAllowNonFastForwards(true), false);
+
+ if (atomic) {
+ // Atomic update sees that master and master/x are conflicting, then marks
+ // the first one in the list as LOCK_FAILURE and aborts the rest.
+ assertResults(cmds,
+ LOCK_FAILURE, TRANSACTION_ABORTED, TRANSACTION_ABORTED);
+ assertRefs(
+ "refs/heads/master", A,
+ "refs/heads/masters", B);
+ } else {
+ // Non-atomic updates are applied in order: master succeeds, then master/x
+ // fails due to conflict.
+ assertResults(cmds, OK, LOCK_FAILURE, LOCK_FAILURE);
+ assertRefs(
+ "refs/heads/master", B,
+ "refs/heads/masters", B);
+ }
+ }
+
+ @Test
+ public void conflictThanksToDelete() throws IOException {
+ writeLooseRef("refs/heads/master", A);
+ writeLooseRef("refs/heads/masters", B);
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+ new ReceiveCommand(zeroId(), A, "refs/heads/masters/x", CREATE),
+ new ReceiveCommand(B, zeroId(), "refs/heads/masters", DELETE));
+ execute(newBatchUpdate(cmds).setAllowNonFastForwards(true));
+
+ assertResults(cmds, OK, OK, OK);
+ assertRefs(
+ "refs/heads/master", B,
+ "refs/heads/masters/x", A);
+ }
+
+ @Test
+ public void updateToMissingObject() throws IOException {
+ writeLooseRef("refs/heads/master", A);
+
+ ObjectId bad =
+ ObjectId.fromString("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef");
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, bad, "refs/heads/master", UPDATE),
+ new ReceiveCommand(zeroId(), B, "refs/heads/foo2", CREATE));
+ execute(newBatchUpdate(cmds).setAllowNonFastForwards(true), false);
+
+ if (atomic) {
+ assertResults(cmds, REJECTED_MISSING_OBJECT, TRANSACTION_ABORTED);
+ assertRefs("refs/heads/master", A);
+ } else {
+ assertResults(cmds, REJECTED_MISSING_OBJECT, OK);
+ assertRefs(
+ "refs/heads/master", A,
+ "refs/heads/foo2", B);
+ }
+ }
+
+ @Test
+ public void addMissingObject() throws IOException {
+ writeLooseRef("refs/heads/master", A);
+
+ ObjectId bad =
+ ObjectId.fromString("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef");
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+ new ReceiveCommand(zeroId(), bad, "refs/heads/foo2", CREATE));
+ execute(newBatchUpdate(cmds).setAllowNonFastForwards(true), false);
+
+ if (atomic) {
+ assertResults(cmds, TRANSACTION_ABORTED, REJECTED_MISSING_OBJECT);
+ assertRefs("refs/heads/master", A);
+ } else {
+ assertResults(cmds, OK, REJECTED_MISSING_OBJECT);
+ assertRefs("refs/heads/master", B);
+ }
+ }
+
+ @Test
+ public void oneNonExistentRef() throws IOException {
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/foo1", UPDATE),
+ new ReceiveCommand(zeroId(), B, "refs/heads/foo2", CREATE));
+ execute(newBatchUpdate(cmds).setAllowNonFastForwards(true));
+
+ if (atomic) {
+ assertResults(cmds, LOCK_FAILURE, TRANSACTION_ABORTED);
+ assertRefs();
+ } else {
+ assertResults(cmds, LOCK_FAILURE, OK);
+ assertRefs("refs/heads/foo2", B);
+ }
+ }
+
+ @Test
+ public void oneRefWrongOldValue() throws IOException {
+ writeLooseRef("refs/heads/master", A);
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(B, B, "refs/heads/master", UPDATE),
+ new ReceiveCommand(zeroId(), B, "refs/heads/foo2", CREATE));
+ execute(newBatchUpdate(cmds).setAllowNonFastForwards(true));
+
+ if (atomic) {
+ assertResults(cmds, LOCK_FAILURE, TRANSACTION_ABORTED);
+ assertRefs("refs/heads/master", A);
+ } else {
+ assertResults(cmds, LOCK_FAILURE, OK);
+ assertRefs(
+ "refs/heads/master", A,
+ "refs/heads/foo2", B);
+ }
+ }
+
+ @Test
+ public void nonExistentRef() throws IOException {
+ writeLooseRef("refs/heads/master", A);
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+ new ReceiveCommand(A, zeroId(), "refs/heads/foo2", DELETE));
+ execute(newBatchUpdate(cmds).setAllowNonFastForwards(true));
+
+ if (atomic) {
+ assertResults(cmds, TRANSACTION_ABORTED, LOCK_FAILURE);
+ assertRefs("refs/heads/master", A);
+ } else {
+ assertResults(cmds, OK, LOCK_FAILURE);
+ assertRefs("refs/heads/master", B);
+ }
+ }
+
+ @Test
+ public void noRefLog() throws IOException {
+ writeRef("refs/heads/master", A);
+
+ Map<String, ReflogEntry> oldLogs =
+ getLastReflogs("refs/heads/master", "refs/heads/branch");
+ assertEquals(Collections.singleton("refs/heads/master"), oldLogs.keySet());
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+ new ReceiveCommand(zeroId(), B, "refs/heads/branch", CREATE));
+ execute(newBatchUpdate(cmds).setAllowNonFastForwards(true));
+
+ assertResults(cmds, OK, OK);
+ assertRefs(
+ "refs/heads/master", B,
+ "refs/heads/branch", B);
+ assertReflogUnchanged(oldLogs, "refs/heads/master");
+ assertReflogUnchanged(oldLogs, "refs/heads/branch");
+ }
+
+ @Test
+ public void reflogDefaultIdent() throws IOException {
+ writeRef("refs/heads/master", A);
+ writeRef("refs/heads/branch2", A);
+
+ Map<String, ReflogEntry> oldLogs = getLastReflogs(
+ "refs/heads/master", "refs/heads/branch1", "refs/heads/branch2");
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+ new ReceiveCommand(zeroId(), B, "refs/heads/branch1", CREATE));
+ execute(
+ newBatchUpdate(cmds)
+ .setAllowNonFastForwards(true)
+ .setRefLogMessage("a reflog", false));
+
+ assertResults(cmds, OK, OK);
+ assertRefs(
+ "refs/heads/master", B,
+ "refs/heads/branch1", B,
+ "refs/heads/branch2", A);
+ assertReflogEquals(
+ reflog(A, B, new PersonIdent(diskRepo), "a reflog"),
+ getLastReflog("refs/heads/master"));
+ assertReflogEquals(
+ reflog(zeroId(), B, new PersonIdent(diskRepo), "a reflog"),
+ getLastReflog("refs/heads/branch1"));
+ assertReflogUnchanged(oldLogs, "refs/heads/branch2");
+ }
+
+ @Test
+ public void reflogAppendStatusNoMessage() throws IOException {
+ writeRef("refs/heads/master", A);
+ writeRef("refs/heads/branch1", B);
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+ new ReceiveCommand(B, A, "refs/heads/branch1", UPDATE_NONFASTFORWARD),
+ new ReceiveCommand(zeroId(), A, "refs/heads/branch2", CREATE));
+ execute(
+ newBatchUpdate(cmds)
+ .setAllowNonFastForwards(true)
+ .setRefLogMessage(null, true));
+
+ assertResults(cmds, OK, OK, OK);
+ assertRefs(
+ "refs/heads/master", B,
+ "refs/heads/branch1", A,
+ "refs/heads/branch2", A);
+ assertReflogEquals(
+ // Always forced; setAllowNonFastForwards(true) bypasses the check.
+ reflog(A, B, new PersonIdent(diskRepo), "forced-update"),
+ getLastReflog("refs/heads/master"));
+ assertReflogEquals(
+ reflog(B, A, new PersonIdent(diskRepo), "forced-update"),
+ getLastReflog("refs/heads/branch1"));
+ assertReflogEquals(
+ reflog(zeroId(), A, new PersonIdent(diskRepo), "created"),
+ getLastReflog("refs/heads/branch2"));
+ }
+
+ @Test
+ public void reflogAppendStatusFastForward() throws IOException {
+ writeRef("refs/heads/master", A);
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE));
+ execute(newBatchUpdate(cmds).setRefLogMessage(null, true));
+
+ assertResults(cmds, OK);
+ assertRefs("refs/heads/master", B);
+ assertReflogEquals(
+ reflog(A, B, new PersonIdent(diskRepo), "fast-forward"),
+ getLastReflog("refs/heads/master"));
+ }
+
+ @Test
+ public void reflogAppendStatusWithMessage() throws IOException {
+ writeRef("refs/heads/master", A);
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+ new ReceiveCommand(zeroId(), A, "refs/heads/branch", CREATE));
+ execute(newBatchUpdate(cmds).setRefLogMessage("a reflog", true));
+
+ assertResults(cmds, OK, OK);
+ assertRefs(
+ "refs/heads/master", B,
+ "refs/heads/branch", A);
+ assertReflogEquals(
+ reflog(A, B, new PersonIdent(diskRepo), "a reflog: fast-forward"),
+ getLastReflog("refs/heads/master"));
+ assertReflogEquals(
+ reflog(zeroId(), A, new PersonIdent(diskRepo), "a reflog: created"),
+ getLastReflog("refs/heads/branch"));
+ }
+
+ @Test
+ public void reflogCustomIdent() throws IOException {
+ writeRef("refs/heads/master", A);
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+ new ReceiveCommand(zeroId(), B, "refs/heads/branch", CREATE));
+ PersonIdent ident = new PersonIdent("A Reflog User", "reflog@example.com");
+ execute(
+ newBatchUpdate(cmds)
+ .setRefLogMessage("a reflog", false)
+ .setRefLogIdent(ident));
+
+ assertResults(cmds, OK, OK);
+ assertRefs(
+ "refs/heads/master", B,
+ "refs/heads/branch", B);
+ assertReflogEquals(
+ reflog(A, B, ident, "a reflog"),
+ getLastReflog("refs/heads/master"),
+ true);
+ assertReflogEquals(
+ reflog(zeroId(), B, ident, "a reflog"),
+ getLastReflog("refs/heads/branch"),
+ true);
+ }
+
+ @Test
+ public void reflogDelete() throws IOException {
+ writeRef("refs/heads/master", A);
+ writeRef("refs/heads/branch", A);
+ assertEquals(
+ 2, getLastReflogs("refs/heads/master", "refs/heads/branch").size());
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, zeroId(), "refs/heads/master", DELETE),
+ new ReceiveCommand(A, B, "refs/heads/branch", UPDATE));
+ execute(newBatchUpdate(cmds).setRefLogMessage("a reflog", false));
+
+ assertResults(cmds, OK, OK);
+ assertRefs("refs/heads/branch", B);
+ assertNull(getLastReflog("refs/heads/master"));
+ assertReflogEquals(
+ reflog(A, B, new PersonIdent(diskRepo), "a reflog"),
+ getLastReflog("refs/heads/branch"));
+ }
+
+ @Test
+ public void reflogFileDirectoryConflict() throws IOException {
+ writeRef("refs/heads/master", A);
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, zeroId(), "refs/heads/master", DELETE),
+ new ReceiveCommand(zeroId(), A, "refs/heads/master/x", CREATE));
+ execute(newBatchUpdate(cmds).setRefLogMessage("a reflog", false));
+
+ assertResults(cmds, OK, OK);
+ assertRefs("refs/heads/master/x", A);
+ assertNull(getLastReflog("refs/heads/master"));
+ assertReflogEquals(
+ reflog(zeroId(), A, new PersonIdent(diskRepo), "a reflog"),
+ getLastReflog("refs/heads/master/x"));
+ }
+
+ @Test
+ public void reflogOnLockFailure() throws IOException {
+ writeRef("refs/heads/master", A);
+
+ Map<String, ReflogEntry> oldLogs =
+ getLastReflogs("refs/heads/master", "refs/heads/branch");
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+ new ReceiveCommand(A, B, "refs/heads/branch", UPDATE));
+ execute(newBatchUpdate(cmds).setRefLogMessage("a reflog", false));
+
+ if (atomic) {
+ assertResults(cmds, TRANSACTION_ABORTED, LOCK_FAILURE);
+ assertReflogUnchanged(oldLogs, "refs/heads/master");
+ assertReflogUnchanged(oldLogs, "refs/heads/branch");
+ } else {
+ assertResults(cmds, OK, LOCK_FAILURE);
+ assertReflogEquals(
+ reflog(A, B, new PersonIdent(diskRepo), "a reflog"),
+ getLastReflog("refs/heads/master"));
+ assertReflogUnchanged(oldLogs, "refs/heads/branch");
+ }
+ }
+
+ @Test
+ public void overrideRefLogMessage() throws Exception {
+ writeRef("refs/heads/master", A);
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+ new ReceiveCommand(zeroId(), B, "refs/heads/branch", CREATE));
+ cmds.get(0).setRefLogMessage("custom log", false);
+ PersonIdent ident = new PersonIdent(diskRepo);
+ execute(
+ newBatchUpdate(cmds)
+ .setRefLogIdent(ident)
+ .setRefLogMessage("a reflog", true));
+
+ assertResults(cmds, OK, OK);
+ assertReflogEquals(
+ reflog(A, B, ident, "custom log"),
+ getLastReflog("refs/heads/master"),
+ true);
+ assertReflogEquals(
+ reflog(zeroId(), B, ident, "a reflog: created"),
+ getLastReflog("refs/heads/branch"),
+ true);
+ }
+
+ @Test
+ public void overrideDisableRefLog() throws Exception {
+ writeRef("refs/heads/master", A);
+
+ Map<String, ReflogEntry> oldLogs =
+ getLastReflogs("refs/heads/master", "refs/heads/branch");
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+ new ReceiveCommand(zeroId(), B, "refs/heads/branch", CREATE));
+ cmds.get(0).disableRefLog();
+ execute(newBatchUpdate(cmds).setRefLogMessage("a reflog", true));
+
+ assertResults(cmds, OK, OK);
+ assertReflogUnchanged(oldLogs, "refs/heads/master");
+ assertReflogEquals(
+ reflog(zeroId(), B, new PersonIdent(diskRepo), "a reflog: created"),
+ getLastReflog("refs/heads/branch"));
+ }
+
+ @Test
+ public void packedRefsLockFailure() throws Exception {
+ writeLooseRef("refs/heads/master", A);
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+ new ReceiveCommand(zeroId(), B, "refs/heads/branch", CREATE));
+
+ LockFile myLock = refdir.lockPackedRefs();
+ try {
+ execute(newBatchUpdate(cmds).setAllowNonFastForwards(true));
+
+ assertFalse(getLockFile("refs/heads/master").exists());
+ assertFalse(getLockFile("refs/heads/branch").exists());
+
+ if (atomic) {
+ assertResults(cmds, LOCK_FAILURE, TRANSACTION_ABORTED);
+ assertRefs("refs/heads/master", A);
+ } else {
+ // Only operates on loose refs, doesn't care that packed-refs is locked.
+ assertResults(cmds, OK, OK);
+ assertRefs(
+ "refs/heads/master", B,
+ "refs/heads/branch", B);
+ }
+ } finally {
+ myLock.unlock();
+ }
+ }
+
+ @Test
+ public void oneRefLockFailure() throws Exception {
+ writeLooseRef("refs/heads/master", A);
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(zeroId(), B, "refs/heads/branch", CREATE),
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE));
+
+ LockFile myLock = new LockFile(refdir.fileFor("refs/heads/master"));
+ assertTrue(myLock.lock());
+ try {
+ execute(newBatchUpdate(cmds).setAllowNonFastForwards(true));
+
+ assertFalse(LockFile.getLockFile(refdir.packedRefsFile).exists());
+ assertFalse(getLockFile("refs/heads/branch").exists());
+
+ if (atomic) {
+ assertResults(cmds, TRANSACTION_ABORTED, LOCK_FAILURE);
+ assertRefs("refs/heads/master", A);
+ } else {
+ assertResults(cmds, OK, LOCK_FAILURE);
+ assertRefs(
+ "refs/heads/branch", B,
+ "refs/heads/master", A);
+ }
+ } finally {
+ myLock.unlock();
+ }
+ }
+
+ @Test
+ public void singleRefUpdateDoesNotRequirePackedRefsLock() throws Exception {
+ writeLooseRef("refs/heads/master", A);
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE));
+
+ LockFile myLock = refdir.lockPackedRefs();
+ try {
+ execute(newBatchUpdate(cmds).setAllowNonFastForwards(true));
+
+ assertFalse(getLockFile("refs/heads/master").exists());
+ assertResults(cmds, OK);
+ assertRefs("refs/heads/master", B);
+ } finally {
+ myLock.unlock();
+ }
+ }
+
+ @Test
+ public void atomicUpdateRespectsInProcessLock() throws Exception {
+ assumeTrue(atomic);
+
+ writeLooseRef("refs/heads/master", A);
+
+ List<ReceiveCommand> cmds = Arrays.asList(
+ new ReceiveCommand(A, B, "refs/heads/master", UPDATE),
+ new ReceiveCommand(zeroId(), B, "refs/heads/branch", CREATE));
+
+ Thread t = new Thread(() -> {
+ try {
+ execute(newBatchUpdate(cmds).setAllowNonFastForwards(true));
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ });
+
+ ReentrantLock l = refdir.inProcessPackedRefsLock;
+ l.lock();
+ try {
+ t.start();
+ long timeoutSecs = 10;
+ long startNanos = System.nanoTime();
+
+ // Hold onto the lock until we observe the worker thread has attempted to
+ // acquire it.
+ while (l.getQueueLength() == 0) {
+ long elapsedNanos = System.nanoTime() - startNanos;
+ assertTrue(
+ "timed out waiting for work thread to attempt to acquire lock",
+ NANOSECONDS.toSeconds(elapsedNanos) < timeoutSecs);
+ Thread.sleep(3);
+ }
+
+ // Once we unlock, the worker thread should finish the update promptly.
+ l.unlock();
+ t.join(SECONDS.toMillis(timeoutSecs));
+ assertFalse(t.isAlive());
+ } finally {
+ if (l.isHeldByCurrentThread()) {
+ l.unlock();
+ }
+ }
+
+ assertResults(cmds, OK, OK);
+ assertRefs(
+ "refs/heads/master", B,
+ "refs/heads/branch", B);
+ }
+
+ private void writeLooseRef(String name, AnyObjectId id) throws IOException {
+ write(new File(diskRepo.getDirectory(), name), id.name() + "\n");
+ }
+
+ private void writeRef(String name, AnyObjectId id) throws IOException {
+ RefUpdate u = diskRepo.updateRef(name);
+ u.setRefLogMessage(getClass().getSimpleName(), false);
+ u.setForceUpdate(true);
+ u.setNewObjectId(id);
+ RefUpdate.Result r = u.update();
+ switch (r) {
+ case NEW:
+ case FORCED:
+ return;
+ default:
+ throw new IOException("Got " + r + " while updating " + name);
+ }
+ }
+
+ private BatchRefUpdate newBatchUpdate(List<ReceiveCommand> cmds) {
+ BatchRefUpdate u = refdir.newBatchUpdate();
+ if (atomic) {
+ assertTrue(u.isAtomic());
+ } else {
+ u.setAtomic(false);
+ }
+ u.addCommand(cmds);
+ return u;
+ }
+
+ private void execute(BatchRefUpdate u) throws IOException {
+ execute(u, false);
+ }
+
+ private void execute(BatchRefUpdate u, boolean strictWork) throws IOException {
+ try (RevWalk rw = new RevWalk(diskRepo)) {
+ u.execute(rw,
+ strictWork ? new StrictWorkMonitor() : NullProgressMonitor.INSTANCE);
+ }
+ }
+
+ private void assertRefs(Object... args) throws IOException {
+ if (args.length % 2 != 0) {
+ throw new IllegalArgumentException(
+ "expected even number of args: " + Arrays.toString(args));
+ }
+
+ Map<String, AnyObjectId> expected = new LinkedHashMap<>();
+ for (int i = 0; i < args.length; i += 2) {
+ expected.put((String) args[i], (AnyObjectId) args[i + 1]);
+ }
+
+ Map<String, Ref> refs = refdir.getRefs(RefDatabase.ALL);
+ Ref actualHead = refs.remove(Constants.HEAD);
+ if (actualHead != null) {
+ String actualLeafName = actualHead.getLeaf().getName();
+ assertEquals(
+ "expected HEAD to point to refs/heads/master, got: " + actualLeafName,
+ "refs/heads/master", actualLeafName);
+ AnyObjectId expectedMaster = expected.get("refs/heads/master");
+ assertNotNull("expected master ref since HEAD exists", expectedMaster);
+ assertEquals(expectedMaster, actualHead.getObjectId());
+ }
+
+ Map<String, AnyObjectId> actual = new LinkedHashMap<>();
+ refs.forEach((n, r) -> actual.put(n, r.getObjectId()));
+
+ assertEquals(expected.keySet(), actual.keySet());
+ actual.forEach((n, a) -> assertEquals(n, expected.get(n), a));
+ }
+
+ enum Result {
+ OK(ReceiveCommand.Result.OK),
+ LOCK_FAILURE(ReceiveCommand.Result.LOCK_FAILURE),
+ REJECTED_NONFASTFORWARD(ReceiveCommand.Result.REJECTED_NONFASTFORWARD),
+ REJECTED_MISSING_OBJECT(ReceiveCommand.Result.REJECTED_MISSING_OBJECT),
+ TRANSACTION_ABORTED(ReceiveCommand::isTransactionAborted);
+
+ final Predicate<? super ReceiveCommand> p;
+
+ private Result(Predicate<? super ReceiveCommand> p) {
+ this.p = p;
+ }
+
+ private Result(ReceiveCommand.Result result) {
+ this(c -> c.getResult() == result);
+ }
+ }
+
+ private void assertResults(
+ List<ReceiveCommand> cmds, Result... expected) {
+ if (expected.length != cmds.size()) {
+ throw new IllegalArgumentException(
+ "expected " + cmds.size() + " result args");
+ }
+ for (int i = 0; i < cmds.size(); i++) {
+ ReceiveCommand c = cmds.get(i);
+ Result r = expected[i];
+ assertTrue(
+ String.format(
+ "result of command (%d) should be %s: %s %s%s",
+ Integer.valueOf(i), r, c,
+ c.getResult(),
+ c.getMessage() != null ? " (" + c.getMessage() + ")" : ""),
+ r.p.test(c));
+ }
+ }
+
+ private Map<String, ReflogEntry> getLastReflogs(String... names)
+ throws IOException {
+ Map<String, ReflogEntry> result = new LinkedHashMap<>();
+ for (String name : names) {
+ ReflogEntry e = getLastReflog(name);
+ if (e != null) {
+ result.put(name, e);
+ }
+ }
+ return result;
+ }
+
+ private ReflogEntry getLastReflog(String name) throws IOException {
+ ReflogReader r = diskRepo.getReflogReader(name);
+ if (r == null) {
+ return null;
+ }
+ return r.getLastEntry();
+ }
+
+ private File getLockFile(String refName) {
+ return LockFile.getLockFile(refdir.fileFor(refName));
+ }
+
+ private void assertReflogUnchanged(
+ Map<String, ReflogEntry> old, String name) throws IOException {
+ assertReflogEquals(old.get(name), getLastReflog(name), true);
+ }
+
+ private static void assertReflogEquals(
+ ReflogEntry expected, ReflogEntry actual) {
+ assertReflogEquals(expected, actual, false);
+ }
+
+ private static void assertReflogEquals(
+ ReflogEntry expected, ReflogEntry actual, boolean strictTime) {
+ if (expected == null) {
+ assertNull(actual);
+ return;
+ }
+ assertNotNull(actual);
+ assertEquals(expected.getOldId(), actual.getOldId());
+ assertEquals(expected.getNewId(), actual.getNewId());
+ if (strictTime) {
+ assertEquals(expected.getWho(), actual.getWho());
+ } else {
+ assertEquals(expected.getWho().getName(), actual.getWho().getName());
+ assertEquals(
+ expected.getWho().getEmailAddress(),
+ actual.getWho().getEmailAddress());
+ }
+ assertEquals(expected.getComment(), actual.getComment());
+ }
+
+ private static ReflogEntry reflog(ObjectId oldId, ObjectId newId,
+ PersonIdent who, String comment) {
+ return new ReflogEntry() {
+ @Override
+ public ObjectId getOldId() {
+ return oldId;
+ }
+
+ @Override
+ public ObjectId getNewId() {
+ return newId;
+ }
+
+ @Override
+ public PersonIdent getWho() {
+ return who;
+ }
+
+ @Override
+ public String getComment() {
+ return comment;
+ }
+
+ @Override
+ public CheckoutEntry parseCheckout() {
+ throw new UnsupportedOperationException();
+ }
+ };
+ }
+}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcConcurrentTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcConcurrentTest.java
index ebb5a4f..643bb49 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcConcurrentTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/GcConcurrentTest.java
@@ -45,8 +45,12 @@
import static java.lang.Integer.valueOf;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
import java.io.IOException;
+import java.util.Collection;
+import java.util.Collections;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.Callable;
import java.util.concurrent.CyclicBarrier;
@@ -56,8 +60,14 @@
import java.util.concurrent.TimeUnit;
import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.internal.storage.pack.PackWriter;
+import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.lib.EmptyProgressMonitor;
+import org.eclipse.jgit.lib.NullProgressMonitor;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.Sets;
import org.eclipse.jgit.revwalk.RevBlob;
+import org.eclipse.jgit.revwalk.RevCommit;
import org.junit.Test;
public class GcConcurrentTest extends GcTestCase {
@@ -118,4 +128,97 @@ public Integer call() throws Exception {
pool.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS);
}
}
+
+ @Test
+ public void repackAndGetStats() throws Exception {
+ TestRepository<FileRepository>.BranchBuilder test = tr.branch("test");
+ test.commit().add("a", "a").create();
+ GC gc1 = new GC(tr.getRepository());
+ gc1.setPackExpireAgeMillis(0);
+ gc1.gc();
+ test.commit().add("b", "b").create();
+
+ // Create a new Repository instance and trigger a gc
+ // from that instance. Reusing the existing repo instance
+ // tr.getRepository() would not show the problem.
+ FileRepository r2 = new FileRepository(
+ tr.getRepository().getDirectory());
+ GC gc2 = new GC(r2);
+ gc2.setPackExpireAgeMillis(0);
+ gc2.gc();
+
+ new GC(tr.getRepository()).getStatistics();
+ }
+
+ @Test
+ public void repackAndUploadPack() throws Exception {
+ TestRepository<FileRepository>.BranchBuilder test = tr.branch("test");
+ // RevCommit a = test.commit().add("a", "a").create();
+ test.commit().add("a", "a").create();
+
+ GC gc1 = new GC(tr.getRepository());
+ gc1.setPackExpireAgeMillis(0);
+ gc1.gc();
+
+ RevCommit b = test.commit().add("b", "b").create();
+
+ FileRepository r2 = new FileRepository(
+ tr.getRepository().getDirectory());
+ GC gc2 = new GC(r2);
+ gc2.setPackExpireAgeMillis(0);
+ gc2.gc();
+
+ // Simulate parts of an UploadPack. This is the situation on
+ // server side (e.g. gerrit) when when clients are
+ // cloning/fetching while the server side repo's
+ // are gc'ed by an external process (e.g. scheduled
+ // native git gc)
+ try (PackWriter pw = new PackWriter(tr.getRepository())) {
+ pw.setUseBitmaps(true);
+ pw.preparePack(NullProgressMonitor.INSTANCE, Sets.of(b),
+ Collections.<ObjectId> emptySet());
+ new GC(tr.getRepository()).getStatistics();
+ }
+ }
+
+ PackFile getSinglePack(FileRepository r) {
+ Collection<PackFile> packs = r.getObjectDatabase().getPacks();
+ assertEquals(1, packs.size());
+ return packs.iterator().next();
+ }
+
+ @Test
+ public void repackAndCheckBitmapUsage() throws Exception {
+ // create a test repository with one commit and pack all objects. After
+ // packing create loose objects to trigger creation of a new packfile on
+ // the next gc
+ TestRepository<FileRepository>.BranchBuilder test = tr.branch("test");
+ test.commit().add("a", "a").create();
+ FileRepository repository = tr.getRepository();
+ GC gc1 = new GC(repository);
+ gc1.setPackExpireAgeMillis(0);
+ gc1.gc();
+ String oldPackName = getSinglePack(repository).getPackName();
+ RevCommit b = test.commit().add("b", "b").create();
+
+ // start the garbage collection on a new repository instance,
+ FileRepository repository2 = new FileRepository(repository.getDirectory());
+ GC gc2 = new GC(repository2);
+ gc2.setPackExpireAgeMillis(0);
+ gc2.gc();
+ String newPackName = getSinglePack(repository2).getPackName();
+ // make sure gc() has caused creation of a new packfile
+ assertNotEquals(oldPackName, newPackName);
+
+ // Even when asking again for the set of packfiles outdated data
+ // will be returned. As long as the repository can work on cached data
+ // it will do so and not detect that a new packfile exists.
+ assertNotEquals(getSinglePack(repository).getPackName(), newPackName);
+
+ // Only when accessing object content it is required to rescan the pack
+ // directory and the new packfile will be detected.
+ repository.getObjectDatabase().open(b).getSize();
+ assertEquals(getSinglePack(repository).getPackName(), newPackName);
+ assertNotNull(getSinglePack(repository).getBitmapIndex());
+ }
}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefDirectoryTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefDirectoryTest.java
index 97130f2..fefccf3 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefDirectoryTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefDirectoryTest.java
@@ -46,7 +46,6 @@
import static org.eclipse.jgit.lib.Constants.HEAD;
import static org.eclipse.jgit.lib.Constants.R_HEADS;
import static org.eclipse.jgit.lib.Constants.R_TAGS;
-import static org.eclipse.jgit.lib.ObjectId.zeroId;
import static org.eclipse.jgit.lib.Ref.Storage.LOOSE;
import static org.eclipse.jgit.lib.Ref.Storage.NEW;
import static org.junit.Assert.assertEquals;
@@ -62,31 +61,27 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
-import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
+import org.eclipse.jgit.errors.LockFailedException;
import org.eclipse.jgit.events.ListenerHandle;
import org.eclipse.jgit.events.RefsChangedEvent;
import org.eclipse.jgit.events.RefsChangedListener;
import org.eclipse.jgit.junit.LocalDiskRepositoryTestCase;
import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.lib.AnyObjectId;
-import org.eclipse.jgit.lib.BatchRefUpdate;
-import org.eclipse.jgit.lib.NullProgressMonitor;
-import org.eclipse.jgit.lib.ProgressMonitor;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Ref.Storage;
import org.eclipse.jgit.lib.RefDatabase;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevTag;
-import org.eclipse.jgit.revwalk.RevWalk;
-import org.eclipse.jgit.transport.ReceiveCommand;
import org.junit.Before;
import org.junit.Test;
+@SuppressWarnings("boxing")
public class RefDirectoryTest extends LocalDiskRepositoryTestCase {
private Repository diskRepo;
@@ -1293,120 +1288,20 @@ public void onRefsChanged(RefsChangedEvent event) {
}
@Test
- public void testBatchRefUpdateSimpleNoForce() throws IOException {
+ public void testPackedRefsLockFailure() throws Exception {
writeLooseRef("refs/heads/master", A);
- writeLooseRef("refs/heads/masters", B);
- List<ReceiveCommand> commands = Arrays.asList(
- new ReceiveCommand(A, B, "refs/heads/master",
- ReceiveCommand.Type.UPDATE),
- new ReceiveCommand(B, A, "refs/heads/masters",
- ReceiveCommand.Type.UPDATE_NONFASTFORWARD));
- BatchRefUpdate batchUpdate = refdir.newBatchUpdate();
- batchUpdate.addCommand(commands);
- batchUpdate.execute(new RevWalk(diskRepo), new StrictWorkMonitor());
- Map<String, Ref> refs = refdir.getRefs(RefDatabase.ALL);
- assertEquals(ReceiveCommand.Result.OK, commands.get(0).getResult());
- assertEquals(ReceiveCommand.Result.REJECTED_NONFASTFORWARD, commands
- .get(1).getResult());
- assertEquals("[HEAD, refs/heads/master, refs/heads/masters]", refs
- .keySet().toString());
- assertEquals(B.getId(), refs.get("refs/heads/master").getObjectId());
- assertEquals(B.getId(), refs.get("refs/heads/masters").getObjectId());
- }
-
- @Test
- public void testBatchRefUpdateSimpleForce() throws IOException {
- writeLooseRef("refs/heads/master", A);
- writeLooseRef("refs/heads/masters", B);
- List<ReceiveCommand> commands = Arrays.asList(
- new ReceiveCommand(A, B, "refs/heads/master",
- ReceiveCommand.Type.UPDATE),
- new ReceiveCommand(B, A, "refs/heads/masters",
- ReceiveCommand.Type.UPDATE_NONFASTFORWARD));
- BatchRefUpdate batchUpdate = refdir.newBatchUpdate();
- batchUpdate.setAllowNonFastForwards(true);
- batchUpdate.addCommand(commands);
- batchUpdate.execute(new RevWalk(diskRepo), new StrictWorkMonitor());
- Map<String, Ref> refs = refdir.getRefs(RefDatabase.ALL);
- assertEquals(ReceiveCommand.Result.OK, commands.get(0).getResult());
- assertEquals(ReceiveCommand.Result.OK, commands.get(1).getResult());
- assertEquals("[HEAD, refs/heads/master, refs/heads/masters]", refs
- .keySet().toString());
- assertEquals(B.getId(), refs.get("refs/heads/master").getObjectId());
- assertEquals(A.getId(), refs.get("refs/heads/masters").getObjectId());
- }
-
- @Test
- public void testBatchRefUpdateNonFastForwardDoesNotDoExpensiveMergeCheck()
- throws IOException {
- writeLooseRef("refs/heads/master", B);
- List<ReceiveCommand> commands = Arrays.asList(
- new ReceiveCommand(B, A, "refs/heads/master",
- ReceiveCommand.Type.UPDATE_NONFASTFORWARD));
- BatchRefUpdate batchUpdate = refdir.newBatchUpdate();
- batchUpdate.setAllowNonFastForwards(true);
- batchUpdate.addCommand(commands);
- batchUpdate.execute(new RevWalk(diskRepo) {
- @Override
- public boolean isMergedInto(RevCommit base, RevCommit tip) {
- throw new AssertionError("isMergedInto() should not be called");
- }
- }, new StrictWorkMonitor());
- Map<String, Ref> refs = refdir.getRefs(RefDatabase.ALL);
- assertEquals(ReceiveCommand.Result.OK, commands.get(0).getResult());
- assertEquals(A.getId(), refs.get("refs/heads/master").getObjectId());
- }
-
- @Test
- public void testBatchRefUpdateConflict() throws IOException {
- writeLooseRef("refs/heads/master", A);
- writeLooseRef("refs/heads/masters", B);
- List<ReceiveCommand> commands = Arrays.asList(
- new ReceiveCommand(A, B, "refs/heads/master",
- ReceiveCommand.Type.UPDATE),
- new ReceiveCommand(zeroId(), A, "refs/heads/master/x",
- ReceiveCommand.Type.CREATE),
- new ReceiveCommand(zeroId(), A, "refs/heads",
- ReceiveCommand.Type.CREATE));
- BatchRefUpdate batchUpdate = refdir.newBatchUpdate();
- batchUpdate.setAllowNonFastForwards(true);
- batchUpdate.addCommand(commands);
- batchUpdate
- .execute(new RevWalk(diskRepo), NullProgressMonitor.INSTANCE);
- Map<String, Ref> refs = refdir.getRefs(RefDatabase.ALL);
- assertEquals(ReceiveCommand.Result.OK, commands.get(0).getResult());
- assertEquals(ReceiveCommand.Result.LOCK_FAILURE, commands.get(1)
- .getResult());
- assertEquals(ReceiveCommand.Result.LOCK_FAILURE, commands.get(2)
- .getResult());
- assertEquals("[HEAD, refs/heads/master, refs/heads/masters]", refs
- .keySet().toString());
- assertEquals(B.getId(), refs.get("refs/heads/master").getObjectId());
- assertEquals(B.getId(), refs.get("refs/heads/masters").getObjectId());
- }
-
- @Test
- public void testBatchRefUpdateConflictThanksToDelete() throws IOException {
- writeLooseRef("refs/heads/master", A);
- writeLooseRef("refs/heads/masters", B);
- List<ReceiveCommand> commands = Arrays.asList(
- new ReceiveCommand(A, B, "refs/heads/master",
- ReceiveCommand.Type.UPDATE),
- new ReceiveCommand(zeroId(), A, "refs/heads/masters/x",
- ReceiveCommand.Type.CREATE),
- new ReceiveCommand(B, zeroId(), "refs/heads/masters",
- ReceiveCommand.Type.DELETE));
- BatchRefUpdate batchUpdate = refdir.newBatchUpdate();
- batchUpdate.setAllowNonFastForwards(true);
- batchUpdate.addCommand(commands);
- batchUpdate.execute(new RevWalk(diskRepo), new StrictWorkMonitor());
- Map<String, Ref> refs = refdir.getRefs(RefDatabase.ALL);
- assertEquals(ReceiveCommand.Result.OK, commands.get(0).getResult());
- assertEquals(ReceiveCommand.Result.OK, commands.get(1).getResult());
- assertEquals(ReceiveCommand.Result.OK, commands.get(2).getResult());
- assertEquals("[HEAD, refs/heads/master, refs/heads/masters/x]", refs
- .keySet().toString());
- assertEquals(A.getId(), refs.get("refs/heads/masters/x").getObjectId());
+ refdir.setRetrySleepMs(Arrays.asList(0, 0));
+ LockFile myLock = refdir.lockPackedRefs();
+ try {
+ refdir.pack(Arrays.asList("refs/heads/master"));
+ fail("expected LockFailedException");
+ } catch (LockFailedException e) {
+ assertEquals(refdir.packedRefsFile.getPath(), e.getFile().getPath());
+ } finally {
+ myLock.unlock();
+ }
+ Ref ref = refdir.getRef("refs/heads/master");
+ assertEquals(Storage.LOOSE, ref.getStorage());
}
private void writeLooseRef(String name, AnyObjectId id) throws IOException {
@@ -1434,34 +1329,4 @@ private void deleteLooseRef(String name) {
File path = new File(diskRepo.getDirectory(), name);
assertTrue("deleted " + name, path.delete());
}
-
- private static final class StrictWorkMonitor implements ProgressMonitor {
- private int lastWork, totalWork;
-
- @Override
- public void start(int totalTasks) {
- // empty
- }
-
- @Override
- public void beginTask(String title, int total) {
- this.totalWork = total;
- lastWork = 0;
- }
-
- @Override
- public void update(int completed) {
- lastWork += completed;
- }
-
- @Override
- public void endTask() {
- assertEquals("Units of work recorded", totalWork, lastWork);
- }
-
- @Override
- public boolean isCancelled() {
- return false;
- }
- }
}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefUpdateTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefUpdateTest.java
index 1203e83..34f9eb9 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefUpdateTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/RefUpdateTest.java
@@ -58,12 +58,10 @@
import java.io.File;
import java.io.IOException;
-import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
-import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
@@ -969,27 +967,10 @@ public void testCreateMissingObject() throws IOException {
RefUpdate ru = db.updateRef(name);
ru.setNewObjectId(bad);
Result update = ru.update();
- assertEquals(Result.NEW, update);
+ assertEquals(Result.REJECTED_MISSING_OBJECT, update);
Ref ref = db.exactRef(name);
- assertNotNull(ref);
- assertFalse(ref.isPeeled());
- assertEquals(bad, ref.getObjectId());
-
- try (RevWalk rw = new RevWalk(db)) {
- rw.parseAny(ref.getObjectId());
- fail("Expected MissingObjectException");
- } catch (MissingObjectException expected) {
- assertEquals(bad, expected.getObjectId());
- }
-
- RefDirectory refdir = (RefDirectory) db.getRefDatabase();
- try {
- // Packing requires peeling, which fails.
- refdir.pack(Arrays.asList(name));
- } catch (MissingObjectException expected) {
- assertEquals(bad, expected.getObjectId());
- }
+ assertNull(ref);
}
@Test
@@ -1005,7 +986,7 @@ public void testUpdateMissingObject() throws IOException {
ru = db.updateRef(name);
ru.setNewObjectId(bad);
update = ru.update();
- assertEquals(Result.REJECTED, update);
+ assertEquals(Result.REJECTED_MISSING_OBJECT, update);
Ref ref = db.exactRef(name);
assertNotNull(ref);
@@ -1018,33 +999,18 @@ public void testForceUpdateMissingObject() throws IOException {
RefUpdate ru = updateRef(name);
Result update = ru.update();
assertEquals(Result.NEW, update);
+ ObjectId oldId = ru.getNewObjectId();
ObjectId bad =
ObjectId.fromString("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef");
ru = db.updateRef(name);
ru.setNewObjectId(bad);
update = ru.forceUpdate();
- assertEquals(Result.FORCED, update);
+ assertEquals(Result.REJECTED_MISSING_OBJECT, update);
Ref ref = db.exactRef(name);
assertNotNull(ref);
- assertFalse(ref.isPeeled());
- assertEquals(bad, ref.getObjectId());
-
- try (RevWalk rw = new RevWalk(db)) {
- rw.parseAny(ref.getObjectId());
- fail("Expected MissingObjectException");
- } catch (MissingObjectException expected) {
- assertEquals(bad, expected.getObjectId());
- }
-
- RefDirectory refdir = (RefDirectory) db.getRefDatabase();
- try {
- // Packing requires peeling, which fails.
- refdir.pack(Arrays.asList(name));
- } catch (MissingObjectException expected) {
- assertEquals(bad, expected.getObjectId());
- }
+ assertEquals(oldId, ref.getObjectId());
}
private static void writeReflog(Repository db, ObjectId newId, String msg,
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/T0003_BasicTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/T0003_BasicTest.java
index ae1e531..9d23d83 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/T0003_BasicTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/file/T0003_BasicTest.java
@@ -661,33 +661,39 @@ public void test027_UnpackedRefHigherPriorityThanPacked()
@Test
public void test028_LockPackedRef() throws IOException {
+ ObjectId id1;
+ ObjectId id2;
+ try (ObjectInserter ins = db.newObjectInserter()) {
+ id1 = ins.insert(
+ Constants.OBJ_BLOB, "contents1".getBytes(Constants.CHARSET));
+ id2 = ins.insert(
+ Constants.OBJ_BLOB, "contents2".getBytes(Constants.CHARSET));
+ ins.flush();
+ }
+
writeTrashFile(".git/packed-refs",
- "7f822839a2fe9760f386cbbbcb3f92c5fe81def7 refs/heads/foobar");
+ id1.name() + " refs/heads/foobar");
writeTrashFile(".git/HEAD", "ref: refs/heads/foobar\n");
BUG_WorkAroundRacyGitIssues("packed-refs");
BUG_WorkAroundRacyGitIssues("HEAD");
ObjectId resolve = db.resolve("HEAD");
- assertEquals("7f822839a2fe9760f386cbbbcb3f92c5fe81def7", resolve.name());
+ assertEquals(id1, resolve);
RefUpdate lockRef = db.updateRef("HEAD");
- ObjectId newId = ObjectId
- .fromString("07f822839a2fe9760f386cbbbcb3f92c5fe81def");
- lockRef.setNewObjectId(newId);
+ lockRef.setNewObjectId(id2);
assertEquals(RefUpdate.Result.FORCED, lockRef.forceUpdate());
assertTrue(new File(db.getDirectory(), "refs/heads/foobar").exists());
- assertEquals(newId, db.resolve("refs/heads/foobar"));
+ assertEquals(id2, db.resolve("refs/heads/foobar"));
// Again. The ref already exists
RefUpdate lockRef2 = db.updateRef("HEAD");
- ObjectId newId2 = ObjectId
- .fromString("7f822839a2fe9760f386cbbbcb3f92c5fe81def7");
- lockRef2.setNewObjectId(newId2);
+ lockRef2.setNewObjectId(id1);
assertEquals(RefUpdate.Result.FORCED, lockRef2.forceUpdate());
assertTrue(new File(db.getDirectory(), "refs/heads/foobar").exists());
- assertEquals(newId2, db.resolve("refs/heads/foobar"));
+ assertEquals(id1, db.resolve("refs/heads/foobar"));
}
@Test
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/reftable/MergedReftableTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/reftable/MergedReftableTest.java
new file mode 100644
index 0000000..f9ebaf6
--- /dev/null
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/reftable/MergedReftableTest.java
@@ -0,0 +1,351 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import static org.eclipse.jgit.lib.Constants.HEAD;
+import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
+import static org.eclipse.jgit.lib.Constants.R_HEADS;
+import static org.eclipse.jgit.lib.Ref.Storage.NEW;
+import static org.eclipse.jgit.lib.Ref.Storage.PACKED;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.eclipse.jgit.internal.storage.io.BlockSource;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectIdRef;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.RefComparator;
+import org.junit.Test;
+
+public class MergedReftableTest {
+ @Test
+ public void noTables() throws IOException {
+ MergedReftable mr = merge(new byte[0][]);
+ try (RefCursor rc = mr.allRefs()) {
+ assertFalse(rc.next());
+ }
+ try (RefCursor rc = mr.seekRef(HEAD)) {
+ assertFalse(rc.next());
+ }
+ try (RefCursor rc = mr.seekRef(R_HEADS)) {
+ assertFalse(rc.next());
+ }
+ }
+
+ @Test
+ public void oneEmptyTable() throws IOException {
+ MergedReftable mr = merge(write());
+ try (RefCursor rc = mr.allRefs()) {
+ assertFalse(rc.next());
+ }
+ try (RefCursor rc = mr.seekRef(HEAD)) {
+ assertFalse(rc.next());
+ }
+ try (RefCursor rc = mr.seekRef(R_HEADS)) {
+ assertFalse(rc.next());
+ }
+ }
+
+ @Test
+ public void twoEmptyTables() throws IOException {
+ MergedReftable mr = merge(write(), write());
+ try (RefCursor rc = mr.allRefs()) {
+ assertFalse(rc.next());
+ }
+ try (RefCursor rc = mr.seekRef(HEAD)) {
+ assertFalse(rc.next());
+ }
+ try (RefCursor rc = mr.seekRef(R_HEADS)) {
+ assertFalse(rc.next());
+ }
+ }
+
+ @SuppressWarnings("boxing")
+ @Test
+ public void oneTableScan() throws IOException {
+ List<Ref> refs = new ArrayList<>();
+ for (int i = 1; i <= 567; i++) {
+ refs.add(ref(String.format("refs/heads/%03d", i), i));
+ }
+
+ MergedReftable mr = merge(write(refs));
+ try (RefCursor rc = mr.allRefs()) {
+ for (Ref exp : refs) {
+ assertTrue("has " + exp.getName(), rc.next());
+ Ref act = rc.getRef();
+ assertEquals(exp.getName(), act.getName());
+ assertEquals(exp.getObjectId(), act.getObjectId());
+ }
+ assertFalse(rc.next());
+ }
+ }
+
+ @Test
+ public void deleteIsHidden() throws IOException {
+ List<Ref> delta1 = Arrays.asList(
+ ref("refs/heads/apple", 1),
+ ref("refs/heads/master", 2));
+ List<Ref> delta2 = Arrays.asList(delete("refs/heads/apple"));
+
+ MergedReftable mr = merge(write(delta1), write(delta2));
+ try (RefCursor rc = mr.allRefs()) {
+ assertTrue(rc.next());
+ assertEquals("refs/heads/master", rc.getRef().getName());
+ assertEquals(id(2), rc.getRef().getObjectId());
+ assertFalse(rc.next());
+ }
+ }
+
+ @Test
+ public void twoTableSeek() throws IOException {
+ List<Ref> delta1 = Arrays.asList(
+ ref("refs/heads/apple", 1),
+ ref("refs/heads/master", 2));
+ List<Ref> delta2 = Arrays.asList(ref("refs/heads/banana", 3));
+
+ MergedReftable mr = merge(write(delta1), write(delta2));
+ try (RefCursor rc = mr.seekRef("refs/heads/master")) {
+ assertTrue(rc.next());
+ assertEquals("refs/heads/master", rc.getRef().getName());
+ assertEquals(id(2), rc.getRef().getObjectId());
+ assertFalse(rc.next());
+ }
+ }
+
+ @Test
+ public void twoTableById() throws IOException {
+ List<Ref> delta1 = Arrays.asList(
+ ref("refs/heads/apple", 1),
+ ref("refs/heads/master", 2));
+ List<Ref> delta2 = Arrays.asList(ref("refs/heads/banana", 3));
+
+ MergedReftable mr = merge(write(delta1), write(delta2));
+ try (RefCursor rc = mr.byObjectId(id(2))) {
+ assertTrue(rc.next());
+ assertEquals("refs/heads/master", rc.getRef().getName());
+ assertEquals(id(2), rc.getRef().getObjectId());
+ assertFalse(rc.next());
+ }
+ }
+
+ @SuppressWarnings("boxing")
+ @Test
+ public void fourTableScan() throws IOException {
+ List<Ref> base = new ArrayList<>();
+ for (int i = 1; i <= 567; i++) {
+ base.add(ref(String.format("refs/heads/%03d", i), i));
+ }
+
+ List<Ref> delta1 = Arrays.asList(
+ ref("refs/heads/next", 4),
+ ref(String.format("refs/heads/%03d", 55), 4096));
+ List<Ref> delta2 = Arrays.asList(
+ delete("refs/heads/next"),
+ ref(String.format("refs/heads/%03d", 55), 8192));
+ List<Ref> delta3 = Arrays.asList(
+ ref("refs/heads/master", 4242),
+ ref(String.format("refs/heads/%03d", 42), 5120),
+ ref(String.format("refs/heads/%03d", 98), 6120));
+
+ List<Ref> expected = merge(base, delta1, delta2, delta3);
+ MergedReftable mr = merge(
+ write(base),
+ write(delta1),
+ write(delta2),
+ write(delta3));
+ try (RefCursor rc = mr.allRefs()) {
+ for (Ref exp : expected) {
+ assertTrue("has " + exp.getName(), rc.next());
+ Ref act = rc.getRef();
+ assertEquals(exp.getName(), act.getName());
+ assertEquals(exp.getObjectId(), act.getObjectId());
+ }
+ assertFalse(rc.next());
+ }
+ }
+
+ @Test
+ public void scanIncludeDeletes() throws IOException {
+ List<Ref> delta1 = Arrays.asList(ref("refs/heads/next", 4));
+ List<Ref> delta2 = Arrays.asList(delete("refs/heads/next"));
+ List<Ref> delta3 = Arrays.asList(ref("refs/heads/master", 8));
+
+ MergedReftable mr = merge(write(delta1), write(delta2), write(delta3));
+ mr.setIncludeDeletes(true);
+ try (RefCursor rc = mr.allRefs()) {
+ assertTrue(rc.next());
+ Ref r = rc.getRef();
+ assertEquals("refs/heads/master", r.getName());
+ assertEquals(id(8), r.getObjectId());
+
+ assertTrue(rc.next());
+ r = rc.getRef();
+ assertEquals("refs/heads/next", r.getName());
+ assertEquals(NEW, r.getStorage());
+ assertNull(r.getObjectId());
+
+ assertFalse(rc.next());
+ }
+ }
+
+ @SuppressWarnings("boxing")
+ @Test
+ public void oneTableSeek() throws IOException {
+ List<Ref> refs = new ArrayList<>();
+ for (int i = 1; i <= 567; i++) {
+ refs.add(ref(String.format("refs/heads/%03d", i), i));
+ }
+
+ MergedReftable mr = merge(write(refs));
+ for (Ref exp : refs) {
+ try (RefCursor rc = mr.seekRef(exp.getName())) {
+ assertTrue("has " + exp.getName(), rc.next());
+ Ref act = rc.getRef();
+ assertEquals(exp.getName(), act.getName());
+ assertEquals(exp.getObjectId(), act.getObjectId());
+ assertFalse(rc.next());
+ }
+ }
+ }
+
+ @Test
+ public void compaction() throws IOException {
+ List<Ref> delta1 = Arrays.asList(
+ ref("refs/heads/next", 4),
+ ref("refs/heads/master", 1));
+ List<Ref> delta2 = Arrays.asList(delete("refs/heads/next"));
+ List<Ref> delta3 = Arrays.asList(ref("refs/heads/master", 8));
+
+ ReftableCompactor compactor = new ReftableCompactor();
+ compactor.addAll(Arrays.asList(
+ read(write(delta1)),
+ read(write(delta2)),
+ read(write(delta3))));
+ ByteArrayOutputStream out = new ByteArrayOutputStream();
+ compactor.compact(out);
+ byte[] table = out.toByteArray();
+
+ ReftableReader reader = read(table);
+ try (RefCursor rc = reader.allRefs()) {
+ assertTrue(rc.next());
+ Ref r = rc.getRef();
+ assertEquals("refs/heads/master", r.getName());
+ assertEquals(id(8), r.getObjectId());
+ assertFalse(rc.next());
+ }
+ }
+
+ private static MergedReftable merge(byte[]... table) {
+ List<Reftable> stack = new ArrayList<>(table.length);
+ for (byte[] b : table) {
+ stack.add(read(b));
+ }
+ return new MergedReftable(stack);
+ }
+
+ private static ReftableReader read(byte[] table) {
+ return new ReftableReader(BlockSource.from(table));
+ }
+
+ private static Ref ref(String name, int id) {
+ return new ObjectIdRef.PeeledNonTag(PACKED, name, id(id));
+ }
+
+ private static Ref delete(String name) {
+ return new ObjectIdRef.Unpeeled(NEW, name, null);
+ }
+
+ private static ObjectId id(int i) {
+ byte[] buf = new byte[OBJECT_ID_LENGTH];
+ buf[0] = (byte) (i & 0xff);
+ buf[1] = (byte) ((i >>> 8) & 0xff);
+ buf[2] = (byte) ((i >>> 16) & 0xff);
+ buf[3] = (byte) (i >>> 24);
+ return ObjectId.fromRaw(buf);
+ }
+
+ private byte[] write(Ref... refs) throws IOException {
+ return write(Arrays.asList(refs));
+ }
+
+ private byte[] write(Collection<Ref> refs) throws IOException {
+ ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+ ReftableWriter writer = new ReftableWriter().begin(buffer);
+ for (Ref r : RefComparator.sort(refs)) {
+ writer.writeRef(r);
+ }
+ writer.finish();
+ return buffer.toByteArray();
+ }
+
+ @SafeVarargs
+ private static List<Ref> merge(List<Ref>... tables) {
+ Map<String, Ref> expect = new HashMap<>();
+ for (List<Ref> t : tables) {
+ for (Ref r : t) {
+ if (r.getStorage() == NEW && r.getObjectId() == null) {
+ expect.remove(r.getName());
+ } else {
+ expect.put(r.getName(), r);
+ }
+ }
+ }
+
+ List<Ref> expected = new ArrayList<>(expect.values());
+ Collections.sort(expected, RefComparator.INSTANCE);
+ return expected;
+ }
+}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/reftable/ReftableTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/reftable/ReftableTest.java
new file mode 100644
index 0000000..6809d7b
--- /dev/null
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/reftable/ReftableTest.java
@@ -0,0 +1,725 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import static org.eclipse.jgit.lib.Constants.HEAD;
+import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
+import static org.eclipse.jgit.lib.Constants.R_HEADS;
+import static org.eclipse.jgit.lib.Ref.Storage.NEW;
+import static org.eclipse.jgit.lib.Ref.Storage.PACKED;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.internal.storage.io.BlockSource;
+import org.eclipse.jgit.internal.storage.reftable.ReftableWriter.Stats;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectIdRef;
+import org.eclipse.jgit.lib.PersonIdent;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.ReflogEntry;
+import org.eclipse.jgit.lib.SymbolicRef;
+import org.junit.Test;
+
+public class ReftableTest {
+ private static final String MASTER = "refs/heads/master";
+ private static final String NEXT = "refs/heads/next";
+ private static final String V1_0 = "refs/tags/v1.0";
+
+ private Stats stats;
+
+ @Test
+ public void emptyTable() throws IOException {
+ byte[] table = write();
+ assertEquals(92 /* header, footer */, table.length);
+ assertEquals('R', table[0]);
+ assertEquals('E', table[1]);
+ assertEquals('F', table[2]);
+ assertEquals('T', table[3]);
+ assertEquals(0x01, table[4]);
+ assertTrue(ReftableConstants.isFileHeaderMagic(table, 0, 8));
+ assertTrue(ReftableConstants.isFileHeaderMagic(table, 24, 92));
+
+ Reftable t = read(table);
+ try (RefCursor rc = t.allRefs()) {
+ assertFalse(rc.next());
+ }
+ try (RefCursor rc = t.seekRef(HEAD)) {
+ assertFalse(rc.next());
+ }
+ try (RefCursor rc = t.seekRef(R_HEADS)) {
+ assertFalse(rc.next());
+ }
+ try (LogCursor rc = t.allLogs()) {
+ assertFalse(rc.next());
+ }
+ }
+
+ @Test
+ public void emptyVirtualTableFromRefs() throws IOException {
+ Reftable t = Reftable.from(Collections.emptyList());
+ try (RefCursor rc = t.allRefs()) {
+ assertFalse(rc.next());
+ }
+ try (RefCursor rc = t.seekRef(HEAD)) {
+ assertFalse(rc.next());
+ }
+ try (LogCursor rc = t.allLogs()) {
+ assertFalse(rc.next());
+ }
+ }
+
+ @Test
+ public void estimateCurrentBytesOneRef() throws IOException {
+ Ref exp = ref(MASTER, 1);
+ int expBytes = 24 + 4 + 5 + 3 + MASTER.length() + 20 + 68;
+
+ byte[] table;
+ ReftableConfig cfg = new ReftableConfig();
+ cfg.setIndexObjects(false);
+ ReftableWriter writer = new ReftableWriter().setConfig(cfg);
+ try (ByteArrayOutputStream buf = new ByteArrayOutputStream()) {
+ writer.begin(buf);
+ assertEquals(92, writer.estimateTotalBytes());
+ writer.writeRef(exp);
+ assertEquals(expBytes, writer.estimateTotalBytes());
+ writer.finish();
+ table = buf.toByteArray();
+ }
+ assertEquals(expBytes, table.length);
+ }
+
+ @SuppressWarnings("boxing")
+ @Test
+ public void estimateCurrentBytesWithIndex() throws IOException {
+ List<Ref> refs = new ArrayList<>();
+ for (int i = 1; i <= 5670; i++) {
+ refs.add(ref(String.format("refs/heads/%04d", i), i));
+ }
+
+ ReftableConfig cfg = new ReftableConfig();
+ cfg.setIndexObjects(false);
+ cfg.setMaxIndexLevels(1);
+
+ int expBytes = 139654;
+ byte[] table;
+ ReftableWriter writer = new ReftableWriter().setConfig(cfg);
+ try (ByteArrayOutputStream buf = new ByteArrayOutputStream()) {
+ writer.begin(buf);
+ writer.sortAndWriteRefs(refs);
+ assertEquals(expBytes, writer.estimateTotalBytes());
+ writer.finish();
+ stats = writer.getStats();
+ table = buf.toByteArray();
+ }
+ assertEquals(1, stats.refIndexLevels());
+ assertEquals(expBytes, table.length);
+ }
+
+ @Test
+ public void oneIdRef() throws IOException {
+ Ref exp = ref(MASTER, 1);
+ byte[] table = write(exp);
+ assertEquals(24 + 4 + 5 + 3 + MASTER.length() + 20 + 68, table.length);
+
+ ReftableReader t = read(table);
+ try (RefCursor rc = t.allRefs()) {
+ assertTrue(rc.next());
+ Ref act = rc.getRef();
+ assertNotNull(act);
+ assertEquals(PACKED, act.getStorage());
+ assertTrue(act.isPeeled());
+ assertFalse(act.isSymbolic());
+ assertEquals(exp.getName(), act.getName());
+ assertEquals(exp.getObjectId(), act.getObjectId());
+ assertNull(act.getPeeledObjectId());
+ assertFalse(rc.wasDeleted());
+ assertFalse(rc.next());
+ }
+ try (RefCursor rc = t.seekRef(MASTER)) {
+ assertTrue(rc.next());
+ Ref act = rc.getRef();
+ assertNotNull(act);
+ assertEquals(exp.getName(), act.getName());
+ assertFalse(rc.next());
+ }
+ }
+
+ @Test
+ public void oneTagRef() throws IOException {
+ Ref exp = tag(V1_0, 1, 2);
+ byte[] table = write(exp);
+ assertEquals(24 + 4 + 5 + 2 + V1_0.length() + 40 + 68, table.length);
+
+ ReftableReader t = read(table);
+ try (RefCursor rc = t.allRefs()) {
+ assertTrue(rc.next());
+ Ref act = rc.getRef();
+ assertNotNull(act);
+ assertEquals(PACKED, act.getStorage());
+ assertTrue(act.isPeeled());
+ assertFalse(act.isSymbolic());
+ assertEquals(exp.getName(), act.getName());
+ assertEquals(exp.getObjectId(), act.getObjectId());
+ assertEquals(exp.getPeeledObjectId(), act.getPeeledObjectId());
+ }
+ }
+
+ @Test
+ public void oneSymbolicRef() throws IOException {
+ Ref exp = sym(HEAD, MASTER);
+ byte[] table = write(exp);
+ assertEquals(
+ 24 + 4 + 5 + 2 + HEAD.length() + 1 + MASTER.length() + 68,
+ table.length);
+
+ ReftableReader t = read(table);
+ try (RefCursor rc = t.allRefs()) {
+ assertTrue(rc.next());
+ Ref act = rc.getRef();
+ assertNotNull(act);
+ assertTrue(act.isSymbolic());
+ assertEquals(exp.getName(), act.getName());
+ assertNotNull(act.getLeaf());
+ assertEquals(MASTER, act.getTarget().getName());
+ assertNull(act.getObjectId());
+ }
+ }
+
+ @Test
+ public void resolveSymbolicRef() throws IOException {
+ Reftable t = read(write(
+ sym(HEAD, "refs/heads/tmp"),
+ sym("refs/heads/tmp", MASTER),
+ ref(MASTER, 1)));
+
+ Ref head = t.exactRef(HEAD);
+ assertNull(head.getObjectId());
+ assertEquals("refs/heads/tmp", head.getTarget().getName());
+
+ head = t.resolve(head);
+ assertNotNull(head);
+ assertEquals(id(1), head.getObjectId());
+
+ Ref master = t.exactRef(MASTER);
+ assertNotNull(master);
+ assertSame(master, t.resolve(master));
+ }
+
+ @Test
+ public void failDeepChainOfSymbolicRef() throws IOException {
+ Reftable t = read(write(
+ sym(HEAD, "refs/heads/1"),
+ sym("refs/heads/1", "refs/heads/2"),
+ sym("refs/heads/2", "refs/heads/3"),
+ sym("refs/heads/3", "refs/heads/4"),
+ sym("refs/heads/4", "refs/heads/5"),
+ sym("refs/heads/5", MASTER),
+ ref(MASTER, 1)));
+
+ Ref head = t.exactRef(HEAD);
+ assertNull(head.getObjectId());
+ assertNull(t.resolve(head));
+ }
+
+ @Test
+ public void oneDeletedRef() throws IOException {
+ String name = "refs/heads/gone";
+ Ref exp = newRef(name);
+ byte[] table = write(exp);
+ assertEquals(24 + 4 + 5 + 2 + name.length() + 68, table.length);
+
+ ReftableReader t = read(table);
+ try (RefCursor rc = t.allRefs()) {
+ assertFalse(rc.next());
+ }
+
+ t.setIncludeDeletes(true);
+ try (RefCursor rc = t.allRefs()) {
+ assertTrue(rc.next());
+ Ref act = rc.getRef();
+ assertNotNull(act);
+ assertFalse(act.isSymbolic());
+ assertEquals(name, act.getName());
+ assertEquals(NEW, act.getStorage());
+ assertNull(act.getObjectId());
+ assertTrue(rc.wasDeleted());
+ }
+ }
+
+ @Test
+ public void seekNotFound() throws IOException {
+ Ref exp = ref(MASTER, 1);
+ ReftableReader t = read(write(exp));
+ try (RefCursor rc = t.seekRef("refs/heads/a")) {
+ assertFalse(rc.next());
+ }
+ try (RefCursor rc = t.seekRef("refs/heads/n")) {
+ assertFalse(rc.next());
+ }
+ }
+
+ @Test
+ public void namespaceNotFound() throws IOException {
+ Ref exp = ref(MASTER, 1);
+ ReftableReader t = read(write(exp));
+ try (RefCursor rc = t.seekRef("refs/changes/")) {
+ assertFalse(rc.next());
+ }
+ try (RefCursor rc = t.seekRef("refs/tags/")) {
+ assertFalse(rc.next());
+ }
+ }
+
+ @Test
+ public void namespaceHeads() throws IOException {
+ Ref master = ref(MASTER, 1);
+ Ref next = ref(NEXT, 2);
+ Ref v1 = tag(V1_0, 3, 4);
+
+ ReftableReader t = read(write(master, next, v1));
+ try (RefCursor rc = t.seekRef("refs/tags/")) {
+ assertTrue(rc.next());
+ assertEquals(V1_0, rc.getRef().getName());
+ assertFalse(rc.next());
+ }
+ try (RefCursor rc = t.seekRef("refs/heads/")) {
+ assertTrue(rc.next());
+ assertEquals(MASTER, rc.getRef().getName());
+
+ assertTrue(rc.next());
+ assertEquals(NEXT, rc.getRef().getName());
+
+ assertFalse(rc.next());
+ }
+ }
+
+ @SuppressWarnings("boxing")
+ @Test
+ public void indexScan() throws IOException {
+ List<Ref> refs = new ArrayList<>();
+ for (int i = 1; i <= 5670; i++) {
+ refs.add(ref(String.format("refs/heads/%04d", i), i));
+ }
+
+ byte[] table = write(refs);
+ assertTrue(stats.refIndexLevels() > 0);
+ assertTrue(stats.refIndexSize() > 0);
+ assertScan(refs, read(table));
+ }
+
+ @SuppressWarnings("boxing")
+ @Test
+ public void indexSeek() throws IOException {
+ List<Ref> refs = new ArrayList<>();
+ for (int i = 1; i <= 5670; i++) {
+ refs.add(ref(String.format("refs/heads/%04d", i), i));
+ }
+
+ byte[] table = write(refs);
+ assertTrue(stats.refIndexLevels() > 0);
+ assertTrue(stats.refIndexSize() > 0);
+ assertSeek(refs, read(table));
+ }
+
+ @SuppressWarnings("boxing")
+ @Test
+ public void noIndexScan() throws IOException {
+ List<Ref> refs = new ArrayList<>();
+ for (int i = 1; i <= 567; i++) {
+ refs.add(ref(String.format("refs/heads/%03d", i), i));
+ }
+
+ byte[] table = write(refs);
+ assertEquals(0, stats.refIndexLevels());
+ assertEquals(0, stats.refIndexSize());
+ assertEquals(table.length, stats.totalBytes());
+ assertScan(refs, read(table));
+ }
+
+ @SuppressWarnings("boxing")
+ @Test
+ public void noIndexSeek() throws IOException {
+ List<Ref> refs = new ArrayList<>();
+ for (int i = 1; i <= 567; i++) {
+ refs.add(ref(String.format("refs/heads/%03d", i), i));
+ }
+
+ byte[] table = write(refs);
+ assertEquals(0, stats.refIndexLevels());
+ assertSeek(refs, read(table));
+ }
+
+ @Test
+ public void withReflog() throws IOException {
+ Ref master = ref(MASTER, 1);
+ Ref next = ref(NEXT, 2);
+ PersonIdent who = new PersonIdent("Log", "Ger", 1500079709, -8 * 60);
+ String msg = "test";
+
+ ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+ ReftableWriter writer = new ReftableWriter()
+ .setMinUpdateIndex(1)
+ .setMaxUpdateIndex(1)
+ .begin(buffer);
+
+ writer.writeRef(master);
+ writer.writeRef(next);
+
+ writer.writeLog(MASTER, 1, who, ObjectId.zeroId(), id(1), msg);
+ writer.writeLog(NEXT, 1, who, ObjectId.zeroId(), id(2), msg);
+
+ writer.finish();
+ byte[] table = buffer.toByteArray();
+ assertEquals(245, table.length);
+
+ ReftableReader t = read(table);
+ try (RefCursor rc = t.allRefs()) {
+ assertTrue(rc.next());
+ assertEquals(MASTER, rc.getRef().getName());
+ assertEquals(id(1), rc.getRef().getObjectId());
+
+ assertTrue(rc.next());
+ assertEquals(NEXT, rc.getRef().getName());
+ assertEquals(id(2), rc.getRef().getObjectId());
+ assertFalse(rc.next());
+ }
+ try (LogCursor lc = t.allLogs()) {
+ assertTrue(lc.next());
+ assertEquals(MASTER, lc.getRefName());
+ assertEquals(1, lc.getUpdateIndex());
+ assertEquals(ObjectId.zeroId(), lc.getReflogEntry().getOldId());
+ assertEquals(id(1), lc.getReflogEntry().getNewId());
+ assertEquals(who, lc.getReflogEntry().getWho());
+ assertEquals(msg, lc.getReflogEntry().getComment());
+
+ assertTrue(lc.next());
+ assertEquals(NEXT, lc.getRefName());
+ assertEquals(1, lc.getUpdateIndex());
+ assertEquals(ObjectId.zeroId(), lc.getReflogEntry().getOldId());
+ assertEquals(id(2), lc.getReflogEntry().getNewId());
+ assertEquals(who, lc.getReflogEntry().getWho());
+ assertEquals(msg, lc.getReflogEntry().getComment());
+
+ assertFalse(lc.next());
+ }
+ }
+
+ @Test
+ public void onlyReflog() throws IOException {
+ PersonIdent who = new PersonIdent("Log", "Ger", 1500079709, -8 * 60);
+ String msg = "test";
+
+ ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+ ReftableWriter writer = new ReftableWriter()
+ .setMinUpdateIndex(1)
+ .setMaxUpdateIndex(1)
+ .begin(buffer);
+ writer.writeLog(MASTER, 1, who, ObjectId.zeroId(), id(1), msg);
+ writer.writeLog(NEXT, 1, who, ObjectId.zeroId(), id(2), msg);
+ writer.finish();
+ byte[] table = buffer.toByteArray();
+ stats = writer.getStats();
+ assertEquals(170, table.length);
+ assertEquals(0, stats.refCount());
+ assertEquals(0, stats.refBytes());
+ assertEquals(0, stats.refIndexLevels());
+
+ ReftableReader t = read(table);
+ try (RefCursor rc = t.allRefs()) {
+ assertFalse(rc.next());
+ }
+ try (RefCursor rc = t.seekRef("refs/heads/")) {
+ assertFalse(rc.next());
+ }
+ try (LogCursor lc = t.allLogs()) {
+ assertTrue(lc.next());
+ assertEquals(MASTER, lc.getRefName());
+ assertEquals(1, lc.getUpdateIndex());
+ assertEquals(ObjectId.zeroId(), lc.getReflogEntry().getOldId());
+ assertEquals(id(1), lc.getReflogEntry().getNewId());
+ assertEquals(who, lc.getReflogEntry().getWho());
+ assertEquals(msg, lc.getReflogEntry().getComment());
+
+ assertTrue(lc.next());
+ assertEquals(NEXT, lc.getRefName());
+ assertEquals(1, lc.getUpdateIndex());
+ assertEquals(ObjectId.zeroId(), lc.getReflogEntry().getOldId());
+ assertEquals(id(2), lc.getReflogEntry().getNewId());
+ assertEquals(who, lc.getReflogEntry().getWho());
+ assertEquals(msg, lc.getReflogEntry().getComment());
+
+ assertFalse(lc.next());
+ }
+ }
+
+ @SuppressWarnings("boxing")
+ @Test
+ public void logScan() throws IOException {
+ ReftableConfig cfg = new ReftableConfig();
+ cfg.setRefBlockSize(256);
+ cfg.setLogBlockSize(2048);
+
+ ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+ ReftableWriter writer = new ReftableWriter(cfg);
+ writer.setMinUpdateIndex(1).setMaxUpdateIndex(1).begin(buffer);
+
+ List<Ref> refs = new ArrayList<>();
+ for (int i = 1; i <= 5670; i++) {
+ Ref ref = ref(String.format("refs/heads/%03d", i), i);
+ refs.add(ref);
+ writer.writeRef(ref);
+ }
+
+ PersonIdent who = new PersonIdent("Log", "Ger", 1500079709, -8 * 60);
+ for (Ref ref : refs) {
+ writer.writeLog(ref.getName(), 1, who,
+ ObjectId.zeroId(), ref.getObjectId(),
+ "create " + ref.getName());
+ }
+ writer.finish();
+ stats = writer.getStats();
+ assertTrue(stats.logBytes() > 4096);
+ byte[] table = buffer.toByteArray();
+
+ ReftableReader t = read(table);
+ try (LogCursor lc = t.allLogs()) {
+ for (Ref exp : refs) {
+ assertTrue("has " + exp.getName(), lc.next());
+ assertEquals(exp.getName(), lc.getRefName());
+ ReflogEntry entry = lc.getReflogEntry();
+ assertNotNull(entry);
+ assertEquals(who, entry.getWho());
+ assertEquals(ObjectId.zeroId(), entry.getOldId());
+ assertEquals(exp.getObjectId(), entry.getNewId());
+ assertEquals("create " + exp.getName(), entry.getComment());
+ }
+ assertFalse(lc.next());
+ }
+ }
+
+ @SuppressWarnings("boxing")
+ @Test
+ public void byObjectIdOneRefNoIndex() throws IOException {
+ List<Ref> refs = new ArrayList<>();
+ for (int i = 1; i <= 200; i++) {
+ refs.add(ref(String.format("refs/heads/%02d", i), i));
+ }
+ refs.add(ref("refs/heads/master", 100));
+
+ ReftableReader t = read(write(refs));
+ assertEquals(0, stats.objIndexSize());
+
+ try (RefCursor rc = t.byObjectId(id(42))) {
+ assertTrue("has 42", rc.next());
+ assertEquals("refs/heads/42", rc.getRef().getName());
+ assertEquals(id(42), rc.getRef().getObjectId());
+ assertFalse(rc.next());
+ }
+ try (RefCursor rc = t.byObjectId(id(100))) {
+ assertTrue("has 100", rc.next());
+ assertEquals("refs/heads/100", rc.getRef().getName());
+ assertEquals(id(100), rc.getRef().getObjectId());
+
+ assertTrue("has master", rc.next());
+ assertEquals("refs/heads/master", rc.getRef().getName());
+ assertEquals(id(100), rc.getRef().getObjectId());
+
+ assertFalse(rc.next());
+ }
+ }
+
+ @SuppressWarnings("boxing")
+ @Test
+ public void byObjectIdOneRefWithIndex() throws IOException {
+ List<Ref> refs = new ArrayList<>();
+ for (int i = 1; i <= 5200; i++) {
+ refs.add(ref(String.format("refs/heads/%02d", i), i));
+ }
+ refs.add(ref("refs/heads/master", 100));
+
+ ReftableReader t = read(write(refs));
+ assertTrue(stats.objIndexSize() > 0);
+
+ try (RefCursor rc = t.byObjectId(id(42))) {
+ assertTrue("has 42", rc.next());
+ assertEquals("refs/heads/42", rc.getRef().getName());
+ assertEquals(id(42), rc.getRef().getObjectId());
+ assertFalse(rc.next());
+ }
+ try (RefCursor rc = t.byObjectId(id(100))) {
+ assertTrue("has 100", rc.next());
+ assertEquals("refs/heads/100", rc.getRef().getName());
+ assertEquals(id(100), rc.getRef().getObjectId());
+
+ assertTrue("has master", rc.next());
+ assertEquals("refs/heads/master", rc.getRef().getName());
+ assertEquals(id(100), rc.getRef().getObjectId());
+
+ assertFalse(rc.next());
+ }
+ }
+
+ @Test
+ public void unpeeledDoesNotWrite() {
+ try {
+ write(new ObjectIdRef.Unpeeled(PACKED, MASTER, id(1)));
+ fail("expected IOException");
+ } catch (IOException e) {
+ assertEquals(JGitText.get().peeledRefIsRequired, e.getMessage());
+ }
+ }
+
+ @Test
+ public void nameTooLongDoesNotWrite() throws IOException {
+ try {
+ ReftableConfig cfg = new ReftableConfig();
+ cfg.setRefBlockSize(64);
+
+ ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+ ReftableWriter writer = new ReftableWriter(cfg).begin(buffer);
+ writer.writeRef(ref("refs/heads/i-am-not-a-teapot", 1));
+ writer.finish();
+ fail("expected BlockSizeTooSmallException");
+ } catch (BlockSizeTooSmallException e) {
+ assertEquals(84, e.getMinimumBlockSize());
+ }
+ }
+
+ @Test
+ public void badCrc32() throws IOException {
+ byte[] table = write();
+ table[table.length - 1] = 0x42;
+
+ try {
+ read(table).seekRef(HEAD);
+ fail("expected IOException");
+ } catch (IOException e) {
+ assertEquals(JGitText.get().invalidReftableCRC, e.getMessage());
+ }
+ }
+
+
+ private static void assertScan(List<Ref> refs, Reftable t)
+ throws IOException {
+ try (RefCursor rc = t.allRefs()) {
+ for (Ref exp : refs) {
+ assertTrue("has " + exp.getName(), rc.next());
+ Ref act = rc.getRef();
+ assertEquals(exp.getName(), act.getName());
+ assertEquals(exp.getObjectId(), act.getObjectId());
+ }
+ assertFalse(rc.next());
+ }
+ }
+
+ private static void assertSeek(List<Ref> refs, Reftable t)
+ throws IOException {
+ for (Ref exp : refs) {
+ try (RefCursor rc = t.seekRef(exp.getName())) {
+ assertTrue("has " + exp.getName(), rc.next());
+ Ref act = rc.getRef();
+ assertEquals(exp.getName(), act.getName());
+ assertEquals(exp.getObjectId(), act.getObjectId());
+ assertFalse(rc.next());
+ }
+ }
+ }
+
+ private static Ref ref(String name, int id) {
+ return new ObjectIdRef.PeeledNonTag(PACKED, name, id(id));
+ }
+
+ private static Ref tag(String name, int id1, int id2) {
+ return new ObjectIdRef.PeeledTag(PACKED, name, id(id1), id(id2));
+ }
+
+ private static Ref sym(String name, String target) {
+ return new SymbolicRef(name, newRef(target));
+ }
+
+ private static Ref newRef(String name) {
+ return new ObjectIdRef.Unpeeled(NEW, name, null);
+ }
+
+ private static ObjectId id(int i) {
+ byte[] buf = new byte[OBJECT_ID_LENGTH];
+ buf[0] = (byte) (i & 0xff);
+ buf[1] = (byte) ((i >>> 8) & 0xff);
+ buf[2] = (byte) ((i >>> 16) & 0xff);
+ buf[3] = (byte) (i >>> 24);
+ return ObjectId.fromRaw(buf);
+ }
+
+ private static ReftableReader read(byte[] table) {
+ return new ReftableReader(BlockSource.from(table));
+ }
+
+ private byte[] write(Ref... refs) throws IOException {
+ return write(Arrays.asList(refs));
+ }
+
+ private byte[] write(Collection<Ref> refs) throws IOException {
+ ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+ stats = new ReftableWriter()
+ .begin(buffer)
+ .sortAndWriteRefs(refs)
+ .finish()
+ .getStats();
+ return buffer.toByteArray();
+ }
+}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/reftree/LocalDiskRefTreeDatabaseTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/reftree/LocalDiskRefTreeDatabaseTest.java
index 67a7819..d5a07e0 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/reftree/LocalDiskRefTreeDatabaseTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/internal/storage/reftree/LocalDiskRefTreeDatabaseTest.java
@@ -83,7 +83,7 @@ public void setUp() throws Exception {
FileRepository init = createWorkRepository();
FileBasedConfig cfg = init.getConfig();
cfg.setInt("core", null, "repositoryformatversion", 1);
- cfg.setString("extensions", null, "refsStorage", "reftree");
+ cfg.setString("extensions", null, "refStorage", "reftree");
cfg.save();
repo = (FileRepository) new FileRepositoryBuilder()
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/AbbreviatedObjectIdTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/AbbreviatedObjectIdTest.java
index 6529d9e..30a9626 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/AbbreviatedObjectIdTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/AbbreviatedObjectIdTest.java
@@ -86,7 +86,6 @@ public void testFull_FromByteArray() {
final ObjectId f = i.toObjectId();
assertNotNull(f);
assertEquals(ObjectId.fromString(s), f);
- assertEquals(f.hashCode(), i.hashCode());
}
@Test
@@ -101,7 +100,6 @@ public void testFull_FromString() {
final ObjectId f = i.toObjectId();
assertNotNull(f);
assertEquals(ObjectId.fromString(s), f);
- assertEquals(f.hashCode(), i.hashCode());
}
@Test
@@ -215,7 +213,7 @@ public void test17_FromString() {
}
@Test
- public void testEquals_Short() {
+ public void testEquals_Short8() {
final String s = "7b6e8067";
final AbbreviatedObjectId a = AbbreviatedObjectId.fromString(s);
final AbbreviatedObjectId b = AbbreviatedObjectId.fromString(s);
@@ -226,6 +224,18 @@ public void testEquals_Short() {
}
@Test
+ public void testEquals_Short4() {
+ final String s = "7b6e";
+ final AbbreviatedObjectId a = AbbreviatedObjectId.fromString(s);
+ final AbbreviatedObjectId b = AbbreviatedObjectId.fromString(s);
+ assertNotSame(a, b);
+ assertTrue(a.hashCode() != 0);
+ assertTrue(a.hashCode() == b.hashCode());
+ assertEquals(b, a);
+ assertEquals(a, b);
+ }
+
+ @Test
public void testEquals_Full() {
final String s = "7b6e8067ec96acef9a4184b43210d583b6d2f99a";
final AbbreviatedObjectId a = AbbreviatedObjectId.fromString(s);
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/DirCacheCheckoutTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/DirCacheCheckoutTest.java
index f8c2d45..05573b9 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/DirCacheCheckoutTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/DirCacheCheckoutTest.java
@@ -72,6 +72,8 @@
import org.eclipse.jgit.errors.CheckoutConflictException;
import org.eclipse.jgit.errors.CorruptObjectException;
import org.eclipse.jgit.errors.NoWorkTreeException;
+import org.eclipse.jgit.events.ChangeRecorder;
+import org.eclipse.jgit.events.ListenerHandle;
import org.eclipse.jgit.junit.RepositoryTestCase;
import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.junit.TestRepository.BranchBuilder;
@@ -141,14 +143,19 @@ private static HashMap<String, String> mkmap(String... args) {
@Test
public void testResetHard() throws IOException, NoFilepatternException,
GitAPIException {
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
writeTrashFile("f", "f()");
writeTrashFile("D/g", "g()");
git.add().addFilepattern(".").call();
git.commit().setMessage("inital").call();
assertIndex(mkmap("f", "f()", "D/g", "g()"));
-
+ recorder.assertNoEvent();
git.branchCreate().setName("topic").call();
+ recorder.assertNoEvent();
writeTrashFile("f", "f()\nmaster");
writeTrashFile("D/g", "g()\ng2()");
@@ -156,9 +163,12 @@ public void testResetHard() throws IOException, NoFilepatternException,
git.add().addFilepattern(".").call();
RevCommit master = git.commit().setMessage("master-1").call();
assertIndex(mkmap("f", "f()\nmaster", "D/g", "g()\ng2()", "E/h", "h()"));
+ recorder.assertNoEvent();
checkoutBranch("refs/heads/topic");
assertIndex(mkmap("f", "f()", "D/g", "g()"));
+ recorder.assertEvent(new String[] { "f", "D/g" },
+ new String[] { "E/h" });
writeTrashFile("f", "f()\nside");
assertTrue(new File(db.getWorkTree(), "D/g").delete());
@@ -167,26 +177,41 @@ public void testResetHard() throws IOException, NoFilepatternException,
git.add().addFilepattern(".").setUpdate(true).call();
RevCommit topic = git.commit().setMessage("topic-1").call();
assertIndex(mkmap("f", "f()\nside", "G/i", "i()"));
+ recorder.assertNoEvent();
writeTrashFile("untracked", "untracked");
resetHard(master);
assertIndex(mkmap("f", "f()\nmaster", "D/g", "g()\ng2()", "E/h", "h()"));
+ recorder.assertEvent(new String[] { "f", "D/g", "E/h" },
+ new String[] { "G", "G/i" });
+
resetHard(topic);
assertIndex(mkmap("f", "f()\nside", "G/i", "i()"));
assertWorkDir(mkmap("f", "f()\nside", "G/i", "i()", "untracked",
"untracked"));
+ recorder.assertEvent(new String[] { "f", "G/i" },
+ new String[] { "D", "D/g", "E", "E/h" });
assertEquals(MergeStatus.CONFLICTING, git.merge().include(master)
.call().getMergeStatus());
assertEquals(
"[D/g, mode:100644, stage:1][D/g, mode:100644, stage:3][E/h, mode:100644][G/i, mode:100644][f, mode:100644, stage:1][f, mode:100644, stage:2][f, mode:100644, stage:3]",
indexState(0));
+ recorder.assertEvent(new String[] { "f", "D/g", "E/h" },
+ ChangeRecorder.EMPTY);
resetHard(master);
assertIndex(mkmap("f", "f()\nmaster", "D/g", "g()\ng2()", "E/h", "h()"));
assertWorkDir(mkmap("f", "f()\nmaster", "D/g", "g()\ng2()", "E/h",
"h()", "untracked", "untracked"));
+ recorder.assertEvent(new String[] { "f", "D/g" },
+ new String[] { "G", "G/i" });
+
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
}
}
@@ -202,13 +227,18 @@ public void testResetHard() throws IOException, NoFilepatternException,
@Test
public void testResetHardFromIndexEntryWithoutFileToTreeWithoutFile()
throws Exception {
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
writeTrashFile("x", "x");
git.add().addFilepattern("x").call();
RevCommit id1 = git.commit().setMessage("c1").call();
writeTrashFile("f/g", "f/g");
git.rm().addFilepattern("x").call();
+ recorder.assertEvent(ChangeRecorder.EMPTY, new String[] { "x" });
git.add().addFilepattern("f/g").call();
git.commit().setMessage("c2").call();
deleteTrashFile("f/g");
@@ -217,6 +247,11 @@ public void testResetHardFromIndexEntryWithoutFileToTreeWithoutFile()
// The actual test
git.reset().setMode(ResetType.HARD).setRef(id1.getName()).call();
assertIndex(mkmap("x", "x"));
+ recorder.assertEvent(new String[] { "x" }, ChangeRecorder.EMPTY);
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
}
}
@@ -227,13 +262,22 @@ public void testResetHardFromIndexEntryWithoutFileToTreeWithoutFile()
*/
@Test
public void testInitialCheckout() throws Exception {
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
TestRepository<Repository> db_t = new TestRepository<>(db);
BranchBuilder master = db_t.branch("master");
master.commit().add("f", "1").message("m0").create();
assertFalse(new File(db.getWorkTree(), "f").exists());
git.checkout().setName("master").call();
assertTrue(new File(db.getWorkTree(), "f").exists());
+ recorder.assertEvent(new String[] { "f" }, ChangeRecorder.EMPTY);
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
}
}
@@ -930,120 +974,154 @@ public void testCheckoutOutChanges() throws IOException {
public void testCheckoutChangeLinkToEmptyDir() throws Exception {
Assume.assumeTrue(FS.DETECTED.supportsSymlinks());
String fname = "was_file";
- Git git = Git.wrap(db);
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
+ try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
+ // Add a file
+ writeTrashFile(fname, "a");
+ git.add().addFilepattern(fname).call();
- // Add a file
- writeTrashFile(fname, "a");
- git.add().addFilepattern(fname).call();
+ // Add a link to file
+ String linkName = "link";
+ File link = writeLink(linkName, fname).toFile();
+ git.add().addFilepattern(linkName).call();
+ git.commit().setMessage("Added file and link").call();
- // Add a link to file
- String linkName = "link";
- File link = writeLink(linkName, fname).toFile();
- git.add().addFilepattern(linkName).call();
- git.commit().setMessage("Added file and link").call();
+ assertWorkDir(mkmap(linkName, "a", fname, "a"));
- assertWorkDir(mkmap(linkName, "a", fname, "a"));
+ // replace link with empty directory
+ FileUtils.delete(link);
+ FileUtils.mkdir(link);
+ assertTrue("Link must be a directory now", link.isDirectory());
- // replace link with empty directory
- FileUtils.delete(link);
- FileUtils.mkdir(link);
- assertTrue("Link must be a directory now", link.isDirectory());
+ // modify file
+ writeTrashFile(fname, "b");
+ assertWorkDir(mkmap(fname, "b", linkName, "/"));
+ recorder.assertNoEvent();
- // modify file
- writeTrashFile(fname, "b");
- assertWorkDir(mkmap(fname, "b", linkName, "/"));
+ // revert both paths to HEAD state
+ git.checkout().setStartPoint(Constants.HEAD).addPath(fname)
+ .addPath(linkName).call();
- // revert both paths to HEAD state
- git.checkout().setStartPoint(Constants.HEAD)
- .addPath(fname).addPath(linkName).call();
+ assertWorkDir(mkmap(fname, "a", linkName, "a"));
+ recorder.assertEvent(new String[] { fname, linkName },
+ ChangeRecorder.EMPTY);
- assertWorkDir(mkmap(fname, "a", linkName, "a"));
-
- Status st = git.status().call();
- assertTrue(st.isClean());
+ Status st = git.status().call();
+ assertTrue(st.isClean());
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
+ }
}
@Test
public void testCheckoutChangeLinkToEmptyDirs() throws Exception {
Assume.assumeTrue(FS.DETECTED.supportsSymlinks());
String fname = "was_file";
- Git git = Git.wrap(db);
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
+ try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
+ // Add a file
+ writeTrashFile(fname, "a");
+ git.add().addFilepattern(fname).call();
- // Add a file
- writeTrashFile(fname, "a");
- git.add().addFilepattern(fname).call();
+ // Add a link to file
+ String linkName = "link";
+ File link = writeLink(linkName, fname).toFile();
+ git.add().addFilepattern(linkName).call();
+ git.commit().setMessage("Added file and link").call();
- // Add a link to file
- String linkName = "link";
- File link = writeLink(linkName, fname).toFile();
- git.add().addFilepattern(linkName).call();
- git.commit().setMessage("Added file and link").call();
+ assertWorkDir(mkmap(linkName, "a", fname, "a"));
- assertWorkDir(mkmap(linkName, "a", fname, "a"));
+ // replace link with directory containing only directories, no files
+ FileUtils.delete(link);
+ FileUtils.mkdirs(new File(link, "dummyDir"));
+ assertTrue("Link must be a directory now", link.isDirectory());
- // replace link with directory containing only directories, no files
- FileUtils.delete(link);
- FileUtils.mkdirs(new File(link, "dummyDir"));
- assertTrue("Link must be a directory now", link.isDirectory());
+ assertFalse("Must not delete non empty directory", link.delete());
- assertFalse("Must not delete non empty directory", link.delete());
+ // modify file
+ writeTrashFile(fname, "b");
+ assertWorkDir(mkmap(fname, "b", linkName + "/dummyDir", "/"));
+ recorder.assertNoEvent();
- // modify file
- writeTrashFile(fname, "b");
- assertWorkDir(mkmap(fname, "b", linkName + "/dummyDir", "/"));
+ // revert both paths to HEAD state
+ git.checkout().setStartPoint(Constants.HEAD).addPath(fname)
+ .addPath(linkName).call();
- // revert both paths to HEAD state
- git.checkout().setStartPoint(Constants.HEAD)
- .addPath(fname).addPath(linkName).call();
+ assertWorkDir(mkmap(fname, "a", linkName, "a"));
+ recorder.assertEvent(new String[] { fname, linkName },
+ ChangeRecorder.EMPTY);
- assertWorkDir(mkmap(fname, "a", linkName, "a"));
-
- Status st = git.status().call();
- assertTrue(st.isClean());
+ Status st = git.status().call();
+ assertTrue(st.isClean());
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
+ }
}
@Test
public void testCheckoutChangeLinkToNonEmptyDirs() throws Exception {
Assume.assumeTrue(FS.DETECTED.supportsSymlinks());
String fname = "file";
- Git git = Git.wrap(db);
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
+ try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
+ // Add a file
+ writeTrashFile(fname, "a");
+ git.add().addFilepattern(fname).call();
- // Add a file
- writeTrashFile(fname, "a");
- git.add().addFilepattern(fname).call();
+ // Add a link to file
+ String linkName = "link";
+ File link = writeLink(linkName, fname).toFile();
+ git.add().addFilepattern(linkName).call();
+ git.commit().setMessage("Added file and link").call();
- // Add a link to file
- String linkName = "link";
- File link = writeLink(linkName, fname).toFile();
- git.add().addFilepattern(linkName).call();
- git.commit().setMessage("Added file and link").call();
+ assertWorkDir(mkmap(linkName, "a", fname, "a"));
- assertWorkDir(mkmap(linkName, "a", fname, "a"));
+ // replace link with directory containing only directories, no files
+ FileUtils.delete(link);
- // replace link with directory containing only directories, no files
- FileUtils.delete(link);
+ // create but do not add a file in the new directory to the index
+ writeTrashFile(linkName + "/dir1", "file1", "c");
- // create but do not add a file in the new directory to the index
- writeTrashFile(linkName + "/dir1", "file1", "c");
+ // create but do not add a file in the new directory to the index
+ writeTrashFile(linkName + "/dir2", "file2", "d");
- // create but do not add a file in the new directory to the index
- writeTrashFile(linkName + "/dir2", "file2", "d");
+ assertTrue("File must be a directory now", link.isDirectory());
+ assertFalse("Must not delete non empty directory", link.delete());
- assertTrue("File must be a directory now", link.isDirectory());
- assertFalse("Must not delete non empty directory", link.delete());
+ // 2 extra files are created
+ assertWorkDir(mkmap(fname, "a", linkName + "/dir1/file1", "c",
+ linkName + "/dir2/file2", "d"));
+ recorder.assertNoEvent();
- // 2 extra files are created
- assertWorkDir(mkmap(fname, "a", linkName + "/dir1/file1", "c",
- linkName + "/dir2/file2", "d"));
+ // revert path to HEAD state
+ git.checkout().setStartPoint(Constants.HEAD).addPath(linkName)
+ .call();
- // revert path to HEAD state
- git.checkout().setStartPoint(Constants.HEAD).addPath(linkName).call();
+ // expect only the one added to the index
+ assertWorkDir(mkmap(linkName, "a", fname, "a"));
+ recorder.assertEvent(new String[] { linkName },
+ ChangeRecorder.EMPTY);
- // expect only the one added to the index
- assertWorkDir(mkmap(linkName, "a", fname, "a"));
-
- Status st = git.status().call();
- assertTrue(st.isClean());
+ Status st = git.status().call();
+ assertTrue(st.isClean());
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
+ }
}
@Test
@@ -1051,174 +1129,222 @@ public void testCheckoutChangeLinkToNonEmptyDirsAndNewIndexEntry()
throws Exception {
Assume.assumeTrue(FS.DETECTED.supportsSymlinks());
String fname = "file";
- Git git = Git.wrap(db);
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
+ try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
+ // Add a file
+ writeTrashFile(fname, "a");
+ git.add().addFilepattern(fname).call();
- // Add a file
- writeTrashFile(fname, "a");
- git.add().addFilepattern(fname).call();
+ // Add a link to file
+ String linkName = "link";
+ File link = writeLink(linkName, fname).toFile();
+ git.add().addFilepattern(linkName).call();
+ git.commit().setMessage("Added file and link").call();
- // Add a link to file
- String linkName = "link";
- File link = writeLink(linkName, fname).toFile();
- git.add().addFilepattern(linkName).call();
- git.commit().setMessage("Added file and link").call();
+ assertWorkDir(mkmap(linkName, "a", fname, "a"));
- assertWorkDir(mkmap(linkName, "a", fname, "a"));
+ // replace link with directory containing only directories, no files
+ FileUtils.delete(link);
- // replace link with directory containing only directories, no files
- FileUtils.delete(link);
+ // create and add a file in the new directory to the index
+ writeTrashFile(linkName + "/dir1", "file1", "c");
+ git.add().addFilepattern(linkName + "/dir1/file1").call();
- // create and add a file in the new directory to the index
- writeTrashFile(linkName + "/dir1", "file1", "c");
- git.add().addFilepattern(linkName + "/dir1/file1").call();
+ // create but do not add a file in the new directory to the index
+ writeTrashFile(linkName + "/dir2", "file2", "d");
- // create but do not add a file in the new directory to the index
- writeTrashFile(linkName + "/dir2", "file2", "d");
+ assertTrue("File must be a directory now", link.isDirectory());
+ assertFalse("Must not delete non empty directory", link.delete());
- assertTrue("File must be a directory now", link.isDirectory());
- assertFalse("Must not delete non empty directory", link.delete());
+ // 2 extra files are created
+ assertWorkDir(mkmap(fname, "a", linkName + "/dir1/file1", "c",
+ linkName + "/dir2/file2", "d"));
+ recorder.assertNoEvent();
- // 2 extra files are created
- assertWorkDir(mkmap(fname, "a", linkName + "/dir1/file1", "c",
- linkName + "/dir2/file2", "d"));
+ // revert path to HEAD state
+ git.checkout().setStartPoint(Constants.HEAD).addPath(linkName)
+ .call();
- // revert path to HEAD state
- git.checkout().setStartPoint(Constants.HEAD).addPath(linkName).call();
+ // original file and link
+ assertWorkDir(mkmap(linkName, "a", fname, "a"));
+ recorder.assertEvent(new String[] { linkName },
+ ChangeRecorder.EMPTY);
- // original file and link
- assertWorkDir(mkmap(linkName, "a", fname, "a"));
-
- Status st = git.status().call();
- assertTrue(st.isClean());
+ Status st = git.status().call();
+ assertTrue(st.isClean());
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
+ }
}
@Test
public void testCheckoutChangeFileToEmptyDir() throws Exception {
String fname = "was_file";
- Git git = Git.wrap(db);
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
+ try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
+ // Add a file
+ File file = writeTrashFile(fname, "a");
+ git.add().addFilepattern(fname).call();
+ git.commit().setMessage("Added file").call();
- // Add a file
- File file = writeTrashFile(fname, "a");
- git.add().addFilepattern(fname).call();
- git.commit().setMessage("Added file").call();
+ // replace file with empty directory
+ FileUtils.delete(file);
+ FileUtils.mkdir(file);
+ assertTrue("File must be a directory now", file.isDirectory());
+ assertWorkDir(mkmap(fname, "/"));
+ recorder.assertNoEvent();
- // replace file with empty directory
- FileUtils.delete(file);
- FileUtils.mkdir(file);
- assertTrue("File must be a directory now", file.isDirectory());
+ // revert path to HEAD state
+ git.checkout().setStartPoint(Constants.HEAD).addPath(fname).call();
+ assertWorkDir(mkmap(fname, "a"));
+ recorder.assertEvent(new String[] { fname }, ChangeRecorder.EMPTY);
- assertWorkDir(mkmap(fname, "/"));
-
- // revert path to HEAD state
- git.checkout().setStartPoint(Constants.HEAD).addPath(fname).call();
-
- assertWorkDir(mkmap(fname, "a"));
-
- Status st = git.status().call();
- assertTrue(st.isClean());
+ Status st = git.status().call();
+ assertTrue(st.isClean());
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
+ }
}
@Test
public void testCheckoutChangeFileToEmptyDirs() throws Exception {
String fname = "was_file";
- Git git = Git.wrap(db);
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
+ try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
+ // Add a file
+ File file = writeTrashFile(fname, "a");
+ git.add().addFilepattern(fname).call();
+ git.commit().setMessage("Added file").call();
- // Add a file
- File file = writeTrashFile(fname, "a");
- git.add().addFilepattern(fname).call();
- git.commit().setMessage("Added file").call();
+ // replace file with directory containing only directories, no files
+ FileUtils.delete(file);
+ FileUtils.mkdirs(new File(file, "dummyDir"));
+ assertTrue("File must be a directory now", file.isDirectory());
+ assertFalse("Must not delete non empty directory", file.delete());
- // replace file with directory containing only directories, no files
- FileUtils.delete(file);
- FileUtils.mkdirs(new File(file, "dummyDir"));
- assertTrue("File must be a directory now", file.isDirectory());
- assertFalse("Must not delete non empty directory", file.delete());
+ assertWorkDir(mkmap(fname + "/dummyDir", "/"));
+ recorder.assertNoEvent();
- assertWorkDir(mkmap(fname + "/dummyDir", "/"));
+ // revert path to HEAD state
+ git.checkout().setStartPoint(Constants.HEAD).addPath(fname).call();
+ assertWorkDir(mkmap(fname, "a"));
+ recorder.assertEvent(new String[] { fname }, ChangeRecorder.EMPTY);
- // revert path to HEAD state
- git.checkout().setStartPoint(Constants.HEAD).addPath(fname).call();
-
- assertWorkDir(mkmap(fname, "a"));
-
- Status st = git.status().call();
- assertTrue(st.isClean());
+ Status st = git.status().call();
+ assertTrue(st.isClean());
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
+ }
}
@Test
public void testCheckoutChangeFileToNonEmptyDirs() throws Exception {
String fname = "was_file";
- Git git = Git.wrap(db);
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
+ try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
+ // Add a file
+ File file = writeTrashFile(fname, "a");
+ git.add().addFilepattern(fname).call();
+ git.commit().setMessage("Added file").call();
- // Add a file
- File file = writeTrashFile(fname, "a");
- git.add().addFilepattern(fname).call();
- git.commit().setMessage("Added file").call();
+ assertWorkDir(mkmap(fname, "a"));
- assertWorkDir(mkmap(fname, "a"));
+ // replace file with directory containing only directories, no files
+ FileUtils.delete(file);
- // replace file with directory containing only directories, no files
- FileUtils.delete(file);
+ // create but do not add a file in the new directory to the index
+ writeTrashFile(fname + "/dir1", "file1", "c");
- // create but do not add a file in the new directory to the index
- writeTrashFile(fname + "/dir1", "file1", "c");
+ // create but do not add a file in the new directory to the index
+ writeTrashFile(fname + "/dir2", "file2", "d");
- // create but do not add a file in the new directory to the index
- writeTrashFile(fname + "/dir2", "file2", "d");
+ assertTrue("File must be a directory now", file.isDirectory());
+ assertFalse("Must not delete non empty directory", file.delete());
- assertTrue("File must be a directory now", file.isDirectory());
- assertFalse("Must not delete non empty directory", file.delete());
+ // 2 extra files are created
+ assertWorkDir(mkmap(fname + "/dir1/file1", "c",
+ fname + "/dir2/file2", "d"));
+ recorder.assertNoEvent();
- // 2 extra files are created
- assertWorkDir(
- mkmap(fname + "/dir1/file1", "c", fname + "/dir2/file2", "d"));
+ // revert path to HEAD state
+ git.checkout().setStartPoint(Constants.HEAD).addPath(fname).call();
- // revert path to HEAD state
- git.checkout().setStartPoint(Constants.HEAD).addPath(fname).call();
+ // expect only the one added to the index
+ assertWorkDir(mkmap(fname, "a"));
+ recorder.assertEvent(new String[] { fname }, ChangeRecorder.EMPTY);
- // expect only the one added to the index
- assertWorkDir(mkmap(fname, "a"));
-
- Status st = git.status().call();
- assertTrue(st.isClean());
+ Status st = git.status().call();
+ assertTrue(st.isClean());
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
+ }
}
@Test
public void testCheckoutChangeFileToNonEmptyDirsAndNewIndexEntry()
throws Exception {
String fname = "was_file";
- Git git = Git.wrap(db);
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
+ try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
+ // Add a file
+ File file = writeTrashFile(fname, "a");
+ git.add().addFilepattern(fname).call();
+ git.commit().setMessage("Added file").call();
- // Add a file
- File file = writeTrashFile(fname, "a");
- git.add().addFilepattern(fname).call();
- git.commit().setMessage("Added file").call();
+ assertWorkDir(mkmap(fname, "a"));
- assertWorkDir(mkmap(fname, "a"));
+ // replace file with directory containing only directories, no files
+ FileUtils.delete(file);
- // replace file with directory containing only directories, no files
- FileUtils.delete(file);
+ // create and add a file in the new directory to the index
+ writeTrashFile(fname + "/dir", "file1", "c");
+ git.add().addFilepattern(fname + "/dir/file1").call();
- // create and add a file in the new directory to the index
- writeTrashFile(fname + "/dir", "file1", "c");
- git.add().addFilepattern(fname + "/dir/file1").call();
+ // create but do not add a file in the new directory to the index
+ writeTrashFile(fname + "/dir", "file2", "d");
- // create but do not add a file in the new directory to the index
- writeTrashFile(fname + "/dir", "file2", "d");
+ assertTrue("File must be a directory now", file.isDirectory());
+ assertFalse("Must not delete non empty directory", file.delete());
- assertTrue("File must be a directory now", file.isDirectory());
- assertFalse("Must not delete non empty directory", file.delete());
+ // 2 extra files are created
+ assertWorkDir(mkmap(fname + "/dir/file1", "c", fname + "/dir/file2",
+ "d"));
+ recorder.assertNoEvent();
- // 2 extra files are created
- assertWorkDir(
- mkmap(fname + "/dir/file1", "c", fname + "/dir/file2", "d"));
-
- // revert path to HEAD state
- git.checkout().setStartPoint(Constants.HEAD).addPath(fname).call();
- assertWorkDir(mkmap(fname, "a"));
-
- Status st = git.status().call();
- assertTrue(st.isClean());
+ // revert path to HEAD state
+ git.checkout().setStartPoint(Constants.HEAD).addPath(fname).call();
+ assertWorkDir(mkmap(fname, "a"));
+ recorder.assertEvent(new String[] { fname }, ChangeRecorder.EMPTY);
+ Status st = git.status().call();
+ assertTrue(st.isClean());
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
+ }
}
@Test
@@ -1293,76 +1419,100 @@ public void testDontOverwriteEmptyFolder() throws IOException {
public void testOverwriteUntrackedIgnoredFile() throws IOException,
GitAPIException {
String fname="file.txt";
- Git git = Git.wrap(db);
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
+ try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
+ // Add a file
+ writeTrashFile(fname, "a");
+ git.add().addFilepattern(fname).call();
+ git.commit().setMessage("create file").call();
- // Add a file
- writeTrashFile(fname, "a");
- git.add().addFilepattern(fname).call();
- git.commit().setMessage("create file").call();
+ // Create branch
+ git.branchCreate().setName("side").call();
- // Create branch
- git.branchCreate().setName("side").call();
+ // Modify file
+ writeTrashFile(fname, "b");
+ git.add().addFilepattern(fname).call();
+ git.commit().setMessage("modify file").call();
+ recorder.assertNoEvent();
- // Modify file
- writeTrashFile(fname, "b");
- git.add().addFilepattern(fname).call();
- git.commit().setMessage("modify file").call();
+ // Switch branches
+ git.checkout().setName("side").call();
+ recorder.assertEvent(new String[] { fname }, ChangeRecorder.EMPTY);
+ git.rm().addFilepattern(fname).call();
+ recorder.assertEvent(ChangeRecorder.EMPTY, new String[] { fname });
+ writeTrashFile(".gitignore", fname);
+ git.add().addFilepattern(".gitignore").call();
+ git.commit().setMessage("delete and ignore file").call();
- // Switch branches
- git.checkout().setName("side").call();
- git.rm().addFilepattern(fname).call();
- writeTrashFile(".gitignore", fname);
- git.add().addFilepattern(".gitignore").call();
- git.commit().setMessage("delete and ignore file").call();
-
- writeTrashFile(fname, "Something different");
- git.checkout().setName("master").call();
- assertWorkDir(mkmap(fname, "b"));
- assertTrue(git.status().call().isClean());
+ writeTrashFile(fname, "Something different");
+ recorder.assertNoEvent();
+ git.checkout().setName("master").call();
+ assertWorkDir(mkmap(fname, "b"));
+ recorder.assertEvent(new String[] { fname },
+ new String[] { ".gitignore" });
+ assertTrue(git.status().call().isClean());
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
+ }
}
@Test
public void testOverwriteUntrackedFileModeChange()
throws IOException, GitAPIException {
String fname = "file.txt";
- Git git = Git.wrap(db);
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
+ try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
+ // Add a file
+ File file = writeTrashFile(fname, "a");
+ git.add().addFilepattern(fname).call();
+ git.commit().setMessage("create file").call();
+ assertWorkDir(mkmap(fname, "a"));
- // Add a file
- File file = writeTrashFile(fname, "a");
- git.add().addFilepattern(fname).call();
- git.commit().setMessage("create file").call();
- assertWorkDir(mkmap(fname, "a"));
+ // Create branch
+ git.branchCreate().setName("side").call();
- // Create branch
- git.branchCreate().setName("side").call();
+ // Switch branches
+ git.checkout().setName("side").call();
+ recorder.assertNoEvent();
- // Switch branches
- git.checkout().setName("side").call();
+ // replace file with directory containing files
+ FileUtils.delete(file);
- // replace file with directory containing files
- FileUtils.delete(file);
+ // create and add a file in the new directory to the index
+ writeTrashFile(fname + "/dir1", "file1", "c");
+ git.add().addFilepattern(fname + "/dir1/file1").call();
- // create and add a file in the new directory to the index
- writeTrashFile(fname + "/dir1", "file1", "c");
- git.add().addFilepattern(fname + "/dir1/file1").call();
+ // create but do not add a file in the new directory to the index
+ writeTrashFile(fname + "/dir2", "file2", "d");
- // create but do not add a file in the new directory to the index
- writeTrashFile(fname + "/dir2", "file2", "d");
+ assertTrue("File must be a directory now", file.isDirectory());
+ assertFalse("Must not delete non empty directory", file.delete());
- assertTrue("File must be a directory now", file.isDirectory());
- assertFalse("Must not delete non empty directory", file.delete());
-
- // 2 extra files are created
- assertWorkDir(
- mkmap(fname + "/dir1/file1", "c", fname + "/dir2/file2", "d"));
-
- try {
- git.checkout().setName("master").call();
- fail("did not throw exception");
- } catch (Exception e) {
- // 2 extra files are still there
+ // 2 extra files are created
assertWorkDir(mkmap(fname + "/dir1/file1", "c",
fname + "/dir2/file2", "d"));
+
+ try {
+ git.checkout().setName("master").call();
+ fail("did not throw exception");
+ } catch (Exception e) {
+ // 2 extra files are still there
+ assertWorkDir(mkmap(fname + "/dir1/file1", "c",
+ fname + "/dir2/file2", "d"));
+ }
+ recorder.assertNoEvent();
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
}
}
@@ -1371,50 +1521,60 @@ public void testOverwriteUntrackedLinkModeChange()
throws Exception {
Assume.assumeTrue(FS.DETECTED.supportsSymlinks());
String fname = "file.txt";
- Git git = Git.wrap(db);
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
+ try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
+ // Add a file
+ writeTrashFile(fname, "a");
+ git.add().addFilepattern(fname).call();
- // Add a file
- writeTrashFile(fname, "a");
- git.add().addFilepattern(fname).call();
+ // Add a link to file
+ String linkName = "link";
+ File link = writeLink(linkName, fname).toFile();
+ git.add().addFilepattern(linkName).call();
+ git.commit().setMessage("Added file and link").call();
- // Add a link to file
- String linkName = "link";
- File link = writeLink(linkName, fname).toFile();
- git.add().addFilepattern(linkName).call();
- git.commit().setMessage("Added file and link").call();
+ assertWorkDir(mkmap(linkName, "a", fname, "a"));
- assertWorkDir(mkmap(linkName, "a", fname, "a"));
+ // Create branch
+ git.branchCreate().setName("side").call();
- // Create branch
- git.branchCreate().setName("side").call();
+ // Switch branches
+ git.checkout().setName("side").call();
+ recorder.assertNoEvent();
- // Switch branches
- git.checkout().setName("side").call();
+ // replace link with directory containing files
+ FileUtils.delete(link);
- // replace link with directory containing files
- FileUtils.delete(link);
+ // create and add a file in the new directory to the index
+ writeTrashFile(linkName + "/dir1", "file1", "c");
+ git.add().addFilepattern(linkName + "/dir1/file1").call();
- // create and add a file in the new directory to the index
- writeTrashFile(linkName + "/dir1", "file1", "c");
- git.add().addFilepattern(linkName + "/dir1/file1").call();
+ // create but do not add a file in the new directory to the index
+ writeTrashFile(linkName + "/dir2", "file2", "d");
- // create but do not add a file in the new directory to the index
- writeTrashFile(linkName + "/dir2", "file2", "d");
+ assertTrue("Link must be a directory now", link.isDirectory());
+ assertFalse("Must not delete non empty directory", link.delete());
- assertTrue("Link must be a directory now", link.isDirectory());
- assertFalse("Must not delete non empty directory", link.delete());
-
- // 2 extra files are created
- assertWorkDir(mkmap(fname, "a", linkName + "/dir1/file1", "c",
- linkName + "/dir2/file2", "d"));
-
- try {
- git.checkout().setName("master").call();
- fail("did not throw exception");
- } catch (Exception e) {
- // 2 extra files are still there
+ // 2 extra files are created
assertWorkDir(mkmap(fname, "a", linkName + "/dir1/file1", "c",
linkName + "/dir2/file2", "d"));
+
+ try {
+ git.checkout().setName("master").call();
+ fail("did not throw exception");
+ } catch (Exception e) {
+ // 2 extra files are still there
+ assertWorkDir(mkmap(fname, "a", linkName + "/dir1/file1", "c",
+ linkName + "/dir2/file2", "d"));
+ }
+ recorder.assertNoEvent();
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
}
}
@@ -1423,36 +1583,47 @@ public void testFileModeChangeWithNoContentChangeUpdate() throws Exception {
if (!FS.DETECTED.supportsExecute())
return;
- Git git = Git.wrap(db);
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
+ try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
+ // Add non-executable file
+ File file = writeTrashFile("file.txt", "a");
+ git.add().addFilepattern("file.txt").call();
+ git.commit().setMessage("commit1").call();
+ assertFalse(db.getFS().canExecute(file));
- // Add non-executable file
- File file = writeTrashFile("file.txt", "a");
- git.add().addFilepattern("file.txt").call();
- git.commit().setMessage("commit1").call();
- assertFalse(db.getFS().canExecute(file));
+ // Create branch
+ git.branchCreate().setName("b1").call();
- // Create branch
- git.branchCreate().setName("b1").call();
+ // Make file executable
+ db.getFS().setExecute(file, true);
+ git.add().addFilepattern("file.txt").call();
+ git.commit().setMessage("commit2").call();
+ recorder.assertNoEvent();
- // Make file executable
- db.getFS().setExecute(file, true);
- git.add().addFilepattern("file.txt").call();
- git.commit().setMessage("commit2").call();
+ // Verify executable and working directory is clean
+ Status status = git.status().call();
+ assertTrue(status.getModified().isEmpty());
+ assertTrue(status.getChanged().isEmpty());
+ assertTrue(db.getFS().canExecute(file));
- // Verify executable and working directory is clean
- Status status = git.status().call();
- assertTrue(status.getModified().isEmpty());
- assertTrue(status.getChanged().isEmpty());
- assertTrue(db.getFS().canExecute(file));
+ // Switch branches
+ git.checkout().setName("b1").call();
- // Switch branches
- git.checkout().setName("b1").call();
-
- // Verify not executable and working directory is clean
- status = git.status().call();
- assertTrue(status.getModified().isEmpty());
- assertTrue(status.getChanged().isEmpty());
- assertFalse(db.getFS().canExecute(file));
+ // Verify not executable and working directory is clean
+ status = git.status().call();
+ assertTrue(status.getModified().isEmpty());
+ assertTrue(status.getChanged().isEmpty());
+ assertFalse(db.getFS().canExecute(file));
+ recorder.assertEvent(new String[] { "file.txt" },
+ ChangeRecorder.EMPTY);
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
+ }
}
@Test
@@ -1460,41 +1631,50 @@ public void testFileModeChangeAndContentChangeConflict() throws Exception {
if (!FS.DETECTED.supportsExecute())
return;
- Git git = Git.wrap(db);
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
+ try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
+ // Add non-executable file
+ File file = writeTrashFile("file.txt", "a");
+ git.add().addFilepattern("file.txt").call();
+ git.commit().setMessage("commit1").call();
+ assertFalse(db.getFS().canExecute(file));
- // Add non-executable file
- File file = writeTrashFile("file.txt", "a");
- git.add().addFilepattern("file.txt").call();
- git.commit().setMessage("commit1").call();
- assertFalse(db.getFS().canExecute(file));
+ // Create branch
+ git.branchCreate().setName("b1").call();
- // Create branch
- git.branchCreate().setName("b1").call();
+ // Make file executable
+ db.getFS().setExecute(file, true);
+ git.add().addFilepattern("file.txt").call();
+ git.commit().setMessage("commit2").call();
- // Make file executable
- db.getFS().setExecute(file, true);
- git.add().addFilepattern("file.txt").call();
- git.commit().setMessage("commit2").call();
+ // Verify executable and working directory is clean
+ Status status = git.status().call();
+ assertTrue(status.getModified().isEmpty());
+ assertTrue(status.getChanged().isEmpty());
+ assertTrue(db.getFS().canExecute(file));
- // Verify executable and working directory is clean
- Status status = git.status().call();
- assertTrue(status.getModified().isEmpty());
- assertTrue(status.getChanged().isEmpty());
- assertTrue(db.getFS().canExecute(file));
+ writeTrashFile("file.txt", "b");
- writeTrashFile("file.txt", "b");
-
- // Switch branches
- CheckoutCommand checkout = git.checkout().setName("b1");
- try {
- checkout.call();
- fail("Checkout exception not thrown");
- } catch (org.eclipse.jgit.api.errors.CheckoutConflictException e) {
- CheckoutResult result = checkout.getResult();
- assertNotNull(result);
- assertNotNull(result.getConflictList());
- assertEquals(1, result.getConflictList().size());
- assertTrue(result.getConflictList().contains("file.txt"));
+ // Switch branches
+ CheckoutCommand checkout = git.checkout().setName("b1");
+ try {
+ checkout.call();
+ fail("Checkout exception not thrown");
+ } catch (org.eclipse.jgit.api.errors.CheckoutConflictException e) {
+ CheckoutResult result = checkout.getResult();
+ assertNotNull(result);
+ assertNotNull(result.getConflictList());
+ assertEquals(1, result.getConflictList().size());
+ assertTrue(result.getConflictList().contains("file.txt"));
+ }
+ recorder.assertNoEvent();
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
}
}
@@ -1504,40 +1684,52 @@ public void testDirtyFileModeEqualHeadMerge()
if (!FS.DETECTED.supportsExecute())
return;
- Git git = Git.wrap(db);
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
+ try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
+ // Add non-executable file
+ File file = writeTrashFile("file.txt", "a");
+ git.add().addFilepattern("file.txt").call();
+ git.commit().setMessage("commit1").call();
+ assertFalse(db.getFS().canExecute(file));
- // Add non-executable file
- File file = writeTrashFile("file.txt", "a");
- git.add().addFilepattern("file.txt").call();
- git.commit().setMessage("commit1").call();
- assertFalse(db.getFS().canExecute(file));
+ // Create branch
+ git.branchCreate().setName("b1").call();
- // Create branch
- git.branchCreate().setName("b1").call();
+ // Create second commit and don't touch file
+ writeTrashFile("file2.txt", "");
+ git.add().addFilepattern("file2.txt").call();
+ git.commit().setMessage("commit2").call();
- // Create second commit and don't touch file
- writeTrashFile("file2.txt", "");
- git.add().addFilepattern("file2.txt").call();
- git.commit().setMessage("commit2").call();
+ // stage a mode change
+ writeTrashFile("file.txt", "a");
+ db.getFS().setExecute(file, true);
+ git.add().addFilepattern("file.txt").call();
- // stage a mode change
- writeTrashFile("file.txt", "a");
- db.getFS().setExecute(file, true);
- git.add().addFilepattern("file.txt").call();
+ // dirty the file
+ writeTrashFile("file.txt", "b");
- // dirty the file
- writeTrashFile("file.txt", "b");
+ assertEquals(
+ "[file.txt, mode:100755, content:a][file2.txt, mode:100644, content:]",
+ indexState(CONTENT));
+ assertWorkDir(mkmap("file.txt", "b", "file2.txt", ""));
+ recorder.assertNoEvent();
- assertEquals(
- "[file.txt, mode:100755, content:a][file2.txt, mode:100644, content:]",
- indexState(CONTENT));
- assertWorkDir(mkmap("file.txt", "b", "file2.txt", ""));
-
- // Switch branches and check that the dirty file survived in worktree
- // and index
- git.checkout().setName("b1").call();
- assertEquals("[file.txt, mode:100755, content:a]", indexState(CONTENT));
- assertWorkDir(mkmap("file.txt", "b"));
+ // Switch branches and check that the dirty file survived in
+ // worktree and index
+ git.checkout().setName("b1").call();
+ assertEquals("[file.txt, mode:100755, content:a]",
+ indexState(CONTENT));
+ assertWorkDir(mkmap("file.txt", "b"));
+ recorder.assertEvent(ChangeRecorder.EMPTY,
+ new String[] { "file2.txt" });
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
+ }
}
@Test
@@ -1546,40 +1738,53 @@ public void testDirtyFileModeEqualIndexMerge()
if (!FS.DETECTED.supportsExecute())
return;
- Git git = Git.wrap(db);
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
+ try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
+ // Add non-executable file
+ File file = writeTrashFile("file.txt", "a");
+ git.add().addFilepattern("file.txt").call();
+ git.commit().setMessage("commit1").call();
+ assertFalse(db.getFS().canExecute(file));
- // Add non-executable file
- File file = writeTrashFile("file.txt", "a");
- git.add().addFilepattern("file.txt").call();
- git.commit().setMessage("commit1").call();
- assertFalse(db.getFS().canExecute(file));
+ // Create branch
+ git.branchCreate().setName("b1").call();
- // Create branch
- git.branchCreate().setName("b1").call();
+ // Create second commit with executable file
+ file = writeTrashFile("file.txt", "b");
+ db.getFS().setExecute(file, true);
+ git.add().addFilepattern("file.txt").call();
+ git.commit().setMessage("commit2").call();
- // Create second commit with executable file
- file = writeTrashFile("file.txt", "b");
- db.getFS().setExecute(file, true);
- git.add().addFilepattern("file.txt").call();
- git.commit().setMessage("commit2").call();
+ // stage the same content as in the branch we want to switch to
+ writeTrashFile("file.txt", "a");
+ db.getFS().setExecute(file, false);
+ git.add().addFilepattern("file.txt").call();
- // stage the same content as in the branch we want to switch to
- writeTrashFile("file.txt", "a");
- db.getFS().setExecute(file, false);
- git.add().addFilepattern("file.txt").call();
+ // dirty the file
+ writeTrashFile("file.txt", "c");
+ db.getFS().setExecute(file, true);
- // dirty the file
- writeTrashFile("file.txt", "c");
- db.getFS().setExecute(file, true);
+ assertEquals("[file.txt, mode:100644, content:a]",
+ indexState(CONTENT));
+ assertWorkDir(mkmap("file.txt", "c"));
+ recorder.assertNoEvent();
- assertEquals("[file.txt, mode:100644, content:a]", indexState(CONTENT));
- assertWorkDir(mkmap("file.txt", "c"));
-
- // Switch branches and check that the dirty file survived in worktree
- // and index
- git.checkout().setName("b1").call();
- assertEquals("[file.txt, mode:100644, content:a]", indexState(CONTENT));
- assertWorkDir(mkmap("file.txt", "c"));
+ // Switch branches and check that the dirty file survived in
+ // worktree
+ // and index
+ git.checkout().setName("b1").call();
+ assertEquals("[file.txt, mode:100644, content:a]",
+ indexState(CONTENT));
+ assertWorkDir(mkmap("file.txt", "c"));
+ recorder.assertNoEvent();
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
+ }
}
@Test
@@ -1587,31 +1792,44 @@ public void testFileModeChangeAndContentChangeNoConflict() throws Exception {
if (!FS.DETECTED.supportsExecute())
return;
- Git git = Git.wrap(db);
+ ChangeRecorder recorder = new ChangeRecorder();
+ ListenerHandle handle = null;
+ try (Git git = new Git(db)) {
+ handle = db.getListenerList()
+ .addWorkingTreeModifiedListener(recorder);
+ // Add first file
+ File file1 = writeTrashFile("file1.txt", "a");
+ git.add().addFilepattern("file1.txt").call();
+ git.commit().setMessage("commit1").call();
+ assertFalse(db.getFS().canExecute(file1));
- // Add first file
- File file1 = writeTrashFile("file1.txt", "a");
- git.add().addFilepattern("file1.txt").call();
- git.commit().setMessage("commit1").call();
- assertFalse(db.getFS().canExecute(file1));
+ // Add second file
+ File file2 = writeTrashFile("file2.txt", "b");
+ git.add().addFilepattern("file2.txt").call();
+ git.commit().setMessage("commit2").call();
+ assertFalse(db.getFS().canExecute(file2));
+ recorder.assertNoEvent();
- // Add second file
- File file2 = writeTrashFile("file2.txt", "b");
- git.add().addFilepattern("file2.txt").call();
- git.commit().setMessage("commit2").call();
- assertFalse(db.getFS().canExecute(file2));
+ // Create branch from first commit
+ assertNotNull(git.checkout().setCreateBranch(true).setName("b1")
+ .setStartPoint(Constants.HEAD + "~1").call());
+ recorder.assertEvent(ChangeRecorder.EMPTY,
+ new String[] { "file2.txt" });
- // Create branch from first commit
- assertNotNull(git.checkout().setCreateBranch(true).setName("b1")
- .setStartPoint(Constants.HEAD + "~1").call());
+ // Change content and file mode in working directory and index
+ file1 = writeTrashFile("file1.txt", "c");
+ db.getFS().setExecute(file1, true);
+ git.add().addFilepattern("file1.txt").call();
- // Change content and file mode in working directory and index
- file1 = writeTrashFile("file1.txt", "c");
- db.getFS().setExecute(file1, true);
- git.add().addFilepattern("file1.txt").call();
-
- // Switch back to 'master'
- assertNotNull(git.checkout().setName(Constants.MASTER).call());
+ // Switch back to 'master'
+ assertNotNull(git.checkout().setName(Constants.MASTER).call());
+ recorder.assertEvent(new String[] { "file2.txt" },
+ ChangeRecorder.EMPTY);
+ } finally {
+ if (handle != null) {
+ handle.remove();
+ }
+ }
}
@Test(expected = CheckoutConflictException.class)
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/IndexDiffSubmoduleTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/IndexDiffSubmoduleTest.java
index 0111b94..d89aabe 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/IndexDiffSubmoduleTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/IndexDiffSubmoduleTest.java
@@ -43,11 +43,13 @@
package org.eclipse.jgit.lib;
+import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
+import java.util.Set;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.errors.GitAPIException;
@@ -118,6 +120,31 @@ public void testDirtyRootWorktree(IgnoreSubmoduleMode mode)
assertTrue(indexDiff.diff());
}
+ private void assertDiff(IndexDiff indexDiff, IgnoreSubmoduleMode mode,
+ IgnoreSubmoduleMode... expectedEmptyModes) throws IOException {
+ boolean diffResult = indexDiff.diff();
+ Set<String> submodulePaths = indexDiff
+ .getPathsWithIndexMode(FileMode.GITLINK);
+ boolean emptyExpected = false;
+ for (IgnoreSubmoduleMode empty : expectedEmptyModes) {
+ if (mode.equals(empty)) {
+ emptyExpected = true;
+ break;
+ }
+ }
+ if (emptyExpected) {
+ assertFalse("diff should be false with mode=" + mode,
+ diffResult);
+ assertEquals("should have no paths with FileMode.GITLINK", 0,
+ submodulePaths.size());
+ } else {
+ assertTrue("diff should be true with mode=" + mode,
+ diffResult);
+ assertTrue("submodule path should have FileMode.GITLINK",
+ submodulePaths.contains("modules/submodule"));
+ }
+ }
+
@Theory
public void testDirtySubmoduleWorktree(IgnoreSubmoduleMode mode)
throws IOException {
@@ -125,13 +152,8 @@ public void testDirtySubmoduleWorktree(IgnoreSubmoduleMode mode)
IndexDiff indexDiff = new IndexDiff(db, Constants.HEAD,
new FileTreeIterator(db));
indexDiff.setIgnoreSubmoduleMode(mode);
- if (mode.equals(IgnoreSubmoduleMode.ALL)
- || mode.equals(IgnoreSubmoduleMode.DIRTY))
- assertFalse("diff should be false with mode=" + mode,
- indexDiff.diff());
- else
- assertTrue("diff should be true with mode=" + mode,
- indexDiff.diff());
+ assertDiff(indexDiff, mode, IgnoreSubmoduleMode.ALL,
+ IgnoreSubmoduleMode.DIRTY);
}
@Theory
@@ -145,12 +167,7 @@ public void testDirtySubmoduleHEAD(IgnoreSubmoduleMode mode)
IndexDiff indexDiff = new IndexDiff(db, Constants.HEAD,
new FileTreeIterator(db));
indexDiff.setIgnoreSubmoduleMode(mode);
- if (mode.equals(IgnoreSubmoduleMode.ALL))
- assertFalse("diff should be false with mode=" + mode,
- indexDiff.diff());
- else
- assertTrue("diff should be true with mode=" + mode,
- indexDiff.diff());
+ assertDiff(indexDiff, mode, IgnoreSubmoduleMode.ALL);
}
@Theory
@@ -163,13 +180,8 @@ public void testDirtySubmoduleIndex(IgnoreSubmoduleMode mode)
IndexDiff indexDiff = new IndexDiff(db, Constants.HEAD,
new FileTreeIterator(db));
indexDiff.setIgnoreSubmoduleMode(mode);
- if (mode.equals(IgnoreSubmoduleMode.ALL)
- || mode.equals(IgnoreSubmoduleMode.DIRTY))
- assertFalse("diff should be false with mode=" + mode,
- indexDiff.diff());
- else
- assertTrue("diff should be true with mode=" + mode,
- indexDiff.diff());
+ assertDiff(indexDiff, mode, IgnoreSubmoduleMode.ALL,
+ IgnoreSubmoduleMode.DIRTY);
}
@Theory
@@ -183,13 +195,8 @@ public void testDirtySubmoduleIndexAndWorktree(IgnoreSubmoduleMode mode)
IndexDiff indexDiff = new IndexDiff(db, Constants.HEAD,
new FileTreeIterator(db));
indexDiff.setIgnoreSubmoduleMode(mode);
- if (mode.equals(IgnoreSubmoduleMode.ALL)
- || mode.equals(IgnoreSubmoduleMode.DIRTY))
- assertFalse("diff should be false with mode=" + mode,
- indexDiff.diff());
- else
- assertTrue("diff should be true with mode=" + mode,
- indexDiff.diff());
+ assertDiff(indexDiff, mode, IgnoreSubmoduleMode.ALL,
+ IgnoreSubmoduleMode.DIRTY);
}
@Theory
@@ -200,13 +207,7 @@ public void testDirtySubmoduleWorktreeUntracked(IgnoreSubmoduleMode mode)
IndexDiff indexDiff = new IndexDiff(db, Constants.HEAD,
new FileTreeIterator(db));
indexDiff.setIgnoreSubmoduleMode(mode);
- if (mode.equals(IgnoreSubmoduleMode.ALL)
- || mode.equals(IgnoreSubmoduleMode.DIRTY)
- || mode.equals(IgnoreSubmoduleMode.UNTRACKED))
- assertFalse("diff should be false with mode=" + mode,
- indexDiff.diff());
- else
- assertTrue("diff should be true with mode=" + mode,
- indexDiff.diff());
+ assertDiff(indexDiff, mode, IgnoreSubmoduleMode.ALL,
+ IgnoreSubmoduleMode.DIRTY, IgnoreSubmoduleMode.UNTRACKED);
}
}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/ObjectCheckerTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/ObjectCheckerTest.java
index 43160fb..c8729d9 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/ObjectCheckerTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/ObjectCheckerTest.java
@@ -45,6 +45,7 @@
package org.eclipse.jgit.lib;
import static java.lang.Integer.valueOf;
+import static org.eclipse.jgit.junit.JGitTestUtil.concat;
import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
import static org.eclipse.jgit.lib.Constants.OBJ_BAD;
import static org.eclipse.jgit.lib.Constants.OBJ_BLOB;
@@ -1054,20 +1055,7 @@ public void testInvalidTreeNameIsMacHFSGit3()
checker.checkTree(data);
}
- private static byte[] concat(byte[]... b) {
- int n = 0;
- for (byte[] a : b) {
- n += a.length;
- }
- byte[] data = new byte[n];
- n = 0;
- for (byte[] a : b) {
- System.arraycopy(a, 0, data, n, a.length);
- n += a.length;
- }
- return data;
- }
@Test
public void testInvalidTreeNameIsMacHFSGitCorruptUTF8AtEnd()
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/ReflogResolveTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/ReflogResolveTest.java
index 7db9f60..15f28af 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/ReflogResolveTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/lib/ReflogResolveTest.java
@@ -179,4 +179,4 @@ public void resolveDate() throws Exception {
}
}
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/revwalk/RevWalkMergeBaseTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/revwalk/RevWalkMergeBaseTest.java
index 2451c50..077645e 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/revwalk/RevWalkMergeBaseTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/revwalk/RevWalkMergeBaseTest.java
@@ -171,4 +171,4 @@ public void testInconsistentCommitTimes() throws Exception {
assertNull(rw.next());
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/revwalk/SkipRevFilterTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/revwalk/SkipRevFilterTest.java
index 353a487..cf02aa8 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/revwalk/SkipRevFilterTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/revwalk/SkipRevFilterTest.java
@@ -81,4 +81,4 @@ public void testSkipRevFilter0() throws Exception {
public void testSkipRevFilterNegative() throws Exception {
SkipRevFilter.create(-1);
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/submodule/SubmoduleAddTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/submodule/SubmoduleAddTest.java
index 5c46659..f42dd02 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/submodule/SubmoduleAddTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/submodule/SubmoduleAddTest.java
@@ -269,4 +269,4 @@ public void addSubmoduleWithExistingSubmoduleDefined() throws Exception {
ConfigConstants.CONFIG_KEY_URL));
}
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/submodule/SubmoduleStatusTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/submodule/SubmoduleStatusTest.java
index 61df9d9..5832518 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/submodule/SubmoduleStatusTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/submodule/SubmoduleStatusTest.java
@@ -59,11 +59,11 @@
import org.eclipse.jgit.dircache.DirCacheEditor.PathEdit;
import org.eclipse.jgit.dircache.DirCacheEntry;
import org.eclipse.jgit.junit.RepositoryTestCase;
+import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.lib.ConfigConstants;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.FileMode;
import org.eclipse.jgit.lib.ObjectId;
-import org.eclipse.jgit.lib.RefUpdate;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.lib.StoredConfig;
import org.eclipse.jgit.storage.file.FileBasedConfig;
@@ -256,11 +256,16 @@ public void apply(DirCacheEntry ent) {
}
@Test
- public void repositoryWithInitializedSubmodule() throws IOException,
- GitAPIException {
- final ObjectId id = ObjectId
- .fromString("abcd1234abcd1234abcd1234abcd1234abcd1234");
- final String path = "sub";
+ public void repositoryWithInitializedSubmodule() throws Exception {
+ String path = "sub";
+ Repository subRepo = Git.init().setBare(false)
+ .setDirectory(new File(db.getWorkTree(), path)).call()
+ .getRepository();
+ assertNotNull(subRepo);
+
+ TestRepository<?> subTr = new TestRepository<>(subRepo);
+ ObjectId id = subTr.branch(Constants.HEAD).commit().create().copy();
+
DirCache cache = db.lockDirCache();
DirCacheEditor editor = cache.editor();
editor.add(new PathEdit(path) {
@@ -287,15 +292,6 @@ public void apply(DirCacheEntry ent) {
ConfigConstants.CONFIG_KEY_URL, url);
modulesConfig.save();
- Repository subRepo = Git.init().setBare(false)
- .setDirectory(new File(db.getWorkTree(), path)).call()
- .getRepository();
- assertNotNull(subRepo);
-
- RefUpdate update = subRepo.updateRef(Constants.HEAD, true);
- update.setNewObjectId(id);
- update.forceUpdate();
-
SubmoduleStatusCommand command = new SubmoduleStatusCommand(db);
Map<String, SubmoduleStatus> statuses = command.call();
assertNotNull(statuses);
@@ -312,11 +308,16 @@ public void apply(DirCacheEntry ent) {
}
@Test
- public void repositoryWithDifferentRevCheckedOutSubmodule()
- throws IOException, GitAPIException {
- final ObjectId id = ObjectId
- .fromString("abcd1234abcd1234abcd1234abcd1234abcd1234");
- final String path = "sub";
+ public void repositoryWithDifferentRevCheckedOutSubmodule() throws Exception {
+ String path = "sub";
+ Repository subRepo = Git.init().setBare(false)
+ .setDirectory(new File(db.getWorkTree(), path)).call()
+ .getRepository();
+ assertNotNull(subRepo);
+
+ TestRepository<?> subTr = new TestRepository<>(subRepo);
+ ObjectId id = subTr.branch(Constants.HEAD).commit().create().copy();
+
DirCache cache = db.lockDirCache();
DirCacheEditor editor = cache.editor();
editor.add(new PathEdit(path) {
@@ -343,15 +344,7 @@ public void apply(DirCacheEntry ent) {
ConfigConstants.CONFIG_KEY_URL, url);
modulesConfig.save();
- Repository subRepo = Git.init().setBare(false)
- .setDirectory(new File(db.getWorkTree(), path)).call()
- .getRepository();
- assertNotNull(subRepo);
-
- RefUpdate update = subRepo.updateRef(Constants.HEAD, true);
- update.setNewObjectId(ObjectId
- .fromString("aaaa0000aaaa0000aaaa0000aaaa0000aaaa0000"));
- update.forceUpdate();
+ ObjectId newId = subTr.branch(Constants.HEAD).commit().create().copy();
SubmoduleStatusCommand command = new SubmoduleStatusCommand(db);
Map<String, SubmoduleStatus> statuses = command.call();
@@ -365,7 +358,7 @@ public void apply(DirCacheEntry ent) {
assertNotNull(status);
assertEquals(path, status.getPath());
assertEquals(id, status.getIndexId());
- assertEquals(update.getNewObjectId(), status.getHeadId());
+ assertEquals(newId, status.getHeadId());
assertEquals(SubmoduleStatusType.REV_CHECKED_OUT, status.getType());
}
}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/OpenSshConfigTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/OpenSshConfigTest.java
index fc520ab..3eb0497 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/OpenSshConfigTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/OpenSshConfigTest.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2008, 2014 Google Inc.
+ * Copyright (C) 2008, 2017 Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -43,10 +43,13 @@
package org.eclipse.jgit.transport;
+import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import java.io.File;
@@ -58,9 +61,12 @@
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.transport.OpenSshConfig.Host;
import org.eclipse.jgit.util.FileUtils;
+import org.eclipse.jgit.util.SystemReader;
import org.junit.Before;
import org.junit.Test;
+import com.jcraft.jsch.ConfigRepository;
+
public class OpenSshConfigTest extends RepositoryTestCase {
private File home;
@@ -79,15 +85,18 @@ public void setUp() throws Exception {
configFile = new File(new File(home, ".ssh"), Constants.CONFIG);
FileUtils.mkdir(configFile.getParentFile());
- System.setProperty("user.name", "jex_junit");
+ mockSystemReader.setProperty(Constants.OS_USER_NAME_KEY, "jex_junit");
osc = new OpenSshConfig(home, configFile);
}
private void config(final String data) throws IOException {
- final OutputStreamWriter fw = new OutputStreamWriter(
- new FileOutputStream(configFile), "UTF-8");
- fw.write(data);
- fw.close();
+ long lastMtime = configFile.lastModified();
+ do {
+ try (final OutputStreamWriter fw = new OutputStreamWriter(
+ new FileOutputStream(configFile), "UTF-8")) {
+ fw.write(data);
+ }
+ } while (lastMtime == configFile.lastModified());
}
@Test
@@ -155,13 +164,18 @@ public void testQuoteParsing() throws Exception {
@Test
public void testAlias_DoesNotMatch() throws Exception {
- config("Host orcz\n" + "\tHostName repo.or.cz\n");
+ config("Host orcz\n" + "Port 29418\n" + "\tHostName repo.or.cz\n");
final Host h = osc.lookup("repo.or.cz");
assertNotNull(h);
assertEquals("repo.or.cz", h.getHostName());
assertEquals("jex_junit", h.getUser());
assertEquals(22, h.getPort());
assertNull(h.getIdentityFile());
+ final Host h2 = osc.lookup("orcz");
+ assertEquals("repo.or.cz", h.getHostName());
+ assertEquals("jex_junit", h.getUser());
+ assertEquals(29418, h2.getPort());
+ assertNull(h.getIdentityFile());
}
@Test
@@ -282,4 +296,193 @@ public void testAlias_badConnectionAttempts() throws Exception {
assertNotNull(h);
assertEquals(1, h.getConnectionAttempts());
}
+
+ @Test
+ public void testDefaultBlock() throws Exception {
+ config("ConnectionAttempts 5\n\nHost orcz\nConnectionAttempts 3\n");
+ final Host h = osc.lookup("orcz");
+ assertNotNull(h);
+ assertEquals(5, h.getConnectionAttempts());
+ }
+
+ @Test
+ public void testHostCaseInsensitive() throws Exception {
+ config("hOsT orcz\nConnectionAttempts 3\n");
+ final Host h = osc.lookup("orcz");
+ assertNotNull(h);
+ assertEquals(3, h.getConnectionAttempts());
+ }
+
+ @Test
+ public void testListValueSingle() throws Exception {
+ config("Host orcz\nUserKnownHostsFile /foo/bar\n");
+ final ConfigRepository.Config c = osc.getConfig("orcz");
+ assertNotNull(c);
+ assertEquals("/foo/bar", c.getValue("UserKnownHostsFile"));
+ }
+
+ @Test
+ public void testListValueMultiple() throws Exception {
+ // Tilde expansion doesn't occur within the parser
+ config("Host orcz\nUserKnownHostsFile \"~/foo/ba z\" /foo/bar \n");
+ final ConfigRepository.Config c = osc.getConfig("orcz");
+ assertNotNull(c);
+ assertArrayEquals(new Object[] { "~/foo/ba z", "/foo/bar" },
+ c.getValues("UserKnownHostsFile"));
+ }
+
+ @Test
+ public void testRepeatedLookups() throws Exception {
+ config("Host orcz\n" + "\tConnectionAttempts 5\n");
+ final Host h1 = osc.lookup("orcz");
+ final Host h2 = osc.lookup("orcz");
+ assertNotNull(h1);
+ assertSame(h1, h2);
+ assertEquals(5, h1.getConnectionAttempts());
+ assertEquals(h1.getConnectionAttempts(), h2.getConnectionAttempts());
+ final ConfigRepository.Config c = osc.getConfig("orcz");
+ assertNotNull(c);
+ assertSame(c, h1.getConfig());
+ assertSame(c, h2.getConfig());
+ }
+
+ @Test
+ public void testRepeatedLookupsWithModification() throws Exception {
+ config("Host orcz\n" + "\tConnectionAttempts -1\n");
+ final Host h1 = osc.lookup("orcz");
+ assertNotNull(h1);
+ assertEquals(1, h1.getConnectionAttempts());
+ config("Host orcz\n" + "\tConnectionAttempts 5\n");
+ final Host h2 = osc.lookup("orcz");
+ assertNotNull(h2);
+ assertNotSame(h1, h2);
+ assertEquals(5, h2.getConnectionAttempts());
+ assertEquals(1, h1.getConnectionAttempts());
+ assertNotSame(h1.getConfig(), h2.getConfig());
+ }
+
+ @Test
+ public void testIdentityFile() throws Exception {
+ config("Host orcz\nIdentityFile \"~/foo/ba z\"\nIdentityFile /foo/bar");
+ final Host h = osc.lookup("orcz");
+ assertNotNull(h);
+ File f = h.getIdentityFile();
+ assertNotNull(f);
+ // Host does tilde replacement
+ assertEquals(new File(home, "foo/ba z"), f);
+ final ConfigRepository.Config c = h.getConfig();
+ // Config doesn't
+ assertArrayEquals(new Object[] { "~/foo/ba z", "/foo/bar" },
+ c.getValues("IdentityFile"));
+ }
+
+ @Test
+ public void testMultiIdentityFile() throws Exception {
+ config("IdentityFile \"~/foo/ba z\"\nHost orcz\nIdentityFile /foo/bar\nHOST *\nIdentityFile /foo/baz");
+ final Host h = osc.lookup("orcz");
+ assertNotNull(h);
+ File f = h.getIdentityFile();
+ assertNotNull(f);
+ // Host does tilde replacement
+ assertEquals(new File(home, "foo/ba z"), f);
+ final ConfigRepository.Config c = h.getConfig();
+ // Config doesn't
+ assertArrayEquals(new Object[] { "~/foo/ba z", "/foo/bar", "/foo/baz" },
+ c.getValues("IdentityFile"));
+ }
+
+ @Test
+ public void testNegatedPattern() throws Exception {
+ config("Host repo.or.cz\nIdentityFile ~/foo/bar\nHOST !*.or.cz\nIdentityFile /foo/baz");
+ final Host h = osc.lookup("repo.or.cz");
+ assertNotNull(h);
+ assertEquals(new File(home, "foo/bar"), h.getIdentityFile());
+ assertArrayEquals(new Object[] { "~/foo/bar" },
+ h.getConfig().getValues("IdentityFile"));
+ }
+
+ @Test
+ public void testPattern() throws Exception {
+ config("Host repo.or.cz\nIdentityFile ~/foo/bar\nHOST *.or.cz\nIdentityFile /foo/baz");
+ final Host h = osc.lookup("repo.or.cz");
+ assertNotNull(h);
+ assertEquals(new File(home, "foo/bar"), h.getIdentityFile());
+ assertArrayEquals(new Object[] { "~/foo/bar", "/foo/baz" },
+ h.getConfig().getValues("IdentityFile"));
+ }
+
+ @Test
+ public void testMultiHost() throws Exception {
+ config("Host orcz *.or.cz\nIdentityFile ~/foo/bar\nHOST *.or.cz\nIdentityFile /foo/baz");
+ final Host h1 = osc.lookup("repo.or.cz");
+ assertNotNull(h1);
+ assertEquals(new File(home, "foo/bar"), h1.getIdentityFile());
+ assertArrayEquals(new Object[] { "~/foo/bar", "/foo/baz" },
+ h1.getConfig().getValues("IdentityFile"));
+ final Host h2 = osc.lookup("orcz");
+ assertNotNull(h2);
+ assertEquals(new File(home, "foo/bar"), h2.getIdentityFile());
+ assertArrayEquals(new Object[] { "~/foo/bar" },
+ h2.getConfig().getValues("IdentityFile"));
+ }
+
+ @Test
+ public void testEqualsSign() throws Exception {
+ config("Host=orcz\n\tConnectionAttempts = 5\n\tUser=\t foobar\t\n");
+ final Host h = osc.lookup("orcz");
+ assertNotNull(h);
+ assertEquals(5, h.getConnectionAttempts());
+ assertEquals("foobar", h.getUser());
+ }
+
+ @Test
+ public void testMissingArgument() throws Exception {
+ config("Host=orcz\n\tSendEnv\nIdentityFile\t\nForwardX11\n\tUser=\t foobar\t\n");
+ final Host h = osc.lookup("orcz");
+ assertNotNull(h);
+ assertEquals("foobar", h.getUser());
+ assertArrayEquals(new String[0], h.getConfig().getValues("SendEnv"));
+ assertNull(h.getIdentityFile());
+ assertNull(h.getConfig().getValue("ForwardX11"));
+ }
+
+ @Test
+ public void testHomeDirUserReplacement() throws Exception {
+ config("Host=orcz\n\tIdentityFile %d/.ssh/%u_id_dsa");
+ final Host h = osc.lookup("orcz");
+ assertNotNull(h);
+ assertEquals(new File(new File(home, ".ssh"), "jex_junit_id_dsa"),
+ h.getIdentityFile());
+ }
+
+ @Test
+ public void testHostnameReplacement() throws Exception {
+ config("Host=orcz\nHost *.*\n\tHostname %h\nHost *\n\tHostname %h.example.org");
+ final Host h = osc.lookup("orcz");
+ assertNotNull(h);
+ assertEquals("orcz.example.org", h.getHostName());
+ }
+
+ @Test
+ public void testRemoteUserReplacement() throws Exception {
+ config("Host=orcz\n\tUser foo\n" + "Host *.*\n\tHostname %h\n"
+ + "Host *\n\tHostname %h.ex%%20ample.org\n\tIdentityFile ~/.ssh/%h_%r_id_dsa");
+ final Host h = osc.lookup("orcz");
+ assertNotNull(h);
+ assertEquals(
+ new File(new File(home, ".ssh"),
+ "orcz.ex%20ample.org_foo_id_dsa"),
+ h.getIdentityFile());
+ }
+
+ @Test
+ public void testLocalhostFQDNReplacement() throws Exception {
+ String localhost = SystemReader.getInstance().getHostname();
+ config("Host=orcz\n\tIdentityFile ~/.ssh/%l_id_dsa");
+ final Host h = osc.lookup("orcz");
+ assertNotNull(h);
+ assertEquals(
+ new File(new File(home, ".ssh"), localhost + "_id_dsa"),
+ h.getIdentityFile());
+ }
}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/RemoteConfigTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/RemoteConfigTest.java
index 0cada5c..a0cf0d2 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/RemoteConfigTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/RemoteConfigTest.java
@@ -51,6 +51,7 @@
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
+import java.util.Collections;
import java.util.List;
import org.eclipse.jgit.errors.ConfigInvalidException;
@@ -498,24 +499,48 @@ public void noPushInsteadOf() throws Exception {
}
@Test
- public void singlePushInsteadOf() throws Exception {
+ public void pushInsteadOfNotAppliedToPushUri() throws Exception {
config.setString("remote", "origin", "pushurl", "short:project.git");
config.setString("url", "https://server/repos/", "pushInsteadOf",
"short:");
RemoteConfig rc = new RemoteConfig(config, "origin");
assertFalse(rc.getPushURIs().isEmpty());
+ assertEquals("short:project.git",
+ rc.getPushURIs().get(0).toASCIIString());
+ }
+
+ @Test
+ public void pushInsteadOfAppliedToUri() throws Exception {
+ config.setString("remote", "origin", "url", "short:project.git");
+ config.setString("url", "https://server/repos/", "pushInsteadOf",
+ "short:");
+ RemoteConfig rc = new RemoteConfig(config, "origin");
+ assertFalse(rc.getPushURIs().isEmpty());
+ assertEquals("https://server/repos/project.git",
+ rc.getPushURIs().get(0).toASCIIString());
+ }
+
+ @Test
+ public void multiplePushInsteadOf() throws Exception {
+ config.setString("remote", "origin", "url", "prefixproject.git");
+ config.setStringList("url", "https://server/repos/", "pushInsteadOf",
+ Arrays.asList("pre", "prefix", "pref", "perf"));
+ RemoteConfig rc = new RemoteConfig(config, "origin");
+ assertFalse(rc.getPushURIs().isEmpty());
assertEquals("https://server/repos/project.git", rc.getPushURIs()
.get(0).toASCIIString());
}
@Test
- public void multiplePushInsteadOf() throws Exception {
- config.setString("remote", "origin", "pushurl", "prefixproject.git");
- config.setStringList("url", "https://server/repos/", "pushInsteadOf",
- Arrays.asList("pre", "prefix", "pref", "perf"));
+ public void pushInsteadOfNoPushUrl() throws Exception {
+ config.setString("remote", "origin", "url",
+ "http://git.eclipse.org/gitroot/jgit/jgit");
+ config.setStringList("url", "ssh://someone@git.eclipse.org:29418/",
+ "pushInsteadOf",
+ Collections.singletonList("http://git.eclipse.org/gitroot/"));
RemoteConfig rc = new RemoteConfig(config, "origin");
assertFalse(rc.getPushURIs().isEmpty());
- assertEquals("https://server/repos/project.git", rc.getPushURIs()
- .get(0).toASCIIString());
+ assertEquals("ssh://someone@git.eclipse.org:29418/jgit/jgit",
+ rc.getPushURIs().get(0).toASCIIString());
}
}
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/IntListTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/IntListTest.java
index c6eca9d..d6ea8c6 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/IntListTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/IntListTest.java
@@ -44,6 +44,7 @@
package org.eclipse.jgit.util;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@@ -186,6 +187,16 @@ public void testSet() {
}
@Test
+ public void testContains() {
+ IntList i = new IntList();
+ i.add(1);
+ i.add(4);
+ assertTrue(i.contains(1));
+ assertTrue(i.contains(4));
+ assertFalse(i.contains(2));
+ }
+
+ @Test
public void testToString() {
final IntList i = new IntList();
i.add(1);
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/LongMapTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/LongMapTest.java
similarity index 98%
rename from org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/LongMapTest.java
rename to org.eclipse.jgit.test/tst/org/eclipse/jgit/util/LongMapTest.java
index 1a86aaf..054c61e 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/transport/LongMapTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/LongMapTest.java
@@ -41,7 +41,7 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-package org.eclipse.jgit.transport;
+package org.eclipse.jgit.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/NBTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/NBTest.java
index 7e11a61..d2d44ff 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/NBTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/NBTest.java
@@ -90,6 +90,24 @@ public void testDecodeUInt16() {
}
@Test
+ public void testDecodeUInt24() {
+ assertEquals(0, NB.decodeUInt24(b(0, 0, 0), 0));
+ assertEquals(0, NB.decodeUInt24(padb(3, 0, 0, 0), 3));
+
+ assertEquals(3, NB.decodeUInt24(b(0, 0, 3), 0));
+ assertEquals(3, NB.decodeUInt24(padb(3, 0, 0, 3), 3));
+
+ assertEquals(0xcede03, NB.decodeUInt24(b(0xce, 0xde, 3), 0));
+ assertEquals(0xbade03, NB.decodeUInt24(padb(3, 0xba, 0xde, 3), 3));
+
+ assertEquals(0x03bade, NB.decodeUInt24(b(3, 0xba, 0xde), 0));
+ assertEquals(0x03bade, NB.decodeUInt24(padb(3, 3, 0xba, 0xde), 3));
+
+ assertEquals(0xffffff, NB.decodeUInt24(b(0xff, 0xff, 0xff), 0));
+ assertEquals(0xffffff, NB.decodeUInt24(padb(3, 0xff, 0xff, 0xff), 3));
+ }
+
+ @Test
public void testDecodeInt32() {
assertEquals(0, NB.decodeInt32(b(0, 0, 0, 0), 0));
assertEquals(0, NB.decodeInt32(padb(3, 0, 0, 0, 0), 3));
@@ -198,6 +216,39 @@ public void testEncodeInt16() {
}
@Test
+ public void testEncodeInt24() {
+ byte[] out = new byte[16];
+
+ prepareOutput(out);
+ NB.encodeInt24(out, 0, 0);
+ assertOutput(b(0, 0, 0), out, 0);
+
+ prepareOutput(out);
+ NB.encodeInt24(out, 3, 0);
+ assertOutput(b(0, 0, 0), out, 3);
+
+ prepareOutput(out);
+ NB.encodeInt24(out, 0, 3);
+ assertOutput(b(0, 0, 3), out, 0);
+
+ prepareOutput(out);
+ NB.encodeInt24(out, 3, 3);
+ assertOutput(b(0, 0, 3), out, 3);
+
+ prepareOutput(out);
+ NB.encodeInt24(out, 0, 0xc0deac);
+ assertOutput(b(0xc0, 0xde, 0xac), out, 0);
+
+ prepareOutput(out);
+ NB.encodeInt24(out, 3, 0xbadeac);
+ assertOutput(b(0xba, 0xde, 0xac), out, 3);
+
+ prepareOutput(out);
+ NB.encodeInt24(out, 3, -1);
+ assertOutput(b(0xff, 0xff, 0xff), out, 3);
+ }
+
+ @Test
public void testEncodeInt32() {
final byte[] out = new byte[16];
@@ -315,10 +366,24 @@ private static void assertOutput(final byte[] expect, final byte[] buf,
return r;
}
+ private static byte[] b(int a, int b, int c) {
+ return new byte[] { (byte) a, (byte) b, (byte) c };
+ }
+
private static byte[] b(final int a, final int b, final int c, final int d) {
return new byte[] { (byte) a, (byte) b, (byte) c, (byte) d };
}
+ private static byte[] padb(int len, int a, int b, int c) {
+ final byte[] r = new byte[len + 4];
+ for (int i = 0; i < len; i++)
+ r[i] = (byte) 0xaf;
+ r[len] = (byte) a;
+ r[len + 1] = (byte) b;
+ r[len + 2] = (byte) c;
+ return r;
+ }
+
private static byte[] padb(final int len, final int a, final int b,
final int c, final int d) {
final byte[] r = new byte[len + 4];
diff --git a/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/RawParseUtils_LineMapTest.java b/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/RawParseUtils_LineMapTest.java
index 5939714..6efdce6 100644
--- a/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/RawParseUtils_LineMapTest.java
+++ b/org.eclipse.jgit.test/tst/org/eclipse/jgit/util/RawParseUtils_LineMapTest.java
@@ -43,7 +43,7 @@
package org.eclipse.jgit.util;
-import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertNotNull;
import java.io.UnsupportedEncodingException;
@@ -55,52 +55,51 @@ public class RawParseUtils_LineMapTest {
public void testEmpty() {
final IntList map = RawParseUtils.lineMap(new byte[] {}, 0, 0);
assertNotNull(map);
- assertEquals(2, map.size());
- assertEquals(Integer.MIN_VALUE, map.get(0));
- assertEquals(0, map.get(1));
+ assertArrayEquals(new int[]{Integer.MIN_VALUE, 0}, asInts(map));
}
@Test
public void testOneBlankLine() {
final IntList map = RawParseUtils.lineMap(new byte[] { '\n' }, 0, 1);
- assertEquals(3, map.size());
- assertEquals(Integer.MIN_VALUE, map.get(0));
- assertEquals(0, map.get(1));
- assertEquals(1, map.get(2));
+ assertArrayEquals(new int[]{Integer.MIN_VALUE, 0, 1}, asInts(map));
}
@Test
public void testTwoLineFooBar() throws UnsupportedEncodingException {
final byte[] buf = "foo\nbar\n".getBytes("ISO-8859-1");
final IntList map = RawParseUtils.lineMap(buf, 0, buf.length);
- assertEquals(4, map.size());
- assertEquals(Integer.MIN_VALUE, map.get(0));
- assertEquals(0, map.get(1));
- assertEquals(4, map.get(2));
- assertEquals(buf.length, map.get(3));
+ assertArrayEquals(new int[]{Integer.MIN_VALUE, 0, 4, buf.length}, asInts(map));
}
@Test
public void testTwoLineNoLF() throws UnsupportedEncodingException {
final byte[] buf = "foo\nbar".getBytes("ISO-8859-1");
final IntList map = RawParseUtils.lineMap(buf, 0, buf.length);
- assertEquals(4, map.size());
- assertEquals(Integer.MIN_VALUE, map.get(0));
- assertEquals(0, map.get(1));
- assertEquals(4, map.get(2));
- assertEquals(buf.length, map.get(3));
+ assertArrayEquals(new int[]{Integer.MIN_VALUE, 0, 4, buf.length}, asInts(map));
+ }
+
+ @Test
+ public void testBinary() throws UnsupportedEncodingException {
+ final byte[] buf = "xxxfoo\nb\0ar".getBytes("ISO-8859-1");
+ final IntList map = RawParseUtils.lineMap(buf, 3, buf.length);
+ assertArrayEquals(new int[]{Integer.MIN_VALUE, 3, buf.length}, asInts(map));
}
@Test
public void testFourLineBlanks() throws UnsupportedEncodingException {
final byte[] buf = "foo\n\n\nbar\n".getBytes("ISO-8859-1");
final IntList map = RawParseUtils.lineMap(buf, 0, buf.length);
- assertEquals(6, map.size());
- assertEquals(Integer.MIN_VALUE, map.get(0));
- assertEquals(0, map.get(1));
- assertEquals(4, map.get(2));
- assertEquals(5, map.get(3));
- assertEquals(6, map.get(4));
- assertEquals(buf.length, map.get(5));
+
+ assertArrayEquals(new int[]{
+ Integer.MIN_VALUE, 0, 4, 5, 6, buf.length
+ }, asInts(map));
+ }
+
+ private int[] asInts(IntList l) {
+ int[] result = new int[l.size()];
+ for (int i = 0; i < l.size(); i++) {
+ result[i] = l.get(i);
+ }
+ return result;
}
}
diff --git a/org.eclipse.jgit/.settings/.api_filters b/org.eclipse.jgit/.settings/.api_filters
index da7c122..dbd7547 100644
--- a/org.eclipse.jgit/.settings/.api_filters
+++ b/org.eclipse.jgit/.settings/.api_filters
@@ -16,6 +16,26 @@
</message_arguments>
</filter>
</resource>
+ <resource path="src/org/eclipse/jgit/lib/ReflogEntry.java" type="org.eclipse.jgit.lib.ReflogEntry">
+ <filter comment="adding enum constant does not break binary compatibility" id="403767336">
+ <message_arguments>
+ <message_argument value="org.eclipse.jgit.lib.ReflogEntry"/>
+ <message_argument value="PREFIX_CREATED"/>
+ </message_arguments>
+ </filter>
+ <filter comment="adding enum constant does not break binary compatibility" id="403767336">
+ <message_arguments>
+ <message_argument value="org.eclipse.jgit.lib.ReflogEntry"/>
+ <message_argument value="PREFIX_FAST_FORWARD"/>
+ </message_arguments>
+ </filter>
+ <filter comment="adding enum constant does not break binary compatibility" id="403767336">
+ <message_arguments>
+ <message_argument value="org.eclipse.jgit.lib.ReflogEntry"/>
+ <message_argument value="PREFIX_FORCED_UPDATE"/>
+ </message_arguments>
+ </filter>
+ </resource>
<resource path="src/org/eclipse/jgit/merge/MergeStrategy.java" type="org.eclipse.jgit.merge.MergeStrategy">
<filter comment="OSGi semantic versioning allows breaking implementors of an API in a minor version" id="336695337">
<message_arguments>
@@ -32,4 +52,24 @@
</message_arguments>
</filter>
</resource>
+ <resource path="src/org/eclipse/jgit/transport/http/HttpConnection.java" type="org.eclipse.jgit.transport.http.HttpConnection">
+ <filter id="403767336">
+ <message_arguments>
+ <message_argument value="org.eclipse.jgit.transport.http.HttpConnection"/>
+ <message_argument value="HTTP_11_MOVED_TEMP"/>
+ </message_arguments>
+ </filter>
+ <filter id="403767336">
+ <message_arguments>
+ <message_argument value="org.eclipse.jgit.transport.http.HttpConnection"/>
+ <message_argument value="HTTP_MOVED_TEMP"/>
+ </message_arguments>
+ </filter>
+ <filter id="403767336">
+ <message_arguments>
+ <message_argument value="org.eclipse.jgit.transport.http.HttpConnection"/>
+ <message_argument value="HTTP_SEE_OTHER"/>
+ </message_arguments>
+ </filter>
+ </resource>
</component>
diff --git a/org.eclipse.jgit/META-INF/MANIFEST.MF b/org.eclipse.jgit/META-INF/MANIFEST.MF
index 478071f..e326081 100644
--- a/org.eclipse.jgit/META-INF/MANIFEST.MF
+++ b/org.eclipse.jgit/META-INF/MANIFEST.MF
@@ -59,6 +59,7 @@
org.eclipse.jgit.ignore;version="4.9.0",
org.eclipse.jgit.ignore.internal;version="4.9.0";x-friends:="org.eclipse.jgit.test",
org.eclipse.jgit.internal;version="4.9.0";x-friends:="org.eclipse.jgit.test,org.eclipse.jgit.http.test",
+ org.eclipse.jgit.internal.fsck;version="4.9.0";x-friends:="org.eclipse.jgit.test",
org.eclipse.jgit.internal.ketch;version="4.9.0";x-friends:="org.eclipse.jgit.junit,org.eclipse.jgit.test,org.eclipse.jgit.pgm",
org.eclipse.jgit.internal.storage.dfs;version="4.9.0";
x-friends:="org.eclipse.jgit.test,
@@ -73,7 +74,9 @@
org.eclipse.jgit.lfs,
org.eclipse.jgit.pgm,
org.eclipse.jgit.pgm.test",
+ org.eclipse.jgit.internal.storage.io;version="4.9.0";x-friends:="org.eclipse.jgit.junit,org.eclipse.jgit.test,org.eclipse.jgit.pgm",
org.eclipse.jgit.internal.storage.pack;version="4.9.0";x-friends:="org.eclipse.jgit.junit,org.eclipse.jgit.test,org.eclipse.jgit.pgm",
+ org.eclipse.jgit.internal.storage.reftable;version="4.9.0";x-friends:="org.eclipse.jgit.junit,org.eclipse.jgit.test,org.eclipse.jgit.pgm",
org.eclipse.jgit.internal.storage.reftree;version="4.9.0";x-friends:="org.eclipse.jgit.junit,org.eclipse.jgit.test,org.eclipse.jgit.pgm",
org.eclipse.jgit.lib;version="4.9.0";
uses:="org.eclipse.jgit.revwalk,
diff --git a/org.eclipse.jgit/pom.xml b/org.eclipse.jgit/pom.xml
index 1f341e6..8306eb5 100644
--- a/org.eclipse.jgit/pom.xml
+++ b/org.eclipse.jgit/pom.xml
@@ -206,8 +206,8 @@
<pluginManagement>
<plugins>
<plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>findbugs-maven-plugin</artifactId>
+ <groupId>com.github.hazendaz.spotbugs</groupId>
+ <artifactId>spotbugs-maven-plugin</artifactId>
<configuration>
<excludeFilterFile>findBugs/FindBugsExcludeFilter.xml</excludeFilterFile>
</configuration>
diff --git a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
index 6e793da..8d39314 100644
--- a/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
+++ b/org.eclipse.jgit/resources/org/eclipse/jgit/internal/JGitText.properties
@@ -365,12 +365,17 @@
invalidPathPeriodAtEndWindows=Invalid path (period at end is ignored by Windows): {0}
invalidPathSpaceAtEndWindows=Invalid path (space at end is ignored by Windows): {0}
invalidPathReservedOnWindows=Invalid path (''{0}'' is reserved on Windows): {1}
+invalidRedirectLocation=Invalid redirect location {0} -> {1}
invalidReflogRevision=Invalid reflog revision: {0}
invalidRefName=Invalid ref name: {0}
+invalidReftableBlock=Invalid reftable block
+invalidReftableCRC=Invalid reftable CRC-32
+invalidReftableFile=Invalid reftable file
invalidRemote=Invalid remote: {0}
invalidRepositoryStateNoHead=Invalid repository --- cannot read HEAD
invalidShallowObject=invalid shallow object {0}, expected commit
invalidStageForPath=Invalid stage {0} for path {1}
+invalidSystemProperty=Invalid system property ''{0}'': ''{1}''; using default value {2}
invalidTagOption=Invalid tag option: {0}
invalidTimeout=Invalid timeout: {0}
invalidTimeUnitValue2=Invalid time unit value: {0}.{1}={2}
@@ -409,8 +414,11 @@
mergeRecursiveTooManyMergeBasesFor = "More than {0} merge bases for:\n a {1}\n b {2} found:\n count {3}"
messageAndTaggerNotAllowedInUnannotatedTags = Unannotated tags cannot have a message or tagger
minutesAgo={0} minutes ago
+mismatchOffset=mismatch offset for object {0}
+mismatchCRC=mismatch CRC for object {0}
missingAccesskey=Missing accesskey.
missingConfigurationForKey=No value for key {0} found in configuration
+missingCRC=missing CRC for object {0}
missingDeltaBase=delta base
missingForwardImageInGITBinaryPatch=Missing forward-image in GIT binary patch
missingObject=Missing {0} {1}
@@ -470,6 +478,7 @@
openingConnection=Opening connection
operationCanceled=Operation {0} was canceled
outputHasAlreadyBeenStarted=Output has already been started.
+overflowedReftableBlock=Overflowed reftable block
packChecksumMismatch=Pack checksum mismatch detected for pack file {0}
packCorruptedWhileWritingToFilesystem=Pack corrupted while writing to filesystem
packDoesNotMatchIndex=Pack {0} does not match index
@@ -497,6 +506,7 @@
pathIsNotInWorkingDir=Path is not in working dir
pathNotConfigured=Submodule path is not configured
peeledLineBeforeRef=Peeled line before ref.
+peeledRefIsRequired=Peeled ref is required.
peerDidNotSupplyACompleteObjectGraph=peer did not supply a complete object graph
personIdentEmailNonNull=E-mail address of PersonIdent must not be null.
personIdentNameNonNull=Name of PersonIdent must not be null.
@@ -525,6 +535,11 @@
receivePackInvalidLimit=Illegal limit parameter value {0}
receivePackTooLarge=Pack exceeds the limit of {0} bytes, rejecting the pack
receivingObjects=Receiving objects
+redirectBlocked=Redirection blocked: redirect {0} -> {1} not allowed
+redirectHttp=URI ''{0}'': following HTTP redirect #{1} {2} -> {3}
+redirectLimitExceeded=Redirected more than {0} times; aborted at {1} -> {2}
+redirectLocationMissing=Invalid redirect: no redirect location for {0}
+redirectsOff=Cannot redirect because http.followRedirects is false (HTTP status {0})
refAlreadyExists=already exists
refAlreadyExists1=Ref {0} already exists
reflogEntryNotFound=Entry {0} not found in reflog for ''{1}''
@@ -572,7 +587,7 @@
selectingCommits=Selecting commits
sequenceTooLargeForDiffAlgorithm=Sequence too large for difference algorithm.
serviceNotEnabledNoName=Service not enabled
-serviceNotPermitted={0} not permitted
+serviceNotPermitted={1} not permitted on ''{0}''
sha1CollisionDetected1=SHA-1 collision detected on {0}
shallowCommitsAlreadyInitialized=Shallow commits have already been initialized
shallowPacksRequireDepthWalk=Shallow packs require a DepthWalk
@@ -653,6 +668,7 @@
unableToStore=Unable to store {0}.
unableToWrite=Unable to write {0}
unauthorized=Unauthorized
+underflowedReftableBlock=Underflowed reftable block
unencodeableFile=Unencodable file: {0}
unexpectedCompareResult=Unexpected metadata comparison result: {0}
unexpectedEndOfConfigFile=Unexpected end of config file
@@ -667,6 +683,7 @@
unknownHost=unknown host
unknownIndexVersionOrCorruptIndex=Unknown index version (or corrupt index): {0}
unknownObject=unknown object
+unknownObjectInIndex=unknown object {0} found in index but not in pack file
unknownObjectType=Unknown object type {0}.
unknownObjectType2=unknown
unknownRepositoryFormat=Unknown repository format
@@ -688,6 +705,7 @@
unsupportedOperationNotAddAtEnd=Not add-at-end: {0}
unsupportedPackIndexVersion=Unsupported pack index version {0}
unsupportedPackVersion=Unsupported pack version {0}.
+unsupportedReftableVersion=Unsupported reftable version {0}.
unsupportedRepositoryDescription=Repository description not supported
updateRequiresOldIdAndNewId=Update requires both old ID and new ID to be nonzero
updatingHeadFailed=Updating HEAD failed
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/CheckoutCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/CheckoutCommand.java
index 21d6283..6b20da3 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/CheckoutCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/CheckoutCommand.java
@@ -47,8 +47,10 @@
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.EnumSet;
+import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
+import java.util.Set;
import org.eclipse.jgit.api.CheckoutResult.Status;
import org.eclipse.jgit.api.errors.CheckoutConflictException;
@@ -66,6 +68,7 @@
import org.eclipse.jgit.dircache.DirCacheIterator;
import org.eclipse.jgit.errors.AmbiguousObjectException;
import org.eclipse.jgit.errors.UnmergedPathException;
+import org.eclipse.jgit.events.WorkingTreeModifiedEvent;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Constants;
@@ -175,6 +178,8 @@ private Stage(int number) {
private boolean checkoutAllPaths;
+ private Set<String> actuallyModifiedPaths;
+
/**
* @param repo
*/
@@ -410,7 +415,8 @@ public CheckoutCommand setAllPaths(boolean all) {
}
/**
- * Checkout paths into index and working directory
+ * Checkout paths into index and working directory, firing a
+ * {@link WorkingTreeModifiedEvent} if the working tree was modified.
*
* @return this instance
* @throws IOException
@@ -418,6 +424,7 @@ public CheckoutCommand setAllPaths(boolean all) {
*/
protected CheckoutCommand checkoutPaths() throws IOException,
RefNotFoundException {
+ actuallyModifiedPaths = new HashSet<>();
DirCache dc = repo.lockDirCache();
try (RevWalk revWalk = new RevWalk(repo);
TreeWalk treeWalk = new TreeWalk(repo,
@@ -432,7 +439,16 @@ protected CheckoutCommand checkoutPaths() throws IOException,
checkoutPathsFromCommit(treeWalk, dc, commit);
}
} finally {
- dc.unlock();
+ try {
+ dc.unlock();
+ } finally {
+ WorkingTreeModifiedEvent event = new WorkingTreeModifiedEvent(
+ actuallyModifiedPaths, null);
+ actuallyModifiedPaths = null;
+ if (!event.isEmpty()) {
+ repo.fireEvent(event);
+ }
+ }
}
return this;
}
@@ -461,9 +477,11 @@ public void apply(DirCacheEntry ent) {
int stage = ent.getStage();
if (stage > DirCacheEntry.STAGE_0) {
if (checkoutStage != null) {
- if (stage == checkoutStage.number)
+ if (stage == checkoutStage.number) {
checkoutPath(ent, r, new CheckoutMetadata(
eolStreamType, filterCommand));
+ actuallyModifiedPaths.add(path);
+ }
} else {
UnmergedPathException e = new UnmergedPathException(
ent);
@@ -472,6 +490,7 @@ public void apply(DirCacheEntry ent) {
} else {
checkoutPath(ent, r, new CheckoutMetadata(eolStreamType,
filterCommand));
+ actuallyModifiedPaths.add(path);
}
}
});
@@ -492,13 +511,15 @@ private void checkoutPathsFromCommit(TreeWalk treeWalk, DirCache dc,
final EolStreamType eolStreamType = treeWalk.getEolStreamType();
final String filterCommand = treeWalk
.getFilterCommand(Constants.ATTR_FILTER_TYPE_SMUDGE);
- editor.add(new PathEdit(treeWalk.getPathString()) {
+ final String path = treeWalk.getPathString();
+ editor.add(new PathEdit(path) {
@Override
public void apply(DirCacheEntry ent) {
ent.setObjectId(blobId);
ent.setFileMode(mode);
checkoutPath(ent, r,
new CheckoutMetadata(eolStreamType, filterCommand));
+ actuallyModifiedPaths.add(path);
}
});
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/CleanCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/CleanCommand.java
index c58efb1..e41a03b 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/CleanCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/CleanCommand.java
@@ -54,6 +54,7 @@
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.JGitInternalException;
import org.eclipse.jgit.errors.NoWorkTreeException;
+import org.eclipse.jgit.events.WorkingTreeModifiedEvent;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.FileUtils;
@@ -135,6 +136,10 @@ else if (fs.isDirectory(f))
}
} catch (IOException e) {
throw new JGitInternalException(e.getMessage(), e);
+ } finally {
+ if (!files.isEmpty()) {
+ repo.fireEvent(new WorkingTreeModifiedEvent(null, files));
+ }
}
return files;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/CloneCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/CloneCommand.java
index d450c64..bde8e63 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/CloneCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/CloneCommand.java
@@ -50,6 +50,7 @@
import java.util.Collection;
import java.util.List;
+import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.InvalidRemoteException;
import org.eclipse.jgit.api.errors.JGitInternalException;
@@ -157,6 +158,16 @@ public CloneCommand() {
}
/**
+ * Get the git directory. This is primarily used for tests.
+ *
+ * @return the git directory
+ */
+ @Nullable
+ File getDirectory() {
+ return directory;
+ }
+
+ /**
* Executes the {@code Clone} command.
*
* The Git instance returned by this command needs to be closed by the
@@ -232,9 +243,9 @@ private static boolean isNonEmptyDirectory(File dir) {
return false;
}
- private void verifyDirectories(URIish u) {
+ void verifyDirectories(URIish u) {
if (directory == null && gitDir == null) {
- directory = new File(u.getHumanishName(), Constants.DOT_GIT);
+ directory = new File(u.getHumanishName() + (bare ? Constants.DOT_GIT_EXT : "")); //$NON-NLS-1$
}
directoryExistsInitially = directory != null && directory.exists();
gitDirExistsInitially = gitDir != null && gitDir.exists();
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java
index 274ece6..e29fc05 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/CommitCommand.java
@@ -482,7 +482,7 @@ private DirCache createTemporaryIndex(ObjectId headId, DirCache index,
JGitText.get().entryNotFoundByPath, only.get(i)));
// there must be at least one change
- if (emptyCommit)
+ if (emptyCommit && !allowEmpty.booleanValue())
// Would like to throw a EmptyCommitException. But this would break the API
// TODO(ch): Change this in the next release
throw new JGitInternalException(JGitText.get().emptyCommit);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java
index bae54ce..75460fb 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/MergeCommand.java
@@ -64,6 +64,7 @@
import org.eclipse.jgit.api.errors.NoMessageException;
import org.eclipse.jgit.api.errors.WrongRepositoryStateException;
import org.eclipse.jgit.dircache.DirCacheCheckout;
+import org.eclipse.jgit.events.WorkingTreeModifiedEvent;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Config.ConfigEnum;
@@ -355,6 +356,10 @@ public MergeResult call() throws GitAPIException, NoHeadException,
.getMergeResults();
failingPaths = resolveMerger.getFailingPaths();
unmergedPaths = resolveMerger.getUnmergedPaths();
+ if (!resolveMerger.getModifiedFiles().isEmpty()) {
+ repo.fireEvent(new WorkingTreeModifiedEvent(
+ resolveMerger.getModifiedFiles(), null));
+ }
} else
noProblems = merger.merge(headCommit, srcCommit);
refLogMessage.append(": Merge made by "); //$NON-NLS-1$
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/ReflogCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/ReflogCommand.java
index 04caa0f..394bea5 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/ReflogCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/ReflogCommand.java
@@ -109,4 +109,4 @@ public Collection<ReflogEntry> call() throws GitAPIException,
}
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/RmCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/RmCommand.java
index 9e2cf31..48c23f5 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/RmCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/RmCommand.java
@@ -44,8 +44,10 @@
import java.io.File;
import java.io.IOException;
+import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
+import java.util.List;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.JGitInternalException;
@@ -53,6 +55,7 @@
import org.eclipse.jgit.dircache.DirCache;
import org.eclipse.jgit.dircache.DirCacheBuildIterator;
import org.eclipse.jgit.dircache.DirCacheBuilder;
+import org.eclipse.jgit.events.WorkingTreeModifiedEvent;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.FileMode;
@@ -145,6 +148,7 @@ public DirCache call() throws GitAPIException,
checkCallable();
DirCache dc = null;
+ List<String> actuallyDeletedFiles = new ArrayList<>();
try (final TreeWalk tw = new TreeWalk(repo)) {
dc = repo.lockDirCache();
DirCacheBuilder builder = dc.builder();
@@ -157,11 +161,14 @@ public DirCache call() throws GitAPIException,
if (!cached) {
final FileMode mode = tw.getFileMode(0);
if (mode.getObjectType() == Constants.OBJ_BLOB) {
+ String relativePath = tw.getPathString();
final File path = new File(repo.getWorkTree(),
- tw.getPathString());
+ relativePath);
// Deleting a blob is simply a matter of removing
// the file or symlink named by the tree entry.
- delete(path);
+ if (delete(path)) {
+ actuallyDeletedFiles.add(relativePath);
+ }
}
}
}
@@ -171,16 +178,28 @@ public DirCache call() throws GitAPIException,
throw new JGitInternalException(
JGitText.get().exceptionCaughtDuringExecutionOfRmCommand, e);
} finally {
- if (dc != null)
- dc.unlock();
+ try {
+ if (dc != null) {
+ dc.unlock();
+ }
+ } finally {
+ if (!actuallyDeletedFiles.isEmpty()) {
+ repo.fireEvent(new WorkingTreeModifiedEvent(null,
+ actuallyDeletedFiles));
+ }
+ }
}
return dc;
}
- private void delete(File p) {
- while (p != null && !p.equals(repo.getWorkTree()) && p.delete())
+ private boolean delete(File p) {
+ boolean deleted = false;
+ while (p != null && !p.equals(repo.getWorkTree()) && p.delete()) {
+ deleted = true;
p = p.getParentFile();
+ }
+ return deleted;
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/StashApplyCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/StashApplyCommand.java
index 10ec2a6..b56fb25 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/StashApplyCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/StashApplyCommand.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2012, GitHub Inc.
+ * Copyright (C) 2012, 2017 GitHub Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -44,6 +44,9 @@
import java.io.IOException;
import java.text.MessageFormat;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.api.errors.InvalidRefNameException;
@@ -58,6 +61,7 @@
import org.eclipse.jgit.dircache.DirCacheEntry;
import org.eclipse.jgit.dircache.DirCacheIterator;
import org.eclipse.jgit.errors.CheckoutConflictException;
+import org.eclipse.jgit.events.WorkingTreeModifiedEvent;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.CoreConfig.EolStreamType;
@@ -198,7 +202,13 @@ public ObjectId call() throws GitAPIException,
"stash" }); //$NON-NLS-1$
merger.setBase(stashHeadCommit);
merger.setWorkingTreeIterator(new FileTreeIterator(repo));
- if (merger.merge(headCommit, stashCommit)) {
+ boolean mergeSucceeded = merger.merge(headCommit, stashCommit);
+ List<String> modifiedByMerge = merger.getModifiedFiles();
+ if (!modifiedByMerge.isEmpty()) {
+ repo.fireEvent(
+ new WorkingTreeModifiedEvent(modifiedByMerge, null));
+ }
+ if (mergeSucceeded) {
DirCache dc = repo.lockDirCache();
DirCacheCheckout dco = new DirCacheCheckout(repo, headTree,
dc, merger.getResultTreeId());
@@ -329,6 +339,7 @@ private void resetIndex(RevTree tree) throws IOException {
private void resetUntracked(RevTree tree) throws CheckoutConflictException,
IOException {
+ Set<String> actuallyModifiedPaths = new HashSet<>();
// TODO maybe NameConflictTreeWalk ?
try (TreeWalk walk = new TreeWalk(repo)) {
walk.addTree(tree);
@@ -361,6 +372,12 @@ private void resetUntracked(RevTree tree) throws CheckoutConflictException,
checkoutPath(entry, reader,
new CheckoutMetadata(eolStreamType, null));
+ actuallyModifiedPaths.add(entry.getPathString());
+ }
+ } finally {
+ if (!actuallyModifiedPaths.isEmpty()) {
+ repo.fireEvent(new WorkingTreeModifiedEvent(
+ actuallyModifiedPaths, null));
}
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/StashCreateCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/StashCreateCommand.java
index 681f8e6..21b06e6 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/StashCreateCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/StashCreateCommand.java
@@ -62,6 +62,7 @@
import org.eclipse.jgit.dircache.DirCacheEntry;
import org.eclipse.jgit.dircache.DirCacheIterator;
import org.eclipse.jgit.errors.UnmergedPathException;
+import org.eclipse.jgit.events.WorkingTreeModifiedEvent;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.CommitBuilder;
import org.eclipse.jgit.lib.Constants;
@@ -240,6 +241,7 @@ private Ref getHead() throws GitAPIException {
public RevCommit call() throws GitAPIException {
checkCallable();
+ List<String> deletedFiles = new ArrayList<>();
Ref head = getHead();
try (ObjectReader reader = repo.newObjectReader()) {
RevCommit headCommit = parseCommit(reader, head.getObjectId());
@@ -377,9 +379,11 @@ public void apply(DirCacheEntry ent) {
// Remove untracked files
if (includeUntracked) {
for (DirCacheEntry entry : untracked) {
+ String repoRelativePath = entry.getPathString();
File file = new File(repo.getWorkTree(),
- entry.getPathString());
+ repoRelativePath);
FileUtils.delete(file);
+ deletedFiles.add(repoRelativePath);
}
}
@@ -394,6 +398,11 @@ public void apply(DirCacheEntry ent) {
return parseCommit(reader, commitId);
} catch (IOException e) {
throw new JGitInternalException(JGitText.get().stashFailed, e);
+ } finally {
+ if (!deletedFiles.isEmpty()) {
+ repo.fireEvent(
+ new WorkingTreeModifiedEvent(null, deletedFiles));
+ }
}
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleSyncCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleSyncCommand.java
index f97dce9..b5c0b15 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleSyncCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/api/SubmoduleSyncCommand.java
@@ -162,4 +162,4 @@ public Map<String, String> call() throws GitAPIException {
throw new JGitInternalException(e.getMessage(), e);
}
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/attributes/Attribute.java b/org.eclipse.jgit/src/org/eclipse/jgit/attributes/Attribute.java
index 905ad76..c256b73 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/attributes/Attribute.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/attributes/Attribute.java
@@ -193,4 +193,4 @@ public String toString() {
return key + "=" + value; //$NON-NLS-1$
}
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesHandler.java b/org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesHandler.java
index 3bf4179..8d928e3 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesHandler.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesHandler.java
@@ -144,7 +144,8 @@ public Attributes getAttributes() throws IOException {
mergeInfoAttributes(entryPath, isDirectory, attributes);
// Gets the attributes located on the current entry path
- mergePerDirectoryEntryAttributes(entryPath, isDirectory,
+ mergePerDirectoryEntryAttributes(entryPath, entryPath.lastIndexOf('/'),
+ isDirectory,
treeWalk.getTree(WorkingTreeIterator.class),
treeWalk.getTree(DirCacheIterator.class),
treeWalk.getTree(CanonicalTreeParser.class),
@@ -206,6 +207,8 @@ private void mergeInfoAttributes(String entryPath, boolean isDirectory,
* the path to test. The path must be relative to this attribute
* node's own repository path, and in repository path format
* (uses '/' and not '\').
+ * @param nameRoot
+ * index of the '/' preceeding the current level, or -1 if none
* @param isDirectory
* true if the target item is a directory.
* @param workingTreeIterator
@@ -217,7 +220,7 @@ private void mergeInfoAttributes(String entryPath, boolean isDirectory,
* @throws IOException
*/
private void mergePerDirectoryEntryAttributes(String entryPath,
- boolean isDirectory,
+ int nameRoot, boolean isDirectory,
@Nullable WorkingTreeIterator workingTreeIterator,
@Nullable DirCacheIterator dirCacheIterator,
@Nullable CanonicalTreeParser otherTree, Attributes result)
@@ -228,9 +231,12 @@ private void mergePerDirectoryEntryAttributes(String entryPath,
AttributesNode attributesNode = attributesNode(
treeWalk, workingTreeIterator, dirCacheIterator, otherTree);
if (attributesNode != null) {
- mergeAttributes(attributesNode, entryPath, isDirectory, result);
+ mergeAttributes(attributesNode,
+ entryPath.substring(nameRoot + 1), isDirectory,
+ result);
}
- mergePerDirectoryEntryAttributes(entryPath, isDirectory,
+ mergePerDirectoryEntryAttributes(entryPath,
+ entryPath.lastIndexOf('/', nameRoot - 1), isDirectory,
parentOf(workingTreeIterator), parentOf(dirCacheIterator),
parentOf(otherTree), result);
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesRule.java b/org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesRule.java
index c9c69db..b88a16e 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesRule.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/attributes/AttributesRule.java
@@ -225,4 +225,4 @@ public String toString() {
return sb.toString();
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffConfig.java
index 324b99e..ee70949 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffConfig.java
@@ -54,12 +54,7 @@
/** Keeps track of diff related configuration options. */
public class DiffConfig {
/** Key for {@link Config#get(SectionParser)}. */
- public static final Config.SectionParser<DiffConfig> KEY = new SectionParser<DiffConfig>() {
- @Override
- public DiffConfig parse(final Config cfg) {
- return new DiffConfig(cfg);
- }
- };
+ public static final Config.SectionParser<DiffConfig> KEY = DiffConfig::new;
/** Permissible values for {@code diff.renames}. */
public static enum RenameDetectionType {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffEntry.java b/org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffEntry.java
index e1dfcff..5eb1942 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffEntry.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/diff/DiffEntry.java
@@ -525,4 +525,4 @@ public String toString() {
buf.append("]");
return buf.toString();
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java b/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java
index aed76ac..f8c23ca 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/dircache/DirCacheCheckout.java
@@ -50,6 +50,7 @@
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashMap;
+import java.util.Iterator;
import java.util.List;
import java.util.Map;
@@ -61,6 +62,7 @@
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.errors.IndexWriteException;
import org.eclipse.jgit.errors.MissingObjectException;
+import org.eclipse.jgit.events.WorkingTreeModifiedEvent;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.CoreConfig.AutoCRLF;
@@ -85,6 +87,7 @@
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.FS.ExecutionResult;
import org.eclipse.jgit.util.FileUtils;
+import org.eclipse.jgit.util.IntList;
import org.eclipse.jgit.util.RawParseUtils;
import org.eclipse.jgit.util.SystemReader;
import org.eclipse.jgit.util.io.EolStreamTypeUtil;
@@ -151,6 +154,8 @@ public CheckoutMetadata(EolStreamType eolStreamType,
private boolean emptyDirCache;
+ private boolean performingCheckout;
+
/**
* @return a list of updated paths and smudgeFilterCommands
*/
@@ -432,7 +437,8 @@ void processEntry(CanonicalTreeParser m, DirCacheBuildIterator i,
}
/**
- * Execute this checkout
+ * Execute this checkout. A {@link WorkingTreeModifiedEvent} is fired if the
+ * working tree was modified; even if the checkout fails.
*
* @return <code>false</code> if this method could not delete all the files
* which should be deleted (e.g. because of of the files was
@@ -448,7 +454,17 @@ public boolean checkout() throws IOException {
try {
return doCheckout();
} finally {
- dc.unlock();
+ try {
+ dc.unlock();
+ } finally {
+ if (performingCheckout) {
+ WorkingTreeModifiedEvent event = new WorkingTreeModifiedEvent(
+ getUpdated().keySet(), getRemoved());
+ if (!event.isEmpty()) {
+ repo.fireEvent(event);
+ }
+ }
+ }
}
}
@@ -472,11 +488,13 @@ private boolean doCheckout() throws CorruptObjectException, IOException,
// update our index
builder.finish();
+ performingCheckout = true;
File file = null;
String last = null;
// when deleting files process them in the opposite order as they have
// been reported. This ensures the files are deleted before we delete
// their parent folders
+ IntList nonDeleted = new IntList();
for (int i = removed.size() - 1; i >= 0; i--) {
String r = removed.get(i);
file = new File(repo.getWorkTree(), r);
@@ -486,25 +504,47 @@ private boolean doCheckout() throws CorruptObjectException, IOException,
// a submodule, in which case we shall not attempt
// to delete it. A submodule is not empty, so it
// is safe to check this after a failed delete.
- if (!repo.getFS().isDirectory(file))
+ if (!repo.getFS().isDirectory(file)) {
+ nonDeleted.add(i);
toBeDeleted.add(r);
+ }
} else {
if (last != null && !isSamePrefix(r, last))
removeEmptyParents(new File(repo.getWorkTree(), last));
last = r;
}
}
- if (file != null)
+ if (file != null) {
removeEmptyParents(file);
-
- for (Map.Entry<String, CheckoutMetadata> e : updated.entrySet()) {
- String path = e.getKey();
- CheckoutMetadata meta = e.getValue();
- DirCacheEntry entry = dc.getEntry(path);
- if (!FileMode.GITLINK.equals(entry.getRawMode()))
- checkoutEntry(repo, entry, objectReader, false, meta);
}
-
+ removed = filterOut(removed, nonDeleted);
+ nonDeleted = null;
+ Iterator<Map.Entry<String, CheckoutMetadata>> toUpdate = updated
+ .entrySet().iterator();
+ Map.Entry<String, CheckoutMetadata> e = null;
+ try {
+ while (toUpdate.hasNext()) {
+ e = toUpdate.next();
+ String path = e.getKey();
+ CheckoutMetadata meta = e.getValue();
+ DirCacheEntry entry = dc.getEntry(path);
+ if (!FileMode.GITLINK.equals(entry.getRawMode())) {
+ checkoutEntry(repo, entry, objectReader, false, meta);
+ }
+ e = null;
+ }
+ } catch (Exception ex) {
+ // We didn't actually modify the current entry nor any that
+ // might follow.
+ if (e != null) {
+ toUpdate.remove();
+ }
+ while (toUpdate.hasNext()) {
+ e = toUpdate.next();
+ toUpdate.remove();
+ }
+ throw ex;
+ }
// commit the index builder - a new index is persisted
if (!builder.commit())
throw new IndexWriteException();
@@ -512,6 +552,36 @@ private boolean doCheckout() throws CorruptObjectException, IOException,
return toBeDeleted.size() == 0;
}
+ private static ArrayList<String> filterOut(ArrayList<String> strings,
+ IntList indicesToRemove) {
+ int n = indicesToRemove.size();
+ if (n == strings.size()) {
+ return new ArrayList<>(0);
+ }
+ switch (n) {
+ case 0:
+ return strings;
+ case 1:
+ strings.remove(indicesToRemove.get(0));
+ return strings;
+ default:
+ int length = strings.size();
+ ArrayList<String> result = new ArrayList<>(length - n);
+ // Process indicesToRemove from the back; we know that it
+ // contains indices in descending order.
+ int j = n - 1;
+ int idx = indicesToRemove.get(j);
+ for (int i = 0; i < length; i++) {
+ if (i == idx) {
+ idx = (--j >= 0) ? indicesToRemove.get(j) : -1;
+ } else {
+ result.add(strings.get(i));
+ }
+ }
+ return result;
+ }
+ }
+
private static boolean isSamePrefix(String a, String b) {
int as = a.lastIndexOf('/');
int bs = b.lastIndexOf('/');
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/errors/CorruptPackIndexException.java b/org.eclipse.jgit/src/org/eclipse/jgit/errors/CorruptPackIndexException.java
new file mode 100644
index 0000000..65d83b3
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/errors/CorruptPackIndexException.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.errors;
+
+import org.eclipse.jgit.annotations.Nullable;
+
+/**
+ * Exception thrown when encounters a corrupt pack index file.
+ *
+ * @since 4.9
+ */
+public class CorruptPackIndexException extends Exception {
+ private static final long serialVersionUID = 1L;
+
+ /** The error type of a corrupt index file. */
+ public enum ErrorType {
+ /** Offset does not match index in pack file. */
+ MISMATCH_OFFSET,
+ /** CRC does not match CRC of the object data in pack file. */
+ MISMATCH_CRC,
+ /** CRC is not present in index file. */
+ MISSING_CRC,
+ /** Object in pack is not present in index file. */
+ MISSING_OBJ,
+ /** Object in index file is not present in pack file. */
+ UNKNOWN_OBJ,
+ }
+
+ private ErrorType errorType;
+
+ /**
+ * Report a specific error condition discovered in an index file.
+ *
+ * @param message
+ * the error message.
+ * @param errorType
+ * the error type of corruption.
+ */
+ public CorruptPackIndexException(String message, ErrorType errorType) {
+ super(message);
+ this.errorType = errorType;
+ }
+
+ /**
+ * Specific the reason of the corrupt index file.
+ *
+ * @return error condition or null.
+ */
+ @Nullable
+ public ErrorType getErrorType() {
+ return errorType;
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/errors/TooLargeObjectInPackException.java b/org.eclipse.jgit/src/org/eclipse/jgit/errors/TooLargeObjectInPackException.java
index b5b1af5..ece76ed 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/errors/TooLargeObjectInPackException.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/errors/TooLargeObjectInPackException.java
@@ -92,4 +92,4 @@ public TooLargeObjectInPackException(long objectSize,
public TooLargeObjectInPackException(URIish uri, String s) {
super(uri.setPass(null) + ": " + s); //$NON-NLS-1$
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/errors/TranslationBundleLoadingException.java b/org.eclipse.jgit/src/org/eclipse/jgit/errors/TranslationBundleLoadingException.java
index 4f297b9..6cb332d 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/errors/TranslationBundleLoadingException.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/errors/TranslationBundleLoadingException.java
@@ -69,4 +69,4 @@ public TranslationBundleLoadingException(Class bundleClass, Locale locale, Excep
+ bundleClass.getName() + ", " + locale.toString() + "]", //$NON-NLS-1$ //$NON-NLS-2$
bundleClass, locale, cause);
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/events/ListenerList.java b/org.eclipse.jgit/src/org/eclipse/jgit/events/ListenerList.java
index 12ef533..cea03db 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/events/ListenerList.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/events/ListenerList.java
@@ -53,6 +53,19 @@ public class ListenerList {
private final ConcurrentMap<Class<? extends RepositoryListener>, CopyOnWriteArrayList<ListenerHandle>> lists = new ConcurrentHashMap<>();
/**
+ * Register a {@link WorkingTreeModifiedListener}.
+ *
+ * @param listener
+ * the listener implementation.
+ * @return handle to later remove the listener.
+ * @since 4.9
+ */
+ public ListenerHandle addWorkingTreeModifiedListener(
+ WorkingTreeModifiedListener listener) {
+ return addListener(WorkingTreeModifiedListener.class, listener);
+ }
+
+ /**
* Register an IndexChangedListener.
*
* @param listener
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/events/WorkingTreeModifiedEvent.java b/org.eclipse.jgit/src/org/eclipse/jgit/events/WorkingTreeModifiedEvent.java
new file mode 100644
index 0000000..7a53233
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/events/WorkingTreeModifiedEvent.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright (C) 2017, Thomas Wolf <thomas.wolf@paranor.ch>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.eclipse.jgit.events;
+
+import java.util.Collections;
+import java.util.Collection;
+
+import org.eclipse.jgit.annotations.NonNull;
+
+/**
+ * A {@link RepositoryEvent} describing changes to the working tree. It is fired
+ * whenever a {@link org.eclipse.jgit.dircache.DirCacheCheckout} modifies
+ * (adds/deletes/updates) files in the working tree.
+ *
+ * @since 4.9
+ */
+public class WorkingTreeModifiedEvent
+ extends RepositoryEvent<WorkingTreeModifiedListener> {
+
+ private Collection<String> modified;
+
+ private Collection<String> deleted;
+
+ /**
+ * Creates a new {@link WorkingTreeModifiedEvent} with the given
+ * collections.
+ *
+ * @param modified
+ * repository-relative paths that were added or updated
+ * @param deleted
+ * repository-relative paths that were deleted
+ */
+ public WorkingTreeModifiedEvent(Collection<String> modified,
+ Collection<String> deleted) {
+ this.modified = modified;
+ this.deleted = deleted;
+ }
+
+ /**
+ * Determines whether there are any changes recorded in this event.
+ *
+ * @return {@code true} if no files were modified or deleted, {@code false}
+ * otherwise
+ */
+ public boolean isEmpty() {
+ return (modified == null || modified.isEmpty())
+ && (deleted == null || deleted.isEmpty());
+ }
+
+ /**
+ * Retrieves the {@link Collection} of repository-relative paths of files
+ * that were modified (added or updated).
+ *
+ * @return the set
+ */
+ public @NonNull Collection<String> getModified() {
+ Collection<String> result = modified;
+ if (result == null) {
+ result = Collections.emptyList();
+ modified = result;
+ }
+ return result;
+ }
+
+ /**
+ * Retrieves the {@link Collection} of repository-relative paths of files
+ * that were deleted.
+ *
+ * @return the set
+ */
+ public @NonNull Collection<String> getDeleted() {
+ Collection<String> result = deleted;
+ if (result == null) {
+ result = Collections.emptyList();
+ deleted = result;
+ }
+ return result;
+ }
+
+ @Override
+ public Class<WorkingTreeModifiedListener> getListenerType() {
+ return WorkingTreeModifiedListener.class;
+ }
+
+ @Override
+ public void dispatch(WorkingTreeModifiedListener listener) {
+ listener.onWorkingTreeModified(this);
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/events/WorkingTreeModifiedListener.java b/org.eclipse.jgit/src/org/eclipse/jgit/events/WorkingTreeModifiedListener.java
new file mode 100644
index 0000000..402a900
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/events/WorkingTreeModifiedListener.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2017, Thomas Wolf <thomas.wolf@paranor.ch>
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.events;
+
+/**
+ * Receives {@link WorkingTreeModifiedEvent}s, which are fired whenever a
+ * {@link org.eclipse.jgit.dircache.DirCacheCheckout} modifies
+ * (adds/deletes/updates) files in the working tree.
+ *
+ * @since 4.9
+ */
+public interface WorkingTreeModifiedListener extends RepositoryListener {
+
+ /**
+ * Respond to working tree modifications.
+ *
+ * @param event
+ */
+ void onWorkingTreeModified(WorkingTreeModifiedEvent event);
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/hooks/GitHook.java b/org.eclipse.jgit/src/org/eclipse/jgit/hooks/GitHook.java
index 62a6749..b684dd6 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/hooks/GitHook.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/hooks/GitHook.java
@@ -167,4 +167,4 @@ protected void doRun() throws AbortedByHookException {
}
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/Strings.java b/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/Strings.java
index da482fa..79df151 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/Strings.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/ignore/internal/Strings.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2014, Andrey Loskutov <loskutov@gmx.de>
+ * Copyright (C) 2014, 2017 Andrey Loskutov <loskutov@gmx.de>
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -123,12 +123,15 @@ public static boolean isDirectoryPattern(String pattern) {
static int count(String s, char c, boolean ignoreFirstLast) {
int start = 0;
int count = 0;
- while (true) {
+ int length = s.length();
+ while (start < length) {
start = s.indexOf(c, start);
- if (start == -1)
+ if (start == -1) {
break;
- if (!ignoreFirstLast || (start != 0 && start != s.length()))
+ }
+ if (!ignoreFirstLast || (start != 0 && start != length - 1)) {
count++;
+ }
start++;
}
return count;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
index ea752b9..07666eb 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/JGitText.java
@@ -424,11 +424,16 @@ public static JGitText get() {
/***/ public String invalidPathPeriodAtEndWindows;
/***/ public String invalidPathSpaceAtEndWindows;
/***/ public String invalidPathReservedOnWindows;
+ /***/ public String invalidRedirectLocation;
/***/ public String invalidReflogRevision;
/***/ public String invalidRefName;
+ /***/ public String invalidReftableBlock;
+ /***/ public String invalidReftableCRC;
+ /***/ public String invalidReftableFile;
/***/ public String invalidRemote;
/***/ public String invalidShallowObject;
/***/ public String invalidStageForPath;
+ /***/ public String invalidSystemProperty;
/***/ public String invalidTagOption;
/***/ public String invalidTimeout;
/***/ public String invalidTimeUnitValue2;
@@ -468,8 +473,11 @@ public static JGitText get() {
/***/ public String mergeRecursiveTooManyMergeBasesFor;
/***/ public String messageAndTaggerNotAllowedInUnannotatedTags;
/***/ public String minutesAgo;
+ /***/ public String mismatchOffset;
+ /***/ public String mismatchCRC;
/***/ public String missingAccesskey;
/***/ public String missingConfigurationForKey;
+ /***/ public String missingCRC;
/***/ public String missingDeltaBase;
/***/ public String missingForwardImageInGITBinaryPatch;
/***/ public String missingObject;
@@ -529,6 +537,7 @@ public static JGitText get() {
/***/ public String openingConnection;
/***/ public String operationCanceled;
/***/ public String outputHasAlreadyBeenStarted;
+ /***/ public String overflowedReftableBlock;
/***/ public String packChecksumMismatch;
/***/ public String packCorruptedWhileWritingToFilesystem;
/***/ public String packDoesNotMatchIndex;
@@ -556,6 +565,7 @@ public static JGitText get() {
/***/ public String pathIsNotInWorkingDir;
/***/ public String pathNotConfigured;
/***/ public String peeledLineBeforeRef;
+ /***/ public String peeledRefIsRequired;
/***/ public String peerDidNotSupplyACompleteObjectGraph;
/***/ public String personIdentEmailNonNull;
/***/ public String personIdentNameNonNull;
@@ -584,6 +594,11 @@ public static JGitText get() {
/***/ public String receivePackInvalidLimit;
/***/ public String receivePackTooLarge;
/***/ public String receivingObjects;
+ /***/ public String redirectBlocked;
+ /***/ public String redirectHttp;
+ /***/ public String redirectLimitExceeded;
+ /***/ public String redirectLocationMissing;
+ /***/ public String redirectsOff;
/***/ public String refAlreadyExists;
/***/ public String refAlreadyExists1;
/***/ public String reflogEntryNotFound;
@@ -712,6 +727,7 @@ public static JGitText get() {
/***/ public String unableToStore;
/***/ public String unableToWrite;
/***/ public String unauthorized;
+ /***/ public String underflowedReftableBlock;
/***/ public String unencodeableFile;
/***/ public String unexpectedCompareResult;
/***/ public String unexpectedEndOfConfigFile;
@@ -726,6 +742,7 @@ public static JGitText get() {
/***/ public String unknownHost;
/***/ public String unknownIndexVersionOrCorruptIndex;
/***/ public String unknownObject;
+ /***/ public String unknownObjectInIndex;
/***/ public String unknownObjectType;
/***/ public String unknownObjectType2;
/***/ public String unknownRepositoryFormat;
@@ -747,6 +764,7 @@ public static JGitText get() {
/***/ public String unsupportedOperationNotAddAtEnd;
/***/ public String unsupportedPackIndexVersion;
/***/ public String unsupportedPackVersion;
+ /***/ public String unsupportedReftableVersion;
/***/ public String unsupportedRepositoryDescription;
/***/ public String updateRequiresOldIdAndNewId;
/***/ public String updatingHeadFailed;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/FsckError.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/FsckError.java
new file mode 100644
index 0000000..588ed9b
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/FsckError.java
@@ -0,0 +1,152 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.eclipse.jgit.internal.fsck;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import org.eclipse.jgit.annotations.Nullable;
+import org.eclipse.jgit.errors.CorruptPackIndexException;
+import org.eclipse.jgit.errors.CorruptPackIndexException.ErrorType;
+import org.eclipse.jgit.lib.ObjectChecker;
+import org.eclipse.jgit.lib.ObjectId;
+
+/** Holds all fsck errors of a git repository. */
+public class FsckError {
+ /** Represents a corrupt object. */
+ public static class CorruptObject {
+ final ObjectId id;
+
+ final int type;
+
+ ObjectChecker.ErrorType errorType;
+
+ /**
+ * @param id
+ * the object identifier.
+ * @param type
+ * type of the object.
+ */
+ public CorruptObject(ObjectId id, int type) {
+ this.id = id;
+ this.type = type;
+ }
+
+ void setErrorType(ObjectChecker.ErrorType errorType) {
+ this.errorType = errorType;
+ }
+
+ /** @return identifier of the object. */
+ public ObjectId getId() {
+ return id;
+ }
+
+ /** @return type of the object. */
+ public int getType() {
+ return type;
+ }
+
+ /** @return error type of the corruption. */
+ @Nullable
+ public ObjectChecker.ErrorType getErrorType() {
+ return errorType;
+ }
+ }
+
+ /** Represents a corrupt pack index file. */
+ public static class CorruptIndex {
+ String fileName;
+
+ CorruptPackIndexException.ErrorType errorType;
+
+ /**
+ * @param fileName
+ * the file name of the pack index.
+ * @param errorType
+ * the type of error as reported in
+ * {@link CorruptPackIndexException}.
+ */
+ public CorruptIndex(String fileName, ErrorType errorType) {
+ this.fileName = fileName;
+ this.errorType = errorType;
+ }
+
+ /** @return the file name of the index file. */
+ public String getFileName() {
+ return fileName;
+ }
+
+ /** @return the error type of the corruption. */
+ public ErrorType getErrorType() {
+ return errorType;
+ }
+ }
+
+ private final Set<CorruptObject> corruptObjects = new HashSet<>();
+
+ private final Set<ObjectId> missingObjects = new HashSet<>();
+
+ private final Set<CorruptIndex> corruptIndices = new HashSet<>();
+
+ private final Set<String> nonCommitHeads = new HashSet<>();
+
+ /** @return corrupt objects from all pack files. */
+ public Set<CorruptObject> getCorruptObjects() {
+ return corruptObjects;
+ }
+
+ /** @return missing objects that should present in pack files. */
+ public Set<ObjectId> getMissingObjects() {
+ return missingObjects;
+ }
+
+ /** @return corrupt index files associated with the packs. */
+ public Set<CorruptIndex> getCorruptIndices() {
+ return corruptIndices;
+ }
+
+ /** @return refs/heads/* point to non-commit object. */
+ public Set<String> getNonCommitHeads() {
+ return nonCommitHeads;
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/FsckPackParser.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/FsckPackParser.java
new file mode 100644
index 0000000..e6ec681
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/FsckPackParser.java
@@ -0,0 +1,326 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.fsck;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.channels.Channels;
+import java.text.MessageFormat;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.zip.CRC32;
+
+import org.eclipse.jgit.errors.CorruptObjectException;
+import org.eclipse.jgit.errors.CorruptPackIndexException;
+import org.eclipse.jgit.errors.CorruptPackIndexException.ErrorType;
+import org.eclipse.jgit.errors.MissingObjectException;
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.internal.fsck.FsckError.CorruptObject;
+import org.eclipse.jgit.internal.storage.dfs.ReadableChannel;
+import org.eclipse.jgit.internal.storage.file.PackIndex;
+import org.eclipse.jgit.internal.storage.file.PackIndex.MutableEntry;
+import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.ObjectChecker;
+import org.eclipse.jgit.lib.ObjectDatabase;
+import org.eclipse.jgit.transport.PackParser;
+import org.eclipse.jgit.transport.PackedObjectInfo;
+
+/** A read-only pack parser for object validity checking. */
+public class FsckPackParser extends PackParser {
+ private final CRC32 crc;
+
+ private final ReadableChannel channel;
+
+ private final Set<CorruptObject> corruptObjects = new HashSet<>();
+
+ private long expectedObjectCount = -1L;
+
+ private long offset;
+
+ private int blockSize;
+
+ /**
+ * @param db
+ * the object database which stores repository's data.
+ * @param channel
+ * readable channel of the pack file.
+ */
+ public FsckPackParser(ObjectDatabase db, ReadableChannel channel) {
+ super(db, Channels.newInputStream(channel));
+ this.channel = channel;
+ setCheckObjectCollisions(false);
+ this.crc = new CRC32();
+ this.blockSize = channel.blockSize() > 0 ? channel.blockSize() : 65536;
+ }
+
+ @Override
+ protected void onPackHeader(long objCnt) throws IOException {
+ if (expectedObjectCount >= 0) {
+ // Some DFS pack files don't contain the correct object count, e.g.
+ // INSERT/RECEIVE packs don't always contain the correct object
+ // count in their headers. Overwrite the expected object count
+ // after parsing the pack header.
+ setExpectedObjectCount(expectedObjectCount);
+ }
+ }
+
+ @Override
+ protected void onBeginWholeObject(long streamPosition, int type,
+ long inflatedSize) throws IOException {
+ crc.reset();
+ }
+
+ @Override
+ protected void onObjectHeader(Source src, byte[] raw, int pos, int len)
+ throws IOException {
+ crc.update(raw, pos, len);
+ }
+
+ @Override
+ protected void onObjectData(Source src, byte[] raw, int pos, int len)
+ throws IOException {
+ crc.update(raw, pos, len);
+ }
+
+ @Override
+ protected void onEndWholeObject(PackedObjectInfo info) throws IOException {
+ info.setCRC((int) crc.getValue());
+ }
+
+ @Override
+ protected void onBeginOfsDelta(long deltaStreamPosition,
+ long baseStreamPosition, long inflatedSize) throws IOException {
+ crc.reset();
+ }
+
+ @Override
+ protected void onBeginRefDelta(long deltaStreamPosition, AnyObjectId baseId,
+ long inflatedSize) throws IOException {
+ crc.reset();
+ }
+
+ @Override
+ protected UnresolvedDelta onEndDelta() throws IOException {
+ UnresolvedDelta delta = new UnresolvedDelta();
+ delta.setCRC((int) crc.getValue());
+ return delta;
+ }
+
+ @Override
+ protected void onInflatedObjectData(PackedObjectInfo obj, int typeCode,
+ byte[] data) throws IOException {
+ // FsckPackParser ignores this event.
+ }
+
+ @Override
+ protected void verifySafeObject(final AnyObjectId id, final int type,
+ final byte[] data) {
+ try {
+ super.verifySafeObject(id, type, data);
+ } catch (CorruptObjectException e) {
+ // catch the exception and continue parse the pack file
+ CorruptObject o = new CorruptObject(id.toObjectId(), type);
+ if (e.getErrorType() != null) {
+ o.setErrorType(e.getErrorType());
+ }
+ corruptObjects.add(o);
+ }
+ }
+
+ @Override
+ protected void onPackFooter(byte[] hash) throws IOException {
+ }
+
+ @Override
+ protected boolean onAppendBase(int typeCode, byte[] data,
+ PackedObjectInfo info) throws IOException {
+ // Do nothing.
+ return false;
+ }
+
+ @Override
+ protected void onEndThinPack() throws IOException {
+ }
+
+ @Override
+ protected ObjectTypeAndSize seekDatabase(PackedObjectInfo obj,
+ ObjectTypeAndSize info) throws IOException {
+ crc.reset();
+ offset = obj.getOffset();
+ return readObjectHeader(info);
+ }
+
+ @Override
+ protected ObjectTypeAndSize seekDatabase(UnresolvedDelta delta,
+ ObjectTypeAndSize info) throws IOException {
+ crc.reset();
+ offset = delta.getOffset();
+ return readObjectHeader(info);
+ }
+
+ @Override
+ protected int readDatabase(byte[] dst, int pos, int cnt)
+ throws IOException {
+ // read from input instead of database.
+ int n = read(offset, dst, pos, cnt);
+ if (n > 0) {
+ offset += n;
+ }
+ return n;
+ }
+
+ int read(long channelPosition, byte[] dst, int pos, int cnt)
+ throws IOException {
+ long block = channelPosition / blockSize;
+ byte[] bytes = readFromChannel(block);
+ if (bytes == null) {
+ return -1;
+ }
+ int offset = (int) (channelPosition - block * blockSize);
+ int bytesToCopy = Math.min(cnt, bytes.length - offset);
+ if (bytesToCopy < 1) {
+ return -1;
+ }
+ System.arraycopy(bytes, offset, dst, pos, bytesToCopy);
+ return bytesToCopy;
+ }
+
+ private byte[] readFromChannel(long block) throws IOException {
+ channel.position(block * blockSize);
+ ByteBuffer buf = ByteBuffer.allocate(blockSize);
+ int totalBytesRead = 0;
+ while (totalBytesRead < blockSize) {
+ int bytesRead = channel.read(buf);
+ if (bytesRead == -1) {
+ if (totalBytesRead == 0) {
+ return null;
+ }
+ return Arrays.copyOf(buf.array(), totalBytesRead);
+ }
+ totalBytesRead += bytesRead;
+ }
+ return buf.array();
+ }
+
+ @Override
+ protected boolean checkCRC(int oldCRC) {
+ return oldCRC == (int) crc.getValue();
+ }
+
+ @Override
+ protected void onStoreStream(byte[] raw, int pos, int len)
+ throws IOException {
+ }
+
+ /**
+ * @return corrupt objects that reported by {@link ObjectChecker}.
+ */
+ public Set<CorruptObject> getCorruptObjects() {
+ return corruptObjects;
+ }
+
+ /**
+ * Verify the existing index file with all objects from the pack.
+ *
+ * @param entries
+ * all the entries that are expected in the index file
+ * @param idx
+ * index file associate with the pack
+ * @throws CorruptPackIndexException
+ * when the index file is corrupt.
+ */
+ public void verifyIndex(List<PackedObjectInfo> entries, PackIndex idx)
+ throws CorruptPackIndexException {
+ Set<String> all = new HashSet<>();
+ for (PackedObjectInfo entry : entries) {
+ all.add(entry.getName());
+ long offset = idx.findOffset(entry);
+ if (offset == -1) {
+ throw new CorruptPackIndexException(
+ MessageFormat.format(JGitText.get().missingObject,
+ entry.getType(), entry.getName()),
+ ErrorType.MISSING_OBJ);
+ } else if (offset != entry.getOffset()) {
+ throw new CorruptPackIndexException(MessageFormat
+ .format(JGitText.get().mismatchOffset, entry.getName()),
+ ErrorType.MISMATCH_OFFSET);
+ }
+
+ try {
+ if (idx.hasCRC32Support()
+ && (int) idx.findCRC32(entry) != entry.getCRC()) {
+ throw new CorruptPackIndexException(
+ MessageFormat.format(JGitText.get().mismatchCRC,
+ entry.getName()),
+ ErrorType.MISMATCH_CRC);
+ }
+ } catch (MissingObjectException e) {
+ throw new CorruptPackIndexException(MessageFormat
+ .format(JGitText.get().missingCRC, entry.getName()),
+ ErrorType.MISSING_CRC);
+ }
+ }
+
+ for (MutableEntry entry : idx) {
+ if (!all.contains(entry.name())) {
+ throw new CorruptPackIndexException(MessageFormat.format(
+ JGitText.get().unknownObjectInIndex, entry.name()),
+ ErrorType.UNKNOWN_OBJ);
+ }
+ }
+ }
+
+ /**
+ * Set the object count for overwriting the expected object count from pack
+ * header.
+ *
+ * @param expectedObjectCount
+ * the actual expected object count.
+ */
+ public void overwriteObjectCount(long expectedObjectCount) {
+ this.expectedObjectCount = expectedObjectCount;
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/package-info.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/package-info.java
new file mode 100644
index 0000000..361b61f
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/fsck/package-info.java
@@ -0,0 +1,4 @@
+/**
+ * Git fsck support.
+ */
+package org.eclipse.jgit.internal.fsck;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsFsck.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsFsck.java
new file mode 100644
index 0000000..f90ba7d
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/dfs/DfsFsck.java
@@ -0,0 +1,158 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.dfs;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.eclipse.jgit.errors.CorruptPackIndexException;
+import org.eclipse.jgit.errors.MissingObjectException;
+import org.eclipse.jgit.internal.fsck.FsckError;
+import org.eclipse.jgit.internal.fsck.FsckError.CorruptIndex;
+import org.eclipse.jgit.internal.fsck.FsckPackParser;
+import org.eclipse.jgit.internal.storage.pack.PackExt;
+import org.eclipse.jgit.lib.Constants;
+import org.eclipse.jgit.lib.ObjectChecker;
+import org.eclipse.jgit.lib.ProgressMonitor;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.revwalk.ObjectWalk;
+import org.eclipse.jgit.revwalk.RevObject;
+import org.eclipse.jgit.transport.PackedObjectInfo;
+
+/** Verify the validity and connectivity of a DFS repository. */
+public class DfsFsck {
+ private final DfsRepository repo;
+
+ private final DfsObjDatabase objdb;
+
+ private final DfsReader ctx;
+
+ private ObjectChecker objChecker = new ObjectChecker();
+
+ /**
+ * Initialize DFS fsck.
+ *
+ * @param repository
+ * the dfs repository to check.
+ */
+ public DfsFsck(DfsRepository repository) {
+ repo = repository;
+ objdb = repo.getObjectDatabase();
+ ctx = objdb.newReader();
+ }
+
+
+ /**
+ * Verify the integrity and connectivity of all objects in the object
+ * database.
+ *
+ * @param pm
+ * callback to provide progress feedback during the check.
+ * @return all errors about the repository.
+ * @throws IOException
+ * if encounters IO errors during the process.
+ */
+ public FsckError check(ProgressMonitor pm) throws IOException {
+ FsckError errors = new FsckError();
+ try {
+ for (DfsPackFile pack : objdb.getPacks()) {
+ DfsPackDescription packDesc = pack.getPackDescription();
+ try (ReadableChannel channel = repo.getObjectDatabase()
+ .openFile(packDesc, PackExt.PACK)) {
+ List<PackedObjectInfo> objectsInPack;
+ FsckPackParser parser = new FsckPackParser(
+ repo.getObjectDatabase(), channel);
+ parser.setObjectChecker(objChecker);
+ parser.overwriteObjectCount(packDesc.getObjectCount());
+ parser.parse(pm);
+ errors.getCorruptObjects()
+ .addAll(parser.getCorruptObjects());
+ objectsInPack = parser.getSortedObjectList(null);
+ parser.verifyIndex(objectsInPack, pack.getPackIndex(ctx));
+ } catch (MissingObjectException e) {
+ errors.getMissingObjects().add(e.getObjectId());
+ } catch (CorruptPackIndexException e) {
+ errors.getCorruptIndices().add(new CorruptIndex(
+ pack.getPackDescription()
+ .getFileName(PackExt.INDEX),
+ e.getErrorType()));
+ }
+ }
+
+ try (ObjectWalk ow = new ObjectWalk(ctx)) {
+ for (Ref r : repo.getAllRefs().values()) {
+ try {
+ RevObject tip = ow.parseAny(r.getObjectId());
+ if (r.getLeaf().getName().startsWith(Constants.R_HEADS)) {
+ // check if heads point to a commit object
+ if (tip.getType() != Constants.OBJ_COMMIT) {
+ errors.getNonCommitHeads()
+ .add(r.getLeaf().getName());
+ }
+ }
+ ow.markStart(tip);
+ ow.checkConnectivity();
+ ow.markUninteresting(tip);
+ } catch (MissingObjectException e) {
+ errors.getMissingObjects().add(e.getObjectId());
+ }
+ }
+ }
+ } finally {
+ ctx.close();
+ }
+ return errors;
+ }
+
+ /**
+ * Use a customized object checker instead of the default one. Caller can
+ * specify a skip list to ignore some errors.
+ *
+ * @param objChecker
+ * A customized object checker.
+ */
+ public void setObjectChecker(ObjectChecker objChecker) {
+ this.objChecker = objChecker;
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/CheckoutEntryImpl.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/CheckoutEntryImpl.java
index 4b4337d..2eacb7a 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/CheckoutEntryImpl.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/CheckoutEntryImpl.java
@@ -74,4 +74,4 @@ public String getFromBranch() {
public String getToBranch() {
return to;
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java
index 6a674aa..646feac 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/FileRepository.java
@@ -216,7 +216,7 @@ public void onConfigChanged(ConfigChangedEvent event) {
ConfigConstants.CONFIG_KEY_REPO_FORMAT_VERSION, 0);
String reftype = repoConfig.getString(
- "extensions", null, "refsStorage"); //$NON-NLS-1$ //$NON-NLS-2$
+ "extensions", null, "refStorage"); //$NON-NLS-1$ //$NON-NLS-2$
if (repositoryFormatVersion >= 1 && reftype != null) {
if (StringUtils.equalsIgnoreCase(reftype, "reftree")) { //$NON-NLS-1$
refs = new RefTreeDatabase(this, new RefDirectory(this));
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/InfoAttributesNode.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/InfoAttributesNode.java
index bda5cbe..3f82e2a 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/InfoAttributesNode.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/InfoAttributesNode.java
@@ -78,4 +78,4 @@ public AttributesNode load() throws IOException {
return r.getRules().isEmpty() ? null : r;
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/LockFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/LockFile.java
index d2fcacf..6221cfa 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/LockFile.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/LockFile.java
@@ -374,7 +374,7 @@ public void close() throws IOException {
};
}
- private void requireLock() {
+ void requireLock() {
if (os == null) {
unlock();
throw new IllegalStateException(MessageFormat.format(JGitText.get().lockOnNotHeld, ref));
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackFile.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackFile.java
index fcc47fb..0611d3e 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackFile.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackFile.java
@@ -1108,8 +1108,17 @@ synchronized PackBitmapIndex getBitmapIndex() throws IOException {
if (invalid || invalidBitmap)
return null;
if (bitmapIdx == null && hasExt(BITMAP_INDEX)) {
- final PackBitmapIndex idx = PackBitmapIndex.open(
- extFile(BITMAP_INDEX), idx(), getReverseIdx());
+ final PackBitmapIndex idx;
+ try {
+ idx = PackBitmapIndex.open(extFile(BITMAP_INDEX), idx(),
+ getReverseIdx());
+ } catch (FileNotFoundException e) {
+ // Once upon a time this bitmap file existed. Now it
+ // has been removed. Most likely an external gc has
+ // removed this packfile and the bitmap
+ invalidBitmap = true;
+ return null;
+ }
// At this point, idx() will have set packChecksum.
if (Arrays.equals(packChecksum, idx.packChecksum))
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInputStream.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInputStream.java
index 154809b..962f765 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInputStream.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackInputStream.java
@@ -82,4 +82,4 @@ public int read() throws IOException {
public void close() {
wc.close();
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackedBatchRefUpdate.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackedBatchRefUpdate.java
new file mode 100644
index 0000000..b661ae7
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/PackedBatchRefUpdate.java
@@ -0,0 +1,522 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.file;
+
+import static java.util.stream.Collectors.toList;
+import static org.eclipse.jgit.transport.ReceiveCommand.Result.LOCK_FAILURE;
+import static org.eclipse.jgit.transport.ReceiveCommand.Result.NOT_ATTEMPTED;
+import static org.eclipse.jgit.transport.ReceiveCommand.Result.REJECTED_NONFASTFORWARD;
+
+import java.io.IOException;
+import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.eclipse.jgit.annotations.Nullable;
+import org.eclipse.jgit.errors.LockFailedException;
+import org.eclipse.jgit.errors.MissingObjectException;
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.internal.storage.file.RefDirectory.PackedRefList;
+import org.eclipse.jgit.lib.BatchRefUpdate;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectIdRef;
+import org.eclipse.jgit.lib.PersonIdent;
+import org.eclipse.jgit.lib.ProgressMonitor;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.RefDatabase;
+import org.eclipse.jgit.lib.ReflogEntry;
+import org.eclipse.jgit.revwalk.RevObject;
+import org.eclipse.jgit.revwalk.RevTag;
+import org.eclipse.jgit.revwalk.RevWalk;
+import org.eclipse.jgit.transport.ReceiveCommand;
+import org.eclipse.jgit.util.RefList;
+
+/**
+ * Implementation of {@link BatchRefUpdate} that uses the {@code packed-refs}
+ * file to support atomically updating multiple refs.
+ * <p>
+ * The algorithm is designed to be compatible with traditional single ref
+ * updates operating on single refs only. Regardless of success or failure, the
+ * results are atomic: from the perspective of any reader, either all updates in
+ * the batch will be visible, or none will. In the case of process failure
+ * during any of the following steps, removal of stale lock files is always
+ * safe, and will never result in an inconsistent state, although the update may
+ * or may not have been applied.
+ * <p>
+ * The algorithm is:
+ * <ol>
+ * <li>Pack loose refs involved in the transaction using the normal pack-refs
+ * operation. This ensures that creating lock files in the following step
+ * succeeds even if a batch contains both a delete of {@code refs/x} (loose) and
+ * a create of {@code refs/x/y}.</li>
+ * <li>Create locks for all loose refs involved in the transaction, even if they
+ * are not currently loose.</li>
+ * <li>Pack loose refs again, this time while holding all lock files (see {@link
+ * RefDirectory#pack(Map)}), without deleting them afterwards. This covers a
+ * potential race where new loose refs were created after the initial packing
+ * step. If no new loose refs were created during this race, this step does not
+ * modify any files on disk. Keep the merged state in memory.</li>
+ * <li>Update the in-memory packed refs with the commands in the batch, possibly
+ * failing the whole batch if any old ref values do not match.</li>
+ * <li>If the update succeeds, lock {@code packed-refs} and commit by atomically
+ * renaming the lock file.</li>
+ * <li>Delete loose ref lock files.</li>
+ * </ol>
+ *
+ * Because the packed-refs file format is a sorted list, this algorithm is
+ * linear in the total number of refs, regardless of the batch size. This can be
+ * a significant slowdown on repositories with large numbers of refs; callers
+ * that prefer speed over atomicity should use {@code setAtomic(false)}. As an
+ * optimization, an update containing a single ref update does not use the
+ * packed-refs protocol.
+ */
+class PackedBatchRefUpdate extends BatchRefUpdate {
+ private RefDirectory refdb;
+
+ PackedBatchRefUpdate(RefDirectory refdb) {
+ super(refdb);
+ this.refdb = refdb;
+ }
+
+ @Override
+ public void execute(RevWalk walk, ProgressMonitor monitor,
+ List<String> options) throws IOException {
+ if (!isAtomic()) {
+ // Use default one-by-one implementation.
+ super.execute(walk, monitor, options);
+ return;
+ }
+ List<ReceiveCommand> pending =
+ ReceiveCommand.filter(getCommands(), NOT_ATTEMPTED);
+ if (pending.isEmpty()) {
+ return;
+ }
+ if (pending.size() == 1) {
+ // Single-ref updates are always atomic, no need for packed-refs.
+ super.execute(walk, monitor, options);
+ return;
+ }
+
+ // Required implementation details copied from super.execute.
+ if (!blockUntilTimestamps(MAX_WAIT)) {
+ return;
+ }
+ if (options != null) {
+ setPushOptions(options);
+ }
+ // End required implementation details.
+
+ // Check for conflicting names before attempting to acquire locks, since
+ // lockfile creation may fail on file/directory conflicts.
+ if (!checkConflictingNames(pending)) {
+ return;
+ }
+
+ if (!checkObjectExistence(walk, pending)) {
+ return;
+ }
+
+ if (!checkNonFastForwards(walk, pending)) {
+ return;
+ }
+
+ // Pack refs normally, so we can create lock files even in the case where
+ // refs/x is deleted and refs/x/y is created in this batch.
+ try {
+ refdb.pack(
+ pending.stream().map(ReceiveCommand::getRefName).collect(toList()));
+ } catch (LockFailedException e) {
+ lockFailure(pending.get(0), pending);
+ return;
+ }
+
+ Map<String, LockFile> locks = null;
+ refdb.inProcessPackedRefsLock.lock();
+ try {
+ locks = lockLooseRefs(pending);
+ if (locks == null) {
+ return;
+ }
+ PackedRefList oldPackedList = refdb.pack(locks);
+ RefList<Ref> newRefs = applyUpdates(walk, oldPackedList, pending);
+ if (newRefs == null) {
+ return;
+ }
+ LockFile packedRefsLock = refdb.lockPackedRefs();
+ if (packedRefsLock == null) {
+ lockFailure(pending.get(0), pending);
+ return;
+ }
+ // commitPackedRefs removes lock file (by renaming over real file).
+ refdb.commitPackedRefs(packedRefsLock, newRefs, oldPackedList);
+ } finally {
+ try {
+ unlockAll(locks);
+ } finally {
+ refdb.inProcessPackedRefsLock.unlock();
+ }
+ }
+
+ refdb.fireRefsChanged();
+ pending.forEach(c -> c.setResult(ReceiveCommand.Result.OK));
+ writeReflog(pending);
+ }
+
+ private boolean checkConflictingNames(List<ReceiveCommand> commands)
+ throws IOException {
+ Set<String> takenNames = new HashSet<>();
+ Set<String> takenPrefixes = new HashSet<>();
+ Set<String> deletes = new HashSet<>();
+ for (ReceiveCommand cmd : commands) {
+ if (cmd.getType() != ReceiveCommand.Type.DELETE) {
+ takenNames.add(cmd.getRefName());
+ addPrefixesTo(cmd.getRefName(), takenPrefixes);
+ } else {
+ deletes.add(cmd.getRefName());
+ }
+ }
+ Set<String> initialRefs = refdb.getRefs(RefDatabase.ALL).keySet();
+ for (String name : initialRefs) {
+ if (!deletes.contains(name)) {
+ takenNames.add(name);
+ addPrefixesTo(name, takenPrefixes);
+ }
+ }
+
+ for (ReceiveCommand cmd : commands) {
+ if (cmd.getType() != ReceiveCommand.Type.DELETE &&
+ takenPrefixes.contains(cmd.getRefName())) {
+ // This ref is a prefix of some other ref. This check doesn't apply when
+ // this command is a delete, because if the ref is deleted nobody will
+ // ever be creating a loose ref with that name.
+ lockFailure(cmd, commands);
+ return false;
+ }
+ for (String prefix : getPrefixes(cmd.getRefName())) {
+ if (takenNames.contains(prefix)) {
+ // A prefix of this ref is already a refname. This check does apply
+ // when this command is a delete, because we would need to create the
+ // refname as a directory in order to create a lockfile for the
+ // to-be-deleted ref.
+ lockFailure(cmd, commands);
+ return false;
+ }
+ }
+ }
+ return true;
+ }
+
+ private boolean checkObjectExistence(RevWalk walk,
+ List<ReceiveCommand> commands) throws IOException {
+ for (ReceiveCommand cmd : commands) {
+ try {
+ if (!cmd.getNewId().equals(ObjectId.zeroId())) {
+ walk.parseAny(cmd.getNewId());
+ }
+ } catch (MissingObjectException e) {
+ // ReceiveCommand#setResult(Result) converts REJECTED to
+ // REJECTED_NONFASTFORWARD, even though that result is also used for a
+ // missing object. Eagerly handle this case so we can set the right
+ // result.
+ reject(cmd, ReceiveCommand.Result.REJECTED_MISSING_OBJECT, commands);
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private boolean checkNonFastForwards(RevWalk walk,
+ List<ReceiveCommand> commands) throws IOException {
+ if (isAllowNonFastForwards()) {
+ return true;
+ }
+ for (ReceiveCommand cmd : commands) {
+ cmd.updateType(walk);
+ if (cmd.getType() == ReceiveCommand.Type.UPDATE_NONFASTFORWARD) {
+ reject(cmd, REJECTED_NONFASTFORWARD, commands);
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * Lock loose refs corresponding to a list of commands.
+ *
+ * @param commands
+ * commands that we intend to execute.
+ * @return map of ref name in the input commands to lock file. Always contains
+ * one entry for each ref in the input list. All locks are acquired
+ * before returning. If any lock was not able to be acquired: the
+ * return value is null; no locks are held; and all commands that were
+ * pending are set to fail with {@code LOCK_FAILURE}.
+ * @throws IOException
+ * an error occurred other than a failure to acquire; no locks are
+ * held if this exception is thrown.
+ */
+ @Nullable
+ private Map<String, LockFile> lockLooseRefs(List<ReceiveCommand> commands)
+ throws IOException {
+ ReceiveCommand failed = null;
+ Map<String, LockFile> locks = new HashMap<>();
+ try {
+ RETRY: for (int ms : refdb.getRetrySleepMs()) {
+ failed = null;
+ // Release all locks before trying again, to prevent deadlock.
+ unlockAll(locks);
+ locks.clear();
+ RefDirectory.sleep(ms);
+
+ for (ReceiveCommand c : commands) {
+ String name = c.getRefName();
+ LockFile lock = new LockFile(refdb.fileFor(name));
+ if (locks.put(name, lock) != null) {
+ throw new IOException(
+ MessageFormat.format(JGitText.get().duplicateRef, name));
+ }
+ if (!lock.lock()) {
+ failed = c;
+ continue RETRY;
+ }
+ }
+ Map<String, LockFile> result = locks;
+ locks = null;
+ return result;
+ }
+ } finally {
+ unlockAll(locks);
+ }
+ lockFailure(failed != null ? failed : commands.get(0), commands);
+ return null;
+ }
+
+ private static RefList<Ref> applyUpdates(RevWalk walk, RefList<Ref> refs,
+ List<ReceiveCommand> commands) throws IOException {
+ int nDeletes = 0;
+ List<ReceiveCommand> adds = new ArrayList<>(commands.size());
+ for (ReceiveCommand c : commands) {
+ if (c.getType() == ReceiveCommand.Type.CREATE) {
+ adds.add(c);
+ } else if (c.getType() == ReceiveCommand.Type.DELETE) {
+ nDeletes++;
+ }
+ }
+ int addIdx = 0;
+
+ // Construct a new RefList by linearly scanning the old list, and merging in
+ // any updates.
+ Map<String, ReceiveCommand> byName = byName(commands);
+ RefList.Builder<Ref> b =
+ new RefList.Builder<>(refs.size() - nDeletes + adds.size());
+ for (Ref ref : refs) {
+ String name = ref.getName();
+ ReceiveCommand cmd = byName.remove(name);
+ if (cmd == null) {
+ b.add(ref);
+ continue;
+ }
+ if (!cmd.getOldId().equals(ref.getObjectId())) {
+ lockFailure(cmd, commands);
+ return null;
+ }
+
+ // Consume any adds between the last and current ref.
+ while (addIdx < adds.size()) {
+ ReceiveCommand currAdd = adds.get(addIdx);
+ if (currAdd.getRefName().compareTo(name) < 0) {
+ b.add(peeledRef(walk, currAdd));
+ byName.remove(currAdd.getRefName());
+ } else {
+ break;
+ }
+ addIdx++;
+ }
+
+ if (cmd.getType() != ReceiveCommand.Type.DELETE) {
+ b.add(peeledRef(walk, cmd));
+ }
+ }
+
+ // All remaining adds are valid, since the refs didn't exist.
+ while (addIdx < adds.size()) {
+ ReceiveCommand cmd = adds.get(addIdx++);
+ byName.remove(cmd.getRefName());
+ b.add(peeledRef(walk, cmd));
+ }
+
+ // Any remaining updates/deletes do not correspond to any existing refs, so
+ // they are lock failures.
+ if (!byName.isEmpty()) {
+ lockFailure(byName.values().iterator().next(), commands);
+ return null;
+ }
+
+ return b.toRefList();
+ }
+
+ private void writeReflog(List<ReceiveCommand> commands) {
+ PersonIdent ident = getRefLogIdent();
+ if (ident == null) {
+ ident = new PersonIdent(refdb.getRepository());
+ }
+ ReflogWriter w = refdb.getLogWriter();
+ for (ReceiveCommand cmd : commands) {
+ // Assume any pending commands have already been executed atomically.
+ if (cmd.getResult() != ReceiveCommand.Result.OK) {
+ continue;
+ }
+ String name = cmd.getRefName();
+
+ if (cmd.getType() == ReceiveCommand.Type.DELETE) {
+ try {
+ RefDirectory.delete(w.logFor(name), RefDirectory.levelsIn(name));
+ } catch (IOException e) {
+ // Ignore failures, see below.
+ }
+ continue;
+ }
+
+ if (isRefLogDisabled(cmd)) {
+ continue;
+ }
+
+ String msg = getRefLogMessage(cmd);
+ if (isRefLogIncludingResult(cmd)) {
+ String strResult = toResultString(cmd);
+ if (strResult != null) {
+ msg = msg.isEmpty()
+ ? strResult : msg + ": " + strResult; //$NON-NLS-1$
+ }
+ }
+ try {
+ w.log(name, cmd.getOldId(), cmd.getNewId(), ident, msg);
+ } catch (IOException e) {
+ // Ignore failures, but continue attempting to write more reflogs.
+ //
+ // In this storage format, it is impossible to atomically write the
+ // reflog with the ref updates, so we have to choose between:
+ // a. Propagating this exception and claiming failure, even though the
+ // actual ref updates succeeded.
+ // b. Ignoring failures writing the reflog, so we claim success if and
+ // only if the ref updates succeeded.
+ // We choose (b) in order to surprise callers the least.
+ //
+ // Possible future improvements:
+ // * Log a warning to a logger.
+ // * Retry a fixed number of times in case the error was transient.
+ }
+ }
+ }
+
+ private String toResultString(ReceiveCommand cmd) {
+ switch (cmd.getType()) {
+ case CREATE:
+ return ReflogEntry.PREFIX_CREATED;
+ case UPDATE:
+ // Match the behavior of a single RefUpdate. In that case, setting the
+ // force bit completely bypasses the potentially expensive isMergedInto
+ // check, by design, so the reflog message may be inaccurate.
+ //
+ // Similarly, this class bypasses the isMergedInto checks when the force
+ // bit is set, meaning we can't actually distinguish between UPDATE and
+ // UPDATE_NONFASTFORWARD when isAllowNonFastForwards() returns true.
+ return isAllowNonFastForwards()
+ ? ReflogEntry.PREFIX_FORCED_UPDATE : ReflogEntry.PREFIX_FAST_FORWARD;
+ case UPDATE_NONFASTFORWARD:
+ return ReflogEntry.PREFIX_FORCED_UPDATE;
+ default:
+ return null;
+ }
+ }
+
+ private static Map<String, ReceiveCommand> byName(
+ List<ReceiveCommand> commands) {
+ Map<String, ReceiveCommand> ret = new LinkedHashMap<>();
+ for (ReceiveCommand cmd : commands) {
+ ret.put(cmd.getRefName(), cmd);
+ }
+ return ret;
+ }
+
+ private static Ref peeledRef(RevWalk walk, ReceiveCommand cmd)
+ throws IOException {
+ ObjectId newId = cmd.getNewId().copy();
+ RevObject obj = walk.parseAny(newId);
+ if (obj instanceof RevTag) {
+ return new ObjectIdRef.PeeledTag(
+ Ref.Storage.PACKED, cmd.getRefName(), newId, walk.peel(obj).copy());
+ }
+ return new ObjectIdRef.PeeledNonTag(
+ Ref.Storage.PACKED, cmd.getRefName(), newId);
+ }
+
+ private static void unlockAll(@Nullable Map<?, LockFile> locks) {
+ if (locks != null) {
+ locks.values().forEach(LockFile::unlock);
+ }
+ }
+
+ private static void lockFailure(ReceiveCommand cmd,
+ List<ReceiveCommand> commands) {
+ reject(cmd, LOCK_FAILURE, commands);
+ }
+
+ private static void reject(ReceiveCommand cmd, ReceiveCommand.Result result,
+ List<ReceiveCommand> commands) {
+ cmd.setResult(result);
+ for (ReceiveCommand c2 : commands) {
+ if (c2.getResult() == ReceiveCommand.Result.OK) {
+ // Undo OK status so ReceiveCommand#abort aborts it. Assumes this method
+ // is always called before committing any updates to disk.
+ c2.setResult(ReceiveCommand.Result.NOT_ATTEMPTED);
+ }
+ }
+ ReceiveCommand.abort(commands);
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
index c8c2dd5..ecf7ef9 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectory.java
@@ -63,17 +63,22 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
+import java.io.InterruptedIOException;
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.text.MessageFormat;
import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
+import java.util.concurrent.locks.ReentrantLock;
import org.eclipse.jgit.annotations.NonNull;
+import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.errors.InvalidObjectIdException;
import org.eclipse.jgit.errors.LockFailedException;
import org.eclipse.jgit.errors.MissingObjectException;
@@ -135,6 +140,10 @@ public class RefDirectory extends RefDatabase {
Constants.MERGE_HEAD, Constants.FETCH_HEAD, Constants.ORIG_HEAD,
Constants.CHERRY_PICK_HEAD };
+ @SuppressWarnings("boxing")
+ private static final List<Integer> RETRY_SLEEP_MS =
+ Collections.unmodifiableList(Arrays.asList(0, 100, 200, 400, 800, 1600));
+
private final FileRepository parent;
private final File gitDir;
@@ -143,7 +152,7 @@ public class RefDirectory extends RefDatabase {
private final ReflogWriter logWriter;
- private final File packedRefsFile;
+ final File packedRefsFile;
/**
* Immutable sorted list of loose references.
@@ -159,6 +168,22 @@ public class RefDirectory extends RefDatabase {
final AtomicReference<PackedRefList> packedRefs = new AtomicReference<>();
/**
+ * Lock for coordinating operations within a single process that may contend
+ * on the {@code packed-refs} file.
+ * <p>
+ * All operations that write {@code packed-refs} must still acquire a
+ * {@link LockFile} on {@link #packedRefsFile}, even after they have acquired
+ * this lock, since there may be multiple {@link RefDirectory} instances or
+ * other processes operating on the same repo on disk.
+ * <p>
+ * This lock exists so multiple threads in the same process can wait in a fair
+ * queue without trying, failing, and retrying to acquire the on-disk lock. If
+ * {@code RepositoryCache} is used, this lock instance will be used by all
+ * threads.
+ */
+ final ReentrantLock inProcessPackedRefsLock = new ReentrantLock(true);
+
+ /**
* Number of modifications made to this database.
* <p>
* This counter is incremented when a change is made, or detected from the
@@ -174,6 +199,8 @@ public class RefDirectory extends RefDatabase {
*/
private final AtomicInteger lastNotifiedModCnt = new AtomicInteger();
+ private List<Integer> retrySleepMs = RETRY_SLEEP_MS;
+
RefDirectory(final FileRepository db) {
final FS fs = db.getFS();
parent = db;
@@ -183,7 +210,7 @@ public class RefDirectory extends RefDatabase {
packedRefsFile = fs.resolve(gitDir, PACKED_REFS);
looseRefs.set(RefList.<LooseRef> emptyList());
- packedRefs.set(PackedRefList.NO_PACKED_REFS);
+ packedRefs.set(NO_PACKED_REFS);
}
Repository getRepository() {
@@ -209,7 +236,7 @@ public void close() {
private void clearReferences() {
looseRefs.set(RefList.<LooseRef> emptyList());
- packedRefs.set(PackedRefList.NO_PACKED_REFS);
+ packedRefs.set(NO_PACKED_REFS);
}
@Override
@@ -562,6 +589,16 @@ public RefDirectoryRename newRename(String fromName, String toName)
return new RefDirectoryRename(from, to);
}
+ @Override
+ public PackedBatchRefUpdate newBatchUpdate() {
+ return new PackedBatchRefUpdate(this);
+ }
+
+ @Override
+ public boolean performsAtomicTransactions() {
+ return true;
+ }
+
void stored(RefDirectoryUpdate update, FileSnapshot snapshot) {
final ObjectId target = update.getNewObjectId().copy();
final Ref leaf = update.getRef().getLeaf();
@@ -590,16 +627,19 @@ void delete(RefDirectoryUpdate update) throws IOException {
// we don't miss an edit made externally.
final PackedRefList packed = getPackedRefs();
if (packed.contains(name)) {
- LockFile lck = new LockFile(packedRefsFile);
- if (!lck.lock())
- throw new LockFailedException(packedRefsFile);
+ inProcessPackedRefsLock.lock();
try {
- PackedRefList cur = readPackedRefs();
- int idx = cur.find(name);
- if (0 <= idx)
- commitPackedRefs(lck, cur.remove(idx), packed);
+ LockFile lck = lockPackedRefsOrThrow();
+ try {
+ PackedRefList cur = readPackedRefs();
+ int idx = cur.find(name);
+ if (0 <= idx)
+ commitPackedRefs(lck, cur.remove(idx), packed);
+ } finally {
+ lck.unlock();
+ }
} finally {
- lck.unlock();
+ inProcessPackedRefsLock.unlock();
}
}
@@ -635,91 +675,144 @@ void delete(RefDirectoryUpdate update) throws IOException {
* @throws IOException
*/
public void pack(List<String> refs) throws IOException {
- if (refs.size() == 0)
- return;
+ pack(refs, Collections.emptyMap());
+ }
+
+ PackedRefList pack(Map<String, LockFile> heldLocks) throws IOException {
+ return pack(heldLocks.keySet(), heldLocks);
+ }
+
+ private PackedRefList pack(Collection<String> refs,
+ Map<String, LockFile> heldLocks) throws IOException {
+ for (LockFile ol : heldLocks.values()) {
+ ol.requireLock();
+ }
+ if (refs.size() == 0) {
+ return null;
+ }
FS fs = parent.getFS();
// Lock the packed refs file and read the content
- LockFile lck = new LockFile(packedRefsFile);
- if (!lck.lock())
- throw new IOException(MessageFormat.format(
- JGitText.get().cannotLock, packedRefsFile));
-
+ inProcessPackedRefsLock.lock();
try {
- final PackedRefList packed = getPackedRefs();
- RefList<Ref> cur = readPackedRefs();
+ LockFile lck = lockPackedRefsOrThrow();
+ try {
+ final PackedRefList packed = getPackedRefs();
+ RefList<Ref> cur = readPackedRefs();
- // Iterate over all refs to be packed
- boolean dirty = false;
- for (String refName : refs) {
- Ref oldRef = readRef(refName, cur);
- if (oldRef.isSymbolic()) {
- continue; // can't pack symbolic refs
- }
- // Add/Update it to packed-refs
- Ref newRef = peeledPackedRef(oldRef);
- if (newRef == oldRef) {
- // No-op; peeledPackedRef returns the input ref only if it's already
- // packed, and readRef returns a packed ref only if there is no loose
- // ref.
- continue;
- }
-
- dirty = true;
- int idx = cur.find(refName);
- if (idx >= 0) {
- cur = cur.set(idx, newRef);
- } else {
- cur = cur.add(idx, newRef);
- }
- }
- if (!dirty) {
- // All requested refs were already packed accurately
- return;
- }
-
- // The new content for packed-refs is collected. Persist it.
- commitPackedRefs(lck, cur, packed);
-
- // Now delete the loose refs which are now packed
- for (String refName : refs) {
- // Lock the loose ref
- File refFile = fileFor(refName);
- if (!fs.exists(refFile))
- continue;
- LockFile rLck = new LockFile(refFile);
- if (!rLck.lock())
- continue;
- try {
- LooseRef currentLooseRef = scanRef(null, refName);
- if (currentLooseRef == null || currentLooseRef.isSymbolic())
- continue;
- Ref packedRef = cur.get(refName);
- ObjectId clr_oid = currentLooseRef.getObjectId();
- if (clr_oid != null
- && clr_oid.equals(packedRef.getObjectId())) {
- RefList<LooseRef> curLoose, newLoose;
- do {
- curLoose = looseRefs.get();
- int idx = curLoose.find(refName);
- if (idx < 0)
- break;
- newLoose = curLoose.remove(idx);
- } while (!looseRefs.compareAndSet(curLoose, newLoose));
- int levels = levelsIn(refName) - 2;
- delete(refFile, levels, rLck);
+ // Iterate over all refs to be packed
+ boolean dirty = false;
+ for (String refName : refs) {
+ Ref oldRef = readRef(refName, cur);
+ if (oldRef == null) {
+ continue; // A non-existent ref is already correctly packed.
}
- } finally {
- rLck.unlock();
+ if (oldRef.isSymbolic()) {
+ continue; // can't pack symbolic refs
+ }
+ // Add/Update it to packed-refs
+ Ref newRef = peeledPackedRef(oldRef);
+ if (newRef == oldRef) {
+ // No-op; peeledPackedRef returns the input ref only if it's already
+ // packed, and readRef returns a packed ref only if there is no
+ // loose ref.
+ continue;
+ }
+
+ dirty = true;
+ int idx = cur.find(refName);
+ if (idx >= 0) {
+ cur = cur.set(idx, newRef);
+ } else {
+ cur = cur.add(idx, newRef);
+ }
}
+ if (!dirty) {
+ // All requested refs were already packed accurately
+ return packed;
+ }
+
+ // The new content for packed-refs is collected. Persist it.
+ PackedRefList result = commitPackedRefs(lck, cur, packed);
+
+ // Now delete the loose refs which are now packed
+ for (String refName : refs) {
+ // Lock the loose ref
+ File refFile = fileFor(refName);
+ if (!fs.exists(refFile)) {
+ continue;
+ }
+
+ LockFile rLck = heldLocks.get(refName);
+ boolean shouldUnlock;
+ if (rLck == null) {
+ rLck = new LockFile(refFile);
+ if (!rLck.lock()) {
+ continue;
+ }
+ shouldUnlock = true;
+ } else {
+ shouldUnlock = false;
+ }
+
+ try {
+ LooseRef currentLooseRef = scanRef(null, refName);
+ if (currentLooseRef == null || currentLooseRef.isSymbolic()) {
+ continue;
+ }
+ Ref packedRef = cur.get(refName);
+ ObjectId clr_oid = currentLooseRef.getObjectId();
+ if (clr_oid != null
+ && clr_oid.equals(packedRef.getObjectId())) {
+ RefList<LooseRef> curLoose, newLoose;
+ do {
+ curLoose = looseRefs.get();
+ int idx = curLoose.find(refName);
+ if (idx < 0) {
+ break;
+ }
+ newLoose = curLoose.remove(idx);
+ } while (!looseRefs.compareAndSet(curLoose, newLoose));
+ int levels = levelsIn(refName) - 2;
+ delete(refFile, levels, rLck);
+ }
+ } finally {
+ if (shouldUnlock) {
+ rLck.unlock();
+ }
+ }
+ }
+ // Don't fire refsChanged. The refs have not change, only their
+ // storage.
+ return result;
+ } finally {
+ lck.unlock();
}
- // Don't fire refsChanged. The refs have not change, only their
- // storage.
} finally {
- lck.unlock();
+ inProcessPackedRefsLock.unlock();
}
}
+ @Nullable
+ LockFile lockPackedRefs() throws IOException {
+ LockFile lck = new LockFile(packedRefsFile);
+ for (int ms : getRetrySleepMs()) {
+ sleep(ms);
+ if (lck.lock()) {
+ return lck;
+ }
+ }
+ return null;
+ }
+
+ private LockFile lockPackedRefsOrThrow() throws IOException {
+ LockFile lck = lockPackedRefs();
+ if (lck == null) {
+ throw new LockFailedException(packedRefsFile);
+ }
+ return lck;
+ }
+
/**
* Make sure a ref is peeled and has the Storage PACKED. If the given ref
* has this attributes simply return it. Otherwise create a new peeled
@@ -813,7 +906,7 @@ private PackedRefList readPackedRefs() throws IOException {
throw noPackedRefs;
}
// Ignore it and leave the new list empty.
- return PackedRefList.NO_PACKED_REFS;
+ return NO_PACKED_REFS;
}
try {
return new PackedRefList(parsePackedRefs(br), snapshot,
@@ -894,8 +987,11 @@ private static String copy(final String src, final int off, final int end) {
return new StringBuilder(end - off).append(src, off, end).toString();
}
- private void commitPackedRefs(final LockFile lck, final RefList<Ref> refs,
+ PackedRefList commitPackedRefs(final LockFile lck, final RefList<Ref> refs,
final PackedRefList oldPackedList) throws IOException {
+ // Can't just return packedRefs.get() from this method; it might have been
+ // updated again after writePackedRefs() returns.
+ AtomicReference<PackedRefList> result = new AtomicReference<>();
new RefWriter(refs) {
@Override
protected void writeFile(String name, byte[] content)
@@ -935,8 +1031,10 @@ protected void writeFile(String name, byte[] content)
throw new ObjectWritingException(
MessageFormat.format(JGitText.get().unableToWrite, name));
}
+ result.set(newPackedList);
}
}.writePackedRefs();
+ return result.get();
}
private Ref readRef(String name, RefList<Ref> packed) throws IOException {
@@ -1058,7 +1156,7 @@ private static boolean isSymRef(final byte[] buf, int n) {
}
/** If the parent should fire listeners, fires them. */
- private void fireRefsChanged() {
+ void fireRefsChanged() {
final int last = lastNotifiedModCnt.get();
final int curr = modCnt.get();
if (last != curr && lastNotifiedModCnt.compareAndSet(last, curr) && last != 0)
@@ -1125,22 +1223,80 @@ private static void delete(final File file, final int depth, LockFile rLck)
}
}
- private static class PackedRefList extends RefList<Ref> {
- static final PackedRefList NO_PACKED_REFS = new PackedRefList(
- RefList.emptyList(), FileSnapshot.MISSING_FILE,
- ObjectId.zeroId());
+ /**
+ * Get times to sleep while retrying a possibly contentious operation.
+ * <p>
+ * For retrying an operation that might have high contention, such as locking
+ * the {@code packed-refs} file, the caller may implement a retry loop using
+ * the returned values:
+ *
+ * <pre>
+ * for (int toSleepMs : getRetrySleepMs()) {
+ * sleep(toSleepMs);
+ * if (isSuccessful(doSomething())) {
+ * return success;
+ * }
+ * }
+ * return failure;
+ * </pre>
+ *
+ * The first value in the returned iterable is 0, and the caller should treat
+ * a fully-consumed iterator as a timeout.
+ *
+ * @return iterable of times, in milliseconds, that the caller should sleep
+ * before attempting an operation.
+ */
+ Iterable<Integer> getRetrySleepMs() {
+ return retrySleepMs;
+ }
- final FileSnapshot snapshot;
+ void setRetrySleepMs(List<Integer> retrySleepMs) {
+ if (retrySleepMs == null || retrySleepMs.isEmpty()
+ || retrySleepMs.get(0).intValue() != 0) {
+ throw new IllegalArgumentException();
+ }
+ this.retrySleepMs = retrySleepMs;
+ }
- final ObjectId id;
+ /**
+ * Sleep with {@link Thread#sleep(long)}, converting {@link
+ * InterruptedException} to {@link InterruptedIOException}.
+ *
+ * @param ms
+ * time to sleep, in milliseconds; zero or negative is a no-op.
+ * @throws InterruptedIOException
+ * if sleeping was interrupted.
+ */
+ static void sleep(long ms) throws InterruptedIOException {
+ if (ms <= 0) {
+ return;
+ }
+ try {
+ Thread.sleep(ms);
+ } catch (InterruptedException e) {
+ InterruptedIOException ie = new InterruptedIOException();
+ ie.initCause(e);
+ throw ie;
+ }
+ }
- PackedRefList(RefList<Ref> src, FileSnapshot s, ObjectId i) {
+ static class PackedRefList extends RefList<Ref> {
+
+ private final FileSnapshot snapshot;
+
+ private final ObjectId id;
+
+ private PackedRefList(RefList<Ref> src, FileSnapshot s, ObjectId i) {
super(src);
snapshot = s;
id = i;
}
}
+ private static final PackedRefList NO_PACKED_REFS = new PackedRefList(
+ RefList.emptyList(), FileSnapshot.MISSING_FILE,
+ ObjectId.zeroId());
+
private static LooseSymbolicRef newSymbolicRef(FileSnapshot snapshot,
String name, String target) {
Ref dst = new ObjectIdRef.Unpeeled(NEW, target, null);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectoryUpdate.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectoryUpdate.java
index 3c1916b..1105352 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectoryUpdate.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/RefDirectoryUpdate.java
@@ -50,6 +50,7 @@
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.RefUpdate;
+import org.eclipse.jgit.lib.ReflogEntry;
import org.eclipse.jgit.lib.Repository;
/** Updates any reference stored by {@link RefDirectory}. */
@@ -127,14 +128,14 @@ protected Result doUpdate(final Result status) throws IOException {
return status;
}
- private String toResultString(final Result status) {
+ private String toResultString(Result status) {
switch (status) {
case FORCED:
- return "forced-update"; //$NON-NLS-1$
+ return ReflogEntry.PREFIX_FORCED_UPDATE;
case FAST_FORWARD:
- return "fast forward"; //$NON-NLS-1$
+ return ReflogEntry.PREFIX_FAST_FORWARD;
case NEW:
- return "created"; //$NON-NLS-1$
+ return ReflogEntry.PREFIX_CREATED;
default:
return null;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogEntryImpl.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogEntryImpl.java
index 16b2a46..8723a8b 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogEntryImpl.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogEntryImpl.java
@@ -139,4 +139,4 @@ public CheckoutEntry parseCheckout() {
else
return null;
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogWriter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogWriter.java
index 24d2c79..0213c10 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogWriter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/ReflogWriter.java
@@ -72,20 +72,18 @@
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.FileUtils;
-/**
- * Utility for writing reflog entries
- */
+/** Utility for writing reflog entries. */
public class ReflogWriter {
/**
- * Get the ref name to be used for when locking a ref's log for rewriting
+ * Get the ref name to be used for when locking a ref's log for rewriting.
*
* @param name
* name of the ref, relative to the Git repository top level
* directory (so typically starts with refs/).
- * @return the name of the ref's lock ref
+ * @return the name of the ref's lock ref.
*/
- public static String refLockFor(final String name) {
+ public static String refLockFor(String name) {
return name + LockFile.SUFFIX;
}
@@ -98,24 +96,24 @@ public static String refLockFor(final String name) {
private final boolean forceWrite;
/**
- * Create write for repository
+ * Create writer for repository.
*
* @param repository
*/
- public ReflogWriter(final Repository repository) {
+ public ReflogWriter(Repository repository) {
this(repository, false);
}
/**
- * Create write for repository
+ * Create writer for repository.
*
* @param repository
* @param forceWrite
* true to write to disk all entries logged, false to respect the
- * repository's config and current log file status
+ * repository's config and current log file status.
*/
- public ReflogWriter(final Repository repository, final boolean forceWrite) {
- final FS fs = repository.getFS();
+ public ReflogWriter(Repository repository, boolean forceWrite) {
+ FS fs = repository.getFS();
parent = repository;
File gitDir = repository.getDirectory();
logsDir = fs.resolve(gitDir, LOGS);
@@ -124,19 +122,19 @@ public ReflogWriter(final Repository repository, final boolean forceWrite) {
}
/**
- * Get repository that reflog is being written for
+ * Get repository that reflog is being written for.
*
- * @return file repository
+ * @return file repository.
*/
public Repository getRepository() {
return parent;
}
/**
- * Create the log directories
+ * Create the log directories.
*
* @throws IOException
- * @return this writer
+ * @return this writer.
*/
public ReflogWriter create() throws IOException {
FileUtils.mkdir(logsDir);
@@ -163,15 +161,14 @@ public File logFor(String name) {
}
/**
- * Write the given {@link ReflogEntry} entry to the ref's log
+ * Write the given entry to the ref's log.
*
* @param refName
- *
* @param entry
* @return this writer
* @throws IOException
*/
- public ReflogWriter log(final String refName, final ReflogEntry entry)
+ public ReflogWriter log(String refName, ReflogEntry entry)
throws IOException {
return log(refName, entry.getOldId(), entry.getNewId(), entry.getWho(),
entry.getComment());
@@ -188,15 +185,14 @@ public ReflogWriter log(final String refName, final ReflogEntry entry)
* @return this writer
* @throws IOException
*/
- public ReflogWriter log(final String refName, final ObjectId oldId,
- final ObjectId newId, final PersonIdent ident, final String message)
- throws IOException {
+ public ReflogWriter log(String refName, ObjectId oldId,
+ ObjectId newId, PersonIdent ident, String message) throws IOException {
byte[] encoded = encode(oldId, newId, ident, message);
return log(refName, encoded);
}
/**
- * Write the given ref update to the ref's log
+ * Write the given ref update to the ref's log.
*
* @param update
* @param msg
@@ -204,11 +200,11 @@ public ReflogWriter log(final String refName, final ObjectId oldId,
* @return this writer
* @throws IOException
*/
- public ReflogWriter log(final RefUpdate update, final String msg,
- final boolean deref) throws IOException {
- final ObjectId oldId = update.getOldObjectId();
- final ObjectId newId = update.getNewObjectId();
- final Ref ref = update.getRef();
+ public ReflogWriter log(RefUpdate update, String msg,
+ boolean deref) throws IOException {
+ ObjectId oldId = update.getOldObjectId();
+ ObjectId newId = update.getNewObjectId();
+ Ref ref = update.getRef();
PersonIdent ident = update.getRefLogIdent();
if (ident == null)
@@ -216,7 +212,7 @@ public ReflogWriter log(final RefUpdate update, final String msg,
else
ident = new PersonIdent(ident);
- final byte[] rec = encode(oldId, newId, ident, msg);
+ byte[] rec = encode(oldId, newId, ident, msg);
if (deref && ref.isSymbolic()) {
log(ref.getName(), rec);
log(ref.getLeaf().getName(), rec);
@@ -228,22 +224,23 @@ public ReflogWriter log(final RefUpdate update, final String msg,
private byte[] encode(ObjectId oldId, ObjectId newId, PersonIdent ident,
String message) {
- final StringBuilder r = new StringBuilder();
+ StringBuilder r = new StringBuilder();
r.append(ObjectId.toString(oldId));
r.append(' ');
r.append(ObjectId.toString(newId));
r.append(' ');
r.append(ident.toExternalString());
r.append('\t');
- r.append(message.replace("\r\n", " ").replace("\n", " ")); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
+ r.append(
+ message.replace("\r\n", " ") //$NON-NLS-1$ //$NON-NLS-2$
+ .replace("\n", " ")); //$NON-NLS-1$ //$NON-NLS-2$
r.append('\n');
return Constants.encode(r.toString());
}
- private ReflogWriter log(final String refName, final byte[] rec)
- throws IOException {
- final File log = logFor(refName);
- final boolean write = forceWrite
+ private ReflogWriter log(String refName, byte[] rec) throws IOException {
+ File log = logFor(refName);
+ boolean write = forceWrite
|| (isLogAllRefUpdates() && shouldAutoCreateLog(refName))
|| log.isFile();
if (!write)
@@ -254,7 +251,7 @@ private ReflogWriter log(final String refName, final byte[] rec)
try {
out = new FileOutputStream(log, true);
} catch (FileNotFoundException err) {
- final File dir = log.getParentFile();
+ File dir = log.getParentFile();
if (dir.exists())
throw err;
if (!dir.mkdirs() && !dir.isDirectory())
@@ -281,10 +278,10 @@ private boolean isLogAllRefUpdates() {
return parent.getConfig().get(CoreConfig.KEY).isLogAllRefUpdates();
}
- private boolean shouldAutoCreateLog(final String refName) {
- return refName.equals(HEAD) //
- || refName.startsWith(R_HEADS) //
- || refName.startsWith(R_REMOTES) //
+ private boolean shouldAutoCreateLog(String refName) {
+ return refName.equals(HEAD)
+ || refName.startsWith(R_HEADS)
+ || refName.startsWith(R_REMOTES)
|| refName.equals(R_STASH);
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/SimpleDataOutput.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/SimpleDataOutput.java
index 373a494..5fe0429 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/SimpleDataOutput.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/SimpleDataOutput.java
@@ -136,4 +136,4 @@ public void writeChars(String s) throws IOException {
public void writeUTF(String s) throws IOException {
throw new UnsupportedOperationException();
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WriteConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WriteConfig.java
index 1e2b239..d9cbbd8 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WriteConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/file/WriteConfig.java
@@ -49,12 +49,7 @@
class WriteConfig {
/** Key for {@link Config#get(SectionParser)}. */
- static final Config.SectionParser<WriteConfig> KEY = new SectionParser<WriteConfig>() {
- @Override
- public WriteConfig parse(final Config cfg) {
- return new WriteConfig(cfg);
- }
- };
+ static final Config.SectionParser<WriteConfig> KEY = WriteConfig::new;
private final int compression;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/io/BlockSource.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/io/BlockSource.java
new file mode 100644
index 0000000..0a5f9c1
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/io/BlockSource.java
@@ -0,0 +1,185 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.io;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+
+/**
+ * Provides content blocks of file.
+ * <p>
+ * {@code BlockSource} implementations must decide if they will be thread-safe,
+ * or not.
+ */
+public abstract class BlockSource implements AutoCloseable {
+ /**
+ * Wrap a byte array as a {@code BlockSource}.
+ *
+ * @param content
+ * input file.
+ * @return block source to read from {@code content}.
+ */
+ public static BlockSource from(byte[] content) {
+ return new BlockSource() {
+ @Override
+ public ByteBuffer read(long pos, int cnt) {
+ ByteBuffer buf = ByteBuffer.allocate(cnt);
+ if (pos < content.length) {
+ int p = (int) pos;
+ int n = Math.min(cnt, content.length - p);
+ buf.put(content, p, n);
+ }
+ return buf;
+ }
+
+ @Override
+ public long size() {
+ return content.length;
+ }
+
+ @Override
+ public void close() {
+ // Do nothing.
+ }
+ };
+ }
+
+ /**
+ * Read from a {@code FileInputStream}.
+ * <p>
+ * The returned {@code BlockSource} is not thread-safe, as it must seek the
+ * file channel to read a block.
+ *
+ * @param in
+ * the file. The {@code BlockSource} will close {@code in}.
+ * @return wrapper for {@code in}.
+ */
+ public static BlockSource from(FileInputStream in) {
+ return from(in.getChannel());
+ }
+
+ /**
+ * Read from a {@code FileChannel}.
+ * <p>
+ * The returned {@code BlockSource} is not thread-safe, as it must seek the
+ * file channel to read a block.
+ *
+ * @param ch
+ * the file. The {@code BlockSource} will close {@code ch}.
+ * @return wrapper for {@code ch}.
+ */
+ public static BlockSource from(FileChannel ch) {
+ return new BlockSource() {
+ @Override
+ public ByteBuffer read(long pos, int blockSize) throws IOException {
+ ByteBuffer b = ByteBuffer.allocate(blockSize);
+ ch.position(pos);
+ int n;
+ do {
+ n = ch.read(b);
+ } while (n > 0 && b.position() < blockSize);
+ return b;
+ }
+
+ @Override
+ public long size() throws IOException {
+ return ch.size();
+ }
+
+ @Override
+ public void close() {
+ try {
+ ch.close();
+ } catch (IOException e) {
+ // Ignore close failures of read-only files.
+ }
+ }
+ };
+ }
+
+ /**
+ * Read a block from the file.
+ * <p>
+ * To reduce copying, the returned ByteBuffer should have an accessible
+ * array and {@code arrayOffset() == 0}. The caller will discard the
+ * ByteBuffer and directly use the backing array.
+ *
+ * @param position
+ * position of the block in the file, specified in bytes from the
+ * beginning of the file.
+ * @param blockSize
+ * size to read.
+ * @return buffer containing the block content.
+ * @throws IOException
+ * if block cannot be read.
+ */
+ public abstract ByteBuffer read(long position, int blockSize)
+ throws IOException;
+
+ /**
+ * Determine the size of the file.
+ *
+ * @return total number of bytes in the file.
+ * @throws IOException
+ * if size cannot be obtained.
+ */
+ public abstract long size() throws IOException;
+
+ /**
+ * Advise the {@code BlockSource} a sequential scan is starting.
+ *
+ * @param startPos
+ * starting position.
+ * @param endPos
+ * ending position.
+ */
+ public void adviseSequentialRead(long startPos, long endPos) {
+ // Do nothing by default.
+ }
+
+ @Override
+ public abstract void close();
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/DeltaIndexScanner.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/DeltaIndexScanner.java
index 7e10878..969d02b 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/DeltaIndexScanner.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/DeltaIndexScanner.java
@@ -127,4 +127,4 @@ private static int tableSize(final int worstCaseBlockCnt) {
sz <<= 1;
return sz;
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/ObjectToPack.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/ObjectToPack.java
index a089657..bc7a603 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/ObjectToPack.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/ObjectToPack.java
@@ -182,6 +182,7 @@ public final boolean isWritten() {
}
/** @return the type of this object. */
+ @Override
public final int getType() {
return (flags >> TYPE_SHIFT) & 0x7;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackExt.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackExt.java
index 248692f..e8bbf78 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackExt.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/pack/PackExt.java
@@ -59,6 +59,9 @@ public class PackExt {
/** A pack bitmap index file extension. */
public static final PackExt BITMAP_INDEX = newPackExt("bitmap"); //$NON-NLS-1$
+ /** A reftable file. */
+ public static final PackExt REFTABLE = newPackExt("ref"); //$NON-NLS-1$
+
/** @return all of the PackExt values. */
public static PackExt[] values() {
return VALUES;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockReader.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockReader.java
new file mode 100644
index 0000000..a92bedc
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockReader.java
@@ -0,0 +1,584 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.eclipse.jgit.internal.storage.reftable.BlockWriter.compare;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_HEADER_LEN;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.INDEX_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_DATA;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_NONE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.OBJ_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.REF_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_1ID;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_2ID;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_NONE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_SYMREF;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_TYPE_MASK;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.reverseUpdateIndex;
+import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
+import static org.eclipse.jgit.lib.Ref.Storage.NEW;
+import static org.eclipse.jgit.lib.Ref.Storage.PACKED;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.zip.DataFormatException;
+import java.util.zip.Inflater;
+
+import org.eclipse.jgit.annotations.Nullable;
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.internal.storage.io.BlockSource;
+import org.eclipse.jgit.lib.CheckoutEntry;
+import org.eclipse.jgit.lib.InflaterCache;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectIdRef;
+import org.eclipse.jgit.lib.PersonIdent;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.ReflogEntry;
+import org.eclipse.jgit.lib.SymbolicRef;
+import org.eclipse.jgit.util.LongList;
+import org.eclipse.jgit.util.NB;
+import org.eclipse.jgit.util.RawParseUtils;
+
+/** Reads a single block for {@link ReftableReader}. */
+class BlockReader {
+ private byte blockType;
+ private long endPosition;
+ private boolean truncated;
+
+ private byte[] buf;
+ private int bufLen;
+ private int ptr;
+
+ private int keysStart;
+ private int keysEnd;
+
+ private int restartCnt;
+ private int restartTbl;
+
+ private byte[] nameBuf = new byte[256];
+ private int nameLen;
+ private int valueType;
+
+ byte type() {
+ return blockType;
+ }
+
+ boolean truncated() {
+ return truncated;
+ }
+
+ long endPosition() {
+ return endPosition;
+ }
+
+ boolean next() {
+ return ptr < keysEnd;
+ }
+
+ void parseKey() {
+ int pfx = readVarint32();
+ valueType = readVarint32();
+ int sfx = valueType >>> 3;
+ if (pfx + sfx > nameBuf.length) {
+ int n = Math.max(pfx + sfx, nameBuf.length * 2);
+ nameBuf = Arrays.copyOf(nameBuf, n);
+ }
+ System.arraycopy(buf, ptr, nameBuf, pfx, sfx);
+ ptr += sfx;
+ nameLen = pfx + sfx;
+ }
+
+ String name() {
+ int len = nameLen;
+ if (blockType == LOG_BLOCK_TYPE) {
+ len -= 9;
+ }
+ return RawParseUtils.decode(UTF_8, nameBuf, 0, len);
+ }
+
+ boolean match(byte[] match, boolean matchIsPrefix) {
+ int len = nameLen;
+ if (blockType == LOG_BLOCK_TYPE) {
+ len -= 9;
+ }
+ if (matchIsPrefix) {
+ return len >= match.length
+ && compare(
+ match, 0, match.length,
+ nameBuf, 0, match.length) == 0;
+ }
+ return compare(match, 0, match.length, nameBuf, 0, len) == 0;
+ }
+
+ long readPositionFromIndex() throws IOException {
+ if (blockType != INDEX_BLOCK_TYPE) {
+ throw invalidBlock();
+ }
+
+ readVarint32(); // skip prefix length
+ int n = readVarint32() >>> 3;
+ ptr += n; // skip name
+ return readVarint64();
+ }
+
+ Ref readRef() throws IOException {
+ String name = RawParseUtils.decode(UTF_8, nameBuf, 0, nameLen);
+ switch (valueType & VALUE_TYPE_MASK) {
+ case VALUE_NONE: // delete
+ return newRef(name);
+
+ case VALUE_1ID:
+ return new ObjectIdRef.PeeledNonTag(PACKED, name, readValueId());
+
+ case VALUE_2ID: { // annotated tag
+ ObjectId id1 = readValueId();
+ ObjectId id2 = readValueId();
+ return new ObjectIdRef.PeeledTag(PACKED, name, id1, id2);
+ }
+
+ case VALUE_SYMREF: {
+ String val = readValueString();
+ return new SymbolicRef(name, newRef(val));
+ }
+
+ default:
+ throw invalidBlock();
+ }
+ }
+
+ @Nullable
+ LongList readBlockPositionList() {
+ int n = valueType & VALUE_TYPE_MASK;
+ if (n == 0) {
+ n = readVarint32();
+ if (n == 0) {
+ return null;
+ }
+ }
+
+ LongList b = new LongList(n);
+ b.add(readVarint64());
+ for (int j = 1; j < n; j++) {
+ long prior = b.get(j - 1);
+ b.add(prior + readVarint64());
+ }
+ return b;
+ }
+
+ long readLogUpdateIndex() {
+ return reverseUpdateIndex(NB.decodeUInt64(nameBuf, nameLen - 8));
+ }
+
+ @Nullable
+ ReflogEntry readLogEntry() {
+ if ((valueType & VALUE_TYPE_MASK) == LOG_NONE) {
+ return null;
+ }
+
+ ObjectId oldId = readValueId();
+ ObjectId newId = readValueId();
+ PersonIdent who = readPersonIdent();
+ String msg = readValueString();
+
+ return new ReflogEntry() {
+ @Override
+ public ObjectId getOldId() {
+ return oldId;
+ }
+
+ @Override
+ public ObjectId getNewId() {
+ return newId;
+ }
+
+ @Override
+ public PersonIdent getWho() {
+ return who;
+ }
+
+ @Override
+ public String getComment() {
+ return msg;
+ }
+
+ @Override
+ public CheckoutEntry parseCheckout() {
+ return null;
+ }
+ };
+ }
+
+ private ObjectId readValueId() {
+ ObjectId id = ObjectId.fromRaw(buf, ptr);
+ ptr += OBJECT_ID_LENGTH;
+ return id;
+ }
+
+ private String readValueString() {
+ int len = readVarint32();
+ int end = ptr + len;
+ String s = RawParseUtils.decode(UTF_8, buf, ptr, end);
+ ptr = end;
+ return s;
+ }
+
+ private PersonIdent readPersonIdent() {
+ String name = readValueString();
+ String email = readValueString();
+ long ms = readVarint64() * 1000;
+ int tz = readInt16();
+ return new PersonIdent(name, email, ms, tz);
+ }
+
+ void readBlock(BlockSource src, long pos, int fileBlockSize)
+ throws IOException {
+ readBlockIntoBuf(src, pos, fileBlockSize);
+ parseBlockStart(src, pos, fileBlockSize);
+ }
+
+ private void readBlockIntoBuf(BlockSource src, long pos, int size)
+ throws IOException {
+ ByteBuffer b = src.read(pos, size);
+ bufLen = b.position();
+ if (bufLen <= 0) {
+ throw invalidBlock();
+ }
+ if (b.hasArray() && b.arrayOffset() == 0) {
+ buf = b.array();
+ } else {
+ buf = new byte[bufLen];
+ b.flip();
+ b.get(buf);
+ }
+ endPosition = pos + bufLen;
+ }
+
+ private void parseBlockStart(BlockSource src, long pos, int fileBlockSize)
+ throws IOException {
+ ptr = 0;
+ if (pos == 0) {
+ if (bufLen == FILE_HEADER_LEN) {
+ setupEmptyFileBlock();
+ return;
+ }
+ ptr += FILE_HEADER_LEN; // first block begins with file header
+ }
+
+ int typeAndSize = NB.decodeInt32(buf, ptr);
+ ptr += 4;
+
+ blockType = (byte) (typeAndSize >>> 24);
+ int blockLen = decodeBlockLen(typeAndSize);
+ if (blockType == LOG_BLOCK_TYPE) {
+ // Log blocks must be inflated after the header.
+ long deflatedSize = inflateBuf(src, pos, blockLen, fileBlockSize);
+ endPosition = pos + 4 + deflatedSize;
+ }
+ if (bufLen < blockLen) {
+ if (blockType != INDEX_BLOCK_TYPE) {
+ throw invalidBlock();
+ }
+ // Its OK during sequential scan for an index block to have been
+ // partially read and be truncated in-memory. This happens when
+ // the index block is larger than the file's blockSize. Caller
+ // will break out of its scan loop once it sees the blockType.
+ truncated = true;
+ } else if (bufLen > blockLen) {
+ bufLen = blockLen;
+ }
+
+ if (blockType != FILE_BLOCK_TYPE) {
+ restartCnt = NB.decodeUInt16(buf, bufLen - 2);
+ restartTbl = bufLen - (restartCnt * 3 + 2);
+ keysStart = ptr;
+ keysEnd = restartTbl;
+ } else {
+ keysStart = ptr;
+ keysEnd = ptr;
+ }
+ }
+
+ static int decodeBlockLen(int typeAndSize) {
+ return typeAndSize & 0xffffff;
+ }
+
+ private long inflateBuf(BlockSource src, long pos, int blockLen,
+ int fileBlockSize) throws IOException {
+ byte[] dst = new byte[blockLen];
+ System.arraycopy(buf, 0, dst, 0, 4);
+
+ long deflatedSize = 0;
+ Inflater inf = InflaterCache.get();
+ try {
+ inf.setInput(buf, ptr, bufLen - ptr);
+ for (int o = 4;;) {
+ int n = inf.inflate(dst, o, dst.length - o);
+ o += n;
+ if (inf.finished()) {
+ deflatedSize = inf.getBytesRead();
+ break;
+ } else if (n <= 0 && inf.needsInput()) {
+ long p = pos + 4 + inf.getBytesRead();
+ readBlockIntoBuf(src, p, fileBlockSize);
+ inf.setInput(buf, 0, bufLen);
+ } else if (n <= 0) {
+ throw invalidBlock();
+ }
+ }
+ } catch (DataFormatException e) {
+ throw invalidBlock(e);
+ } finally {
+ InflaterCache.release(inf);
+ }
+
+ buf = dst;
+ bufLen = dst.length;
+ return deflatedSize;
+ }
+
+ private void setupEmptyFileBlock() {
+ // An empty reftable has only the file header in first block.
+ blockType = FILE_BLOCK_TYPE;
+ ptr = FILE_HEADER_LEN;
+ restartCnt = 0;
+ restartTbl = bufLen;
+ keysStart = bufLen;
+ keysEnd = bufLen;
+ }
+
+ void verifyIndex() throws IOException {
+ if (blockType != INDEX_BLOCK_TYPE || truncated) {
+ throw invalidBlock();
+ }
+ }
+
+ /**
+ * Finds a key in the block and positions the current pointer on its record.
+ * <p>
+ * As a side-effect this method arranges for the current pointer to be near
+ * or exactly on {@code key}, allowing other methods to access data from
+ * that current record:
+ * <ul>
+ * <li>{@link #name()}
+ * <li>{@link #match(byte[], boolean)}
+ * <li>{@link #readRef()}
+ * <li>{@link #readLogUpdateIndex()}
+ * <li>{@link #readLogEntry()}
+ * <li>{@link #readBlockPositionList()}
+ * </ul>
+ *
+ * @param key
+ * key to find.
+ * @return {@code <0} if the key occurs before the start of this block;
+ * {@code 0} if the block is positioned on the key; {@code >0} if
+ * the key occurs after the last key of this block.
+ */
+ int seekKey(byte[] key) {
+ int low = 0;
+ int end = restartCnt;
+ for (;;) {
+ int mid = (low + end) >>> 1;
+ int p = NB.decodeUInt24(buf, restartTbl + mid * 3);
+ ptr = p + 1; // skip 0 prefix length
+ int n = readVarint32() >>> 3;
+ int cmp = compare(key, 0, key.length, buf, ptr, n);
+ if (cmp < 0) {
+ end = mid;
+ } else if (cmp == 0) {
+ ptr = p;
+ return 0;
+ } else /* if (cmp > 0) */ {
+ low = mid + 1;
+ }
+ if (low >= end) {
+ return scanToKey(key, p, low, cmp);
+ }
+ }
+ }
+
+ /**
+ * Performs the linear search step within a restart interval.
+ * <p>
+ * Starts at a restart position whose key sorts before (or equal to)
+ * {@code key} and walks sequentially through the following prefix
+ * compressed records to find {@code key}.
+ *
+ * @param key
+ * key the caller wants to find.
+ * @param rPtr
+ * current record pointer from restart table binary search.
+ * @param rIdx
+ * current restart table index.
+ * @param rCmp
+ * result of compare from restart table binary search.
+ * @return {@code <0} if the key occurs before the start of this block;
+ * {@code 0} if the block is positioned on the key; {@code >0} if
+ * the key occurs after the last key of this block.
+ */
+ private int scanToKey(byte[] key, int rPtr, int rIdx, int rCmp) {
+ if (rCmp < 0) {
+ if (rIdx == 0) {
+ ptr = keysStart;
+ return -1;
+ }
+ ptr = NB.decodeUInt24(buf, restartTbl + (rIdx - 1) * 3);
+ } else {
+ ptr = rPtr;
+ }
+
+ int cmp;
+ do {
+ int savePtr = ptr;
+ parseKey();
+ cmp = compare(key, 0, key.length, nameBuf, 0, nameLen);
+ if (cmp <= 0) {
+ // cmp < 0, name should be in this block, but is not.
+ // cmp = 0, block is positioned at name.
+ ptr = savePtr;
+ return cmp < 0 && savePtr == keysStart ? -1 : 0;
+ }
+ skipValue();
+ } while (ptr < keysEnd);
+ return cmp;
+ }
+
+ void skipValue() {
+ switch (blockType) {
+ case REF_BLOCK_TYPE:
+ switch (valueType & VALUE_TYPE_MASK) {
+ case VALUE_NONE:
+ return;
+ case VALUE_1ID:
+ ptr += OBJECT_ID_LENGTH;
+ return;
+ case VALUE_2ID:
+ ptr += 2 * OBJECT_ID_LENGTH;
+ return;
+ case VALUE_SYMREF:
+ skipString();
+ return;
+ }
+ break;
+
+ case OBJ_BLOCK_TYPE: {
+ int n = valueType & VALUE_TYPE_MASK;
+ if (n == 0) {
+ n = readVarint32();
+ }
+ while (n-- > 0) {
+ readVarint32();
+ }
+ return;
+ }
+
+ case INDEX_BLOCK_TYPE:
+ readVarint32();
+ return;
+
+ case LOG_BLOCK_TYPE:
+ if ((valueType & VALUE_TYPE_MASK) == LOG_NONE) {
+ return;
+ } else if ((valueType & VALUE_TYPE_MASK) == LOG_DATA) {
+ ptr += 2 * OBJECT_ID_LENGTH; // oldId, newId
+ skipString(); // name
+ skipString(); // email
+ readVarint64(); // time
+ ptr += 2; // tz
+ skipString(); // msg
+ return;
+ }
+ }
+
+ throw new IllegalStateException();
+ }
+
+ private void skipString() {
+ int n = readVarint32(); // string length
+ ptr += n;
+ }
+
+ private short readInt16() {
+ return (short) NB.decodeUInt16(buf, ptr += 2);
+ }
+
+ private int readVarint32() {
+ byte c = buf[ptr++];
+ int val = c & 0x7f;
+ while ((c & 0x80) != 0) {
+ c = buf[ptr++];
+ val++;
+ val <<= 7;
+ val |= (c & 0x7f);
+ }
+ return val;
+ }
+
+ private long readVarint64() {
+ byte c = buf[ptr++];
+ long val = c & 0x7f;
+ while ((c & 0x80) != 0) {
+ c = buf[ptr++];
+ val++;
+ val <<= 7;
+ val |= (c & 0x7f);
+ }
+ return val;
+ }
+
+ private static Ref newRef(String name) {
+ return new ObjectIdRef.Unpeeled(NEW, name, null);
+ }
+
+ private static IOException invalidBlock() {
+ return invalidBlock(null);
+ }
+
+ private static IOException invalidBlock(Throwable cause) {
+ return new IOException(JGitText.get().invalidReftableBlock, cause);
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockSizeTooSmallException.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockSizeTooSmallException.java
new file mode 100644
index 0000000..cb0f988
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockSizeTooSmallException.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import java.io.IOException;
+
+/** Thrown if {@link ReftableWriter} cannot fit a reference. */
+public class BlockSizeTooSmallException extends IOException {
+ private static final long serialVersionUID = 1L;
+
+ private final int minBlockSize;
+
+ BlockSizeTooSmallException(int b) {
+ minBlockSize = b;
+ }
+
+ /** @return minimum block size in bytes reftable requires to write a ref. */
+ public int getMinimumBlockSize() {
+ return minBlockSize;
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockWriter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockWriter.java
new file mode 100644
index 0000000..8f3e889
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/BlockWriter.java
@@ -0,0 +1,601 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_HEADER_LEN;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.INDEX_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_DATA;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_NONE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.MAX_RESTARTS;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.OBJ_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.REF_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_1ID;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_2ID;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_NONE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_SYMREF;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VALUE_TYPE_MASK;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.reverseUpdateIndex;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableOutputStream.computeVarintSize;
+import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
+import static org.eclipse.jgit.lib.Ref.Storage.NEW;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.PersonIdent;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.util.IntList;
+import org.eclipse.jgit.util.LongList;
+import org.eclipse.jgit.util.NB;
+
+/** Formats and writes blocks for {@link ReftableWriter}. */
+class BlockWriter {
+ private final byte blockType;
+ private final byte keyType;
+ private final List<Entry> entries;
+ private final int blockLimitBytes;
+ private final int restartInterval;
+
+ private int entriesSumBytes;
+ private int restartCnt;
+
+ BlockWriter(byte type, byte kt, int bs, int ri) {
+ blockType = type;
+ keyType = kt;
+ blockLimitBytes = bs;
+ restartInterval = ri;
+ entries = new ArrayList<>(estimateEntryCount(type, kt, bs));
+ }
+
+ private static int estimateEntryCount(byte blockType, byte keyType,
+ int blockLimitBytes) {
+ double avgBytesPerEntry;
+ switch (blockType) {
+ case REF_BLOCK_TYPE:
+ default:
+ avgBytesPerEntry = 35.31;
+ break;
+
+ case OBJ_BLOCK_TYPE:
+ avgBytesPerEntry = 4.19;
+ break;
+
+ case LOG_BLOCK_TYPE:
+ avgBytesPerEntry = 101.14;
+ break;
+
+ case INDEX_BLOCK_TYPE:
+ switch (keyType) {
+ case REF_BLOCK_TYPE:
+ case LOG_BLOCK_TYPE:
+ default:
+ avgBytesPerEntry = 27.44;
+ break;
+
+ case OBJ_BLOCK_TYPE:
+ avgBytesPerEntry = 11.57;
+ break;
+ }
+ }
+
+ int cnt = (int) (Math.ceil(blockLimitBytes / avgBytesPerEntry));
+ return Math.min(cnt, 4096);
+ }
+
+ byte blockType() {
+ return blockType;
+ }
+
+ boolean padBetweenBlocks() {
+ return padBetweenBlocks(blockType)
+ || (blockType == INDEX_BLOCK_TYPE && padBetweenBlocks(keyType));
+ }
+
+ static boolean padBetweenBlocks(byte type) {
+ return type == REF_BLOCK_TYPE || type == OBJ_BLOCK_TYPE;
+ }
+
+ byte[] lastKey() {
+ return entries.get(entries.size() - 1).key;
+ }
+
+ int currentSize() {
+ return computeBlockBytes(0, false);
+ }
+
+ void mustAdd(Entry entry) throws BlockSizeTooSmallException {
+ if (!tryAdd(entry, true)) {
+ // Insanely long names need a larger block size.
+ throw blockSizeTooSmall(entry);
+ }
+ }
+
+ boolean tryAdd(Entry entry) {
+ if (entry instanceof ObjEntry
+ && computeBlockBytes(entry.sizeBytes(), 1) > blockLimitBytes) {
+ // If the ObjEntry has so many ref block pointers that its
+ // encoding overflows any block, reconfigure it to tell readers to
+ // instead scan all refs for this ObjectId. That significantly
+ // shrinks the entry to a very small size, which may now fit into
+ // this block.
+ ((ObjEntry) entry).markScanRequired();
+ }
+
+ if (tryAdd(entry, true)) {
+ return true;
+ } else if (nextShouldBeRestart()) {
+ // It was time for another restart, but the entry doesn't fit
+ // with its complete key, as the block is nearly full. Try to
+ // force it to fit with prefix compression rather than waste
+ // the tail of the block with padding.
+ return tryAdd(entry, false);
+ }
+ return false;
+ }
+
+ private boolean tryAdd(Entry entry, boolean tryRestart) {
+ byte[] key = entry.key;
+ int prefixLen = 0;
+ boolean restart = tryRestart && nextShouldBeRestart();
+ if (!restart) {
+ Entry priorEntry = entries.get(entries.size() - 1);
+ byte[] prior = priorEntry.key;
+ prefixLen = commonPrefix(prior, prior.length, key);
+ if (prefixLen <= 5 /* "refs/" */ && keyType == REF_BLOCK_TYPE) {
+ // Force restart points at transitions between namespaces
+ // such as "refs/heads/" to "refs/tags/".
+ restart = true;
+ prefixLen = 0;
+ } else if (prefixLen == 0) {
+ restart = true;
+ }
+ }
+
+ entry.restart = restart;
+ entry.prefixLen = prefixLen;
+ int entryBytes = entry.sizeBytes();
+ if (computeBlockBytes(entryBytes, restart) > blockLimitBytes) {
+ return false;
+ }
+
+ entriesSumBytes += entryBytes;
+ entries.add(entry);
+ if (restart) {
+ restartCnt++;
+ }
+ return true;
+ }
+
+ private boolean nextShouldBeRestart() {
+ int cnt = entries.size();
+ return (cnt == 0 || ((cnt + 1) % restartInterval) == 0)
+ && restartCnt < MAX_RESTARTS;
+ }
+
+ private int computeBlockBytes(int entryBytes, boolean restart) {
+ return computeBlockBytes(
+ entriesSumBytes + entryBytes,
+ restartCnt + (restart ? 1 : 0));
+ }
+
+ private static int computeBlockBytes(int entryBytes, int restartCnt) {
+ return 4 // 4-byte block header
+ + entryBytes
+ + restartCnt * 3 // restart_offset
+ + 2; // 2-byte restart_count
+ }
+
+ void writeTo(ReftableOutputStream os) throws IOException {
+ os.beginBlock(blockType);
+ IntList restarts = new IntList(restartCnt);
+ for (Entry entry : entries) {
+ if (entry.restart) {
+ restarts.add(os.bytesWrittenInBlock());
+ }
+ entry.writeKey(os);
+ entry.writeValue(os);
+ }
+ if (restarts.size() == 0 || restarts.size() > MAX_RESTARTS) {
+ throw new IllegalStateException();
+ }
+ for (int i = 0; i < restarts.size(); i++) {
+ os.writeInt24(restarts.get(i));
+ }
+ os.writeInt16(restarts.size());
+ os.flushBlock();
+ }
+
+ private BlockSizeTooSmallException blockSizeTooSmall(Entry entry) {
+ // Compute size required to fit this entry by itself.
+ int min = FILE_HEADER_LEN + computeBlockBytes(entry.sizeBytes(), 1);
+ return new BlockSizeTooSmallException(min);
+ }
+
+ static int commonPrefix(byte[] a, int n, byte[] b) {
+ int len = Math.min(n, Math.min(a.length, b.length));
+ for (int i = 0; i < len; i++) {
+ if (a[i] != b[i]) {
+ return i;
+ }
+ }
+ return len;
+ }
+
+ static int encodeSuffixAndType(int sfx, int valueType) {
+ return (sfx << 3) | valueType;
+ }
+
+ static int compare(
+ byte[] a, int ai, int aLen,
+ byte[] b, int bi, int bLen) {
+ int aEnd = ai + aLen;
+ int bEnd = bi + bLen;
+ while (ai < aEnd && bi < bEnd) {
+ int c = (a[ai++] & 0xff) - (b[bi++] & 0xff);
+ if (c != 0) {
+ return c;
+ }
+ }
+ return aLen - bLen;
+ }
+
+ static abstract class Entry {
+ static int compare(Entry ea, Entry eb) {
+ byte[] a = ea.key;
+ byte[] b = eb.key;
+ return BlockWriter.compare(a, 0, a.length, b, 0, b.length);
+ }
+
+ final byte[] key;
+ int prefixLen;
+ boolean restart;
+
+ Entry(byte[] key) {
+ this.key = key;
+ }
+
+ void writeKey(ReftableOutputStream os) {
+ int sfxLen = key.length - prefixLen;
+ os.writeVarint(prefixLen);
+ os.writeVarint(encodeSuffixAndType(sfxLen, valueType()));
+ os.write(key, prefixLen, sfxLen);
+ }
+
+ int sizeBytes() {
+ int sfxLen = key.length - prefixLen;
+ int sfx = encodeSuffixAndType(sfxLen, valueType());
+ return computeVarintSize(prefixLen)
+ + computeVarintSize(sfx)
+ + sfxLen
+ + valueSize();
+ }
+
+ abstract byte blockType();
+ abstract int valueType();
+ abstract int valueSize();
+ abstract void writeValue(ReftableOutputStream os) throws IOException;
+ }
+
+ static class IndexEntry extends Entry {
+ private final long blockPosition;
+
+ IndexEntry(byte[] key, long blockPosition) {
+ super(key);
+ this.blockPosition = blockPosition;
+ }
+
+ @Override
+ byte blockType() {
+ return INDEX_BLOCK_TYPE;
+ }
+
+ @Override
+ int valueType() {
+ return 0;
+ }
+
+ @Override
+ int valueSize() {
+ return computeVarintSize(blockPosition);
+ }
+
+ @Override
+ void writeValue(ReftableOutputStream os) {
+ os.writeVarint(blockPosition);
+ }
+ }
+
+ static class RefEntry extends Entry {
+ final Ref ref;
+
+ RefEntry(Ref ref) {
+ super(nameUtf8(ref));
+ this.ref = ref;
+ }
+
+ @Override
+ byte blockType() {
+ return REF_BLOCK_TYPE;
+ }
+
+ @Override
+ int valueType() {
+ if (ref.isSymbolic()) {
+ return VALUE_SYMREF;
+ } else if (ref.getStorage() == NEW && ref.getObjectId() == null) {
+ return VALUE_NONE;
+ } else if (ref.getPeeledObjectId() != null) {
+ return VALUE_2ID;
+ } else {
+ return VALUE_1ID;
+ }
+ }
+
+ @Override
+ int valueSize() {
+ switch (valueType()) {
+ case VALUE_NONE:
+ return 0;
+ case VALUE_1ID:
+ return OBJECT_ID_LENGTH;
+ case VALUE_2ID:
+ return 2 * OBJECT_ID_LENGTH;
+ case VALUE_SYMREF:
+ if (ref.isSymbolic()) {
+ int nameLen = nameUtf8(ref.getTarget()).length;
+ return computeVarintSize(nameLen) + nameLen;
+ }
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ void writeValue(ReftableOutputStream os) throws IOException {
+ switch (valueType()) {
+ case VALUE_NONE:
+ return;
+
+ case VALUE_1ID: {
+ ObjectId id1 = ref.getObjectId();
+ if (!ref.isPeeled()) {
+ throw new IOException(JGitText.get().peeledRefIsRequired);
+ } else if (id1 == null) {
+ throw new IOException(JGitText.get().invalidId0);
+ }
+ os.writeId(id1);
+ return;
+ }
+
+ case VALUE_2ID: {
+ ObjectId id1 = ref.getObjectId();
+ ObjectId id2 = ref.getPeeledObjectId();
+ if (!ref.isPeeled()) {
+ throw new IOException(JGitText.get().peeledRefIsRequired);
+ } else if (id1 == null || id2 == null) {
+ throw new IOException(JGitText.get().invalidId0);
+ }
+ os.writeId(id1);
+ os.writeId(id2);
+ return;
+ }
+
+ case VALUE_SYMREF:
+ if (ref.isSymbolic()) {
+ os.writeVarintString(ref.getTarget().getName());
+ return;
+ }
+ }
+ throw new IllegalStateException();
+ }
+
+ private static byte[] nameUtf8(Ref ref) {
+ return ref.getName().getBytes(UTF_8);
+ }
+ }
+
+ static class ObjEntry extends Entry {
+ final LongList blockPos;
+
+ ObjEntry(int idLen, ObjectId id, LongList blockPos) {
+ super(key(idLen, id));
+ this.blockPos = blockPos;
+ }
+
+ private static byte[] key(int idLen, ObjectId id) {
+ byte[] key = new byte[OBJECT_ID_LENGTH];
+ id.copyRawTo(key, 0);
+ if (idLen < OBJECT_ID_LENGTH) {
+ return Arrays.copyOf(key, idLen);
+ }
+ return key;
+ }
+
+ void markScanRequired() {
+ blockPos.clear();
+ }
+
+ @Override
+ byte blockType() {
+ return OBJ_BLOCK_TYPE;
+ }
+
+ @Override
+ int valueType() {
+ int cnt = blockPos.size();
+ return cnt != 0 && cnt <= VALUE_TYPE_MASK ? cnt : 0;
+ }
+
+ @Override
+ int valueSize() {
+ int cnt = blockPos.size();
+ if (cnt == 0) {
+ return computeVarintSize(0);
+ }
+
+ int n = 0;
+ if (cnt > VALUE_TYPE_MASK) {
+ n += computeVarintSize(cnt);
+ }
+ n += computeVarintSize(blockPos.get(0));
+ for (int j = 1; j < cnt; j++) {
+ long prior = blockPos.get(j - 1);
+ long b = blockPos.get(j);
+ n += computeVarintSize(b - prior);
+ }
+ return n;
+ }
+
+ @Override
+ void writeValue(ReftableOutputStream os) throws IOException {
+ int cnt = blockPos.size();
+ if (cnt == 0) {
+ os.writeVarint(0);
+ return;
+ }
+
+ if (cnt > VALUE_TYPE_MASK) {
+ os.writeVarint(cnt);
+ }
+ os.writeVarint(blockPos.get(0));
+ for (int j = 1; j < cnt; j++) {
+ long prior = blockPos.get(j - 1);
+ long b = blockPos.get(j);
+ os.writeVarint(b - prior);
+ }
+ }
+ }
+
+ static class DeleteLogEntry extends Entry {
+ DeleteLogEntry(String refName, long updateIndex) {
+ super(LogEntry.key(refName, updateIndex));
+ }
+
+ @Override
+ byte blockType() {
+ return LOG_BLOCK_TYPE;
+ }
+
+ @Override
+ int valueType() {
+ return LOG_NONE;
+ }
+
+ @Override
+ int valueSize() {
+ return 0;
+ }
+
+ @Override
+ void writeValue(ReftableOutputStream os) {
+ // Nothing in a delete log record.
+ }
+ }
+
+ static class LogEntry extends Entry {
+ final ObjectId oldId;
+ final ObjectId newId;
+ final long timeSecs;
+ final short tz;
+ final byte[] name;
+ final byte[] email;
+ final byte[] msg;
+
+ LogEntry(String refName, long updateIndex, PersonIdent who,
+ ObjectId oldId, ObjectId newId, String message) {
+ super(key(refName, updateIndex));
+
+ this.oldId = oldId;
+ this.newId = newId;
+ this.timeSecs = who.getWhen().getTime() / 1000L;
+ this.tz = (short) who.getTimeZoneOffset();
+ this.name = who.getName().getBytes(UTF_8);
+ this.email = who.getEmailAddress().getBytes(UTF_8);
+ this.msg = message.getBytes(UTF_8);
+ }
+
+ static byte[] key(String ref, long index) {
+ byte[] name = ref.getBytes(UTF_8);
+ byte[] key = Arrays.copyOf(name, name.length + 1 + 8);
+ NB.encodeInt64(key, key.length - 8, reverseUpdateIndex(index));
+ return key;
+ }
+
+ @Override
+ byte blockType() {
+ return LOG_BLOCK_TYPE;
+ }
+
+ @Override
+ int valueType() {
+ return LOG_DATA;
+ }
+
+ @Override
+ int valueSize() {
+ return 2 * OBJECT_ID_LENGTH
+ + computeVarintSize(name.length) + name.length
+ + computeVarintSize(email.length) + email.length
+ + computeVarintSize(timeSecs)
+ + 2 // tz
+ + computeVarintSize(msg.length) + msg.length;
+ }
+
+ @Override
+ void writeValue(ReftableOutputStream os) {
+ os.writeId(oldId);
+ os.writeId(newId);
+ os.writeVarintString(name);
+ os.writeVarintString(email);
+ os.writeVarint(timeSecs);
+ os.writeInt16(tz);
+ os.writeVarintString(msg);
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/EmptyLogCursor.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/EmptyLogCursor.java
new file mode 100644
index 0000000..d774589
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/EmptyLogCursor.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import java.io.IOException;
+
+import org.eclipse.jgit.lib.ReflogEntry;
+
+/** Empty {@link LogCursor} with no results. */
+class EmptyLogCursor extends LogCursor {
+ @Override
+ public boolean next() throws IOException {
+ return false;
+ }
+
+ @Override
+ public String getRefName() {
+ return null;
+ }
+
+ @Override
+ public long getUpdateIndex() {
+ return 0;
+ }
+
+ @Override
+ public ReflogEntry getReflogEntry() {
+ return null;
+ }
+
+ @Override
+ public void close() {
+ // Do nothing.
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/LogCursor.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/LogCursor.java
new file mode 100644
index 0000000..c19968c
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/LogCursor.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import java.io.IOException;
+
+import org.eclipse.jgit.lib.ReflogEntry;
+
+/** Iterator over logs inside a {@link Reftable}. */
+public abstract class LogCursor implements AutoCloseable {
+ /**
+ * Check if another log record is available.
+ *
+ * @return {@code true} if there is another result.
+ * @throws IOException
+ * logs cannot be read.
+ */
+ public abstract boolean next() throws IOException;
+
+ /** @return name of the current reference. */
+ public abstract String getRefName();
+
+ /** @return identifier of the transaction that created the log record. */
+ public abstract long getUpdateIndex();
+
+ /** @return current log entry. */
+ public abstract ReflogEntry getReflogEntry();
+
+ @Override
+ public abstract void close();
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/MergedReftable.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/MergedReftable.java
new file mode 100644
index 0000000..71144cd
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/MergedReftable.java
@@ -0,0 +1,375 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.PriorityQueue;
+
+import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.ReflogEntry;
+
+/**
+ * Merges multiple reference tables together.
+ * <p>
+ * A {@link MergedReftable} merge-joins multiple {@link ReftableReader} on the
+ * fly. Tables higher/later in the stack shadow lower/earlier tables, hiding
+ * references that been updated/replaced.
+ * <p>
+ * By default deleted references are skipped and not returned to the caller.
+ * {@link #setIncludeDeletes(boolean)} can be used to modify this behavior if
+ * the caller needs to preserve deletions during partial compaction.
+ * <p>
+ * A {@code MergedReftable} is not thread-safe.
+ */
+public class MergedReftable extends Reftable {
+ private final Reftable[] tables;
+
+ /**
+ * Initialize a merged table reader.
+ * <p>
+ * The tables in {@code tableStack} will be closed when this
+ * {@code MergedReftable} is closed.
+ *
+ * @param tableStack
+ * stack of tables to read from. The base of the stack is at
+ * index 0, the most recent should be at the top of the stack at
+ * {@code tableStack.size() - 1}. The top of the stack (higher
+ * index) shadows the base of the stack (lower index).
+ */
+ public MergedReftable(List<Reftable> tableStack) {
+ tables = tableStack.toArray(new Reftable[0]);
+
+ // Tables must expose deletes to this instance to correctly
+ // shadow references from lower tables.
+ for (Reftable t : tables) {
+ t.setIncludeDeletes(true);
+ }
+ }
+
+ @Override
+ public RefCursor allRefs() throws IOException {
+ MergedRefCursor m = new MergedRefCursor();
+ for (int i = 0; i < tables.length; i++) {
+ m.add(new RefQueueEntry(tables[i].allRefs(), i));
+ }
+ return m;
+ }
+
+ @Override
+ public RefCursor seekRef(String name) throws IOException {
+ if (name.endsWith("/")) { //$NON-NLS-1$
+ return seekRefPrefix(name);
+ }
+ return seekSingleRef(name);
+ }
+
+ private RefCursor seekRefPrefix(String name) throws IOException {
+ MergedRefCursor m = new MergedRefCursor();
+ for (int i = 0; i < tables.length; i++) {
+ m.add(new RefQueueEntry(tables[i].seekRef(name), i));
+ }
+ return m;
+ }
+
+ private RefCursor seekSingleRef(String name) throws IOException {
+ // Walk the tables from highest priority (end of list) to lowest.
+ // As soon as the reference is found (queue not empty), all lower
+ // priority tables are irrelevant as current table shadows them.
+ MergedRefCursor m = new MergedRefCursor();
+ for (int i = tables.length - 1; i >= 0 && m.queue.isEmpty(); i--) {
+ m.add(new RefQueueEntry(tables[i].seekRef(name), i));
+ }
+ return m;
+ }
+
+ @Override
+ public RefCursor byObjectId(AnyObjectId name) throws IOException {
+ MergedRefCursor m = new MergedRefCursor();
+ for (int i = 0; i < tables.length; i++) {
+ m.add(new RefQueueEntry(tables[i].byObjectId(name), i));
+ }
+ return m;
+ }
+
+ @Override
+ public LogCursor allLogs() throws IOException {
+ MergedLogCursor m = new MergedLogCursor();
+ for (int i = 0; i < tables.length; i++) {
+ m.add(new LogQueueEntry(tables[i].allLogs(), i));
+ }
+ return m;
+ }
+
+ @Override
+ public LogCursor seekLog(String refName, long updateIdx)
+ throws IOException {
+ MergedLogCursor m = new MergedLogCursor();
+ for (int i = 0; i < tables.length; i++) {
+ m.add(new LogQueueEntry(tables[i].seekLog(refName, updateIdx), i));
+ }
+ return m;
+ }
+
+ @Override
+ public void close() throws IOException {
+ for (Reftable t : tables) {
+ t.close();
+ }
+ }
+
+ int queueSize() {
+ return Math.max(1, tables.length);
+ }
+
+ private class MergedRefCursor extends RefCursor {
+ private final PriorityQueue<RefQueueEntry> queue;
+ private RefQueueEntry head;
+ private Ref ref;
+
+ MergedRefCursor() {
+ queue = new PriorityQueue<>(queueSize(), RefQueueEntry::compare);
+ }
+
+ void add(RefQueueEntry t) throws IOException {
+ // Common case is many iterations over the same RefQueueEntry
+ // for the bottom of the stack (scanning all refs). Its almost
+ // always less than the top of the queue. Avoid the queue's
+ // O(log N) insertion and removal costs for this common case.
+ if (!t.rc.next()) {
+ t.rc.close();
+ } else if (head == null) {
+ RefQueueEntry p = queue.peek();
+ if (p == null || RefQueueEntry.compare(t, p) < 0) {
+ head = t;
+ } else {
+ head = queue.poll();
+ queue.add(t);
+ }
+ } else if (RefQueueEntry.compare(t, head) > 0) {
+ queue.add(t);
+ } else {
+ queue.add(head);
+ head = t;
+ }
+ }
+
+ @Override
+ public boolean next() throws IOException {
+ for (;;) {
+ RefQueueEntry t = poll();
+ if (t == null) {
+ return false;
+ }
+
+ ref = t.rc.getRef();
+ boolean include = includeDeletes || !t.rc.wasDeleted();
+ skipShadowedRefs(ref.getName());
+ add(t);
+ if (include) {
+ return true;
+ }
+ }
+ }
+
+ private RefQueueEntry poll() {
+ RefQueueEntry e = head;
+ if (e != null) {
+ head = null;
+ return e;
+ }
+ return queue.poll();
+ }
+
+ private void skipShadowedRefs(String name) throws IOException {
+ for (;;) {
+ RefQueueEntry t = head != null ? head : queue.peek();
+ if (t != null && name.equals(t.name())) {
+ add(poll());
+ } else {
+ break;
+ }
+ }
+ }
+
+ @Override
+ public Ref getRef() {
+ return ref;
+ }
+
+ @Override
+ public void close() {
+ while (!queue.isEmpty()) {
+ queue.remove().rc.close();
+ }
+ }
+ }
+
+ private static class RefQueueEntry {
+ static int compare(RefQueueEntry a, RefQueueEntry b) {
+ int cmp = a.name().compareTo(b.name());
+ if (cmp == 0) {
+ // higher index shadows lower index, so higher index first.
+ cmp = b.stackIdx - a.stackIdx;
+ }
+ return cmp;
+ }
+
+ final RefCursor rc;
+ final int stackIdx;
+
+ RefQueueEntry(RefCursor rc, int stackIdx) {
+ this.rc = rc;
+ this.stackIdx = stackIdx;
+ }
+
+ String name() {
+ return rc.getRef().getName();
+ }
+ }
+
+ private class MergedLogCursor extends LogCursor {
+ private final PriorityQueue<LogQueueEntry> queue;
+ private String refName;
+ private long updateIndex;
+ private ReflogEntry entry;
+
+ MergedLogCursor() {
+ queue = new PriorityQueue<>(queueSize(), LogQueueEntry::compare);
+ }
+
+ void add(LogQueueEntry t) throws IOException {
+ if (t.lc.next()) {
+ queue.add(t);
+ } else {
+ t.lc.close();
+ }
+ }
+
+ @Override
+ public boolean next() throws IOException {
+ for (;;) {
+ LogQueueEntry t = queue.poll();
+ if (t == null) {
+ return false;
+ }
+
+ refName = t.lc.getRefName();
+ updateIndex = t.lc.getUpdateIndex();
+ entry = t.lc.getReflogEntry();
+ boolean include = includeDeletes || entry != null;
+ skipShadowed(refName, updateIndex);
+ add(t);
+ if (include) {
+ return true;
+ }
+ }
+ }
+
+ private void skipShadowed(String name, long index) throws IOException {
+ for (;;) {
+ LogQueueEntry t = queue.peek();
+ if (t != null && name.equals(t.name()) && index == t.index()) {
+ add(queue.remove());
+ } else {
+ break;
+ }
+ }
+ }
+
+ @Override
+ public String getRefName() {
+ return refName;
+ }
+
+ @Override
+ public long getUpdateIndex() {
+ return updateIndex;
+ }
+
+ @Override
+ public ReflogEntry getReflogEntry() {
+ return entry;
+ }
+
+ @Override
+ public void close() {
+ while (!queue.isEmpty()) {
+ queue.remove().lc.close();
+ }
+ }
+ }
+
+ private static class LogQueueEntry {
+ static int compare(LogQueueEntry a, LogQueueEntry b) {
+ int cmp = a.name().compareTo(b.name());
+ if (cmp == 0) {
+ // higher update index sorts first.
+ cmp = Long.signum(b.index() - a.index());
+ }
+ if (cmp == 0) {
+ // higher index comes first.
+ cmp = b.stackIdx - a.stackIdx;
+ }
+ return cmp;
+ }
+
+ final LogCursor lc;
+ final int stackIdx;
+
+ LogQueueEntry(LogCursor lc, int stackIdx) {
+ this.lc = lc;
+ this.stackIdx = stackIdx;
+ }
+
+ String name() {
+ return lc.getRefName();
+ }
+
+ long index() {
+ return lc.getUpdateIndex();
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/RefCursor.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/RefCursor.java
new file mode 100644
index 0000000..786fae1
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/RefCursor.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import java.io.IOException;
+
+import org.eclipse.jgit.lib.Ref;
+
+/** Iterator over references inside a {@link Reftable}. */
+public abstract class RefCursor implements AutoCloseable {
+ /**
+ * Check if another reference is available.
+ *
+ * @return {@code true} if there is another result.
+ * @throws IOException
+ * references cannot be read.
+ */
+ public abstract boolean next() throws IOException;
+
+ /** @return reference at the current position. */
+ public abstract Ref getRef();
+
+ /** @return {@code true} if the current reference was deleted. */
+ public boolean wasDeleted() {
+ Ref r = getRef();
+ return r.getStorage() == Ref.Storage.NEW && r.getObjectId() == null;
+ }
+
+ @Override
+ public abstract void close();
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/Reftable.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/Reftable.java
new file mode 100644
index 0000000..1189ed3
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/Reftable.java
@@ -0,0 +1,262 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import static org.eclipse.jgit.lib.RefDatabase.MAX_SYMBOLIC_REF_DEPTH;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.Collection;
+
+import org.eclipse.jgit.annotations.Nullable;
+import org.eclipse.jgit.internal.storage.io.BlockSource;
+import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.SymbolicRef;
+
+/** Abstract table of references. */
+public abstract class Reftable implements AutoCloseable {
+ /**
+ * @param refs
+ * references to convert into a reftable; may be empty.
+ * @return a reader for the supplied references.
+ */
+ public static Reftable from(Collection<Ref> refs) {
+ try {
+ ReftableConfig cfg = new ReftableConfig();
+ cfg.setIndexObjects(false);
+ cfg.setAlignBlocks(false);
+ ByteArrayOutputStream buf = new ByteArrayOutputStream();
+ new ReftableWriter()
+ .setConfig(cfg)
+ .begin(buf)
+ .sortAndWriteRefs(refs)
+ .finish();
+ return new ReftableReader(BlockSource.from(buf.toByteArray()));
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /** {@code true} if deletions should be included in results. */
+ protected boolean includeDeletes;
+
+ /**
+ * @param deletes
+ * if {@code true} deleted references will be returned. If
+ * {@code false} (default behavior), deleted references will be
+ * skipped, and not returned.
+ */
+ public void setIncludeDeletes(boolean deletes) {
+ includeDeletes = deletes;
+ }
+
+ /**
+ * Seek to the first reference, to iterate in order.
+ *
+ * @return cursor to iterate.
+ * @throws IOException
+ * if references cannot be read.
+ */
+ public abstract RefCursor allRefs() throws IOException;
+
+ /**
+ * Seek either to a reference, or a reference subtree.
+ * <p>
+ * If {@code refName} ends with {@code "/"} the method will seek to the
+ * subtree of all references starting with {@code refName} as a prefix. If
+ * no references start with this prefix, an empty cursor is returned.
+ * <p>
+ * Otherwise exactly {@code refName} will be looked for. If present, the
+ * returned cursor will iterate exactly one entry. If not found, an empty
+ * cursor is returned.
+ *
+ * @param refName
+ * reference name or subtree to find.
+ * @return cursor to iterate; empty cursor if no references match.
+ * @throws IOException
+ * if references cannot be read.
+ */
+ public abstract RefCursor seekRef(String refName) throws IOException;
+
+ /**
+ * Match references pointing to a specific object.
+ *
+ * @param id
+ * object to find.
+ * @return cursor to iterate; empty cursor if no references match.
+ * @throws IOException
+ * if references cannot be read.
+ */
+ public abstract RefCursor byObjectId(AnyObjectId id) throws IOException;
+
+ /**
+ * Seek reader to read log records.
+ *
+ * @return cursor to iterate; empty cursor if no logs are present.
+ * @throws IOException
+ * if logs cannot be read.
+ */
+ public abstract LogCursor allLogs() throws IOException;
+
+ /**
+ * Read a single reference's log.
+ *
+ * @param refName
+ * exact name of the reference whose log to read.
+ * @return cursor to iterate; empty cursor if no logs match.
+ * @throws IOException
+ * if logs cannot be read.
+ */
+ public LogCursor seekLog(String refName) throws IOException {
+ return seekLog(refName, Long.MAX_VALUE);
+ }
+
+ /**
+ * Seek to an update index in a reference's log.
+ *
+ * @param refName
+ * exact name of the reference whose log to read.
+ * @param updateIndex
+ * most recent index to return first in the log cursor. Log
+ * records at or before {@code updateIndex} will be returned.
+ * @return cursor to iterate; empty cursor if no logs match.
+ * @throws IOException
+ * if logs cannot be read.
+ */
+ public abstract LogCursor seekLog(String refName, long updateIndex)
+ throws IOException;
+
+ /**
+ * Lookup a reference, or null if not found.
+ *
+ * @param refName
+ * reference name to find.
+ * @return the reference, or {@code null} if not found.
+ * @throws IOException
+ * if references cannot be read.
+ */
+ @Nullable
+ public Ref exactRef(String refName) throws IOException {
+ try (RefCursor rc = seekRef(refName)) {
+ return rc.next() ? rc.getRef() : null;
+ }
+ }
+
+ /**
+ * Test if a reference or reference subtree exists.
+ * <p>
+ * If {@code refName} ends with {@code "/"}, the method tests if any
+ * reference starts with {@code refName} as a prefix.
+ * <p>
+ * Otherwise, the method checks if {@code refName} exists.
+ *
+ * @param refName
+ * reference name or subtree to find.
+ * @return {@code true} if the reference exists, or at least one reference
+ * exists in the subtree.
+ * @throws IOException
+ * if references cannot be read.
+ */
+ public boolean hasRef(String refName) throws IOException {
+ try (RefCursor rc = seekRef(refName)) {
+ return rc.next();
+ }
+ }
+
+ /**
+ * Test if any reference directly refers to the object.
+ *
+ * @param id
+ * ObjectId to find.
+ * @return {@code true} if any reference exists directly referencing
+ * {@code id}, or a annotated tag that peels to {@code id}.
+ * @throws IOException
+ * if references cannot be read.
+ */
+ public boolean hasId(AnyObjectId id) throws IOException {
+ try (RefCursor rc = byObjectId(id)) {
+ return rc.next();
+ }
+ }
+
+ /**
+ * Resolve a symbolic reference to populate its value.
+ *
+ * @param symref
+ * reference to resolve.
+ * @return resolved {@code symref}, or {@code null}.
+ * @throws IOException
+ * if references cannot be read.
+ */
+ @Nullable
+ public Ref resolve(Ref symref) throws IOException {
+ return resolve(symref, 0);
+ }
+
+ private Ref resolve(Ref ref, int depth) throws IOException {
+ if (!ref.isSymbolic()) {
+ return ref;
+ }
+
+ Ref dst = ref.getTarget();
+ if (MAX_SYMBOLIC_REF_DEPTH <= depth) {
+ return null; // claim it doesn't exist
+ }
+
+ dst = exactRef(dst.getName());
+ if (dst == null) {
+ return ref;
+ }
+
+ dst = resolve(dst, depth + 1);
+ if (dst == null) {
+ return null; // claim it doesn't exist
+ }
+ return new SymbolicRef(ref.getName(), dst);
+ }
+
+ @Override
+ public abstract void close() throws IOException;
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableCompactor.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableCompactor.java
new file mode 100644
index 0000000..4f92267
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableCompactor.java
@@ -0,0 +1,264 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.eclipse.jgit.lib.PersonIdent;
+import org.eclipse.jgit.internal.storage.reftable.ReftableWriter.Stats;
+import org.eclipse.jgit.lib.ReflogEntry;
+
+/**
+ * Merges reftables and compacts them into a single output.
+ * <p>
+ * For a partial compaction callers should {@link #setIncludeDeletes(boolean)}
+ * to {@code true} to ensure the new reftable continues to use a delete marker
+ * to shadow any lower reftable that may have the reference present.
+ * <p>
+ * By default all log entries within the range defined by
+ * {@link #setMinUpdateIndex(long)} and {@link #setMaxUpdateIndex(long)} are
+ * copied, even if no references in the output file match the log records.
+ * Callers may truncate the log to a more recent time horizon with
+ * {@link #setOldestReflogTimeMillis(long)}, or disable the log altogether with
+ * {@code setOldestReflogTimeMillis(Long.MAX_VALUE)}.
+ */
+public class ReftableCompactor {
+ private final ReftableWriter writer = new ReftableWriter();
+ private final ArrayDeque<Reftable> tables = new ArrayDeque<>();
+
+ private long compactBytesLimit;
+ private long bytesToCompact;
+ private boolean includeDeletes;
+ private long minUpdateIndex;
+ private long maxUpdateIndex;
+ private long oldestReflogTimeMillis;
+ private Stats stats;
+
+ /**
+ * @param cfg
+ * configuration for the reftable.
+ * @return {@code this}
+ */
+ public ReftableCompactor setConfig(ReftableConfig cfg) {
+ writer.setConfig(cfg);
+ return this;
+ }
+
+ /**
+ * @param bytes
+ * limit on number of bytes from source tables to compact.
+ * @return {@code this}
+ */
+ public ReftableCompactor setCompactBytesLimit(long bytes) {
+ compactBytesLimit = bytes;
+ return this;
+ }
+
+ /**
+ * @param deletes
+ * {@code true} to include deletions in the output, which may be
+ * necessary for partial compaction.
+ * @return {@code this}
+ */
+ public ReftableCompactor setIncludeDeletes(boolean deletes) {
+ includeDeletes = deletes;
+ return this;
+ }
+
+ /**
+ * @param min
+ * the minimum update index for log entries that appear in the
+ * compacted reftable. This should be 1 higher than the prior
+ * reftable's {@code maxUpdateIndex} if this table will be used
+ * in a stack.
+ * @return {@code this}
+ */
+ public ReftableCompactor setMinUpdateIndex(long min) {
+ minUpdateIndex = min;
+ return this;
+ }
+
+ /**
+ * @param max
+ * the maximum update index for log entries that appear in the
+ * compacted reftable. This should be at least 1 higher than the
+ * prior reftable's {@code maxUpdateIndex} if this table will be
+ * used in a stack.
+ * @return {@code this}
+ */
+ public ReftableCompactor setMaxUpdateIndex(long max) {
+ maxUpdateIndex = max;
+ return this;
+ }
+
+ /**
+ * @param timeMillis
+ * oldest log time to preserve. Entries whose timestamps are
+ * {@code >= timeMillis} will be copied into the output file. Log
+ * entries that predate {@code timeMillis} will be discarded.
+ * Specified in Java standard milliseconds since the epoch.
+ * @return {@code this}
+ */
+ public ReftableCompactor setOldestReflogTimeMillis(long timeMillis) {
+ oldestReflogTimeMillis = timeMillis;
+ return this;
+ }
+
+ /**
+ * Add all of the tables, in the specified order.
+ * <p>
+ * Unconditionally adds all tables, ignoring the
+ * {@link #setCompactBytesLimit(long)}.
+ *
+ * @param readers
+ * tables to compact. Tables should be ordered oldest first/most
+ * recent last so that the more recent tables can shadow the
+ * older results. Caller is responsible for closing the readers.
+ */
+ public void addAll(List<? extends Reftable> readers) {
+ tables.addAll(readers);
+ }
+
+ /**
+ * Try to add this reader at the bottom of the stack.
+ * <p>
+ * A reader may be rejected by returning {@code false} if the compactor is
+ * already rewriting its {@link #setCompactBytesLimit(long)}. When this
+ * happens the caller should stop trying to add tables, and execute the
+ * compaction.
+ *
+ * @param reader
+ * the reader to insert at the bottom of the stack. Caller is
+ * responsible for closing the reader.
+ * @return {@code true} if the compactor accepted this table; {@code false}
+ * if the compactor has reached its limit.
+ * @throws IOException
+ * if size of {@code reader} cannot be read.
+ */
+ public boolean tryAddFirst(ReftableReader reader) throws IOException {
+ long sz = reader.size();
+ if (compactBytesLimit > 0 && bytesToCompact + sz > compactBytesLimit) {
+ return false;
+ }
+ bytesToCompact += sz;
+ tables.addFirst(reader);
+ return true;
+ }
+
+ /**
+ * Write a compaction to {@code out}.
+ *
+ * @param out
+ * stream to write the compacted tables to. Caller is responsible
+ * for closing {@code out}.
+ * @throws IOException
+ * if tables cannot be read, or cannot be written.
+ */
+ public void compact(OutputStream out) throws IOException {
+ MergedReftable mr = new MergedReftable(new ArrayList<>(tables));
+ mr.setIncludeDeletes(includeDeletes);
+
+ writer.setMinUpdateIndex(minUpdateIndex);
+ writer.setMaxUpdateIndex(maxUpdateIndex);
+ writer.begin(out);
+ mergeRefs(mr);
+ mergeLogs(mr);
+ writer.finish();
+ stats = writer.getStats();
+ }
+
+ /** @return statistics of the last written reftable. */
+ public Stats getStats() {
+ return stats;
+ }
+
+ private void mergeRefs(MergedReftable mr) throws IOException {
+ try (RefCursor rc = mr.allRefs()) {
+ while (rc.next()) {
+ writer.writeRef(rc.getRef());
+ }
+ }
+ }
+
+ private void mergeLogs(MergedReftable mr) throws IOException {
+ if (oldestReflogTimeMillis == Long.MAX_VALUE) {
+ return;
+ }
+
+ try (LogCursor lc = mr.allLogs()) {
+ while (lc.next()) {
+ long updateIndex = lc.getUpdateIndex();
+ if (updateIndex < minUpdateIndex
+ || updateIndex > maxUpdateIndex) {
+ // Cannot merge log records outside the header's range.
+ continue;
+ }
+
+ String refName = lc.getRefName();
+ ReflogEntry log = lc.getReflogEntry();
+ if (log == null) {
+ if (includeDeletes) {
+ writer.deleteLog(refName, updateIndex);
+ }
+ continue;
+ }
+
+ PersonIdent who = log.getWho();
+ if (who.getWhen().getTime() >= oldestReflogTimeMillis) {
+ writer.writeLog(
+ refName,
+ updateIndex,
+ who,
+ log.getOldId(),
+ log.getNewId(),
+ log.getComment());
+ }
+ }
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableConfig.java
new file mode 100644
index 0000000..f7a1fbe
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableConfig.java
@@ -0,0 +1,216 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.MAX_BLOCK_SIZE;
+
+import org.eclipse.jgit.lib.Config;
+import org.eclipse.jgit.lib.Repository;
+
+/** Configuration used by a reftable writer when constructing the stream. */
+public class ReftableConfig {
+ private int refBlockSize = 4 << 10;
+ private int logBlockSize;
+ private int restartInterval;
+ private int maxIndexLevels;
+ private boolean alignBlocks = true;
+ private boolean indexObjects = true;
+
+ /** Create a default configuration. */
+ public ReftableConfig() {
+ }
+
+ /**
+ * Create a configuration honoring the repository's settings.
+ *
+ * @param db
+ * the repository to read settings from. The repository is not
+ * retained by the new configuration, instead its settings are
+ * copied during the constructor.
+ */
+ public ReftableConfig(Repository db) {
+ fromConfig(db.getConfig());
+ }
+
+ /**
+ * Create a configuration honoring settings in a {@link Config}.
+ *
+ * @param cfg
+ * the source to read settings from. The source is not retained
+ * by the new configuration, instead its settings are copied
+ * during the constructor.
+ */
+ public ReftableConfig(Config cfg) {
+ fromConfig(cfg);
+ }
+
+ /**
+ * Copy an existing configuration to a new instance.
+ *
+ * @param cfg
+ * the source configuration to copy from.
+ */
+ public ReftableConfig(ReftableConfig cfg) {
+ this.refBlockSize = cfg.refBlockSize;
+ this.logBlockSize = cfg.logBlockSize;
+ this.restartInterval = cfg.restartInterval;
+ this.maxIndexLevels = cfg.maxIndexLevels;
+ this.alignBlocks = cfg.alignBlocks;
+ this.indexObjects = cfg.indexObjects;
+ }
+
+ /** @return desired output block size for references, in bytes */
+ public int getRefBlockSize() {
+ return refBlockSize;
+ }
+
+ /**
+ * @param szBytes
+ * desired output block size for references, in bytes.
+ */
+ public void setRefBlockSize(int szBytes) {
+ if (szBytes > MAX_BLOCK_SIZE) {
+ throw new IllegalArgumentException();
+ }
+ refBlockSize = Math.max(0, szBytes);
+ }
+
+ /**
+ * @return desired output block size for log entries, in bytes. If 0 the
+ * writer will default to {@code 2 * getRefBlockSize()}.
+ */
+ public int getLogBlockSize() {
+ return logBlockSize;
+ }
+
+ /**
+ * @param szBytes
+ * desired output block size for log entries, in bytes. If 0 will
+ * default to {@code 2 * getRefBlockSize()}.
+ */
+ public void setLogBlockSize(int szBytes) {
+ if (szBytes > MAX_BLOCK_SIZE) {
+ throw new IllegalArgumentException();
+ }
+ logBlockSize = Math.max(0, szBytes);
+ }
+
+ /** @return number of references between binary search markers. */
+ public int getRestartInterval() {
+ return restartInterval;
+ }
+
+ /**
+ * @param interval
+ * number of references between binary search markers. If
+ * {@code interval} is 0 (default), the writer will select a
+ * default value based on the block size.
+ */
+ public void setRestartInterval(int interval) {
+ restartInterval = Math.max(0, interval);
+ }
+
+ /** @return maximum depth of the index; 0 for unlimited. */
+ public int getMaxIndexLevels() {
+ return maxIndexLevels;
+ }
+
+ /**
+ * @param levels
+ * maximum number of levels to use in indexes. Lower levels of
+ * the index respect {@link #getRefBlockSize()}, and the highest
+ * level may exceed that if the number of levels is limited.
+ */
+ public void setMaxIndexLevels(int levels) {
+ maxIndexLevels = Math.max(0, levels);
+ }
+
+ /** @return {@code true} if the writer should align blocks. */
+ public boolean isAlignBlocks() {
+ return alignBlocks;
+ }
+
+ /**
+ * @param align
+ * if {@code true} blocks are written aligned to multiples of
+ * {@link #getRefBlockSize()}. May increase file size due to NUL
+ * padding bytes added between blocks. Default is {@code true}.
+ */
+ public void setAlignBlocks(boolean align) {
+ alignBlocks = align;
+ }
+
+ /** @return {@code true} if the writer should index object to ref. */
+ public boolean isIndexObjects() {
+ return indexObjects;
+ }
+
+ /**
+ * @param index
+ * if {@code true} the reftable may include additional storage to
+ * efficiently map from {@code ObjectId} to reference names. By
+ * default, {@code true}.
+ */
+ public void setIndexObjects(boolean index) {
+ indexObjects = index;
+ }
+
+ /**
+ * Update properties by setting fields from the configuration.
+ *
+ * If a property's corresponding variable is not defined in the supplied
+ * configuration, then it is left unmodified.
+ *
+ * @param rc
+ * configuration to read properties from.
+ */
+ public void fromConfig(Config rc) {
+ refBlockSize = rc.getInt("reftable", "blockSize", refBlockSize); //$NON-NLS-1$ //$NON-NLS-2$
+ logBlockSize = rc.getInt("reftable", "logBlockSize", logBlockSize); //$NON-NLS-1$ //$NON-NLS-2$
+ restartInterval = rc.getInt("reftable", "restartInterval", restartInterval); //$NON-NLS-1$ //$NON-NLS-2$
+ maxIndexLevels = rc.getInt("reftable", "indexLevels", maxIndexLevels); //$NON-NLS-1$ //$NON-NLS-2$
+ alignBlocks = rc.getBoolean("reftable", "alignBlocks", alignBlocks); //$NON-NLS-1$ //$NON-NLS-2$
+ indexObjects = rc.getBoolean("reftable", "indexObjects", indexObjects); //$NON-NLS-1$ //$NON-NLS-2$
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableConstants.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableConstants.java
new file mode 100644
index 0000000..0b89327
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableConstants.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+class ReftableConstants {
+ static final byte[] FILE_HEADER_MAGIC = { 'R', 'E', 'F', 'T' };
+ static final byte VERSION_1 = (byte) 1;
+
+ static final int FILE_HEADER_LEN = 24;
+ static final int FILE_FOOTER_LEN = 68;
+
+ static final byte FILE_BLOCK_TYPE = 'R';
+ static final byte REF_BLOCK_TYPE = 'r';
+ static final byte OBJ_BLOCK_TYPE = 'o';
+ static final byte LOG_BLOCK_TYPE = 'g';
+ static final byte INDEX_BLOCK_TYPE = 'i';
+
+ static final int VALUE_NONE = 0x0;
+ static final int VALUE_1ID = 0x1;
+ static final int VALUE_2ID = 0x2;
+ static final int VALUE_SYMREF = 0x3;
+ static final int VALUE_TYPE_MASK = 0x7;
+
+ static final int LOG_NONE = 0x0;
+ static final int LOG_DATA = 0x1;
+
+ static final int MAX_BLOCK_SIZE = (1 << 24) - 1;
+ static final int MAX_RESTARTS = 65535;
+
+ static boolean isFileHeaderMagic(byte[] buf, int o, int n) {
+ return (n - o) >= FILE_HEADER_MAGIC.length
+ && buf[o + 0] == FILE_HEADER_MAGIC[0]
+ && buf[o + 1] == FILE_HEADER_MAGIC[1]
+ && buf[o + 2] == FILE_HEADER_MAGIC[2]
+ && buf[o + 3] == FILE_HEADER_MAGIC[3];
+ }
+
+ static long reverseUpdateIndex(long time) {
+ return 0xffffffffffffffffL - time;
+ }
+
+ private ReftableConstants() {
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableOutputStream.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableOutputStream.java
new file mode 100644
index 0000000..a24619b
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableOutputStream.java
@@ -0,0 +1,247 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_HEADER_LEN;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.INDEX_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_BLOCK_TYPE;
+import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Arrays;
+import java.util.zip.Deflater;
+import java.util.zip.DeflaterOutputStream;
+
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.util.NB;
+import org.eclipse.jgit.util.io.CountingOutputStream;
+
+/**
+ * Wrapper to assist formatting a reftable to an {@link OutputStream}.
+ * <p>
+ * Internally buffers at block size boundaries, flushing only complete blocks to
+ * the {@code OutputStream}.
+ */
+class ReftableOutputStream extends OutputStream {
+ private final byte[] tmp = new byte[10];
+ private final CountingOutputStream out;
+ private final boolean alignBlocks;
+
+ private Deflater deflater;
+ private DeflaterOutputStream compressor;
+
+ private int blockType;
+ private int blockSize;
+ private int blockStart;
+ private byte[] blockBuf;
+ private int cur;
+ private long paddingUsed;
+
+ ReftableOutputStream(OutputStream os, int bs, boolean align) {
+ blockSize = bs;
+ blockBuf = new byte[bs];
+ alignBlocks = align;
+ out = new CountingOutputStream(os);
+ }
+
+ void setBlockSize(int bs) {
+ blockSize = bs;
+ }
+
+ @Override
+ public void write(int b) {
+ ensureBytesAvailableInBlockBuf(1);
+ blockBuf[cur++] = (byte) b;
+ }
+
+ @Override
+ public void write(byte[] b, int off, int cnt) {
+ ensureBytesAvailableInBlockBuf(cnt);
+ System.arraycopy(b, off, blockBuf, cur, cnt);
+ cur += cnt;
+ }
+
+ int bytesWrittenInBlock() {
+ return cur;
+ }
+
+ int bytesAvailableInBlock() {
+ return blockSize - cur;
+ }
+
+ long paddingUsed() {
+ return paddingUsed;
+ }
+
+ /** @return bytes flushed; excludes {@link #bytesWrittenInBlock()}. */
+ long size() {
+ return out.getCount();
+ }
+
+ static int computeVarintSize(long val) {
+ int n = 1;
+ for (; (val >>>= 7) != 0; n++) {
+ val--;
+ }
+ return n;
+ }
+
+ void writeVarint(long val) {
+ int n = tmp.length;
+ tmp[--n] = (byte) (val & 0x7f);
+ while ((val >>>= 7) != 0) {
+ tmp[--n] = (byte) (0x80 | (--val & 0x7F));
+ }
+ write(tmp, n, tmp.length - n);
+ }
+
+ void writeInt16(int val) {
+ ensureBytesAvailableInBlockBuf(2);
+ NB.encodeInt16(blockBuf, cur, val);
+ cur += 2;
+ }
+
+ void writeInt24(int val) {
+ ensureBytesAvailableInBlockBuf(3);
+ NB.encodeInt24(blockBuf, cur, val);
+ cur += 3;
+ }
+
+ void writeId(ObjectId id) {
+ ensureBytesAvailableInBlockBuf(OBJECT_ID_LENGTH);
+ id.copyRawTo(blockBuf, cur);
+ cur += OBJECT_ID_LENGTH;
+ }
+
+ void writeVarintString(String s) {
+ writeVarintString(s.getBytes(UTF_8));
+ }
+
+ void writeVarintString(byte[] msg) {
+ writeVarint(msg.length);
+ write(msg, 0, msg.length);
+ }
+
+ private void ensureBytesAvailableInBlockBuf(int cnt) {
+ if (cur + cnt > blockBuf.length) {
+ int n = Math.max(cur + cnt, blockBuf.length * 2);
+ blockBuf = Arrays.copyOf(blockBuf, n);
+ }
+ }
+
+ void flushFileHeader() throws IOException {
+ if (cur == FILE_HEADER_LEN && out.getCount() == 0) {
+ out.write(blockBuf, 0, cur);
+ cur = 0;
+ }
+ }
+
+ void beginBlock(byte type) {
+ blockType = type;
+ blockStart = cur;
+ cur += 4; // reserve space for 4-byte block header.
+ }
+
+ void flushBlock() throws IOException {
+ if (cur > blockSize && blockType != INDEX_BLOCK_TYPE) {
+ throw new IOException(JGitText.get().overflowedReftableBlock);
+ }
+ NB.encodeInt32(blockBuf, blockStart, (blockType << 24) | cur);
+
+ if (blockType == LOG_BLOCK_TYPE) {
+ // Log blocks are deflated after the block header.
+ out.write(blockBuf, 0, 4);
+ if (deflater != null) {
+ deflater.reset();
+ } else {
+ deflater = new Deflater(Deflater.BEST_COMPRESSION);
+ compressor = new DeflaterOutputStream(out, deflater);
+ }
+ compressor.write(blockBuf, 4, cur - 4);
+ compressor.finish();
+ } else {
+ // Other blocks are uncompressed.
+ out.write(blockBuf, 0, cur);
+ }
+
+ cur = 0;
+ blockType = 0;
+ blockStart = 0;
+ }
+
+ void padBetweenBlocksToNextBlock() throws IOException {
+ if (alignBlocks) {
+ long m = size() % blockSize;
+ if (m > 0) {
+ int pad = blockSize - (int) m;
+ ensureBytesAvailableInBlockBuf(pad);
+ Arrays.fill(blockBuf, 0, pad, (byte) 0);
+ out.write(blockBuf, 0, pad);
+ paddingUsed += pad;
+ }
+ }
+ }
+
+ int estimatePadBetweenBlocks(int currentBlockSize) {
+ if (alignBlocks) {
+ long m = (size() + currentBlockSize) % blockSize;
+ return m > 0 ? blockSize - (int) m : 0;
+ }
+ return 0;
+ }
+
+ void finishFile() throws IOException {
+ // File footer doesn't need patching for the block start.
+ // Just flush what has been buffered.
+ out.write(blockBuf, 0, cur);
+ cur = 0;
+
+ if (deflater != null) {
+ deflater.end();
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableReader.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableReader.java
new file mode 100644
index 0000000..be1eb40
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableReader.java
@@ -0,0 +1,669 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.eclipse.jgit.internal.storage.reftable.BlockReader.decodeBlockLen;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_FOOTER_LEN;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_HEADER_LEN;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.INDEX_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.REF_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VERSION_1;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.isFileHeaderMagic;
+import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.text.MessageFormat;
+import java.util.Arrays;
+import java.util.zip.CRC32;
+
+import org.eclipse.jgit.internal.JGitText;
+import org.eclipse.jgit.internal.storage.io.BlockSource;
+import org.eclipse.jgit.internal.storage.reftable.BlockWriter.LogEntry;
+import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.ReflogEntry;
+import org.eclipse.jgit.util.LongList;
+import org.eclipse.jgit.util.LongMap;
+import org.eclipse.jgit.util.NB;
+
+/**
+ * Reads a reftable formatted file.
+ * <p>
+ * {@code ReftableReader} is not thread-safe. Concurrent readers need their own
+ * instance to read from the same file.
+ */
+public class ReftableReader extends Reftable {
+ private final BlockSource src;
+
+ private int blockSize = -1;
+ private long minUpdateIndex;
+ private long maxUpdateIndex;
+
+ private long refEnd;
+ private long objPosition;
+ private long objEnd;
+ private long logPosition;
+ private long logEnd;
+ private int objIdLen;
+
+ private long refIndexPosition = -1;
+ private long objIndexPosition = -1;
+ private long logIndexPosition = -1;
+
+ private BlockReader refIndex;
+ private BlockReader objIndex;
+ private BlockReader logIndex;
+ private LongMap<BlockReader> indexCache;
+
+ /**
+ * Initialize a new reftable reader.
+ *
+ * @param src
+ * the file content to read.
+ */
+ public ReftableReader(BlockSource src) {
+ this.src = src;
+ }
+
+ /**
+ * @return the block size in bytes chosen for this file by the writer. Most
+ * reads from the {@link BlockSource} will be aligned to the block
+ * size.
+ * @throws IOException
+ * file cannot be read.
+ */
+ public int blockSize() throws IOException {
+ if (blockSize == -1) {
+ readFileHeader();
+ }
+ return blockSize;
+ }
+
+ /**
+ * @return the minimum update index for log entries that appear in this
+ * reftable. This should be 1 higher than the prior reftable's
+ * {@code maxUpdateIndex} if this table is used in a stack.
+ * @throws IOException
+ * file cannot be read.
+ */
+ public long minUpdateIndex() throws IOException {
+ if (blockSize == -1) {
+ readFileHeader();
+ }
+ return minUpdateIndex;
+ }
+
+ /**
+ * @return the maximum update index for log entries that appear in this
+ * reftable. This should be 1 higher than the prior reftable's
+ * {@code maxUpdateIndex} if this table is used in a stack.
+ * @throws IOException
+ * file cannot be read.
+ */
+ public long maxUpdateIndex() throws IOException {
+ if (blockSize == -1) {
+ readFileHeader();
+ }
+ return maxUpdateIndex;
+ }
+
+ @Override
+ public RefCursor allRefs() throws IOException {
+ if (blockSize == -1) {
+ readFileHeader();
+ }
+
+ long end = refEnd > 0 ? refEnd : (src.size() - FILE_FOOTER_LEN);
+ src.adviseSequentialRead(0, end);
+
+ RefCursorImpl i = new RefCursorImpl(end, null, false);
+ i.block = readBlock(0, end);
+ return i;
+ }
+
+ @Override
+ public RefCursor seekRef(String refName) throws IOException {
+ initRefIndex();
+
+ byte[] key = refName.getBytes(UTF_8);
+ boolean prefix = key[key.length - 1] == '/';
+
+ RefCursorImpl i = new RefCursorImpl(refEnd, key, prefix);
+ i.block = seek(REF_BLOCK_TYPE, key, refIndex, 0, refEnd);
+ return i;
+ }
+
+ @Override
+ public RefCursor byObjectId(AnyObjectId id) throws IOException {
+ initObjIndex();
+ ObjCursorImpl i = new ObjCursorImpl(refEnd, id);
+ if (objIndex != null) {
+ i.initSeek();
+ } else {
+ i.initScan();
+ }
+ return i;
+ }
+
+ @Override
+ public LogCursor allLogs() throws IOException {
+ initLogIndex();
+ if (logPosition > 0) {
+ src.adviseSequentialRead(logPosition, logEnd);
+ LogCursorImpl i = new LogCursorImpl(logEnd, null);
+ i.block = readBlock(logPosition, logEnd);
+ return i;
+ }
+ return new EmptyLogCursor();
+ }
+
+ @Override
+ public LogCursor seekLog(String refName, long updateIndex)
+ throws IOException {
+ initLogIndex();
+ if (logPosition > 0) {
+ byte[] key = LogEntry.key(refName, updateIndex);
+ byte[] match = refName.getBytes(UTF_8);
+ LogCursorImpl i = new LogCursorImpl(logEnd, match);
+ i.block = seek(LOG_BLOCK_TYPE, key, logIndex, logPosition, logEnd);
+ return i;
+ }
+ return new EmptyLogCursor();
+ }
+
+ private BlockReader seek(byte blockType, byte[] key, BlockReader idx,
+ long startPos, long endPos) throws IOException {
+ if (idx != null) {
+ // Walk through a possibly multi-level index to a leaf block.
+ BlockReader block = idx;
+ do {
+ if (block.seekKey(key) > 0) {
+ return null;
+ }
+ long pos = block.readPositionFromIndex();
+ block = readBlock(pos, endPos);
+ } while (block.type() == INDEX_BLOCK_TYPE);
+ block.seekKey(key);
+ return block;
+ }
+ return binarySearch(blockType, key, startPos, endPos);
+ }
+
+ private BlockReader binarySearch(byte blockType, byte[] key,
+ long startPos, long endPos) throws IOException {
+ if (blockSize == 0) {
+ BlockReader b = readBlock(startPos, endPos);
+ if (blockType != b.type()) {
+ return null;
+ }
+ b.seekKey(key);
+ return b;
+ }
+
+ int low = (int) (startPos / blockSize);
+ int end = blocksIn(startPos, endPos);
+ BlockReader block = null;
+ do {
+ int mid = (low + end) >>> 1;
+ block = readBlock(((long) mid) * blockSize, endPos);
+ if (blockType != block.type()) {
+ return null;
+ }
+ int cmp = block.seekKey(key);
+ if (cmp < 0) {
+ end = mid;
+ } else if (cmp == 0) {
+ break;
+ } else /* if (cmp > 0) */ {
+ low = mid + 1;
+ }
+ } while (low < end);
+ return block;
+ }
+
+ private void readFileHeader() throws IOException {
+ readHeaderOrFooter(0, FILE_HEADER_LEN);
+ }
+
+ private void readFileFooter() throws IOException {
+ int ftrLen = FILE_FOOTER_LEN;
+ byte[] ftr = readHeaderOrFooter(src.size() - ftrLen, ftrLen);
+
+ CRC32 crc = new CRC32();
+ crc.update(ftr, 0, ftrLen - 4);
+ if (crc.getValue() != NB.decodeUInt32(ftr, ftrLen - 4)) {
+ throw new IOException(JGitText.get().invalidReftableCRC);
+ }
+
+ refIndexPosition = NB.decodeInt64(ftr, 24);
+ long p = NB.decodeInt64(ftr, 32);
+ objPosition = p >>> 5;
+ objIdLen = (int) (p & 0x1f);
+ objIndexPosition = NB.decodeInt64(ftr, 40);
+ logPosition = NB.decodeInt64(ftr, 48);
+ logIndexPosition = NB.decodeInt64(ftr, 56);
+
+ if (refIndexPosition > 0) {
+ refEnd = refIndexPosition;
+ } else if (objPosition > 0) {
+ refEnd = objPosition;
+ } else if (logPosition > 0) {
+ refEnd = logPosition;
+ } else {
+ refEnd = src.size() - ftrLen;
+ }
+
+ if (objPosition > 0) {
+ if (objIndexPosition > 0) {
+ objEnd = objIndexPosition;
+ } else if (logPosition > 0) {
+ objEnd = logPosition;
+ } else {
+ objEnd = src.size() - ftrLen;
+ }
+ }
+
+ if (logPosition > 0) {
+ if (logIndexPosition > 0) {
+ logEnd = logIndexPosition;
+ } else {
+ logEnd = src.size() - ftrLen;
+ }
+ }
+ }
+
+ private byte[] readHeaderOrFooter(long pos, int len) throws IOException {
+ ByteBuffer buf = src.read(pos, len);
+ if (buf.position() != len) {
+ throw new IOException(JGitText.get().shortReadOfBlock);
+ }
+
+ byte[] tmp = new byte[len];
+ buf.flip();
+ buf.get(tmp);
+ if (!isFileHeaderMagic(tmp, 0, len)) {
+ throw new IOException(JGitText.get().invalidReftableFile);
+ }
+
+ int v = NB.decodeInt32(tmp, 4);
+ int version = v >>> 24;
+ if (VERSION_1 != version) {
+ throw new IOException(MessageFormat.format(
+ JGitText.get().unsupportedReftableVersion,
+ Integer.valueOf(version)));
+ }
+ if (blockSize == -1) {
+ blockSize = v & 0xffffff;
+ }
+ minUpdateIndex = NB.decodeInt64(tmp, 8);
+ maxUpdateIndex = NB.decodeInt64(tmp, 16);
+ return tmp;
+ }
+
+ private void initRefIndex() throws IOException {
+ if (refIndexPosition < 0) {
+ readFileFooter();
+ }
+ if (refIndex == null && refIndexPosition > 0) {
+ refIndex = readIndex(refIndexPosition);
+ }
+ }
+
+ private void initObjIndex() throws IOException {
+ if (objIndexPosition < 0) {
+ readFileFooter();
+ }
+ if (objIndex == null && objIndexPosition > 0) {
+ objIndex = readIndex(objIndexPosition);
+ }
+ }
+
+ private void initLogIndex() throws IOException {
+ if (logIndexPosition < 0) {
+ readFileFooter();
+ }
+ if (logIndex == null && logIndexPosition > 0) {
+ logIndex = readIndex(logIndexPosition);
+ }
+ }
+
+ private BlockReader readIndex(long pos) throws IOException {
+ int sz = readBlockLen(pos);
+ BlockReader i = new BlockReader();
+ i.readBlock(src, pos, sz);
+ i.verifyIndex();
+ return i;
+ }
+
+ private int readBlockLen(long pos) throws IOException {
+ int sz = pos == 0 ? FILE_HEADER_LEN + 4 : 4;
+ ByteBuffer tmp = src.read(pos, sz);
+ if (tmp.position() < sz) {
+ throw new IOException(JGitText.get().invalidReftableFile);
+ }
+ byte[] buf;
+ if (tmp.hasArray() && tmp.arrayOffset() == 0) {
+ buf = tmp.array();
+ } else {
+ buf = new byte[sz];
+ tmp.flip();
+ tmp.get(buf);
+ }
+ if (pos == 0 && buf[FILE_HEADER_LEN] == FILE_BLOCK_TYPE) {
+ return FILE_HEADER_LEN;
+ }
+ int p = pos == 0 ? FILE_HEADER_LEN : 0;
+ return decodeBlockLen(NB.decodeInt32(buf, p));
+ }
+
+ private BlockReader readBlock(long pos, long end) throws IOException {
+ if (indexCache != null) {
+ BlockReader b = indexCache.get(pos);
+ if (b != null) {
+ return b;
+ }
+ }
+
+ int sz = blockSize;
+ if (sz == 0) {
+ sz = readBlockLen(pos);
+ } else if (pos + sz > end) {
+ sz = (int) (end - pos); // last block may omit padding.
+ }
+
+ BlockReader b = new BlockReader();
+ b.readBlock(src, pos, sz);
+ if (b.type() == INDEX_BLOCK_TYPE && !b.truncated()) {
+ if (indexCache == null) {
+ indexCache = new LongMap<>();
+ }
+ indexCache.put(pos, b);
+ }
+ return b;
+ }
+
+ private int blocksIn(long pos, long end) {
+ int blocks = (int) ((end - pos) / blockSize);
+ return end % blockSize == 0 ? blocks : (blocks + 1);
+ }
+
+ /**
+ * Get size of the reftable, in bytes.
+ *
+ * @return size of the reftable, in bytes.
+ * @throws IOException
+ * size cannot be obtained.
+ */
+ public long size() throws IOException {
+ return src.size();
+ }
+
+ @Override
+ public void close() throws IOException {
+ src.close();
+ }
+
+ private class RefCursorImpl extends RefCursor {
+ private final long scanEnd;
+ private final byte[] match;
+ private final boolean prefix;
+
+ private Ref ref;
+ BlockReader block;
+
+ RefCursorImpl(long scanEnd, byte[] match, boolean prefix) {
+ this.scanEnd = scanEnd;
+ this.match = match;
+ this.prefix = prefix;
+ }
+
+ @Override
+ public boolean next() throws IOException {
+ for (;;) {
+ if (block == null || block.type() != REF_BLOCK_TYPE) {
+ return false;
+ } else if (!block.next()) {
+ long pos = block.endPosition();
+ if (pos >= scanEnd) {
+ return false;
+ }
+ block = readBlock(pos, scanEnd);
+ continue;
+ }
+
+ block.parseKey();
+ if (match != null && !block.match(match, prefix)) {
+ block.skipValue();
+ return false;
+ }
+
+ ref = block.readRef();
+ if (!includeDeletes && wasDeleted()) {
+ continue;
+ }
+ return true;
+ }
+ }
+
+ @Override
+ public Ref getRef() {
+ return ref;
+ }
+
+ @Override
+ public void close() {
+ // Do nothing.
+ }
+ }
+
+ private class LogCursorImpl extends LogCursor {
+ private final long scanEnd;
+ private final byte[] match;
+
+ private String refName;
+ private long updateIndex;
+ private ReflogEntry entry;
+ BlockReader block;
+
+ LogCursorImpl(long scanEnd, byte[] match) {
+ this.scanEnd = scanEnd;
+ this.match = match;
+ }
+
+ @Override
+ public boolean next() throws IOException {
+ for (;;) {
+ if (block == null || block.type() != LOG_BLOCK_TYPE) {
+ return false;
+ } else if (!block.next()) {
+ long pos = block.endPosition();
+ if (pos >= scanEnd) {
+ return false;
+ }
+ block = readBlock(pos, scanEnd);
+ continue;
+ }
+
+ block.parseKey();
+ if (match != null && !block.match(match, false)) {
+ block.skipValue();
+ return false;
+ }
+
+ refName = block.name();
+ updateIndex = block.readLogUpdateIndex();
+ entry = block.readLogEntry();
+ if (entry == null && !includeDeletes) {
+ continue;
+ }
+ return true;
+ }
+ }
+
+ @Override
+ public String getRefName() {
+ return refName;
+ }
+
+ @Override
+ public long getUpdateIndex() {
+ return updateIndex;
+ }
+
+ @Override
+ public ReflogEntry getReflogEntry() {
+ return entry;
+ }
+
+ @Override
+ public void close() {
+ // Do nothing.
+ }
+ }
+
+ static final LongList EMPTY_LONG_LIST = new LongList(0);
+
+ private class ObjCursorImpl extends RefCursor {
+ private final long scanEnd;
+ private final ObjectId match;
+
+ private Ref ref;
+ private int listIdx;
+
+ private LongList blockPos;
+ private BlockReader block;
+
+ ObjCursorImpl(long scanEnd, AnyObjectId id) {
+ this.scanEnd = scanEnd;
+ this.match = id.copy();
+ }
+
+ void initSeek() throws IOException {
+ byte[] rawId = new byte[OBJECT_ID_LENGTH];
+ match.copyRawTo(rawId, 0);
+ byte[] key = Arrays.copyOf(rawId, objIdLen);
+
+ BlockReader b = objIndex;
+ do {
+ if (b.seekKey(key) > 0) {
+ blockPos = EMPTY_LONG_LIST;
+ return;
+ }
+ long pos = b.readPositionFromIndex();
+ b = readBlock(pos, objEnd);
+ } while (b.type() == INDEX_BLOCK_TYPE);
+ b.seekKey(key);
+ while (b.next()) {
+ b.parseKey();
+ if (b.match(key, false)) {
+ blockPos = b.readBlockPositionList();
+ if (blockPos == null) {
+ initScan();
+ return;
+ }
+ break;
+ }
+ b.skipValue();
+ }
+ if (blockPos == null) {
+ blockPos = EMPTY_LONG_LIST;
+ }
+ if (blockPos.size() > 0) {
+ long pos = blockPos.get(listIdx++);
+ block = readBlock(pos, scanEnd);
+ }
+ }
+
+ void initScan() throws IOException {
+ block = readBlock(0, scanEnd);
+ }
+
+ @Override
+ public boolean next() throws IOException {
+ for (;;) {
+ if (block == null || block.type() != REF_BLOCK_TYPE) {
+ return false;
+ } else if (!block.next()) {
+ long pos;
+ if (blockPos != null) {
+ if (listIdx >= blockPos.size()) {
+ return false;
+ }
+ pos = blockPos.get(listIdx++);
+ } else {
+ pos = block.endPosition();
+ }
+ if (pos >= scanEnd) {
+ return false;
+ }
+ block = readBlock(pos, scanEnd);
+ continue;
+ }
+
+ block.parseKey();
+ ref = block.readRef();
+ ObjectId id = ref.getObjectId();
+ if (id != null && match.equals(id)
+ && (includeDeletes || !wasDeleted())) {
+ return true;
+ }
+ }
+ }
+
+ @Override
+ public Ref getRef() {
+ return ref;
+ }
+
+ @Override
+ public void close() {
+ // Do nothing.
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableWriter.java b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableWriter.java
new file mode 100644
index 0000000..45b759f
--- /dev/null
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/internal/storage/reftable/ReftableWriter.java
@@ -0,0 +1,792 @@
+/*
+ * Copyright (C) 2017, Google Inc.
+ * and other copyright owners as documented in the project's IP log.
+ *
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Distribution License v1.0 which
+ * accompanies this distribution, is reproduced below, and is
+ * available at http://www.eclipse.org/org/documents/edl-v10.php
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or
+ * without modification, are permitted provided that the following
+ * conditions are met:
+ *
+ * - Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * - Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ *
+ * - Neither the name of the Eclipse Foundation, Inc. nor the
+ * names of its contributors may be used to endorse or promote
+ * products derived from this software without specific prior
+ * written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+ * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.eclipse.jgit.internal.storage.reftable;
+
+import static java.lang.Math.log;
+import static org.eclipse.jgit.internal.storage.reftable.BlockWriter.padBetweenBlocks;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_FOOTER_LEN;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_HEADER_LEN;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.FILE_HEADER_MAGIC;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.INDEX_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.LOG_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.MAX_BLOCK_SIZE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.MAX_RESTARTS;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.OBJ_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.REF_BLOCK_TYPE;
+import static org.eclipse.jgit.internal.storage.reftable.ReftableConstants.VERSION_1;
+import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.zip.CRC32;
+
+import org.eclipse.jgit.annotations.Nullable;
+import org.eclipse.jgit.internal.storage.reftable.BlockWriter.DeleteLogEntry;
+import org.eclipse.jgit.internal.storage.reftable.BlockWriter.Entry;
+import org.eclipse.jgit.internal.storage.reftable.BlockWriter.IndexEntry;
+import org.eclipse.jgit.internal.storage.reftable.BlockWriter.LogEntry;
+import org.eclipse.jgit.internal.storage.reftable.BlockWriter.ObjEntry;
+import org.eclipse.jgit.internal.storage.reftable.BlockWriter.RefEntry;
+import org.eclipse.jgit.lib.AbbreviatedObjectId;
+import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectIdOwnerMap;
+import org.eclipse.jgit.lib.ObjectIdSubclassMap;
+import org.eclipse.jgit.lib.PersonIdent;
+import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.util.LongList;
+import org.eclipse.jgit.util.NB;
+
+/**
+ * Writes a reftable formatted file.
+ * <p>
+ * A reftable can be written in a streaming fashion, provided the caller sorts
+ * all references. A {@link ReftableWriter} is single-use, and not thread-safe.
+ */
+public class ReftableWriter {
+ private ReftableConfig config;
+ private int refBlockSize;
+ private int logBlockSize;
+ private int restartInterval;
+ private int maxIndexLevels;
+ private boolean alignBlocks;
+ private boolean indexObjects;
+
+ private long minUpdateIndex;
+ private long maxUpdateIndex;
+
+ private ReftableOutputStream out;
+ private ObjectIdSubclassMap<RefList> obj2ref;
+
+ private BlockWriter cur;
+ private Section refs;
+ private Section objs;
+ private Section logs;
+ private int objIdLen;
+ private Stats stats;
+
+ /** Initialize a writer with a default configuration. */
+ public ReftableWriter() {
+ this(new ReftableConfig());
+ }
+
+ /**
+ * Initialize a writer with a specific configuration.
+ *
+ * @param cfg
+ * configuration for the writer.
+ */
+ public ReftableWriter(ReftableConfig cfg) {
+ config = cfg;
+ }
+
+ /**
+ * @param cfg
+ * configuration for the writer.
+ * @return {@code this}
+ */
+ public ReftableWriter setConfig(ReftableConfig cfg) {
+ this.config = cfg != null ? cfg : new ReftableConfig();
+ return this;
+ }
+
+ /**
+ * @param min
+ * the minimum update index for log entries that appear in this
+ * reftable. This should be 1 higher than the prior reftable's
+ * {@code maxUpdateIndex} if this table will be used in a stack.
+ * @return {@code this}
+ */
+ public ReftableWriter setMinUpdateIndex(long min) {
+ minUpdateIndex = min;
+ return this;
+ }
+
+ /**
+ * @param max
+ * the maximum update index for log entries that appear in this
+ * reftable. This should be at least 1 higher than the prior
+ * reftable's {@code maxUpdateIndex} if this table will be used
+ * in a stack.
+ * @return {@code this}
+ */
+ public ReftableWriter setMaxUpdateIndex(long max) {
+ maxUpdateIndex = max;
+ return this;
+ }
+
+ /**
+ * Begin writing the reftable.
+ *
+ * @param os
+ * stream to write the table to. Caller is responsible for
+ * closing the stream after invoking {@link #finish()}.
+ * @return {@code this}
+ * @throws IOException
+ * if reftable header cannot be written.
+ */
+ public ReftableWriter begin(OutputStream os) throws IOException {
+ refBlockSize = config.getRefBlockSize();
+ logBlockSize = config.getLogBlockSize();
+ restartInterval = config.getRestartInterval();
+ maxIndexLevels = config.getMaxIndexLevels();
+ alignBlocks = config.isAlignBlocks();
+ indexObjects = config.isIndexObjects();
+
+ if (refBlockSize <= 0) {
+ refBlockSize = 4 << 10;
+ } else if (refBlockSize > MAX_BLOCK_SIZE) {
+ throw new IllegalArgumentException();
+ }
+ if (logBlockSize <= 0) {
+ logBlockSize = 2 * refBlockSize;
+ }
+ if (restartInterval <= 0) {
+ restartInterval = refBlockSize < (60 << 10) ? 16 : 64;
+ }
+
+ out = new ReftableOutputStream(os, refBlockSize, alignBlocks);
+ refs = new Section(REF_BLOCK_TYPE);
+ if (indexObjects) {
+ obj2ref = new ObjectIdSubclassMap<>();
+ }
+ writeFileHeader();
+ return this;
+ }
+
+ /**
+ * Sort a collection of references and write them to the reftable.
+ *
+ * @param refsToPack
+ * references to sort and write.
+ * @return {@code this}
+ * @throws IOException
+ * if reftable cannot be written.
+ */
+ public ReftableWriter sortAndWriteRefs(Collection<Ref> refsToPack)
+ throws IOException {
+ Iterator<RefEntry> itr = refsToPack.stream()
+ .map(RefEntry::new)
+ .sorted(Entry::compare)
+ .iterator();
+ while (itr.hasNext()) {
+ RefEntry entry = itr.next();
+ long blockPos = refs.write(entry);
+ indexRef(entry.ref, blockPos);
+ }
+ return this;
+ }
+
+ /**
+ * Write one reference to the reftable.
+ * <p>
+ * References must be passed in sorted order.
+ *
+ * @param ref
+ * the reference to store.
+ * @throws IOException
+ * if reftable cannot be written.
+ */
+ public void writeRef(Ref ref) throws IOException {
+ long blockPos = refs.write(new RefEntry(ref));
+ indexRef(ref, blockPos);
+ }
+
+ private void indexRef(Ref ref, long blockPos) {
+ if (indexObjects && !ref.isSymbolic()) {
+ indexId(ref.getObjectId(), blockPos);
+ indexId(ref.getPeeledObjectId(), blockPos);
+ }
+ }
+
+ private void indexId(ObjectId id, long blockPos) {
+ if (id != null) {
+ RefList l = obj2ref.get(id);
+ if (l == null) {
+ l = new RefList(id);
+ obj2ref.add(l);
+ }
+ l.addBlock(blockPos);
+ }
+ }
+
+ /**
+ * Write one reflog entry to the reftable.
+ * <p>
+ * Reflog entries must be written in reference name and descending
+ * {@code updateIndex} (highest first) order.
+ *
+ * @param ref
+ * name of the reference.
+ * @param updateIndex
+ * identifier of the transaction that created the log record. The
+ * {@code updateIndex} must be unique within the scope of
+ * {@code ref}, and must be within the bounds defined by
+ * {@code minUpdateIndex <= updateIndex <= maxUpdateIndex}.
+ * @param who
+ * committer of the reflog entry.
+ * @param oldId
+ * prior id; pass {@link ObjectId#zeroId()} for creations.
+ * @param newId
+ * new id; pass {@link ObjectId#zeroId()} for deletions.
+ * @param message
+ * optional message (may be null).
+ * @throws IOException
+ * if reftable cannot be written.
+ */
+ public void writeLog(String ref, long updateIndex, PersonIdent who,
+ ObjectId oldId, ObjectId newId, @Nullable String message)
+ throws IOException {
+ String msg = message != null ? message : ""; //$NON-NLS-1$
+ beginLog();
+ logs.write(new LogEntry(ref, updateIndex, who, oldId, newId, msg));
+ }
+
+ /**
+ * Record deletion of one reflog entry in this reftable.
+ *
+ * <p>
+ * The deletion can shadow an entry stored in a lower table in the stack.
+ * This is useful for {@code refs/stash} and dropping an entry from its
+ * reflog.
+ * <p>
+ * Deletion must be properly interleaved in sorted updateIndex order with
+ * any other logs written by
+ * {@link #writeLog(String, long, PersonIdent, ObjectId, ObjectId, String)}.
+ *
+ * @param ref
+ * the ref to delete (hide) a reflog entry from.
+ * @param updateIndex
+ * the update index that must be hidden.
+ * @throws IOException
+ * if reftable cannot be written.
+ */
+ public void deleteLog(String ref, long updateIndex) throws IOException {
+ beginLog();
+ logs.write(new DeleteLogEntry(ref, updateIndex));
+ }
+
+ private void beginLog() throws IOException {
+ if (logs == null) {
+ finishRefAndObjSections(); // close prior ref blocks and their index, if present.
+ out.flushFileHeader();
+ out.setBlockSize(logBlockSize);
+ logs = new Section(LOG_BLOCK_TYPE);
+ }
+ }
+
+ /**
+ * @return an estimate of the current size in bytes of the reftable, if it
+ * was finished right now. Estimate is only accurate if
+ * {@link ReftableConfig#setIndexObjects(boolean)} is {@code false}
+ * and {@link ReftableConfig#setMaxIndexLevels(int)} is {@code 1}.
+ */
+ public long estimateTotalBytes() {
+ long bytes = out.size();
+ if (bytes == 0) {
+ bytes += FILE_HEADER_LEN;
+ }
+ if (cur != null) {
+ long curBlockPos = out.size();
+ int sz = cur.currentSize();
+ bytes += sz;
+
+ IndexBuilder idx = null;
+ if (cur.blockType() == REF_BLOCK_TYPE) {
+ idx = refs.idx;
+ } else if (cur.blockType() == LOG_BLOCK_TYPE) {
+ idx = logs.idx;
+ }
+ if (idx != null && shouldHaveIndex(idx)) {
+ if (idx == refs.idx) {
+ bytes += out.estimatePadBetweenBlocks(sz);
+ }
+ bytes += idx.estimateBytes(curBlockPos);
+ }
+ }
+ bytes += FILE_FOOTER_LEN;
+ return bytes;
+ }
+
+ /**
+ * Finish writing the reftable by writing its trailer.
+ *
+ * @return {@code this}
+ * @throws IOException
+ * if reftable cannot be written.
+ */
+ public ReftableWriter finish() throws IOException {
+ finishRefAndObjSections();
+ finishLogSection();
+ writeFileFooter();
+ out.finishFile();
+
+ stats = new Stats(this, out);
+ out = null;
+ obj2ref = null;
+ cur = null;
+ refs = null;
+ objs = null;
+ logs = null;
+ return this;
+ }
+
+ private void finishRefAndObjSections() throws IOException {
+ if (cur != null && cur.blockType() == REF_BLOCK_TYPE) {
+ refs.finishSectionMaybeWriteIndex();
+ if (indexObjects && !obj2ref.isEmpty() && refs.idx.bytes > 0) {
+ writeObjBlocks();
+ }
+ obj2ref = null;
+ }
+ }
+
+ private void writeObjBlocks() throws IOException {
+ List<RefList> sorted = sortById(obj2ref);
+ obj2ref = null;
+ objIdLen = shortestUniqueAbbreviation(sorted);
+
+ out.padBetweenBlocksToNextBlock();
+ objs = new Section(OBJ_BLOCK_TYPE);
+ objs.entryCnt = sorted.size();
+ for (RefList l : sorted) {
+ objs.write(new ObjEntry(objIdLen, l, l.blockPos));
+ }
+ objs.finishSectionMaybeWriteIndex();
+ }
+
+ private void finishLogSection() throws IOException {
+ if (cur != null && cur.blockType() == LOG_BLOCK_TYPE) {
+ logs.finishSectionMaybeWriteIndex();
+ }
+ }
+
+ private boolean shouldHaveIndex(IndexBuilder idx) {
+ int threshold;
+ if (idx == refs.idx && alignBlocks) {
+ threshold = 4;
+ } else {
+ threshold = 1;
+ }
+ return idx.entries.size() + (cur != null ? 1 : 0) > threshold;
+ }
+
+ private void writeFileHeader() {
+ byte[] hdr = new byte[FILE_HEADER_LEN];
+ encodeHeader(hdr);
+ out.write(hdr, 0, FILE_HEADER_LEN);
+ }
+
+ private void encodeHeader(byte[] hdr) {
+ System.arraycopy(FILE_HEADER_MAGIC, 0, hdr, 0, 4);
+ int bs = alignBlocks ? refBlockSize : 0;
+ NB.encodeInt32(hdr, 4, (VERSION_1 << 24) | bs);
+ NB.encodeInt64(hdr, 8, minUpdateIndex);
+ NB.encodeInt64(hdr, 16, maxUpdateIndex);
+ }
+
+ private void writeFileFooter() {
+ int ftrLen = FILE_FOOTER_LEN;
+ byte[] ftr = new byte[ftrLen];
+ encodeHeader(ftr);
+
+ NB.encodeInt64(ftr, 24, indexPosition(refs));
+ NB.encodeInt64(ftr, 32, (firstBlockPosition(objs) << 5) | objIdLen);
+ NB.encodeInt64(ftr, 40, indexPosition(objs));
+ NB.encodeInt64(ftr, 48, firstBlockPosition(logs));
+ NB.encodeInt64(ftr, 56, indexPosition(logs));
+
+ CRC32 crc = new CRC32();
+ crc.update(ftr, 0, ftrLen - 4);
+ NB.encodeInt32(ftr, ftrLen - 4, (int) crc.getValue());
+
+ out.write(ftr, 0, ftrLen);
+ }
+
+ private static long firstBlockPosition(@Nullable Section s) {
+ return s != null ? s.firstBlockPosition : 0;
+ }
+
+ private static long indexPosition(@Nullable Section s) {
+ return s != null && s.idx != null ? s.idx.rootPosition : 0;
+ }
+
+ /** @return statistics of the last written reftable. */
+ public Stats getStats() {
+ return stats;
+ }
+
+ /** Statistics about a written reftable. */
+ public static class Stats {
+ private final int refBlockSize;
+ private final int logBlockSize;
+ private final int restartInterval;
+
+ private final long minUpdateIndex;
+ private final long maxUpdateIndex;
+
+ private final long refCnt;
+ private final long objCnt;
+ private final int objIdLen;
+ private final long logCnt;
+ private final long refBytes;
+ private final long objBytes;
+ private final long logBytes;
+ private final long paddingUsed;
+ private final long totalBytes;
+
+ private final int refIndexSize;
+ private final int refIndexLevels;
+ private final int objIndexSize;
+ private final int objIndexLevels;
+
+ Stats(ReftableWriter w, ReftableOutputStream o) {
+ refBlockSize = w.refBlockSize;
+ logBlockSize = w.logBlockSize;
+ restartInterval = w.restartInterval;
+
+ minUpdateIndex = w.minUpdateIndex;
+ maxUpdateIndex = w.maxUpdateIndex;
+ paddingUsed = o.paddingUsed();
+ totalBytes = o.size();
+
+ refCnt = w.refs.entryCnt;
+ refBytes = w.refs.bytes;
+
+ objCnt = w.objs != null ? w.objs.entryCnt : 0;
+ objBytes = w.objs != null ? w.objs.bytes : 0;
+ objIdLen = w.objIdLen;
+
+ logCnt = w.logs != null ? w.logs.entryCnt : 0;
+ logBytes = w.logs != null ? w.logs.bytes : 0;
+
+ IndexBuilder refIdx = w.refs.idx;
+ refIndexSize = refIdx.bytes;
+ refIndexLevels = refIdx.levels;
+
+ IndexBuilder objIdx = w.objs != null ? w.objs.idx : null;
+ objIndexSize = objIdx != null ? objIdx.bytes : 0;
+ objIndexLevels = objIdx != null ? objIdx.levels : 0;
+ }
+
+ /** @return number of bytes in a ref block. */
+ public int refBlockSize() {
+ return refBlockSize;
+ }
+
+ /** @return number of bytes to compress into a log block. */
+ public int logBlockSize() {
+ return logBlockSize;
+ }
+
+ /** @return number of references between binary search markers. */
+ public int restartInterval() {
+ return restartInterval;
+ }
+
+ /** @return smallest update index contained in this reftable. */
+ public long minUpdateIndex() {
+ return minUpdateIndex;
+ }
+
+ /** @return largest update index contained in this reftable. */
+ public long maxUpdateIndex() {
+ return maxUpdateIndex;
+ }
+
+ /** @return total number of references in the reftable. */
+ public long refCount() {
+ return refCnt;
+ }
+
+ /** @return number of unique objects in the reftable. */
+ public long objCount() {
+ return objCnt;
+ }
+
+ /** @return total number of log records in the reftable. */
+ public long logCount() {
+ return logCnt;
+ }
+
+ /** @return number of bytes for references, including ref index. */
+ public long refBytes() {
+ return refBytes;
+ }
+
+ /** @return number of bytes for objects, including object index. */
+ public long objBytes() {
+ return objBytes;
+ }
+
+ /** @return number of bytes for log, including log index. */
+ public long logBytes() {
+ return logBytes;
+ }
+
+ /** @return total number of bytes in the reftable. */
+ public long totalBytes() {
+ return totalBytes;
+ }
+
+ /** @return bytes of padding used to maintain block alignment. */
+ public long paddingBytes() {
+ return paddingUsed;
+ }
+
+ /** @return number of bytes in the ref index; 0 if no index was used. */
+ public int refIndexSize() {
+ return refIndexSize;
+ }
+
+ /** @return number of levels in the ref index. */
+ public int refIndexLevels() {
+ return refIndexLevels;
+ }
+
+ /** @return number of bytes in the object index; 0 if no index. */
+ public int objIndexSize() {
+ return objIndexSize;
+ }
+
+ /** @return number of levels in the object index. */
+ public int objIndexLevels() {
+ return objIndexLevels;
+ }
+
+ /**
+ * @return number of bytes required to uniquely identify all objects in
+ * the reftable. Unique abbreviations in hex would be
+ * {@code 2 * objIdLength()}.
+ */
+ public int objIdLength() {
+ return objIdLen;
+ }
+ }
+
+ private static List<RefList> sortById(ObjectIdSubclassMap<RefList> m) {
+ List<RefList> s = new ArrayList<>(m.size());
+ for (RefList l : m) {
+ s.add(l);
+ }
+ Collections.sort(s);
+ return s;
+ }
+
+ private static int shortestUniqueAbbreviation(List<RefList> in) {
+ // Estimate minimum number of bytes necessary for unique abbreviations.
+ int bytes = Math.max(2, (int) (log(in.size()) / log(8)));
+ Set<AbbreviatedObjectId> tmp = new HashSet<>((int) (in.size() * 0.75f));
+ retry: for (;;) {
+ int hexLen = bytes * 2;
+ for (ObjectId id : in) {
+ AbbreviatedObjectId a = id.abbreviate(hexLen);
+ if (!tmp.add(a)) {
+ if (++bytes >= OBJECT_ID_LENGTH) {
+ return OBJECT_ID_LENGTH;
+ }
+ tmp.clear();
+ continue retry;
+ }
+ }
+ return bytes;
+ }
+ }
+
+ private static class RefList extends ObjectIdOwnerMap.Entry {
+ final LongList blockPos = new LongList(2);
+
+ RefList(AnyObjectId id) {
+ super(id);
+ }
+
+ void addBlock(long pos) {
+ if (!blockPos.contains(pos)) {
+ blockPos.add(pos);
+ }
+ }
+ }
+
+ private class Section {
+ final IndexBuilder idx;
+ final long firstBlockPosition;
+
+ long entryCnt;
+ long bytes;
+
+ Section(byte keyType) {
+ idx = new IndexBuilder(keyType);
+ firstBlockPosition = out.size();
+ }
+
+ long write(BlockWriter.Entry entry) throws IOException {
+ if (cur == null) {
+ beginBlock(entry);
+ } else if (!cur.tryAdd(entry)) {
+ flushCurBlock();
+ if (cur.padBetweenBlocks()) {
+ out.padBetweenBlocksToNextBlock();
+ }
+ beginBlock(entry);
+ }
+ entryCnt++;
+ return out.size();
+ }
+
+ private void beginBlock(BlockWriter.Entry entry)
+ throws BlockSizeTooSmallException {
+ byte blockType = entry.blockType();
+ int bs = out.bytesAvailableInBlock();
+ cur = new BlockWriter(blockType, idx.keyType, bs, restartInterval);
+ cur.mustAdd(entry);
+ }
+
+ void flushCurBlock() throws IOException {
+ idx.entries.add(new IndexEntry(cur.lastKey(), out.size()));
+ cur.writeTo(out);
+ }
+
+ void finishSectionMaybeWriteIndex() throws IOException {
+ flushCurBlock();
+ cur = null;
+ if (shouldHaveIndex(idx)) {
+ idx.writeIndex();
+ }
+ bytes = out.size() - firstBlockPosition;
+ }
+ }
+
+ private class IndexBuilder {
+ final byte keyType;
+ List<IndexEntry> entries = new ArrayList<>();
+ long rootPosition;
+ int bytes;
+ int levels;
+
+ IndexBuilder(byte kt) {
+ keyType = kt;
+ }
+
+ int estimateBytes(long curBlockPos) {
+ BlockWriter b = new BlockWriter(
+ INDEX_BLOCK_TYPE, keyType,
+ MAX_BLOCK_SIZE,
+ Math.max(restartInterval, entries.size() / MAX_RESTARTS));
+ try {
+ for (Entry e : entries) {
+ b.mustAdd(e);
+ }
+ if (cur != null) {
+ b.mustAdd(new IndexEntry(cur.lastKey(), curBlockPos));
+ }
+ } catch (BlockSizeTooSmallException e) {
+ return b.currentSize();
+ }
+ return b.currentSize();
+ }
+
+ void writeIndex() throws IOException {
+ if (padBetweenBlocks(keyType)) {
+ out.padBetweenBlocksToNextBlock();
+ }
+ long startPos = out.size();
+ writeMultiLevelIndex(entries);
+ bytes = (int) (out.size() - startPos);
+ entries = null;
+ }
+
+ private void writeMultiLevelIndex(List<IndexEntry> keys)
+ throws IOException {
+ levels = 1;
+ while (maxIndexLevels == 0 || levels < maxIndexLevels) {
+ keys = writeOneLevel(keys);
+ if (keys == null) {
+ return;
+ }
+ levels++;
+ }
+
+ // When maxIndexLevels has restricted the writer, write one
+ // index block with the entire remaining set of keys.
+ BlockWriter b = new BlockWriter(
+ INDEX_BLOCK_TYPE, keyType,
+ MAX_BLOCK_SIZE,
+ Math.max(restartInterval, keys.size() / MAX_RESTARTS));
+ for (Entry e : keys) {
+ b.mustAdd(e);
+ }
+ rootPosition = out.size();
+ b.writeTo(out);
+ }
+
+ private List<IndexEntry> writeOneLevel(List<IndexEntry> keys)
+ throws IOException {
+ Section thisLevel = new Section(keyType);
+ for (Entry e : keys) {
+ thisLevel.write(e);
+ }
+ if (!thisLevel.idx.entries.isEmpty()) {
+ thisLevel.flushCurBlock();
+ if (cur.padBetweenBlocks()) {
+ out.padBetweenBlocksToNextBlock();
+ }
+ cur = null;
+ return thisLevel.idx.entries;
+ }
+
+ // The current block fit entire level; make it the root.
+ rootPosition = out.size();
+ cur.writeTo(out);
+ cur = null;
+ return null;
+ }
+ }
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/AbbreviatedObjectId.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/AbbreviatedObjectId.java
index 29a379e..0567051 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/AbbreviatedObjectId.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/AbbreviatedObjectId.java
@@ -336,7 +336,7 @@ private int mask(final int word, final int v) {
@Override
public int hashCode() {
- return w2;
+ return w1;
}
@Override
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BaseRepositoryBuilder.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BaseRepositoryBuilder.java
index de1003b..825c1f7 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BaseRepositoryBuilder.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BaseRepositoryBuilder.java
@@ -738,4 +738,4 @@ protected FS safeFS() {
protected final B self() {
return (B) this;
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BatchRefUpdate.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BatchRefUpdate.java
index 3c5ecfb..956607c 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BatchRefUpdate.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BatchRefUpdate.java
@@ -59,6 +59,7 @@
import java.util.concurrent.TimeoutException;
import org.eclipse.jgit.annotations.Nullable;
+import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.RefUpdate.Result;
import org.eclipse.jgit.revwalk.RevWalk;
@@ -81,8 +82,10 @@ public class BatchRefUpdate {
* clock skew between machines on the same LAN using an NTP server also on
* the same LAN should be under 5 seconds. 5 seconds is also not that long
* for a large `git push` operation to complete.
+ *
+ * @since 4.9
*/
- private static final Duration MAX_WAIT = Duration.ofSeconds(5);
+ protected static final Duration MAX_WAIT = Duration.ofSeconds(5);
private final RefDatabase refdb;
@@ -174,25 +177,36 @@ public BatchRefUpdate setRefLogIdent(final PersonIdent pi) {
* @return message the caller wants to include in the reflog; null if the
* update should not be logged.
*/
+ @Nullable
public String getRefLogMessage() {
return refLogMessage;
}
- /** @return {@code true} if the ref log message should show the result. */
+ /**
+ * Check whether the reflog message should include the result of the update,
+ * such as fast-forward or force-update.
+ * <p>
+ * Describes the default for commands in this batch that do not override it
+ * with {@link ReceiveCommand#setRefLogMessage(String, boolean)}.
+ *
+ * @return true if the message should include the result.
+ */
public boolean isRefLogIncludingResult() {
return refLogIncludeResult;
}
/**
* Set the message to include in the reflog.
+ * <p>
+ * Describes the default for commands in this batch that do not override it
+ * with {@link ReceiveCommand#setRefLogMessage(String, boolean)}.
*
* @param msg
- * the message to describe this change. It may be null if
- * appendStatus is null in order not to append to the reflog
+ * the message to describe this change. If null and appendStatus is
+ * false, the reflog will not be updated.
* @param appendStatus
* true if the status of the ref change (fast-forward or
- * forced-update) should be appended to the user supplied
- * message.
+ * forced-update) should be appended to the user supplied message.
* @return {@code this}.
*/
public BatchRefUpdate setRefLogMessage(String msg, boolean appendStatus) {
@@ -210,6 +224,8 @@ else if (msg == null && appendStatus) {
/**
* Don't record this update in the ref's associated reflog.
+ * <p>
+ * Equivalent to {@code setRefLogMessage(null, false)}.
*
* @return {@code this}.
*/
@@ -219,7 +235,11 @@ public BatchRefUpdate disableRefLog() {
return this;
}
- /** @return true if log has been disabled by {@link #disableRefLog()}. */
+ /**
+ * Check whether log has been disabled by {@link #disableRefLog()}.
+ *
+ * @return true if disabled.
+ */
public boolean isRefLogDisabled() {
return refLogMessage == null;
}
@@ -334,6 +354,19 @@ public List<String> getPushOptions() {
}
/**
+ * Set push options associated with this update.
+ * <p>
+ * Implementations must call this at the top of {@link #execute(RevWalk,
+ * ProgressMonitor, List)}.
+ *
+ * @param options options passed to {@code execute}.
+ * @since 4.9
+ */
+ protected void setPushOptions(List<String> options) {
+ pushOptions = options;
+ }
+
+ /**
* @return list of timestamps the batch must wait for.
* @since 4.6
*/
@@ -399,7 +432,7 @@ public void execute(RevWalk walk, ProgressMonitor monitor,
}
if (options != null) {
- pushOptions = options;
+ setPushOptions(options);
}
monitor.beginTask(JGitText.get().updatingReferences, commands.size());
@@ -410,6 +443,11 @@ public void execute(RevWalk walk, ProgressMonitor monitor,
for (ReceiveCommand cmd : commands) {
try {
if (cmd.getResult() == NOT_ATTEMPTED) {
+ if (isMissing(walk, cmd.getOldId())
+ || isMissing(walk, cmd.getNewId())) {
+ cmd.setResult(ReceiveCommand.Result.REJECTED_MISSING_OBJECT);
+ continue;
+ }
cmd.updateType(walk);
switch (cmd.getType()) {
case CREATE:
@@ -481,6 +519,19 @@ public void execute(RevWalk walk, ProgressMonitor monitor,
monitor.endTask();
}
+ private static boolean isMissing(RevWalk walk, ObjectId id)
+ throws IOException {
+ if (id.equals(ObjectId.zeroId())) {
+ return false; // Explicit add or delete is not missing.
+ }
+ try {
+ walk.parseAny(id);
+ return false;
+ } catch (MissingObjectException e) {
+ return true;
+ }
+ }
+
/**
* Wait for timestamps to be in the past, aborting commands on timeout.
*
@@ -534,17 +585,36 @@ private static Collection<String> getTakenPrefixes(Collection<String> names) {
return ref;
}
- static Collection<String> getPrefixes(String s) {
+ /**
+ * Get all path prefixes of a ref name.
+ *
+ * @param name
+ * ref name.
+ * @return path prefixes of the ref name. For {@code refs/heads/foo}, returns
+ * {@code refs} and {@code refs/heads}.
+ * @since 4.9
+ */
+ protected static Collection<String> getPrefixes(String name) {
Collection<String> ret = new HashSet<>();
- addPrefixesTo(s, ret);
+ addPrefixesTo(name, ret);
return ret;
}
- static void addPrefixesTo(String s, Collection<String> out) {
- int p1 = s.indexOf('/');
+ /**
+ * Add prefixes of a ref name to an existing collection.
+ *
+ * @param name
+ * ref name.
+ * @param out
+ * path prefixes of the ref name. For {@code refs/heads/foo},
+ * returns {@code refs} and {@code refs/heads}.
+ * @since 4.9
+ */
+ protected static void addPrefixesTo(String name, Collection<String> out) {
+ int p1 = name.indexOf('/');
while (p1 > 0) {
- out.add(s.substring(0, p1));
- p1 = s.indexOf('/', p1 + 1);
+ out.add(name.substring(0, p1));
+ p1 = name.indexOf('/', p1 + 1);
}
}
@@ -560,11 +630,11 @@ static void addPrefixesTo(String s, Collection<String> out) {
*/
protected RefUpdate newUpdate(ReceiveCommand cmd) throws IOException {
RefUpdate ru = refdb.newUpdate(cmd.getRefName(), false);
- if (isRefLogDisabled())
+ if (isRefLogDisabled(cmd)) {
ru.disableRefLog();
- else {
+ } else {
ru.setRefLogIdent(refLogIdent);
- ru.setRefLogMessage(refLogMessage, refLogIncludeResult);
+ ru.setRefLogMessage(getRefLogMessage(cmd), isRefLogIncludingResult(cmd));
}
ru.setPushCertificate(pushCert);
switch (cmd.getType()) {
@@ -585,6 +655,47 @@ protected RefUpdate newUpdate(ReceiveCommand cmd) throws IOException {
}
}
+ /**
+ * Check whether reflog is disabled for a command.
+ *
+ * @param cmd
+ * specific command.
+ * @return whether the reflog is disabled, taking into account the state from
+ * this instance as well as overrides in the given command.
+ * @since 4.9
+ */
+ protected boolean isRefLogDisabled(ReceiveCommand cmd) {
+ return cmd.hasCustomRefLog() ? cmd.isRefLogDisabled() : isRefLogDisabled();
+ }
+
+ /**
+ * Get reflog message for a command.
+ *
+ * @param cmd
+ * specific command.
+ * @return reflog message, taking into account the state from this instance as
+ * well as overrides in the given command.
+ * @since 4.9
+ */
+ protected String getRefLogMessage(ReceiveCommand cmd) {
+ return cmd.hasCustomRefLog() ? cmd.getRefLogMessage() : getRefLogMessage();
+ }
+
+ /**
+ * Check whether the reflog message for a command should include the result.
+ *
+ * @param cmd
+ * specific command.
+ * @return whether the reflog message should show the result, taking into
+ * account the state from this instance as well as overrides in the
+ * given command.
+ * @since 4.9
+ */
+ protected boolean isRefLogIncludingResult(ReceiveCommand cmd) {
+ return cmd.hasCustomRefLog()
+ ? cmd.isRefLogIncludingResult() : isRefLogIncludingResult();
+ }
+
@Override
public String toString() {
StringBuilder r = new StringBuilder();
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BitmapObject.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BitmapObject.java
index 345016c..4e0dc2c 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/BitmapObject.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/BitmapObject.java
@@ -62,4 +62,4 @@ public abstract class BitmapObject {
* @return unique hash of this object.
*/
public abstract ObjectId getObjectId();
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/CheckoutEntry.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/CheckoutEntry.java
index d6608cd..34d0b14 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/CheckoutEntry.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/CheckoutEntry.java
@@ -17,4 +17,4 @@ public interface CheckoutEntry {
*/
public abstract String getToBranch();
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/CoreConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/CoreConfig.java
index 40aba63..fdbbe39 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/CoreConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/CoreConfig.java
@@ -57,12 +57,7 @@
*/
public class CoreConfig {
/** Key for {@link Config#get(SectionParser)}. */
- public static final Config.SectionParser<CoreConfig> KEY = new SectionParser<CoreConfig>() {
- @Override
- public CoreConfig parse(final Config cfg) {
- return new CoreConfig(cfg);
- }
- };
+ public static final Config.SectionParser<CoreConfig> KEY = CoreConfig::new;
/** Permissible values for {@code core.autocrlf}. */
public static enum AutoCRLF {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/FileMode.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/FileMode.java
index a489461..edbc709 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/FileMode.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/FileMode.java
@@ -83,7 +83,6 @@ public abstract class FileMode {
public static final int TYPE_MISSING = 0000000;
/** Mode indicating an entry is a tree (aka directory). */
- @SuppressWarnings("synthetic-access")
public static final FileMode TREE = new FileMode(TYPE_TREE,
Constants.OBJ_TREE) {
@Override
@@ -93,7 +92,6 @@ public boolean equals(final int modeBits) {
};
/** Mode indicating an entry is a symbolic link. */
- @SuppressWarnings("synthetic-access")
public static final FileMode SYMLINK = new FileMode(TYPE_SYMLINK,
Constants.OBJ_BLOB) {
@Override
@@ -103,7 +101,6 @@ public boolean equals(final int modeBits) {
};
/** Mode indicating an entry is a non-executable file. */
- @SuppressWarnings("synthetic-access")
public static final FileMode REGULAR_FILE = new FileMode(0100644,
Constants.OBJ_BLOB) {
@Override
@@ -113,7 +110,6 @@ public boolean equals(final int modeBits) {
};
/** Mode indicating an entry is an executable file. */
- @SuppressWarnings("synthetic-access")
public static final FileMode EXECUTABLE_FILE = new FileMode(0100755,
Constants.OBJ_BLOB) {
@Override
@@ -123,7 +119,6 @@ public boolean equals(final int modeBits) {
};
/** Mode indicating an entry is a submodule commit in another repository. */
- @SuppressWarnings("synthetic-access")
public static final FileMode GITLINK = new FileMode(TYPE_GITLINK,
Constants.OBJ_COMMIT) {
@Override
@@ -133,7 +128,6 @@ public boolean equals(final int modeBits) {
};
/** Mode indicating an entry is missing during parallel walks. */
- @SuppressWarnings("synthetic-access")
public static final FileMode MISSING = new FileMode(TYPE_MISSING,
Constants.OBJ_BAD) {
@Override
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/IndexDiff.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/IndexDiff.java
index e544b72..ea573a4 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/IndexDiff.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/IndexDiff.java
@@ -513,14 +513,10 @@ public boolean diff(final ProgressMonitor monitor, int estWorkTreeSize,
}
}
- for (int i = 0; i < treeWalk.getTreeCount(); i++) {
- Set<String> values = fileModes.get(treeWalk.getFileMode(i));
- String path = treeWalk.getPathString();
- if (path != null) {
- if (values == null)
- values = new HashSet<>();
- values.add(path);
- fileModes.put(treeWalk.getFileMode(i), values);
+ String path = treeWalk.getPathString();
+ if (path != null) {
+ for (int i = 0; i < treeWalk.getTreeCount(); i++) {
+ recordFileMode(path, treeWalk.getFileMode(i));
}
}
}
@@ -545,19 +541,21 @@ public boolean diff(final ProgressMonitor monitor, int estWorkTreeSize,
}
Repository subRepo = smw.getRepository();
if (subRepo != null) {
+ String subRepoPath = smw.getPath();
try {
ObjectId subHead = subRepo.resolve("HEAD"); //$NON-NLS-1$
if (subHead != null
- && !subHead.equals(smw.getObjectId()))
- modified.add(smw.getPath());
- else if (ignoreSubmoduleMode != IgnoreSubmoduleMode.DIRTY) {
+ && !subHead.equals(smw.getObjectId())) {
+ modified.add(subRepoPath);
+ recordFileMode(subRepoPath, FileMode.GITLINK);
+ } else if (ignoreSubmoduleMode != IgnoreSubmoduleMode.DIRTY) {
IndexDiff smid = submoduleIndexDiffs.get(smw
.getPath());
if (smid == null) {
smid = new IndexDiff(subRepo,
smw.getObjectId(),
wTreeIt.getWorkingTreeIterator(subRepo));
- submoduleIndexDiffs.put(smw.getPath(), smid);
+ submoduleIndexDiffs.put(subRepoPath, smid);
}
if (smid.diff()) {
if (ignoreSubmoduleMode == IgnoreSubmoduleMode.UNTRACKED
@@ -569,7 +567,8 @@ else if (ignoreSubmoduleMode != IgnoreSubmoduleMode.DIRTY) {
&& smid.getRemoved().isEmpty()) {
continue;
}
- modified.add(smw.getPath());
+ modified.add(subRepoPath);
+ recordFileMode(subRepoPath, FileMode.GITLINK);
}
}
} finally {
@@ -593,6 +592,17 @@ else if (ignoreSubmoduleMode != IgnoreSubmoduleMode.DIRTY) {
return true;
}
+ private void recordFileMode(String path, FileMode mode) {
+ Set<String> values = fileModes.get(mode);
+ if (path != null) {
+ if (values == null) {
+ values = new HashSet<>();
+ fileModes.put(mode, values);
+ }
+ values.add(path);
+ }
+ }
+
private boolean isEntryGitLink(AbstractTreeIterator ti) {
return ((ti != null) && (ti.getEntryRawMode() == FileMode.GITLINK
.getBits()));
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/RefUpdate.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/RefUpdate.java
index 61fda94..0778645 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/RefUpdate.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/RefUpdate.java
@@ -58,7 +58,13 @@
* Creates, updates or deletes any reference.
*/
public abstract class RefUpdate {
- /** Status of an update request. */
+ /**
+ * Status of an update request.
+ * <p>
+ * New values may be added to this enum in the future. Callers may assume that
+ * unknown values are failures, and may generally treat them the same as
+ * {@link #REJECTED_OTHER_REASON}.
+ */
public static enum Result {
/** The ref update/delete has not been attempted by the caller. */
NOT_ATTEMPTED,
@@ -114,6 +120,10 @@ public static enum Result {
* merged into the new value. The configuration did not allow a forced
* update/delete to take place, so ref still contains the old value. No
* previous history was lost.
+ * <p>
+ * <em>Note:</em> Despite the general name, this result only refers to the
+ * non-fast-forward case. For more general errors, see {@link
+ * #REJECTED_OTHER_REASON}.
*/
REJECTED,
@@ -139,7 +149,25 @@ public static enum Result {
* The ref was renamed from another name
* <p>
*/
- RENAMED
+ RENAMED,
+
+ /**
+ * One or more objects aren't in the repository.
+ * <p>
+ * This is severe indication of either repository corruption on the
+ * server side, or a bug in the client wherein the client did not supply
+ * all required objects during the pack transfer.
+ *
+ * @since 4.9
+ */
+ REJECTED_MISSING_OBJECT,
+
+ /**
+ * Rejected for some other reason not covered by another enum value.
+ *
+ * @since 4.9
+ */
+ REJECTED_OTHER_REASON;
}
/** New value the caller wants this ref to have. */
@@ -637,34 +665,47 @@ private Result updateImpl(final RevWalk walk, final Store store)
RevObject oldObj;
// don't make expensive conflict check if this is an existing Ref
- if (oldValue == null && checkConflicting && getRefDatabase().isNameConflicting(getName()))
+ if (oldValue == null && checkConflicting
+ && getRefDatabase().isNameConflicting(getName())) {
return Result.LOCK_FAILURE;
+ }
try {
// If we're detaching a symbolic reference, we should update the reference
// itself. Otherwise, we will update the leaf reference, which should be
// an ObjectIdRef.
- if (!tryLock(!detachingSymbolicRef))
+ if (!tryLock(!detachingSymbolicRef)) {
return Result.LOCK_FAILURE;
+ }
if (expValue != null) {
final ObjectId o;
o = oldValue != null ? oldValue : ObjectId.zeroId();
- if (!AnyObjectId.equals(expValue, o))
+ if (!AnyObjectId.equals(expValue, o)) {
return Result.LOCK_FAILURE;
+ }
}
- if (oldValue == null)
+ try {
+ newObj = safeParseNew(walk, newValue);
+ } catch (MissingObjectException e) {
+ return Result.REJECTED_MISSING_OBJECT;
+ }
+
+ if (oldValue == null) {
return store.execute(Result.NEW);
+ }
- newObj = safeParse(walk, newValue);
- oldObj = safeParse(walk, oldValue);
- if (newObj == oldObj && !detachingSymbolicRef)
+ oldObj = safeParseOld(walk, oldValue);
+ if (newObj == oldObj && !detachingSymbolicRef) {
return store.execute(Result.NO_CHANGE);
+ }
- if (isForceUpdate())
+ if (isForceUpdate()) {
return store.execute(Result.FORCED);
+ }
if (newObj instanceof RevCommit && oldObj instanceof RevCommit) {
- if (walk.isMergedInto((RevCommit) oldObj, (RevCommit) newObj))
+ if (walk.isMergedInto((RevCommit) oldObj, (RevCommit) newObj)) {
return store.execute(Result.FAST_FORWARD);
+ }
}
return Result.REJECTED;
@@ -684,16 +725,23 @@ public void setCheckConflicting(boolean check) {
checkConflicting = check;
}
- private static RevObject safeParse(final RevWalk rw, final AnyObjectId id)
+ private static RevObject safeParseNew(RevWalk rw, AnyObjectId newId)
+ throws IOException {
+ if (newId == null || ObjectId.zeroId().equals(newId)) {
+ return null;
+ }
+ return rw.parseAny(newId);
+ }
+
+ private static RevObject safeParseOld(RevWalk rw, AnyObjectId oldId)
throws IOException {
try {
- return id != null ? rw.parseAny(id) : null;
+ return oldId != null ? rw.parseAny(oldId) : null;
} catch (MissingObjectException e) {
- // We can expect some objects to be missing, like if we are
- // trying to force a deletion of a branch and the object it
- // points to has been pruned from the database due to freak
- // corruption accidents (it happens with 'git new-work-dir').
- //
+ // We can expect some old objects to be missing, like if we are trying to
+ // force a deletion of a branch and the object it points to has been
+ // pruned from the database due to freak corruption accidents (it happens
+ // with 'git new-work-dir').
return null;
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogEntry.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogEntry.java
index 0504646..afa6521 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogEntry.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogEntry.java
@@ -50,6 +50,39 @@
public interface ReflogEntry {
/**
+ * Prefix used in reflog messages when the ref was first created.
+ * <p>
+ * Does not have a corresponding constant in C git, but is untranslated like
+ * the other constants.
+ *
+ * @since 4.9
+ */
+ public static final String PREFIX_CREATED = "created"; //$NON-NLS-1$
+
+ /**
+ * Prefix used in reflog messages when the ref was updated with a fast
+ * forward.
+ * <p>
+ * Untranslated, and exactly matches the
+ * <a href="https://git.kernel.org/pub/scm/git/git.git/tree/builtin/fetch.c?id=f3da2b79be9565779e4f76dc5812c68e156afdf0#n680">
+ * untranslated string in C git</a>.
+ *
+ * @since 4.9
+ */
+ public static final String PREFIX_FAST_FORWARD = "fast-forward"; //$NON-NLS-1$
+
+ /**
+ * Prefix used in reflog messages when the ref was force updated.
+ * <p>
+ * Untranslated, and exactly matches the
+ * <a href="https://git.kernel.org/pub/scm/git/git.git/tree/builtin/fetch.c?id=f3da2b79be9565779e4f76dc5812c68e156afdf0#n695">
+ * untranslated string in C git</a>.
+ *
+ * @since 4.9
+ */
+ public static final String PREFIX_FORCED_UPDATE = "forced-update"; //$NON-NLS-1$
+
+ /**
* @return the commit id before the change
*/
public abstract ObjectId getOldId();
@@ -75,4 +108,4 @@ public interface ReflogEntry {
*/
public abstract CheckoutEntry parseCheckout();
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogReader.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogReader.java
index fdab883..d3f2536 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogReader.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/ReflogReader.java
@@ -86,4 +86,4 @@ public interface ReflogReader {
public abstract List<ReflogEntry> getReverseEntries(int max)
throws IOException;
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Repository.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Repository.java
index 1f2ab9d..72f79f4 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/Repository.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/Repository.java
@@ -106,7 +106,13 @@
* A repository holds all objects and refs used for managing source code (could
* be any type of file, but source code is what SCM's are typically used for).
* <p>
- * This class is thread-safe.
+ * The thread-safety of a {@link Repository} very much depends on the concrete
+ * implementation. Applications working with a generic {@code Repository} type
+ * must not assume the instance is thread-safe.
+ * <ul>
+ * <li>{@code FileRepository} is thread-safe.
+ * <li>{@code DfsRepository} thread-safety is determined by its subclass.
+ * </ul>
*/
public abstract class Repository implements AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(Repository.class);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/lib/UserConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/lib/UserConfig.java
index bd393dd..102a451 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/lib/UserConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/lib/UserConfig.java
@@ -51,12 +51,7 @@
/** The standard "user" configuration parameters. */
public class UserConfig {
/** Key for {@link Config#get(SectionParser)}. */
- public static final Config.SectionParser<UserConfig> KEY = new SectionParser<UserConfig>() {
- @Override
- public UserConfig parse(final Config cfg) {
- return new UserConfig(cfg);
- }
- };
+ public static final Config.SectionParser<UserConfig> KEY = UserConfig::new;
private String authorName;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/merge/MergeFormatterPass.java b/org.eclipse.jgit/src/org/eclipse/jgit/merge/MergeFormatterPass.java
index 0345921..060f068 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/merge/MergeFormatterPass.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/merge/MergeFormatterPass.java
@@ -143,4 +143,4 @@ private void writeLine(RawText seq, int i) throws IOException {
if (out.isBeginln())
out.write('\n');
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/nls/TranslationBundle.java b/org.eclipse.jgit/src/org/eclipse/jgit/nls/TranslationBundle.java
index c85c179..bde69c0 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/nls/TranslationBundle.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/nls/TranslationBundle.java
@@ -184,4 +184,4 @@ void load(Locale locale)
}
}
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/filter/SkipRevFilter.java b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/filter/SkipRevFilter.java
index e230c9b..51dd2ed 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/filter/SkipRevFilter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/revwalk/filter/SkipRevFilter.java
@@ -91,4 +91,4 @@ public boolean include(RevWalk walker, RevCommit cmit)
public RevFilter clone() {
return new SkipRevFilter(skip);
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/BasePackFetchConnection.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/BasePackFetchConnection.java
index e8d1881..61c4c4b 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/BasePackFetchConnection.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/BasePackFetchConnection.java
@@ -63,7 +63,6 @@
import org.eclipse.jgit.internal.storage.file.PackLock;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Config;
-import org.eclipse.jgit.lib.Config.SectionParser;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.MutableObjectId;
import org.eclipse.jgit.lib.NullProgressMonitor;
@@ -250,7 +249,7 @@ public BasePackFetchConnection(final PackTransport packTransport) {
super(packTransport);
if (local != null) {
- final FetchConfig cfg = local.getConfig().get(FetchConfig.KEY);
+ final FetchConfig cfg = local.getConfig().get(FetchConfig::new);
allowOfsDelta = cfg.allowOfsDelta;
} else {
allowOfsDelta = true;
@@ -279,13 +278,6 @@ public BasePackFetchConnection(final PackTransport packTransport) {
}
private static class FetchConfig {
- static final SectionParser<FetchConfig> KEY = new SectionParser<FetchConfig>() {
- @Override
- public FetchConfig parse(final Config cfg) {
- return new FetchConfig(cfg);
- }
- };
-
final boolean allowOfsDelta;
FetchConfig(final Config c) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/BaseReceivePack.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/BaseReceivePack.java
index 6f94dbb..6420015 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/BaseReceivePack.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/BaseReceivePack.java
@@ -78,7 +78,6 @@
import org.eclipse.jgit.internal.storage.file.PackLock;
import org.eclipse.jgit.lib.BatchRefUpdate;
import org.eclipse.jgit.lib.Config;
-import org.eclipse.jgit.lib.Config.SectionParser;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.NullProgressMonitor;
import org.eclipse.jgit.lib.ObjectChecker;
@@ -314,7 +313,7 @@ protected BaseReceivePack(final Repository into) {
TransferConfig tc = db.getConfig().get(TransferConfig.KEY);
objectChecker = tc.newReceiveObjectChecker();
- ReceiveConfig rc = db.getConfig().get(ReceiveConfig.KEY);
+ ReceiveConfig rc = db.getConfig().get(ReceiveConfig::new);
allowCreates = rc.allowCreates;
allowAnyDeletes = true;
allowBranchDeletes = rc.allowDeletes;
@@ -332,13 +331,6 @@ protected BaseReceivePack(final Repository into) {
/** Configuration for receive operations. */
protected static class ReceiveConfig {
- static final SectionParser<ReceiveConfig> KEY = new SectionParser<ReceiveConfig>() {
- @Override
- public ReceiveConfig parse(final Config cfg) {
- return new ReceiveConfig(cfg);
- }
- };
-
final boolean allowCreates;
final boolean allowDeletes;
final boolean allowNonFastForwards;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/DaemonService.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/DaemonService.java
index 80b2cae..566153a 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/DaemonService.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/DaemonService.java
@@ -64,12 +64,7 @@ public abstract class DaemonService {
DaemonService(final String cmdName, final String cfgName) {
command = cmdName.startsWith("git-") ? cmdName : "git-" + cmdName; //$NON-NLS-1$ //$NON-NLS-2$
- configKey = new SectionParser<ServiceConfig>() {
- @Override
- public ServiceConfig parse(final Config cfg) {
- return new ServiceConfig(DaemonService.this, cfg, cfgName);
- }
- };
+ configKey = cfg -> new ServiceConfig(DaemonService.this, cfg, cfgName);
overridable = true;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschConfigSessionFactory.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschConfigSessionFactory.java
index ce14183..242d1c4 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschConfigSessionFactory.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschConfigSessionFactory.java
@@ -259,6 +259,9 @@ protected void configureJSch(JSch jsch) {
protected JSch getJSch(final OpenSshConfig.Host hc, FS fs) throws JSchException {
if (defaultJSch == null) {
defaultJSch = createDefaultJSch(fs);
+ if (defaultJSch.getConfigRepository() == null) {
+ defaultJSch.setConfigRepository(config);
+ }
for (Object name : defaultJSch.getIdentityNames())
byIdentityFile.put((String) name, defaultJSch);
}
@@ -272,6 +275,9 @@ protected JSch getJSch(final OpenSshConfig.Host hc, FS fs) throws JSchException
if (jsch == null) {
jsch = new JSch();
configureJSch(jsch);
+ if (jsch.getConfigRepository() == null) {
+ jsch.setConfigRepository(defaultJSch.getConfigRepository());
+ }
jsch.setHostKeyRepository(defaultJSch.getHostKeyRepository());
jsch.addIdentity(identityKey);
byIdentityFile.put(identityKey, jsch);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschSession.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschSession.java
index f445bcb..82d6ed4e 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschSession.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/JschSession.java
@@ -229,4 +229,4 @@ public int waitFor() throws InterruptedException {
return exitValue();
}
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/NetRC.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/NetRC.java
index bab5bf0..5727b03 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/NetRC.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/NetRC.java
@@ -317,4 +317,4 @@ private void parse() {
}
}
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/OpenSshConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/OpenSshConfig.java
index 8b7b60d..b5b532d 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/OpenSshConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/OpenSshConfig.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2008, 2014, Google Inc.
+ * Copyright (C) 2008, 2017, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -46,32 +46,90 @@
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
-import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
-import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
+import java.util.Set;
import org.eclipse.jgit.errors.InvalidPatternException;
import org.eclipse.jgit.fnmatch.FileNameMatcher;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.StringUtils;
+import org.eclipse.jgit.util.SystemReader;
+
+import com.jcraft.jsch.ConfigRepository;
/**
- * Simple configuration parser for the OpenSSH ~/.ssh/config file.
+ * Fairly complete configuration parser for the OpenSSH ~/.ssh/config file.
* <p>
- * Since JSch does not (currently) have the ability to parse an OpenSSH
- * configuration file this is a simple parser to read that file and make the
- * critical options available to {@link SshSessionFactory}.
+ * JSch does have its own config file parser
+ * {@link com.jcraft.jsch.OpenSSHConfig} since version 0.1.50, but it has a
+ * number of problems:
+ * <ul>
+ * <li>it splits lines of the format "keyword = value" wrongly: you'd end up
+ * with the value "= value".
+ * <li>its "Host" keyword is not case insensitive.
+ * <li>it doesn't handle quoted values.
+ * <li>JSch's OpenSSHConfig doesn't monitor for config file changes.
+ * </ul>
+ * <p>
+ * Therefore implement our own parser to read an OpenSSH configuration file. It
+ * makes the critical options available to {@link SshSessionFactory} via
+ * {@link Host} objects returned by {@link #lookup(String)}, and implements a
+ * fully conforming {@link ConfigRepository} providing
+ * {@link com.jcraft.jsch.ConfigRepository.Config}s via
+ * {@link #getConfig(String)}.
+ * </p>
+ * <p>
+ * Limitations compared to the full OpenSSH 7.5 parser:
+ * </p>
+ * <ul>
+ * <li>This parser does not handle Match or Include keywords.
+ * <li>This parser does not do host name canonicalization (Jsch ignores it
+ * anyway).
+ * </ul>
+ * <p>
+ * Note that OpenSSH's readconf.c is a validating parser; Jsch's
+ * ConfigRepository OTOH treats all option values as plain strings, so any
+ * validation must happen in Jsch outside of the parser. Thus this parser does
+ * not validate option values, except for a few options when constructing a
+ * {@link Host} object.
+ * </p>
+ * <p>
+ * This config does %-substitutions for the following tokens:
+ * </p>
+ * <ul>
+ * <li>%% - single %
+ * <li>%C - short-hand for %l%h%p%r. See %p and %r below; the replacement may be
+ * done partially only and may leave %p or %r or both unreplaced.
+ * <li>%d - home directory path
+ * <li>%h - remote host name
+ * <li>%L - local host name without domain
+ * <li>%l - FQDN of the local host
+ * <li>%n - host name as specified in {@link #lookup(String)}
+ * <li>%p - port number; replaced only if set in the config
+ * <li>%r - remote user name; replaced only if set in the config
+ * <li>%u - local user name
+ * </ul>
+ * <p>
+ * If the config doesn't set the port or the remote user name, %p and %r remain
+ * un-substituted. It's the caller's responsibility to replace them with values
+ * obtained from the connection URI. %i is not handled; Java has no concept of a
+ * "user ID".
+ * </p>
*/
-public class OpenSshConfig {
+public class OpenSshConfig implements ConfigRepository {
+
/** IANA assigned port number for SSH. */
static final int SSH_PORT = 22;
@@ -105,16 +163,31 @@ public static OpenSshConfig get(FS fs) {
/** The .ssh/config file we read and monitor for updates. */
private final File configFile;
- /** Modification time of {@link #configFile} when {@link #hosts} loaded. */
+ /** Modification time of {@link #configFile} when it was last loaded. */
private long lastModified;
- /** Cached entries read out of the configuration file. */
- private Map<String, Host> hosts;
+ /**
+ * Encapsulates entries read out of the configuration file, and
+ * {@link Host}s created from that.
+ */
+ private static class State {
+ Map<String, HostEntry> entries = new LinkedHashMap<>();
+ Map<String, Host> hosts = new HashMap<>();
+
+ @Override
+ @SuppressWarnings("nls")
+ public String toString() {
+ return "State [entries=" + entries + ", hosts=" + hosts + "]";
+ }
+ }
+
+ /** State read from the config file, plus {@link Host}s created from it. */
+ private State state;
OpenSshConfig(final File h, final File cfg) {
home = h;
configFile = cfg;
- hosts = Collections.emptyMap();
+ state = new State();
}
/**
@@ -127,75 +200,81 @@ public static OpenSshConfig get(FS fs) {
* @return r configuration for the requested name. Never null.
*/
public Host lookup(final String hostName) {
- final Map<String, Host> cache = refresh();
- Host h = cache.get(hostName);
- if (h == null)
- h = new Host();
- if (h.patternsApplied)
+ final State cache = refresh();
+ Host h = cache.hosts.get(hostName);
+ if (h != null) {
return h;
-
- for (final Map.Entry<String, Host> e : cache.entrySet()) {
- if (!isHostPattern(e.getKey()))
- continue;
- if (!isHostMatch(e.getKey(), hostName))
- continue;
- h.copyFrom(e.getValue());
}
-
- if (h.hostName == null)
- h.hostName = hostName;
- if (h.user == null)
- h.user = OpenSshConfig.userName();
- if (h.port == 0)
- h.port = OpenSshConfig.SSH_PORT;
- if (h.connectionAttempts == 0)
- h.connectionAttempts = 1;
- h.patternsApplied = true;
+ HostEntry fullConfig = new HostEntry();
+ // Initialize with default entries at the top of the file, before the
+ // first Host block.
+ fullConfig.merge(cache.entries.get(HostEntry.DEFAULT_NAME));
+ for (final Map.Entry<String, HostEntry> e : cache.entries.entrySet()) {
+ String key = e.getKey();
+ if (isHostMatch(key, hostName)) {
+ fullConfig.merge(e.getValue());
+ }
+ }
+ fullConfig.substitute(hostName, home);
+ h = new Host(fullConfig, hostName, home);
+ cache.hosts.put(hostName, h);
return h;
}
- private synchronized Map<String, Host> refresh() {
+ private synchronized State refresh() {
final long mtime = configFile.lastModified();
if (mtime != lastModified) {
- try {
- final FileInputStream in = new FileInputStream(configFile);
- try {
- hosts = parse(in);
- } finally {
- in.close();
- }
- } catch (FileNotFoundException none) {
- hosts = Collections.emptyMap();
- } catch (IOException err) {
- hosts = Collections.emptyMap();
+ State newState = new State();
+ try (FileInputStream in = new FileInputStream(configFile)) {
+ newState.entries = parse(in);
+ } catch (IOException none) {
+ // Ignore -- we'll set and return an empty state
}
lastModified = mtime;
+ state = newState;
}
- return hosts;
+ return state;
}
- private Map<String, Host> parse(final InputStream in) throws IOException {
- final Map<String, Host> m = new LinkedHashMap<>();
+ private Map<String, HostEntry> parse(final InputStream in)
+ throws IOException {
+ final Map<String, HostEntry> m = new LinkedHashMap<>();
final BufferedReader br = new BufferedReader(new InputStreamReader(in));
- final List<Host> current = new ArrayList<>(4);
+ final List<HostEntry> current = new ArrayList<>(4);
String line;
+ // The man page doesn't say so, but the OpenSSH parser (readconf.c)
+ // starts out in active mode and thus always applies any lines that
+ // occur before the first host block. We gather those options in a
+ // HostEntry for DEFAULT_NAME.
+ HostEntry defaults = new HostEntry();
+ current.add(defaults);
+ m.put(HostEntry.DEFAULT_NAME, defaults);
+
while ((line = br.readLine()) != null) {
line = line.trim();
- if (line.length() == 0 || line.startsWith("#")) //$NON-NLS-1$
+ if (line.isEmpty() || line.startsWith("#")) { //$NON-NLS-1$
continue;
-
- final String[] parts = line.split("[ \t]*[= \t]", 2); //$NON-NLS-1$
- final String keyword = parts[0].trim();
- final String argValue = parts[1].trim();
+ }
+ String[] parts = line.split("[ \t]*[= \t]", 2); //$NON-NLS-1$
+ // Although the ssh-config man page doesn't say so, the OpenSSH
+ // parser does allow quoted keywords.
+ String keyword = dequote(parts[0].trim());
+ // man 5 ssh-config says lines had the format "keyword arguments",
+ // with no indication that arguments were optional. However, let's
+ // not crap out on missing arguments. See bug 444319.
+ String argValue = parts.length > 1 ? parts[1].trim() : ""; //$NON-NLS-1$
if (StringUtils.equalsIgnoreCase("Host", keyword)) { //$NON-NLS-1$
current.clear();
- for (final String pattern : argValue.split("[ \t]")) { //$NON-NLS-1$
- final String name = dequote(pattern);
- Host c = m.get(name);
+ for (String name : HostEntry.parseList(argValue)) {
+ if (name == null || name.isEmpty()) {
+ // null should not occur, but better be safe than sorry.
+ continue;
+ }
+ HostEntry c = m.get(name);
if (c == null) {
- c = new Host();
+ c = new HostEntry();
m.put(name, c);
}
current.add(c);
@@ -206,57 +285,18 @@ private Map<String, Host> parse(final InputStream in) throws IOException {
if (current.isEmpty()) {
// We received an option outside of a Host block. We
// don't know who this should match against, so skip.
- //
continue;
}
- if (StringUtils.equalsIgnoreCase("HostName", keyword)) { //$NON-NLS-1$
- for (final Host c : current)
- if (c.hostName == null)
- c.hostName = dequote(argValue);
- } else if (StringUtils.equalsIgnoreCase("User", keyword)) { //$NON-NLS-1$
- for (final Host c : current)
- if (c.user == null)
- c.user = dequote(argValue);
- } else if (StringUtils.equalsIgnoreCase("Port", keyword)) { //$NON-NLS-1$
- try {
- final int port = Integer.parseInt(dequote(argValue));
- for (final Host c : current)
- if (c.port == 0)
- c.port = port;
- } catch (NumberFormatException nfe) {
- // Bad port number. Don't set it.
+ if (HostEntry.isListKey(keyword)) {
+ List<String> args = HostEntry.parseList(argValue);
+ for (HostEntry entry : current) {
+ entry.setValue(keyword, args);
}
- } else if (StringUtils.equalsIgnoreCase("IdentityFile", keyword)) { //$NON-NLS-1$
- for (final Host c : current)
- if (c.identityFile == null)
- c.identityFile = toFile(dequote(argValue));
- } else if (StringUtils.equalsIgnoreCase(
- "PreferredAuthentications", keyword)) { //$NON-NLS-1$
- for (final Host c : current)
- if (c.preferredAuthentications == null)
- c.preferredAuthentications = nows(dequote(argValue));
- } else if (StringUtils.equalsIgnoreCase("BatchMode", keyword)) { //$NON-NLS-1$
- for (final Host c : current)
- if (c.batchMode == null)
- c.batchMode = yesno(dequote(argValue));
- } else if (StringUtils.equalsIgnoreCase(
- "StrictHostKeyChecking", keyword)) { //$NON-NLS-1$
- String value = dequote(argValue);
- for (final Host c : current)
- if (c.strictHostKeyChecking == null)
- c.strictHostKeyChecking = value;
- } else if (StringUtils.equalsIgnoreCase(
- "ConnectionAttempts", keyword)) { //$NON-NLS-1$
- try {
- final int connectionAttempts = Integer.parseInt(dequote(argValue));
- if (connectionAttempts > 0) {
- for (final Host c : current)
- if (c.connectionAttempts == 0)
- c.connectionAttempts = connectionAttempts;
- }
- } catch (NumberFormatException nfe) {
- // ignore bad values
+ } else if (!argValue.isEmpty()) {
+ argValue = dequote(argValue);
+ for (HostEntry entry : current) {
+ entry.setValue(keyword, argValue);
}
}
}
@@ -264,23 +304,35 @@ private Map<String, Host> parse(final InputStream in) throws IOException {
return m;
}
- private static boolean isHostPattern(final String s) {
- return s.indexOf('*') >= 0 || s.indexOf('?') >= 0;
+ private static boolean isHostMatch(final String pattern,
+ final String name) {
+ if (pattern.startsWith("!")) { //$NON-NLS-1$
+ return !patternMatchesHost(pattern.substring(1), name);
+ } else {
+ return patternMatchesHost(pattern, name);
+ }
}
- private static boolean isHostMatch(final String pattern, final String name) {
- final FileNameMatcher fn;
- try {
- fn = new FileNameMatcher(pattern, null);
- } catch (InvalidPatternException e) {
- return false;
+ private static boolean patternMatchesHost(final String pattern,
+ final String name) {
+ if (pattern.indexOf('*') >= 0 || pattern.indexOf('?') >= 0) {
+ final FileNameMatcher fn;
+ try {
+ fn = new FileNameMatcher(pattern, null);
+ } catch (InvalidPatternException e) {
+ return false;
+ }
+ fn.append(name);
+ return fn.isMatch();
+ } else {
+ // Not a pattern but a full host name
+ return pattern.equals(name);
}
- fn.append(name);
- return fn.isMatch();
}
private static String dequote(final String value) {
- if (value.startsWith("\"") && value.endsWith("\"")) //$NON-NLS-1$ //$NON-NLS-2$
+ if (value.startsWith("\"") && value.endsWith("\"") //$NON-NLS-1$ //$NON-NLS-2$
+ && value.length() > 1)
return value.substring(1, value.length() - 1);
return value;
}
@@ -300,24 +352,419 @@ private static Boolean yesno(final String value) {
return Boolean.FALSE;
}
- private File toFile(final String path) {
- if (path.startsWith("~/")) //$NON-NLS-1$
- return new File(home, path.substring(2));
- File ret = new File(path);
- if (ret.isAbsolute())
- return ret;
- return new File(home, path);
+ private static int positive(final String value) {
+ if (value != null) {
+ try {
+ return Integer.parseUnsignedInt(value);
+ } catch (NumberFormatException e) {
+ // Ignore
+ }
+ }
+ return -1;
}
static String userName() {
return AccessController.doPrivileged(new PrivilegedAction<String>() {
@Override
public String run() {
- return System.getProperty("user.name"); //$NON-NLS-1$
+ return SystemReader.getInstance()
+ .getProperty(Constants.OS_USER_NAME_KEY);
}
});
}
+ private static class HostEntry implements ConfigRepository.Config {
+
+ /**
+ * "Host name" of the HostEntry for the default options before the first
+ * host block in a config file.
+ */
+ public static final String DEFAULT_NAME = ""; //$NON-NLS-1$
+
+ // See com.jcraft.jsch.OpenSSHConfig. Translates some command-line keys
+ // to ssh-config keys.
+ private static final Map<String, String> KEY_MAP = new HashMap<>();
+
+ static {
+ KEY_MAP.put("kex", "KexAlgorithms"); //$NON-NLS-1$//$NON-NLS-2$
+ KEY_MAP.put("server_host_key", "HostKeyAlgorithms"); //$NON-NLS-1$ //$NON-NLS-2$
+ KEY_MAP.put("cipher.c2s", "Ciphers"); //$NON-NLS-1$ //$NON-NLS-2$
+ KEY_MAP.put("cipher.s2c", "Ciphers"); //$NON-NLS-1$ //$NON-NLS-2$
+ KEY_MAP.put("mac.c2s", "Macs"); //$NON-NLS-1$ //$NON-NLS-2$
+ KEY_MAP.put("mac.s2c", "Macs"); //$NON-NLS-1$ //$NON-NLS-2$
+ KEY_MAP.put("compression.s2c", "Compression"); //$NON-NLS-1$ //$NON-NLS-2$
+ KEY_MAP.put("compression.c2s", "Compression"); //$NON-NLS-1$ //$NON-NLS-2$
+ KEY_MAP.put("compression_level", "CompressionLevel"); //$NON-NLS-1$ //$NON-NLS-2$
+ KEY_MAP.put("MaxAuthTries", "NumberOfPasswordPrompts"); //$NON-NLS-1$ //$NON-NLS-2$
+ }
+
+ /**
+ * Keys that can be specified multiple times, building up a list. (I.e.,
+ * those are the keys that do not follow the general rule of "first
+ * occurrence wins".)
+ */
+ private static final Set<String> MULTI_KEYS = new HashSet<>();
+
+ static {
+ MULTI_KEYS.add("CERTIFICATEFILE"); //$NON-NLS-1$
+ MULTI_KEYS.add("IDENTITYFILE"); //$NON-NLS-1$
+ MULTI_KEYS.add("LOCALFORWARD"); //$NON-NLS-1$
+ MULTI_KEYS.add("REMOTEFORWARD"); //$NON-NLS-1$
+ MULTI_KEYS.add("SENDENV"); //$NON-NLS-1$
+ }
+
+ /**
+ * Keys that take a whitespace-separated list of elements as argument.
+ * Because the dequote-handling is different, we must handle those in
+ * the parser. There are a few other keys that take comma-separated
+ * lists as arguments, but for the parser those are single arguments
+ * that must be quoted if they contain whitespace, and taking them apart
+ * is the responsibility of the user of those keys.
+ */
+ private static final Set<String> LIST_KEYS = new HashSet<>();
+
+ static {
+ LIST_KEYS.add("CANONICALDOMAINS"); //$NON-NLS-1$
+ LIST_KEYS.add("GLOBALKNOWNHOSTSFILE"); //$NON-NLS-1$
+ LIST_KEYS.add("SENDENV"); //$NON-NLS-1$
+ LIST_KEYS.add("USERKNOWNHOSTSFILE"); //$NON-NLS-1$
+ }
+
+ private Map<String, String> options;
+
+ private Map<String, List<String>> multiOptions;
+
+ private Map<String, List<String>> listOptions;
+
+ @Override
+ public String getHostname() {
+ return getValue("HOSTNAME"); //$NON-NLS-1$
+ }
+
+ @Override
+ public String getUser() {
+ return getValue("USER"); //$NON-NLS-1$
+ }
+
+ @Override
+ public int getPort() {
+ return positive(getValue("PORT")); //$NON-NLS-1$
+ }
+
+ private static String mapKey(String key) {
+ String k = KEY_MAP.get(key);
+ if (k == null) {
+ k = key;
+ }
+ return k.toUpperCase(Locale.ROOT);
+ }
+
+ private String findValue(String key) {
+ String k = mapKey(key);
+ String result = options != null ? options.get(k) : null;
+ if (result == null) {
+ // Also check the list and multi options. Modern OpenSSH treats
+ // UserKnownHostsFile and GlobalKnownHostsFile as list-valued,
+ // and so does this parser. Jsch 0.1.54 in general doesn't know
+ // about list-valued options (it _does_ know multi-valued
+ // options, though), and will ask for a single value for such
+ // options.
+ //
+ // Let's be lenient and return at least the first value from
+ // a list-valued or multi-valued key for which Jsch asks for a
+ // single value.
+ List<String> values = listOptions != null ? listOptions.get(k)
+ : null;
+ if (values == null) {
+ values = multiOptions != null ? multiOptions.get(k) : null;
+ }
+ if (values != null && !values.isEmpty()) {
+ result = values.get(0);
+ }
+ }
+ return result;
+ }
+
+ @Override
+ public String getValue(String key) {
+ // See com.jcraft.jsch.OpenSSHConfig.MyConfig.getValue() for this
+ // special case.
+ if (key.equals("compression.s2c") //$NON-NLS-1$
+ || key.equals("compression.c2s")) { //$NON-NLS-1$
+ String foo = findValue(key);
+ if (foo == null || foo.equals("no")) { //$NON-NLS-1$
+ return "none,zlib@openssh.com,zlib"; //$NON-NLS-1$
+ }
+ return "zlib@openssh.com,zlib,none"; //$NON-NLS-1$
+ }
+ return findValue(key);
+ }
+
+ @Override
+ public String[] getValues(String key) {
+ String k = mapKey(key);
+ List<String> values = listOptions != null ? listOptions.get(k)
+ : null;
+ if (values == null) {
+ values = multiOptions != null ? multiOptions.get(k) : null;
+ }
+ if (values == null || values.isEmpty()) {
+ return new String[0];
+ }
+ return values.toArray(new String[values.size()]);
+ }
+
+ public void setValue(String key, String value) {
+ String k = key.toUpperCase(Locale.ROOT);
+ if (MULTI_KEYS.contains(k)) {
+ if (multiOptions == null) {
+ multiOptions = new HashMap<>();
+ }
+ List<String> values = multiOptions.get(k);
+ if (values == null) {
+ values = new ArrayList<>(4);
+ multiOptions.put(k, values);
+ }
+ values.add(value);
+ } else {
+ if (options == null) {
+ options = new HashMap<>();
+ }
+ if (!options.containsKey(k)) {
+ options.put(k, value);
+ }
+ }
+ }
+
+ public void setValue(String key, List<String> values) {
+ if (values.isEmpty()) {
+ // Can occur only on a missing argument: ignore.
+ return;
+ }
+ String k = key.toUpperCase(Locale.ROOT);
+ // Check multi-valued keys first; because of the replacement
+ // strategy, they must take precedence over list-valued keys
+ // which always follow the "first occurrence wins" strategy.
+ //
+ // Note that SendEnv is a multi-valued list-valued key. (It's
+ // rather immaterial for JGit, though.)
+ if (MULTI_KEYS.contains(k)) {
+ if (multiOptions == null) {
+ multiOptions = new HashMap<>(2 * MULTI_KEYS.size());
+ }
+ List<String> items = multiOptions.get(k);
+ if (items == null) {
+ items = new ArrayList<>(values);
+ multiOptions.put(k, items);
+ } else {
+ items.addAll(values);
+ }
+ } else {
+ if (listOptions == null) {
+ listOptions = new HashMap<>(2 * LIST_KEYS.size());
+ }
+ if (!listOptions.containsKey(k)) {
+ listOptions.put(k, values);
+ }
+ }
+ }
+
+ public static boolean isListKey(String key) {
+ return LIST_KEYS.contains(key.toUpperCase(Locale.ROOT));
+ }
+
+ /**
+ * Splits the argument into a list of whitespace-separated elements.
+ * Elements containing whitespace must be quoted and will be de-quoted.
+ *
+ * @param argument
+ * argument part of the configuration line as read from the
+ * config file
+ * @return a {@link List} of elements, possibly empty and possibly
+ * containing empty elements
+ */
+ public static List<String> parseList(String argument) {
+ List<String> result = new ArrayList<>(4);
+ int start = 0;
+ int length = argument.length();
+ while (start < length) {
+ // Skip whitespace
+ if (Character.isSpaceChar(argument.charAt(start))) {
+ start++;
+ continue;
+ }
+ if (argument.charAt(start) == '"') {
+ int stop = argument.indexOf('"', ++start);
+ if (stop < start) {
+ // No closing double quote: skip
+ break;
+ }
+ result.add(argument.substring(start, stop));
+ start = stop + 1;
+ } else {
+ int stop = start + 1;
+ while (stop < length
+ && !Character.isSpaceChar(argument.charAt(stop))) {
+ stop++;
+ }
+ result.add(argument.substring(start, stop));
+ start = stop + 1;
+ }
+ }
+ return result;
+ }
+
+ protected void merge(HostEntry entry) {
+ if (entry == null) {
+ // Can occur if we could not read the config file
+ return;
+ }
+ if (entry.options != null) {
+ if (options == null) {
+ options = new HashMap<>();
+ }
+ for (Map.Entry<String, String> item : entry.options
+ .entrySet()) {
+ if (!options.containsKey(item.getKey())) {
+ options.put(item.getKey(), item.getValue());
+ }
+ }
+ }
+ if (entry.listOptions != null) {
+ if (listOptions == null) {
+ listOptions = new HashMap<>(2 * LIST_KEYS.size());
+ }
+ for (Map.Entry<String, List<String>> item : entry.listOptions
+ .entrySet()) {
+ if (!listOptions.containsKey(item.getKey())) {
+ listOptions.put(item.getKey(), item.getValue());
+ }
+ }
+
+ }
+ if (entry.multiOptions != null) {
+ if (multiOptions == null) {
+ multiOptions = new HashMap<>(2 * MULTI_KEYS.size());
+ }
+ for (Map.Entry<String, List<String>> item : entry.multiOptions
+ .entrySet()) {
+ List<String> values = multiOptions.get(item.getKey());
+ if (values == null) {
+ values = new ArrayList<>(item.getValue());
+ multiOptions.put(item.getKey(), values);
+ } else {
+ values.addAll(item.getValue());
+ }
+ }
+ }
+ }
+
+ private class Replacer {
+ private final Map<Character, String> replacements = new HashMap<>();
+
+ public Replacer(String originalHostName, File home) {
+ replacements.put(Character.valueOf('%'), "%"); //$NON-NLS-1$
+ replacements.put(Character.valueOf('d'), home.getPath());
+ // Needs special treatment...
+ String host = getValue("HOSTNAME"); //$NON-NLS-1$
+ replacements.put(Character.valueOf('h'), originalHostName);
+ if (host != null && host.indexOf('%') >= 0) {
+ host = substitute(host, "h"); //$NON-NLS-1$
+ options.put("HOSTNAME", host); //$NON-NLS-1$
+ }
+ if (host != null) {
+ replacements.put(Character.valueOf('h'), host);
+ }
+ String localhost = SystemReader.getInstance().getHostname();
+ replacements.put(Character.valueOf('l'), localhost);
+ int period = localhost.indexOf('.');
+ if (period > 0) {
+ localhost = localhost.substring(0, period);
+ }
+ replacements.put(Character.valueOf('L'), localhost);
+ replacements.put(Character.valueOf('n'), originalHostName);
+ replacements.put(Character.valueOf('p'), getValue("PORT")); //$NON-NLS-1$
+ replacements.put(Character.valueOf('r'), getValue("USER")); //$NON-NLS-1$
+ replacements.put(Character.valueOf('u'), userName());
+ replacements.put(Character.valueOf('C'),
+ substitute("%l%h%p%r", "hlpr")); //$NON-NLS-1$ //$NON-NLS-2$
+ }
+
+ public String substitute(String input, String allowed) {
+ if (input == null || input.length() <= 1
+ || input.indexOf('%') < 0) {
+ return input;
+ }
+ StringBuilder builder = new StringBuilder();
+ int start = 0;
+ int length = input.length();
+ while (start < length) {
+ int percent = input.indexOf('%', start);
+ if (percent < 0 || percent + 1 >= length) {
+ builder.append(input.substring(start));
+ break;
+ }
+ String replacement = null;
+ char ch = input.charAt(percent + 1);
+ if (ch == '%' || allowed.indexOf(ch) >= 0) {
+ replacement = replacements.get(Character.valueOf(ch));
+ }
+ if (replacement == null) {
+ builder.append(input.substring(start, percent + 2));
+ } else {
+ builder.append(input.substring(start, percent))
+ .append(replacement);
+ }
+ start = percent + 2;
+ }
+ return builder.toString();
+ }
+ }
+
+ private List<String> substitute(List<String> values, String allowed,
+ Replacer r) {
+ List<String> result = new ArrayList<>(values.size());
+ for (String value : values) {
+ result.add(r.substitute(value, allowed));
+ }
+ return result;
+ }
+
+ protected void substitute(String originalHostName, File home) {
+ Replacer r = new Replacer(originalHostName, home);
+ if (multiOptions != null) {
+ List<String> values = multiOptions.get("IDENTITYFILE"); //$NON-NLS-1$
+ if (values != null) {
+ values = substitute(values, "dhlru", r); //$NON-NLS-1$
+ multiOptions.put("IDENTITYFILE", values); //$NON-NLS-1$
+ }
+ values = multiOptions.get("CERTIFICATEFILE"); //$NON-NLS-1$
+ if (values != null) {
+ values = substitute(values, "dhlru", r); //$NON-NLS-1$
+ multiOptions.put("CERTIFICATEFILE", values); //$NON-NLS-1$
+ }
+ }
+ if (options != null) {
+ // HOSTNAME already done in Replacer constructor
+ String value = options.get("IDENTITYAGENT"); //$NON-NLS-1$
+ if (value != null) {
+ value = r.substitute(value, "dhlru"); //$NON-NLS-1$
+ options.put("IDENTITYAGENT", value); //$NON-NLS-1$
+ }
+ }
+ // Match is not implemented and would need to be done elsewhere
+ // anyway. ControlPath, LocalCommand, ProxyCommand, and
+ // RemoteCommand are not used by Jsch.
+ }
+
+ @Override
+ @SuppressWarnings("nls")
+ public String toString() {
+ return "HostEntry [options=" + options + ", multiOptions="
+ + multiOptions + ", listOptions=" + listOptions + "]";
+ }
+ }
+
/**
* Configuration of one "Host" block in the configuration file.
* <p>
@@ -330,8 +777,6 @@ public String run() {
* already merged into this block.
*/
public static class Host {
- boolean patternsApplied;
-
String hostName;
int port;
@@ -348,23 +793,18 @@ public static class Host {
int connectionAttempts;
- void copyFrom(final Host src) {
- if (hostName == null)
- hostName = src.hostName;
- if (port == 0)
- port = src.port;
- if (identityFile == null)
- identityFile = src.identityFile;
- if (user == null)
- user = src.user;
- if (preferredAuthentications == null)
- preferredAuthentications = src.preferredAuthentications;
- if (batchMode == null)
- batchMode = src.batchMode;
- if (strictHostKeyChecking == null)
- strictHostKeyChecking = src.strictHostKeyChecking;
- if (connectionAttempts == 0)
- connectionAttempts = src.connectionAttempts;
+ private Config config;
+
+ /**
+ * Creates a new uninitialized {@link Host}.
+ */
+ public Host() {
+ // For API backwards compatibility with pre-4.9 JGit
+ }
+
+ Host(Config config, String hostName, File homeDir) {
+ this.config = config;
+ complete(hostName, homeDir);
}
/**
@@ -432,5 +872,89 @@ public boolean isBatchMode() {
public int getConnectionAttempts() {
return connectionAttempts;
}
+
+
+ private void complete(String initialHostName, File homeDir) {
+ // Try to set values from the options.
+ hostName = config.getHostname();
+ user = config.getUser();
+ port = config.getPort();
+ connectionAttempts = positive(
+ config.getValue("ConnectionAttempts")); //$NON-NLS-1$
+ strictHostKeyChecking = config.getValue("StrictHostKeyChecking"); //$NON-NLS-1$
+ String value = config.getValue("BatchMode"); //$NON-NLS-1$
+ if (value != null) {
+ batchMode = yesno(value);
+ }
+ value = config.getValue("PreferredAuthentications"); //$NON-NLS-1$
+ if (value != null) {
+ preferredAuthentications = nows(value);
+ }
+ // Fill in defaults if still not set
+ if (hostName == null) {
+ hostName = initialHostName;
+ }
+ if (user == null) {
+ user = OpenSshConfig.userName();
+ }
+ if (port <= 0) {
+ port = OpenSshConfig.SSH_PORT;
+ }
+ if (connectionAttempts <= 0) {
+ connectionAttempts = 1;
+ }
+ String[] identityFiles = config.getValues("IdentityFile"); //$NON-NLS-1$
+ if (identityFiles != null && identityFiles.length > 0) {
+ identityFile = toFile(identityFiles[0], homeDir);
+ }
+ }
+
+ private File toFile(String path, File home) {
+ if (path.startsWith("~/")) { //$NON-NLS-1$
+ return new File(home, path.substring(2));
+ }
+ File ret = new File(path);
+ if (ret.isAbsolute()) {
+ return ret;
+ }
+ return new File(home, path);
+ }
+
+ Config getConfig() {
+ return config;
+ }
+
+ @Override
+ @SuppressWarnings("nls")
+ public String toString() {
+ return "Host [hostName=" + hostName + ", port=" + port
+ + ", identityFile=" + identityFile + ", user=" + user
+ + ", preferredAuthentications=" + preferredAuthentications
+ + ", batchMode=" + batchMode + ", strictHostKeyChecking="
+ + strictHostKeyChecking + ", connectionAttempts="
+ + connectionAttempts + ", config=" + config + "]";
+ }
+ }
+
+ /**
+ * Retrieves the full {@link com.jcraft.jsch.ConfigRepository.Config Config}
+ * for the given host name.
+ *
+ * @param hostName
+ * to get the config for
+ * @return the configuration for the host
+ * @since 4.9
+ */
+ @Override
+ public Config getConfig(String hostName) {
+ Host host = lookup(hostName);
+ return host.getConfig();
+ }
+
+ @Override
+ @SuppressWarnings("nls")
+ public String toString() {
+ return "OpenSshConfig [home=" + home + ", configFile=" + configFile
+ + ", lastModified=" + lastModified + ", state=" + state + "]";
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java
index 19dfa34..2f6b271 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackParser.java
@@ -82,6 +82,7 @@
import org.eclipse.jgit.lib.ProgressMonitor;
import org.eclipse.jgit.util.BlockList;
import org.eclipse.jgit.util.IO;
+import org.eclipse.jgit.util.LongMap;
import org.eclipse.jgit.util.NB;
import org.eclipse.jgit.util.sha1.SHA1;
@@ -550,29 +551,7 @@ public PackLock parse(ProgressMonitor receiving, ProgressMonitor resolving)
}
if (deltaCount > 0) {
- if (resolving instanceof BatchingProgressMonitor) {
- ((BatchingProgressMonitor) resolving).setDelayStart(
- 1000,
- TimeUnit.MILLISECONDS);
- }
- resolving.beginTask(JGitText.get().resolvingDeltas, deltaCount);
- resolveDeltas(resolving);
- if (entryCount < expectedObjectCount) {
- if (!isAllowThin()) {
- throw new IOException(MessageFormat.format(
- JGitText.get().packHasUnresolvedDeltas,
- Long.valueOf(expectedObjectCount - entryCount)));
- }
-
- resolveDeltasWithExternalBases(resolving);
-
- if (entryCount < expectedObjectCount) {
- throw new IOException(MessageFormat.format(
- JGitText.get().packHasUnresolvedDeltas,
- Long.valueOf(expectedObjectCount - entryCount)));
- }
- }
- resolving.endTask();
+ processDeltas(resolving);
}
packDigest = null;
@@ -595,6 +574,31 @@ public PackLock parse(ProgressMonitor receiving, ProgressMonitor resolving)
return null; // By default there is no locking.
}
+ private void processDeltas(ProgressMonitor resolving) throws IOException {
+ if (resolving instanceof BatchingProgressMonitor) {
+ ((BatchingProgressMonitor) resolving).setDelayStart(1000,
+ TimeUnit.MILLISECONDS);
+ }
+ resolving.beginTask(JGitText.get().resolvingDeltas, deltaCount);
+ resolveDeltas(resolving);
+ if (entryCount < expectedObjectCount) {
+ if (!isAllowThin()) {
+ throw new IOException(MessageFormat.format(
+ JGitText.get().packHasUnresolvedDeltas,
+ Long.valueOf(expectedObjectCount - entryCount)));
+ }
+
+ resolveDeltasWithExternalBases(resolving);
+
+ if (entryCount < expectedObjectCount) {
+ throw new IOException(MessageFormat.format(
+ JGitText.get().packHasUnresolvedDeltas,
+ Long.valueOf(expectedObjectCount - entryCount)));
+ }
+ }
+ resolving.endTask();
+ }
+
private void resolveDeltas(final ProgressMonitor progress)
throws IOException {
final int last = entryCount;
@@ -684,6 +688,7 @@ private void resolveDeltas(DeltaVisit visit, final int type,
PackedObjectInfo oe;
oe = newInfo(tempObjectId, visit.delta, visit.parent.id);
oe.setOffset(visit.delta.position);
+ oe.setType(type);
onInflatedObjectData(oe, type, visit.data);
addObjectAndTrack(oe);
visit.id = oe;
@@ -854,10 +859,9 @@ private void resolveDeltasWithExternalBases(final ProgressMonitor progress)
visit.id = baseId;
final int typeCode = ldr.getType();
final PackedObjectInfo oe = newInfo(baseId, null, null);
-
+ oe.setType(typeCode);
if (onAppendBase(typeCode, visit.data, oe))
entries[entryCount++] = oe;
-
visit.nextChild = firstChildOf(oe);
resolveDeltas(visit.next(), typeCode,
new ObjectTypeAndSize(), progress);
@@ -1059,6 +1063,7 @@ private void whole(final long pos, final int type, final long sz)
PackedObjectInfo obj = newInfo(tempObjectId, null, null);
obj.setOffset(pos);
+ obj.setType(type);
onEndWholeObject(obj);
if (data != null)
onInflatedObjectData(obj, type, data);
@@ -1069,8 +1074,21 @@ private void whole(final long pos, final int type, final long sz)
}
}
- private void verifySafeObject(final AnyObjectId id, final int type,
- final byte[] data) throws IOException {
+ /**
+ * Verify the integrity of the object.
+ *
+ * @param id
+ * identity of the object to be checked.
+ * @param type
+ * the type of the object.
+ * @param data
+ * raw content of the object.
+ * @throws CorruptObjectException
+ * @since 4.9
+ *
+ */
+ protected void verifySafeObject(final AnyObjectId id, final int type,
+ final byte[] data) throws CorruptObjectException {
if (objCheck != null) {
try {
objCheck.check(id, type, data);
@@ -1078,11 +1096,11 @@ private void verifySafeObject(final AnyObjectId id, final int type,
if (e.getErrorType() != null) {
throw e;
}
- throw new CorruptObjectException(MessageFormat.format(
- JGitText.get().invalidObject,
- Constants.typeString(type),
- id.name(),
- e.getMessage()), e);
+ throw new CorruptObjectException(
+ MessageFormat.format(JGitText.get().invalidObject,
+ Constants.typeString(type), id.name(),
+ e.getMessage()),
+ e);
}
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackedObjectInfo.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackedObjectInfo.java
index 6da1c57..381c228 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackedObjectInfo.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/PackedObjectInfo.java
@@ -45,6 +45,7 @@
package org.eclipse.jgit.transport;
import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectIdOwnerMap;
/**
@@ -59,6 +60,8 @@ public class PackedObjectInfo extends ObjectIdOwnerMap.Entry {
private int crc;
+ private int type = Constants.OBJ_BAD;
+
PackedObjectInfo(final long headerOffset, final int packedCRC,
final AnyObjectId id) {
super(id);
@@ -112,4 +115,24 @@ public int getCRC() {
public void setCRC(final int crc) {
this.crc = crc;
}
+
+ /**
+ * @return the object type. The default type is OBJ_BAD, which is considered
+ * as unknown or invalid type.
+ * @since 4.9
+ */
+ public int getType() {
+ return type;
+ }
+
+ /**
+ * Record the object type if applicable.
+ *
+ * @param type
+ * the object type.
+ * @since 4.9
+ */
+ public void setType(int type) {
+ this.type = type;
+ }
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/ReceiveCommand.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/ReceiveCommand.java
index a3f7501..14b35c9 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/ReceiveCommand.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/ReceiveCommand.java
@@ -52,6 +52,7 @@
import java.util.Collection;
import java.util.List;
+import org.eclipse.jgit.annotations.Nullable;
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId;
@@ -190,6 +191,20 @@ public static void abort(Iterable<ReceiveCommand> commands) {
}
}
+ /**
+ * Check whether a command failed due to transaction aborted.
+ *
+ * @param cmd
+ * command.
+ * @return whether the command failed due to transaction aborted, as in {@link
+ * #abort(Iterable)}.
+ * @since 4.9
+ */
+ public static boolean isTransactionAborted(ReceiveCommand cmd) {
+ return cmd.getResult() == REJECTED_OTHER_REASON
+ && cmd.getMessage().equals(JGitText.get().transactionAborted);
+ }
+
private final ObjectId oldId;
private final ObjectId newId;
@@ -204,6 +219,12 @@ public static void abort(Iterable<ReceiveCommand> commands) {
private String message;
+ private boolean customRefLog;
+
+ private String refLogMessage;
+
+ private boolean refLogIncludeResult;
+
private boolean typeIsCorrect;
/**
@@ -329,6 +350,90 @@ public String getMessage() {
}
/**
+ * Set the message to include in the reflog.
+ * <p>
+ * Overrides the default set by {@code setRefLogMessage} on any containing
+ * {@link org.eclipse.jgit.lib.BatchRefUpdate}.
+ *
+ * @param msg
+ * the message to describe this change. If null and appendStatus is
+ * false, the reflog will not be updated.
+ * @param appendStatus
+ * true if the status of the ref change (fast-forward or
+ * forced-update) should be appended to the user supplied message.
+ * @since 4.9
+ */
+ public void setRefLogMessage(String msg, boolean appendStatus) {
+ customRefLog = true;
+ if (msg == null && !appendStatus) {
+ disableRefLog();
+ } else if (msg == null && appendStatus) {
+ refLogMessage = ""; //$NON-NLS-1$
+ refLogIncludeResult = true;
+ } else {
+ refLogMessage = msg;
+ refLogIncludeResult = appendStatus;
+ }
+ }
+
+ /**
+ * Don't record this update in the ref's associated reflog.
+ * <p>
+ * Equivalent to {@code setRefLogMessage(null, false)}.
+ *
+ * @since 4.9
+ */
+ public void disableRefLog() {
+ customRefLog = true;
+ refLogMessage = null;
+ refLogIncludeResult = false;
+ }
+
+ /**
+ * Check whether this command has a custom reflog setting that should override
+ * defaults in any containing {@link org.eclipse.jgit.lib.BatchRefUpdate}.
+ *
+ * @return whether a custom reflog is set.
+ * @since 4.9
+ */
+ public boolean hasCustomRefLog() {
+ return customRefLog;
+ }
+
+ /**
+ * Check whether log has been disabled by {@link #disableRefLog()}.
+ *
+ * @return true if disabled.
+ * @since 4.9
+ */
+ public boolean isRefLogDisabled() {
+ return refLogMessage == null;
+ }
+
+ /**
+ * Get the message to include in the reflog.
+ *
+ * @return message the caller wants to include in the reflog; null if the
+ * update should not be logged.
+ * @since 4.9
+ */
+ @Nullable
+ public String getRefLogMessage() {
+ return refLogMessage;
+ }
+
+ /**
+ * Check whether the reflog message should include the result of the update,
+ * such as fast-forward or force-update.
+ *
+ * @return true if the message should include the result.
+ * @since 4.9
+ */
+ public boolean isRefLogIncludingResult() {
+ return refLogIncludeResult;
+ }
+
+ /**
* Set the status of this command.
*
* @param s
@@ -394,6 +499,7 @@ public void execute(final BaseReceivePack rp) {
try {
final RefUpdate ru = rp.getRepository().updateRef(getRefName());
ru.setRefLogIdent(rp.getRefLogIdent());
+ ru.setRefLogMessage(refLogMessage, refLogIncludeResult);
switch (getType()) {
case DELETE:
if (!ObjectId.zeroId().equals(getOldId())) {
@@ -467,6 +573,14 @@ public void setResult(RefUpdate.Result r) {
setResult(Result.REJECTED_CURRENT_BRANCH);
break;
+ case REJECTED_MISSING_OBJECT:
+ setResult(Result.REJECTED_MISSING_OBJECT);
+ break;
+
+ case REJECTED_OTHER_REASON:
+ setResult(Result.REJECTED_OTHER_REASON);
+ break;
+
default:
setResult(Result.REJECTED_OTHER_REASON, r.name());
break;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteConfig.java
index d91684e..a0d81c0 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteConfig.java
@@ -170,16 +170,28 @@ public RemoteConfig(final Config rc, final String remoteName)
vlst = rc.getStringList(SECTION, name, KEY_URL);
Map<String, String> insteadOf = getReplacements(rc, KEY_INSTEADOF);
uris = new ArrayList<>(vlst.length);
- for (final String s : vlst)
+ for (final String s : vlst) {
uris.add(new URIish(replaceUri(s, insteadOf)));
-
- Map<String, String> pushInsteadOf = getReplacements(rc,
- KEY_PUSHINSTEADOF);
- vlst = rc.getStringList(SECTION, name, KEY_PUSHURL);
- pushURIs = new ArrayList<>(vlst.length);
- for (final String s : vlst)
- pushURIs.add(new URIish(replaceUri(s, pushInsteadOf)));
-
+ }
+ String[] plst = rc.getStringList(SECTION, name, KEY_PUSHURL);
+ pushURIs = new ArrayList<>(plst.length);
+ for (final String s : plst) {
+ pushURIs.add(new URIish(s));
+ }
+ if (pushURIs.isEmpty()) {
+ // Would default to the uris. If we have pushinsteadof, we must
+ // supply rewritten push uris.
+ Map<String, String> pushInsteadOf = getReplacements(rc,
+ KEY_PUSHINSTEADOF);
+ if (!pushInsteadOf.isEmpty()) {
+ for (String s : vlst) {
+ String replaced = replaceUri(s, pushInsteadOf);
+ if (!s.equals(replaced)) {
+ pushURIs.add(new URIish(replaced));
+ }
+ }
+ }
+ }
vlst = rc.getStringList(SECTION, name, KEY_FETCH);
fetch = new ArrayList<>(vlst.length);
for (final String s : vlst)
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteSession.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteSession.java
index 5a73cf5..d6a2fe6 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteSession.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/RemoteSession.java
@@ -83,4 +83,4 @@ public interface RemoteSession {
* Disconnect the remote session
*/
public void disconnect();
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/SignedPushConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/SignedPushConfig.java
index 83b4aca..1ecbed9 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/SignedPushConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/SignedPushConfig.java
@@ -54,12 +54,7 @@
public class SignedPushConfig {
/** Key for {@link Config#get(SectionParser)}. */
public static final SectionParser<SignedPushConfig> KEY =
- new SectionParser<SignedPushConfig>() {
- @Override
- public SignedPushConfig parse(Config cfg) {
- return new SignedPushConfig(cfg);
- }
- };
+ SignedPushConfig::new;
private String certNonceSeed;
private int certNonceSlopLimit;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransferConfig.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransferConfig.java
index d4cd1c3..099629c 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransferConfig.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransferConfig.java
@@ -69,12 +69,8 @@ public class TransferConfig {
private static final String FSCK = "fsck"; //$NON-NLS-1$
/** Key for {@link Config#get(SectionParser)}. */
- public static final Config.SectionParser<TransferConfig> KEY = new SectionParser<TransferConfig>() {
- @Override
- public TransferConfig parse(final Config cfg) {
- return new TransferConfig(cfg);
- }
- };
+ public static final Config.SectionParser<TransferConfig> KEY =
+ TransferConfig::new;
/**
* A git configuration value for how to handle a fsck failure of a particular kind.
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportHttp.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportHttp.java
index 26a254d..2661212 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportHttp.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/TransportHttp.java
@@ -2,6 +2,7 @@
* Copyright (C) 2008-2010, Google Inc.
* Copyright (C) 2008, Shawn O. Pearce <spearce@spearce.org>
* Copyright (C) 2013, Matthias Sohn <matthias.sohn@sap.com>
+ * Copyright (C) 2017, Thomas Wolf <thomas.wolf@paranor.ch>
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -69,6 +70,7 @@
import java.net.MalformedURLException;
import java.net.Proxy;
import java.net.ProxySelector;
+import java.net.URISyntaxException;
import java.net.URL;
import java.text.MessageFormat;
import java.util.ArrayList;
@@ -78,9 +80,11 @@
import java.util.EnumSet;
import java.util.HashSet;
import java.util.LinkedHashSet;
+import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
+import java.util.function.Supplier;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
@@ -91,7 +95,6 @@
import org.eclipse.jgit.internal.JGitText;
import org.eclipse.jgit.internal.storage.file.RefDirectory;
import org.eclipse.jgit.lib.Config;
-import org.eclipse.jgit.lib.Config.SectionParser;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectIdRef;
@@ -104,9 +107,12 @@
import org.eclipse.jgit.util.HttpSupport;
import org.eclipse.jgit.util.IO;
import org.eclipse.jgit.util.RawParseUtils;
+import org.eclipse.jgit.util.SystemReader;
import org.eclipse.jgit.util.TemporaryBuffer;
import org.eclipse.jgit.util.io.DisabledOutputStream;
import org.eclipse.jgit.util.io.UnionInputStream;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Transport over HTTP and FTP protocols.
@@ -127,10 +133,37 @@
public class TransportHttp extends HttpTransport implements WalkTransport,
PackTransport {
+ private static final Logger LOG = LoggerFactory
+ .getLogger(TransportHttp.class);
+
private static final String SVC_UPLOAD_PACK = "git-upload-pack"; //$NON-NLS-1$
private static final String SVC_RECEIVE_PACK = "git-receive-pack"; //$NON-NLS-1$
+ private static final String MAX_REDIRECT_SYSTEM_PROPERTY = "http.maxRedirects"; //$NON-NLS-1$
+
+ private static final int DEFAULT_MAX_REDIRECTS = 5;
+
+ private static final int MAX_REDIRECTS = (new Supplier<Integer>() {
+
+ @Override
+ public Integer get() {
+ String rawValue = SystemReader.getInstance()
+ .getProperty(MAX_REDIRECT_SYSTEM_PROPERTY);
+ Integer value = Integer.valueOf(DEFAULT_MAX_REDIRECTS);
+ if (rawValue != null) {
+ try {
+ value = Integer.valueOf(Integer.parseUnsignedInt(rawValue));
+ } catch (NumberFormatException e) {
+ LOG.warn(MessageFormat.format(
+ JGitText.get().invalidSystemProperty,
+ MAX_REDIRECT_SYSTEM_PROPERTY, rawValue, value));
+ }
+ }
+ return value;
+ }
+ }).get().intValue();
+
/**
* Accept-Encoding header in the HTTP request
* (https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html).
@@ -231,21 +264,58 @@ public Transport open(URIish uri, Repository local, String remoteName)
}
};
- private static final Config.SectionParser<HttpConfig> HTTP_KEY = new SectionParser<HttpConfig>() {
- @Override
- public HttpConfig parse(final Config cfg) {
- return new HttpConfig(cfg);
+ /**
+ * Config values for http.followRedirect
+ */
+ private static enum HttpRedirectMode implements Config.ConfigEnum {
+
+ /** Always follow redirects (up to the http.maxRedirects limit). */
+ TRUE("true"), //$NON-NLS-1$
+ /**
+ * Only follow redirects on the initial GET request. This is the
+ * default.
+ */
+ INITIAL("initial"), //$NON-NLS-1$
+ /** Never follow redirects. */
+ FALSE("false"); //$NON-NLS-1$
+
+ private final String configValue;
+
+ private HttpRedirectMode(String configValue) {
+ this.configValue = configValue;
}
- };
+
+ @Override
+ public String toConfigValue() {
+ return configValue;
+ }
+
+ @Override
+ public boolean matchConfigValue(String s) {
+ return configValue.equals(s);
+ }
+ }
private static class HttpConfig {
final int postBuffer;
final boolean sslVerify;
+ final HttpRedirectMode followRedirects;
+
+ final int maxRedirects;
+
HttpConfig(final Config rc) {
postBuffer = rc.getInt("http", "postbuffer", 1 * 1024 * 1024); //$NON-NLS-1$ //$NON-NLS-2$
sslVerify = rc.getBoolean("http", "sslVerify", true); //$NON-NLS-1$ //$NON-NLS-2$
+ followRedirects = rc.getEnum(HttpRedirectMode.values(), "http", //$NON-NLS-1$
+ null, "followRedirects", HttpRedirectMode.INITIAL); //$NON-NLS-1$
+ int redirectLimit = rc.getInt("http", "maxRedirects", //$NON-NLS-1$ //$NON-NLS-2$
+ MAX_REDIRECTS);
+ if (redirectLimit < 0) {
+ redirectLimit = MAX_REDIRECTS;
+ }
+ maxRedirects = redirectLimit;
}
HttpConfig() {
@@ -253,9 +323,16 @@ private static class HttpConfig {
}
}
- final URL baseUrl;
+ /**
+ * The current URI we're talking to. The inherited (final) field
+ * {@link #uri} stores the original URI; {@code currentUri} may be different
+ * after redirects.
+ */
+ private URIish currentUri;
- private final URL objectsUrl;
+ private URL baseUrl;
+
+ private URL objectsUrl;
final HttpConfig http;
@@ -270,17 +347,32 @@ private static class HttpConfig {
TransportHttp(final Repository local, final URIish uri)
throws NotSupportedException {
super(local, uri);
+ setURI(uri);
+ http = local.getConfig().get(HttpConfig::new);
+ proxySelector = ProxySelector.getDefault();
+ }
+
+ private URL toURL(URIish urish) throws MalformedURLException {
+ String uriString = urish.toString();
+ if (!uriString.endsWith("/")) { //$NON-NLS-1$
+ uriString += '/';
+ }
+ return new URL(uriString);
+ }
+
+ /**
+ * @param uri
+ * @throws NotSupportedException
+ * @since 4.9
+ */
+ protected void setURI(final URIish uri) throws NotSupportedException {
try {
- String uriString = uri.toString();
- if (!uriString.endsWith("/")) //$NON-NLS-1$
- uriString += "/"; //$NON-NLS-1$
- baseUrl = new URL(uriString);
+ currentUri = uri;
+ baseUrl = toURL(uri);
objectsUrl = new URL(baseUrl, "objects/"); //$NON-NLS-1$
} catch (MalformedURLException e) {
throw new NotSupportedException(MessageFormat.format(JGitText.get().invalidURL, uri), e);
}
- http = local.getConfig().get(HTTP_KEY);
- proxySelector = ProxySelector.getDefault();
}
/**
@@ -291,15 +383,7 @@ private static class HttpConfig {
*/
TransportHttp(final URIish uri) throws NotSupportedException {
super(uri);
- try {
- String uriString = uri.toString();
- if (!uriString.endsWith("/")) //$NON-NLS-1$
- uriString += "/"; //$NON-NLS-1$
- baseUrl = new URL(uriString);
- objectsUrl = new URL(baseUrl, "objects/"); //$NON-NLS-1$
- } catch (MalformedURLException e) {
- throw new NotSupportedException(MessageFormat.format(JGitText.get().invalidURL, uri), e);
- }
+ setURI(uri);
http = new HttpConfig();
proxySelector = ProxySelector.getDefault();
}
@@ -469,28 +553,9 @@ public void setAdditionalHeaders(Map<String, String> headers) {
private HttpConnection connect(final String service)
throws TransportException, NotSupportedException {
- final URL u;
- try {
- final StringBuilder b = new StringBuilder();
- b.append(baseUrl);
-
- if (b.charAt(b.length() - 1) != '/')
- b.append('/');
- b.append(Constants.INFO_REFS);
-
- if (useSmartHttp) {
- b.append(b.indexOf("?") < 0 ? '?' : '&'); //$NON-NLS-1$
- b.append("service="); //$NON-NLS-1$
- b.append(service);
- }
-
- u = new URL(b.toString());
- } catch (MalformedURLException e) {
- throw new NotSupportedException(MessageFormat.format(JGitText.get().invalidURL, uri), e);
- }
-
-
+ URL u = getServiceURL(service);
int authAttempts = 1;
+ int redirects = 0;
Collection<Type> ignoreTypes = null;
for (;;) {
try {
@@ -527,9 +592,10 @@ private HttpConnection connect(final String service)
throw new TransportException(uri,
JGitText.get().noCredentialsProvider);
if (authAttempts > 1)
- credentialsProvider.reset(uri);
+ credentialsProvider.reset(currentUri);
if (3 < authAttempts
- || !authMethod.authorize(uri, credentialsProvider)) {
+ || !authMethod.authorize(currentUri,
+ credentialsProvider)) {
throw new TransportException(uri,
JGitText.get().notAuthorized);
}
@@ -538,8 +604,28 @@ private HttpConnection connect(final String service)
case HttpConnection.HTTP_FORBIDDEN:
throw new TransportException(uri, MessageFormat.format(
- JGitText.get().serviceNotPermitted, service));
+ JGitText.get().serviceNotPermitted, baseUrl,
+ service));
+ case HttpConnection.HTTP_MOVED_PERM:
+ case HttpConnection.HTTP_MOVED_TEMP:
+ case HttpConnection.HTTP_SEE_OTHER:
+ case HttpConnection.HTTP_11_MOVED_TEMP:
+ // SEE_OTHER should actually never be sent by a git server,
+ // and in general should occur only on POST requests. But it
+ // doesn't hurt to accept it here as a redirect.
+ if (http.followRedirects == HttpRedirectMode.FALSE) {
+ throw new TransportException(uri,
+ MessageFormat.format(
+ JGitText.get().redirectsOff,
+ Integer.valueOf(status)));
+ }
+ URIish newUri = redirect(conn.getHeaderField(HDR_LOCATION),
+ Constants.INFO_REFS, redirects++);
+ setURI(newUri);
+ u = getServiceURL(service);
+ authAttempts = 1;
+ break;
default:
String err = status + " " + conn.getResponseMessage(); //$NON-NLS-1$
throw new TransportException(uri, err);
@@ -568,6 +654,89 @@ private HttpConnection connect(final String service)
}
}
+ private URIish redirect(String location, String checkFor, int redirects)
+ throws TransportException {
+ if (location == null || location.isEmpty()) {
+ throw new TransportException(uri,
+ MessageFormat.format(JGitText.get().redirectLocationMissing,
+ baseUrl));
+ }
+ if (redirects >= http.maxRedirects) {
+ throw new TransportException(uri,
+ MessageFormat.format(JGitText.get().redirectLimitExceeded,
+ Integer.valueOf(http.maxRedirects), baseUrl, location));
+ }
+ try {
+ if (!isValidRedirect(baseUrl, location, checkFor)) {
+ throw new TransportException(uri,
+ MessageFormat.format(JGitText.get().redirectBlocked,
+ baseUrl, location));
+ }
+ location = location.substring(0, location.indexOf(checkFor));
+ URIish result = new URIish(location);
+ if (LOG.isInfoEnabled()) {
+ LOG.info(MessageFormat.format(JGitText.get().redirectHttp,
+ uri.setPass(null),
+ Integer.valueOf(redirects), baseUrl, result));
+ }
+ return result;
+ } catch (URISyntaxException e) {
+ throw new TransportException(uri,
+ MessageFormat.format(JGitText.get().invalidRedirectLocation,
+ baseUrl, location),
+ e);
+ }
+ }
+
+ private boolean isValidRedirect(URL current, String next, String checkFor) {
+ // Protocols must be the same, or current is "http" and next "https". We
+ // do not follow redirects from https back to http.
+ String oldProtocol = current.getProtocol().toLowerCase(Locale.ROOT);
+ int schemeEnd = next.indexOf("://"); //$NON-NLS-1$
+ if (schemeEnd < 0) {
+ return false;
+ }
+ String newProtocol = next.substring(0, schemeEnd)
+ .toLowerCase(Locale.ROOT);
+ if (!oldProtocol.equals(newProtocol)) {
+ if (!"https".equals(newProtocol)) { //$NON-NLS-1$
+ return false;
+ }
+ }
+ // git allows only rewriting the root, i.e., everything before INFO_REFS
+ // or the service name
+ if (next.indexOf(checkFor) < 0) {
+ return false;
+ }
+ // Basically we should test here that whatever follows INFO_REFS is
+ // unchanged. But since we re-construct the query part
+ // anyway, it doesn't matter.
+ return true;
+ }
+
+ private URL getServiceURL(final String service)
+ throws NotSupportedException {
+ try {
+ final StringBuilder b = new StringBuilder();
+ b.append(baseUrl);
+
+ if (b.charAt(b.length() - 1) != '/') {
+ b.append('/');
+ }
+ b.append(Constants.INFO_REFS);
+
+ if (useSmartHttp) {
+ b.append(b.indexOf("?") < 0 ? '?' : '&'); //$NON-NLS-1$
+ b.append("service="); //$NON-NLS-1$
+ b.append(service);
+ }
+
+ return new URL(b.toString());
+ } catch (MalformedURLException e) {
+ throw new NotSupportedException(MessageFormat.format(JGitText.get().invalidURL, uri), e);
+ }
+ }
+
/**
* Open an HTTP connection, setting the accept-encoding request header to gzip.
*
@@ -606,6 +775,10 @@ protected HttpConnection httpOpen(String method, URL u,
HttpSupport.disableSslVerify(conn);
}
+ // We must do our own redirect handling to implement git rules and to
+ // handle http->https redirects
+ conn.setInstanceFollowRedirects(false);
+
conn.setRequestMethod(method);
conn.setUseCaches(false);
if (acceptEncoding == AcceptEncoding.GZIP) {
@@ -914,13 +1087,7 @@ abstract class Service {
}
void openStream() throws IOException {
- openStream(null);
- }
-
- void openStream(final String redirectUrl) throws IOException {
- conn = httpOpen(
- METHOD_POST,
- redirectUrl == null ? new URL(baseUrl, serviceName) : new URL(redirectUrl),
+ conn = httpOpen(METHOD_POST, new URL(baseUrl, serviceName),
AcceptEncoding.GZIP);
conn.setInstanceFollowRedirects(false);
conn.setDoOutput(true);
@@ -929,10 +1096,6 @@ void openStream(final String redirectUrl) throws IOException {
}
void sendRequest() throws IOException {
- sendRequest(null);
- }
-
- void sendRequest(final String redirectUrl) throws IOException {
// Try to compress the content, but only if that is smaller.
TemporaryBuffer buf = new TemporaryBuffer.Heap(http.postBuffer);
try {
@@ -947,21 +1110,131 @@ void sendRequest(final String redirectUrl) throws IOException {
buf = out;
}
- openStream(redirectUrl);
- if (buf != out)
- conn.setRequestProperty(HDR_CONTENT_ENCODING, ENCODING_GZIP);
- conn.setFixedLengthStreamingMode((int) buf.length());
- final OutputStream httpOut = conn.getOutputStream();
- try {
- buf.writeTo(httpOut, null);
- } finally {
- httpOut.close();
- }
+ HttpAuthMethod authenticator = null;
+ Collection<Type> ignoreTypes = EnumSet.noneOf(Type.class);
+ // Counts number of repeated authentication attempts using the same
+ // authentication scheme
+ int authAttempts = 1;
+ int redirects = 0;
+ for (;;) {
+ // The very first time we will try with the authentication
+ // method used on the initial GET request. This is a hint only;
+ // it may fail. If so, we'll then re-try with proper 401
+ // handling, going through the available authentication schemes.
+ openStream();
+ if (buf != out) {
+ conn.setRequestProperty(HDR_CONTENT_ENCODING, ENCODING_GZIP);
+ }
+ conn.setFixedLengthStreamingMode((int) buf.length());
+ try (OutputStream httpOut = conn.getOutputStream()) {
+ buf.writeTo(httpOut, null);
+ }
- final int status = HttpSupport.response(conn);
- if (status == HttpConnection.HTTP_MOVED_PERM) {
- String locationHeader = HttpSupport.responseHeader(conn, HDR_LOCATION);
- sendRequest(locationHeader);
+ final int status = HttpSupport.response(conn);
+ switch (status) {
+ case HttpConnection.HTTP_OK:
+ // We're done.
+ return;
+
+ case HttpConnection.HTTP_NOT_FOUND:
+ throw new NoRemoteRepositoryException(uri, MessageFormat
+ .format(JGitText.get().uriNotFound, conn.getURL()));
+
+ case HttpConnection.HTTP_FORBIDDEN:
+ throw new TransportException(uri,
+ MessageFormat.format(
+ JGitText.get().serviceNotPermitted,
+ baseUrl, serviceName));
+
+ case HttpConnection.HTTP_MOVED_PERM:
+ case HttpConnection.HTTP_MOVED_TEMP:
+ case HttpConnection.HTTP_11_MOVED_TEMP:
+ // SEE_OTHER after a POST doesn't make sense for a git
+ // server, so we don't handle it here and thus we'll
+ // report an error in openResponse() later on.
+ if (http.followRedirects != HttpRedirectMode.TRUE) {
+ // Let openResponse() issue an error
+ return;
+ }
+ currentUri = redirect(
+ conn.getHeaderField(HDR_LOCATION),
+ '/' + serviceName, redirects++);
+ try {
+ baseUrl = toURL(currentUri);
+ } catch (MalformedURLException e) {
+ throw new TransportException(uri, MessageFormat.format(
+ JGitText.get().invalidRedirectLocation,
+ baseUrl, currentUri), e);
+ }
+ continue;
+
+ case HttpConnection.HTTP_UNAUTHORIZED:
+ HttpAuthMethod nextMethod = HttpAuthMethod
+ .scanResponse(conn, ignoreTypes);
+ switch (nextMethod.getType()) {
+ case NONE:
+ throw new TransportException(uri,
+ MessageFormat.format(
+ JGitText.get().authenticationNotSupported,
+ conn.getURL()));
+ case NEGOTIATE:
+ // RFC 4559 states "When using the SPNEGO [...] with
+ // [...] POST, the authentication should be complete
+ // [...] before sending the user data." So in theory
+ // the initial GET should have been authenticated
+ // already. (Unless there was a redirect?)
+ //
+ // We try this only once:
+ ignoreTypes.add(HttpAuthMethod.Type.NEGOTIATE);
+ if (authenticator != null) {
+ ignoreTypes.add(authenticator.getType());
+ }
+ authAttempts = 1;
+ // We only do the Kerberos part of SPNEGO, which
+ // requires only one attempt. We do *not* to the
+ // NTLM part of SPNEGO; it's a multi-round
+ // negotiation and among other problems it would
+ // be unclear when to stop if no HTTP_OK is
+ // forthcoming. In theory a malicious server
+ // could keep sending requests for another NTLM
+ // round, keeping a client stuck here.
+ break;
+ default:
+ // DIGEST or BASIC. Let's be sure we ignore NEGOTIATE;
+ // if it was available, we have tried it before.
+ ignoreTypes.add(HttpAuthMethod.Type.NEGOTIATE);
+ if (authenticator == null || authenticator
+ .getType() != nextMethod.getType()) {
+ if (authenticator != null) {
+ ignoreTypes.add(authenticator.getType());
+ }
+ authAttempts = 1;
+ }
+ break;
+ }
+ authMethod = nextMethod;
+ authenticator = nextMethod;
+ CredentialsProvider credentialsProvider = getCredentialsProvider();
+ if (credentialsProvider == null) {
+ throw new TransportException(uri,
+ JGitText.get().noCredentialsProvider);
+ }
+ if (authAttempts > 1) {
+ credentialsProvider.reset(currentUri);
+ }
+ if (3 < authAttempts || !authMethod.authorize(currentUri,
+ credentialsProvider)) {
+ throw new TransportException(uri,
+ JGitText.get().notAuthorized);
+ }
+ authAttempts++;
+ continue;
+
+ default:
+ // Just return here; openResponse() will report an appropriate
+ // error.
+ return;
+ }
}
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java
index 17af0b9..c3f50a4 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/UploadPack.java
@@ -719,7 +719,7 @@ private Map<String, Ref> getAdvertisedOrDefaultRefs() throws IOException {
}
private void service() throws IOException {
- boolean sendPack;
+ boolean sendPack = false;
// If it's a non-bidi request, we need to read the entire request before
// writing a response. Buffer the response until then.
try {
@@ -752,6 +752,17 @@ else if (requestValidator instanceof AnyRequestValidator)
if (!clientShallowCommits.isEmpty())
walk.assumeShallow(clientShallowCommits);
sendPack = negotiate();
+ if (sendPack && !biDirectionalPipe) {
+ // Ensure the request was fully consumed. Any remaining input must
+ // be a protocol error. If we aren't at EOF the implementation is broken.
+ int eof = rawIn.read();
+ if (0 <= eof) {
+ sendPack = false;
+ throw new CorruptObjectException(MessageFormat.format(
+ JGitText.get().expectedEOFReceived,
+ "\\x" + Integer.toHexString(eof))); //$NON-NLS-1$
+ }
+ }
} catch (ServiceMayNotContinueException err) {
if (!err.isOutput() && err.getMessage() != null) {
try {
@@ -778,6 +789,11 @@ else if (requestValidator instanceof AnyRequestValidator)
}
throw err;
} finally {
+ if (!sendPack && !biDirectionalPipe) {
+ while (0 < rawIn.skip(2048) || 0 <= rawIn.read()) {
+ // Discard until EOF.
+ }
+ }
rawOut.stopBuffering();
}
@@ -1390,17 +1406,6 @@ private boolean wantSatisfied(final RevObject want) throws IOException {
private void sendPack() throws IOException {
final boolean sideband = options.contains(OPTION_SIDE_BAND)
|| options.contains(OPTION_SIDE_BAND_64K);
-
- if (!biDirectionalPipe) {
- // Ensure the request was fully consumed. Any remaining input must
- // be a protocol error. If we aren't at EOF the implementation is broken.
- int eof = rawIn.read();
- if (0 <= eof)
- throw new CorruptObjectException(MessageFormat.format(
- JGitText.get().expectedEOFReceived,
- "\\x" + Integer.toHexString(eof))); //$NON-NLS-1$
- }
-
if (sideband) {
try {
sendPack(true);
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/http/HttpConnection.java b/org.eclipse.jgit/src/org/eclipse/jgit/transport/http/HttpConnection.java
index 58081c1..35a1ee1 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/http/HttpConnection.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/transport/http/HttpConnection.java
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2013 Christian Halstrick <christian.halstrick@sap.com>
+ * Copyright (C) 2013, 2017 Christian Halstrick <christian.halstrick@sap.com>
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
@@ -79,6 +79,26 @@ public interface HttpConnection {
public static final int HTTP_MOVED_PERM = java.net.HttpURLConnection.HTTP_MOVED_PERM;
/**
+ * @see HttpURLConnection#HTTP_MOVED_TEMP
+ * @since 4.9
+ */
+ public static final int HTTP_MOVED_TEMP = java.net.HttpURLConnection.HTTP_MOVED_TEMP;
+
+ /**
+ * @see HttpURLConnection#HTTP_SEE_OTHER
+ * @since 4.9
+ */
+ public static final int HTTP_SEE_OTHER = java.net.HttpURLConnection.HTTP_SEE_OTHER;
+
+ /**
+ * HTTP 1.1 additional MOVED_TEMP status code; value = 307.
+ *
+ * @see #HTTP_MOVED_TEMP
+ * @since 4.9
+ */
+ public static final int HTTP_11_MOVED_TEMP = 307;
+
+ /**
* @see HttpURLConnection#HTTP_NOT_FOUND
*/
public static final int HTTP_NOT_FOUND = java.net.HttpURLConnection.HTTP_NOT_FOUND;
@@ -253,7 +273,7 @@ public void setRequestMethod(String method)
/**
* Configure the connection so that it can be used for https communication.
- *
+ *
* @param km
* the keymanager managing the key material used to authenticate
* the local SSLSocket to its peer
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/WorkingTreeOptions.java b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/WorkingTreeOptions.java
index 7d2b33f..2b18904 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/WorkingTreeOptions.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/WorkingTreeOptions.java
@@ -55,12 +55,8 @@
/** Options used by the {@link WorkingTreeIterator}. */
public class WorkingTreeOptions {
/** Key for {@link Config#get(SectionParser)}. */
- public static final Config.SectionParser<WorkingTreeOptions> KEY = new SectionParser<WorkingTreeOptions>() {
- @Override
- public WorkingTreeOptions parse(final Config cfg) {
- return new WorkingTreeOptions(cfg);
- }
- };
+ public static final Config.SectionParser<WorkingTreeOptions> KEY =
+ WorkingTreeOptions::new;
private final boolean fileMode;
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/InterIndexDiffFilter.java b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/InterIndexDiffFilter.java
index 1719416..2ea8228 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/InterIndexDiffFilter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/treewalk/filter/InterIndexDiffFilter.java
@@ -102,4 +102,4 @@ public TreeFilter clone() {
public String toString() {
return "INTERINDEX_DIFF"; //$NON-NLS-1$
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/IntList.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/IntList.java
index 658dd06..0a3c846 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/IntList.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/IntList.java
@@ -71,6 +71,21 @@ public int size() {
}
/**
+ * Check if an entry appears in this collection.
+ *
+ * @param value
+ * the value to search for.
+ * @return true of {@code value} appears in this list.
+ * @since 4.9
+ */
+ public boolean contains(int value) {
+ for (int i = 0; i < count; i++)
+ if (entries[i] == value)
+ return true;
+ return false;
+ }
+
+ /**
* @param i
* index to read, must be in the range [0, {@link #size()}).
* @return the number at the specified index
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/transport/LongMap.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/LongMap.java
similarity index 82%
rename from org.eclipse.jgit/src/org/eclipse/jgit/transport/LongMap.java
rename to org.eclipse.jgit/src/org/eclipse/jgit/util/LongMap.java
index 4d60202..7b0b0c7 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/transport/LongMap.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/LongMap.java
@@ -41,15 +41,16 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-package org.eclipse.jgit.transport;
+package org.eclipse.jgit.util;
/**
- * Simple Map<long,Object> helper for {@link PackParser}.
+ * Simple Map<long,Object>.
*
* @param <V>
* type of the value instance.
+ * @since 4.9
*/
-final class LongMap<V> {
+public class LongMap<V> {
private static final float LOAD_FACTOR = 0.75f;
private Node<V>[] table;
@@ -60,16 +61,27 @@ final class LongMap<V> {
/** Next {@link #size} to trigger a {@link #grow()}. */
private int growAt;
- LongMap() {
+ /** Initialize an empty LongMap. */
+ public LongMap() {
table = createArray(64);
growAt = (int) (table.length * LOAD_FACTOR);
}
- boolean containsKey(final long key) {
+ /**
+ * @param key
+ * the key to find.
+ * @return {@code true} if {@code key} is present in the map.
+ */
+ public boolean containsKey(long key) {
return get(key) != null;
}
- V get(final long key) {
+ /**
+ * @param key
+ * the key to find.
+ * @return stored value of the key, or {@code null}.
+ */
+ public V get(long key) {
for (Node<V> n = table[index(key)]; n != null; n = n.next) {
if (n.key == key)
return n.value;
@@ -77,7 +89,12 @@ V get(final long key) {
return null;
}
- V remove(final long key) {
+ /**
+ * @param key
+ * key to remove from the map.
+ * @return old value of the key, or {@code null}.
+ */
+ public V remove(long key) {
Node<V> n = table[index(key)];
Node<V> prior = null;
while (n != null) {
@@ -95,7 +112,14 @@ V remove(final long key) {
return null;
}
- V put(final long key, final V value) {
+ /**
+ * @param key
+ * key to store {@code value} under.
+ * @param value
+ * new value.
+ * @return prior value, or null.
+ */
+ public V put(long key, V value) {
for (Node<V> n = table[index(key)]; n != null; n = n.next) {
if (n.key == key) {
final V o = n.value;
@@ -145,9 +169,7 @@ private final int index(final long key) {
private static class Node<V> {
final long key;
-
V value;
-
Node<V> next;
Node(final long k, final V v) {
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/NB.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/NB.java
index 8536f1d..471a499 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/NB.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/NB.java
@@ -113,6 +113,24 @@ public static int decodeUInt16(final byte[] intbuf, final int offset) {
}
/**
+ * Convert sequence of 3 bytes (network byte order) into unsigned value.
+ *
+ * @param intbuf
+ * buffer to acquire the 3 bytes of data from.
+ * @param offset
+ * position within the buffer to begin reading from. This
+ * position and the next 2 bytes after it (for a total of 3
+ * bytes) will be read.
+ * @return signed integer value that matches the 24 bits read.
+ * @since 4.9
+ */
+ public static int decodeUInt24(byte[] intbuf, int offset) {
+ int r = (intbuf[offset] & 0xff) << 8;
+ r |= intbuf[offset + 1] & 0xff;
+ return (r << 8) | (intbuf[offset + 2] & 0xff);
+ }
+
+ /**
* Convert sequence of 4 bytes (network byte order) into signed value.
*
* @param intbuf
@@ -223,6 +241,29 @@ public static void encodeInt16(final byte[] intbuf, final int offset, int v) {
}
/**
+ * Write a 24 bit integer as a sequence of 3 bytes (network byte order).
+ *
+ * @param intbuf
+ * buffer to write the 3 bytes of data into.
+ * @param offset
+ * position within the buffer to begin writing to. This position
+ * and the next 2 bytes after it (for a total of 3 bytes) will be
+ * replaced.
+ * @param v
+ * the value to write.
+ * @since 4.9
+ */
+ public static void encodeInt24(byte[] intbuf, int offset, int v) {
+ intbuf[offset + 2] = (byte) v;
+ v >>>= 8;
+
+ intbuf[offset + 1] = (byte) v;
+ v >>>= 8;
+
+ intbuf[offset] = (byte) v;
+ }
+
+ /**
* Write a 32 bit integer as a sequence of 4 bytes (network byte order).
*
* @param intbuf
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/RawParseUtils.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/RawParseUtils.java
index 86777b9..ad138bb 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/RawParseUtils.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/RawParseUtils.java
@@ -618,6 +618,10 @@ public static final int prevLF(final byte[] b, int ptr, final char chrA) {
* <p>
* The last element (index <code>map.size()-1</code>) always contains
* <code>end</code>.
+ * <p>
+ * If the data contains a '\0' anywhere, the whole region is considered binary
+ * and a LineMap corresponding to a single line is returned.
+ * </p>
*
* @param buf
* buffer to scan.
@@ -629,14 +633,29 @@ public static final int prevLF(final byte[] b, int ptr, final char chrA) {
* @return a line map indexing the start position of each line.
*/
public static final IntList lineMap(final byte[] buf, int ptr, int end) {
+ int start = ptr;
+
// Experimentally derived from multiple source repositories
// the average number of bytes/line is 36. Its a rough guess
// to initially size our map close to the target.
- //
- final IntList map = new IntList((end - ptr) / 36);
- map.fillTo(1, Integer.MIN_VALUE);
- for (; ptr < end; ptr = nextLF(buf, ptr))
- map.add(ptr);
+ IntList map = new IntList((end - ptr) / 36);
+ map.add(Integer.MIN_VALUE);
+ boolean foundLF = true;
+ for (; ptr < end; ptr++) {
+ if (foundLF) {
+ map.add(ptr);
+ }
+
+ if (buf[ptr] == '\0') {
+ // binary data.
+ map = new IntList(3);
+ map.add(Integer.MIN_VALUE);
+ map.add(start);
+ break;
+ }
+
+ foundLF = (buf[ptr] == '\n');
+ }
map.add(end);
return map;
}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/RelativeDateFormatter.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/RelativeDateFormatter.java
index 3cb3749..21a55a6 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/RelativeDateFormatter.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/RelativeDateFormatter.java
@@ -140,4 +140,4 @@ private static long round(long n, long unit) {
long rounded = (n + unit / 2) / unit;
return rounded;
}
-}
\ No newline at end of file
+}
diff --git a/org.eclipse.jgit/src/org/eclipse/jgit/util/io/EolStreamTypeUtil.java b/org.eclipse.jgit/src/org/eclipse/jgit/util/io/EolStreamTypeUtil.java
index c95992f..727c1f4 100644
--- a/org.eclipse.jgit/src/org/eclipse/jgit/util/io/EolStreamTypeUtil.java
+++ b/org.eclipse.jgit/src/org/eclipse/jgit/util/io/EolStreamTypeUtil.java
@@ -144,6 +144,11 @@ public static OutputStream wrapOutputStream(OutputStream out,
private static EolStreamType checkInStreamType(WorkingTreeOptions options,
Attributes attrs) {
+ if (attrs.isUnset("text")) {//$NON-NLS-1$
+ // "binary" or "-text" (which is included in the binary expansion)
+ return EolStreamType.DIRECT;
+ }
+
// old git system
if (attrs.isSet("crlf")) {//$NON-NLS-1$
return EolStreamType.TEXT_LF;
@@ -154,9 +159,6 @@ private static EolStreamType checkInStreamType(WorkingTreeOptions options,
}
// new git system
- if (attrs.isUnset("text")) {//$NON-NLS-1$
- return EolStreamType.DIRECT;
- }
String eol = attrs.getValue("eol"); //$NON-NLS-1$
if (eol != null)
// check-in is always normalized to LF
@@ -183,6 +185,11 @@ private static EolStreamType checkInStreamType(WorkingTreeOptions options,
private static EolStreamType checkOutStreamType(WorkingTreeOptions options,
Attributes attrs) {
+ if (attrs.isUnset("text")) {//$NON-NLS-1$
+ // "binary" or "-text" (which is included in the binary expansion)
+ return EolStreamType.DIRECT;
+ }
+
// old git system
if (attrs.isSet("crlf")) {//$NON-NLS-1$
return FORCE_EOL_LF_ON_CHECKOUT ? EolStreamType.TEXT_LF
@@ -194,9 +201,6 @@ private static EolStreamType checkOutStreamType(WorkingTreeOptions options,
}
// new git system
- if (attrs.isUnset("text")) {//$NON-NLS-1$
- return EolStreamType.DIRECT;
- }
String eol = attrs.getValue("eol"); //$NON-NLS-1$
if (eol != null && "crlf".equals(eol)) //$NON-NLS-1$
return EolStreamType.TEXT_CRLF;
diff --git a/pom.xml b/pom.xml
index 19d0225..3e6c55b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -212,7 +212,7 @@
<maven-javadoc-plugin-version>2.10.4</maven-javadoc-plugin-version>
<tycho-extras-version>1.0.0</tycho-extras-version>
<gson-version>2.2.4</gson-version>
- <findbugs-maven-plugin-version>3.0.4</findbugs-maven-plugin-version>
+ <spotbugs-maven-plugin-version>3.0.6</spotbugs-maven-plugin-version>
<maven-surefire-report-plugin-version>2.20</maven-surefire-report-plugin-version>
<!-- Properties to enable jacoco code coverage analysis -->
@@ -371,9 +371,9 @@
</plugin>
<plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>findbugs-maven-plugin</artifactId>
- <version>${findbugs-maven-plugin-version}</version>
+ <groupId>com.github.hazendaz.spotbugs</groupId>
+ <artifactId>spotbugs-maven-plugin</artifactId>
+ <version>${spotbugs-maven-plugin-version}</version>
<configuration>
<findbugsXmlOutput>true</findbugsXmlOutput>
<failOnError>false</failOnError>
@@ -579,9 +579,9 @@
<version>2.5</version>
</plugin>
<plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>findbugs-maven-plugin</artifactId>
- <version>${findbugs-maven-plugin-version}</version>
+ <groupId>com.github.hazendaz.spotbugs</groupId>
+ <artifactId>spotbugs-maven-plugin</artifactId>
+ <version>${spotbugs-maven-plugin-version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
@@ -751,8 +751,8 @@
<build>
<plugins>
<plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>findbugs-maven-plugin</artifactId>
+ <groupId>com.github.hazendaz.spotbugs</groupId>
+ <artifactId>spotbugs-maven-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>