Merge changes If7f8c086,I42a0032f,Id811f144
* changes:
Bind PluginUser.Factory in PluginModule
Expose the GerritRuntime to ServerPlugin
Add GerritRuntime enum to describe the current running environment
diff --git a/.bazelproject b/.bazelproject
index e3a7a9c..8a726eb 100644
--- a/.bazelproject
+++ b/.bazelproject
@@ -4,6 +4,7 @@
directories:
.
+ -bin
-eclipse-out
-contrib
-gerrit-package-plugins
diff --git a/Documentation/BUILD b/Documentation/BUILD
index 2e6f4bc..4177f51 100644
--- a/Documentation/BUILD
+++ b/Documentation/BUILD
@@ -71,13 +71,13 @@
name = "index",
srcs = SRCS,
outs = ["index.jar"],
- cmd = "$(location //lib/asciidoctor:doc_indexer) " +
+ cmd = "$(location //java/com/google/gerrit/asciidoctor:doc_indexer) " +
"-o $(OUTS) " +
"--prefix \"%s/\" " % DOC_DIR +
"--in-ext \".txt\" " +
"--out-ext \".html\" " +
"$(SRCS)",
- tools = ["//lib/asciidoctor:doc_indexer"],
+ tools = ["//java/com/google/gerrit/asciidoctor:doc_indexer"],
)
# For the same srcs, we can have multiple genasciidoc_zip rules, but only one
diff --git a/Documentation/config-gerrit.txt b/Documentation/config-gerrit.txt
index ce7adc2..7ed0e17 100644
--- a/Documentation/config-gerrit.txt
+++ b/Documentation/config-gerrit.txt
@@ -773,10 +773,11 @@
+
Default is 128 MiB per cache, except:
+
+* `"change_notes"`: disk storage is disabled by default
* `"diff_summary"`: default is `1g` (1 GiB of disk space)
+
-If 0, disk storage for the cache is disabled.
+If 0 or negative, disk storage for the cache is disabled.
==== [[cache_names]]Standard Caches
diff --git a/Documentation/config-labels.txt b/Documentation/config-labels.txt
index 91e20cd..cf78c6d 100644
--- a/Documentation/config-labels.txt
+++ b/Documentation/config-labels.txt
@@ -275,8 +275,8 @@
sticky approvals, reducing turn-around for trivial cleanups prior to
submitting a change. Defaults to false.
-[[label_copyAllScoresOnMergeCommitFirstParentUpdate]]
-=== `label.Label-Name.copyAllScoresOnMergeCommitFirstParentUpdate`
+[[label_copyAllScoresOnMergeFirstParentUpdate]]
+=== `label.Label-Name.copyAllScoresOnMergeFirstParentUpdate`
This policy is useful if you don't want to trigger CI or human
verification again if your target branch moved on but the feature
diff --git a/Documentation/replace_macros.py b/Documentation/replace_macros.py
index c76d133..6f90697 100755
--- a/Documentation/replace_macros.py
+++ b/Documentation/replace_macros.py
@@ -183,7 +183,8 @@
element.insertBefore(a, element.firstChild);
// remove the link icon when the mouse is moved away,
- // but keep it shown if the mouse is over the element, the link or the icon
+ // but keep it shown if the mouse is over the element, the link or
+ // the icon
hide = function(evt) {
if (document.elementFromPoint(evt.clientX, evt.clientY) != element
&& document.elementFromPoint(evt.clientX, evt.clientY) != a
@@ -229,54 +230,54 @@
options, _ = opts.parse_args()
try:
- try:
- out_file = open(options.out, 'w', errors='ignore')
- src_file = open(options.src, 'r', errors='ignore')
- except TypeError:
- out_file = open(options.out, 'w')
- src_file = open(options.src, 'r')
- last_line = ''
- ignore_next_line = False
- last_title = ''
- for line in src_file:
- if PAT_GERRIT.match(last_line):
- # Case of "GERRIT\n------" at the footer
- out_file.write(GERRIT_UPLINK)
- last_line = ''
- elif PAT_SEARCHBOX.match(last_line):
- # Case of 'SEARCHBOX\n---------'
- if options.searchbox:
- out_file.write(SEARCH_BOX)
- last_line = ''
- elif PAT_INCLUDE.match(line):
- # Case of 'include::<filename>'
- match = PAT_INCLUDE.match(line)
- out_file.write(last_line)
- last_line = match.group(1) + options.suffix + match.group(2) + '\n'
- elif PAT_STARS.match(line):
- if PAT_TITLE.match(last_line):
- # Case of the title in '.<title>\n****\nget::<url>\n****'
- match = PAT_TITLE.match(last_line)
- last_title = GET_TITLE % match.group(1)
- else:
- out_file.write(last_line)
- last_title = ''
- elif PAT_GET.match(line):
- # Case of '****\nget::<url>\n****' in rest api
- url = PAT_GET.match(line).group(1)
- out_file.write(GET_MACRO.format(url) % last_title)
- ignore_next_line = True
- elif ignore_next_line:
- # Handle the trailing '****' of the 'get::' case
- last_line = ''
- ignore_next_line = False
- else:
- out_file.write(last_line)
- last_line = line
- out_file.write(last_line)
- out_file.write(LINK_SCRIPT)
- out_file.close()
+ try:
+ out_file = open(options.out, 'w', errors='ignore')
+ src_file = open(options.src, 'r', errors='ignore')
+ except TypeError:
+ out_file = open(options.out, 'w')
+ src_file = open(options.src, 'r')
+ last_line = ''
+ ignore_next_line = False
+ last_title = ''
+ for line in src_file:
+ if PAT_GERRIT.match(last_line):
+ # Case of "GERRIT\n------" at the footer
+ out_file.write(GERRIT_UPLINK)
+ last_line = ''
+ elif PAT_SEARCHBOX.match(last_line):
+ # Case of 'SEARCHBOX\n---------'
+ if options.searchbox:
+ out_file.write(SEARCH_BOX)
+ last_line = ''
+ elif PAT_INCLUDE.match(line):
+ # Case of 'include::<filename>'
+ match = PAT_INCLUDE.match(line)
+ out_file.write(last_line)
+ last_line = match.group(1) + options.suffix + match.group(2) + '\n'
+ elif PAT_STARS.match(line):
+ if PAT_TITLE.match(last_line):
+ # Case of the title in '.<title>\n****\nget::<url>\n****'
+ match = PAT_TITLE.match(last_line)
+ last_title = GET_TITLE % match.group(1)
+ else:
+ out_file.write(last_line)
+ last_title = ''
+ elif PAT_GET.match(line):
+ # Case of '****\nget::<url>\n****' in rest api
+ url = PAT_GET.match(line).group(1)
+ out_file.write(GET_MACRO.format(url) % last_title)
+ ignore_next_line = True
+ elif ignore_next_line:
+ # Handle the trailing '****' of the 'get::' case
+ last_line = ''
+ ignore_next_line = False
+ else:
+ out_file.write(last_line)
+ last_line = line
+ out_file.write(last_line)
+ out_file.write(LINK_SCRIPT)
+ out_file.close()
except IOError as err:
- sys.stderr.write(
- "error while expanding %s to %s: %s" % (options.src, options.out, err))
- exit(1)
+ sys.stderr.write(
+ "error while expanding %s to %s: %s" % (options.src, options.out, err))
+ exit(1)
diff --git a/WORKSPACE b/WORKSPACE
index 15d8651..757d86e 100644
--- a/WORKSPACE
+++ b/WORKSPACE
@@ -181,8 +181,8 @@
maven_jar(
name = "gson",
- artifact = "com.google.code.gson:gson:2.8.2",
- sha1 = "3edcfe49d2c6053a70a2a47e4e1c2f94998a49cf",
+ artifact = "com.google.code.gson:gson:2.8.4",
+ sha1 = "d0de1ca9b69e69d1d497ee3c6009d015f64dad57",
)
maven_jar(
@@ -194,8 +194,8 @@
maven_jar(
name = "protobuf",
- artifact = "com.google.protobuf:protobuf-java:3.4.0",
- sha1 = "b32aba0cbe737a4ca953f71688725972e3ee927c",
+ artifact = "com.google.protobuf:protobuf-java:3.5.1",
+ sha1 = "8c3492f7662fa1cbf8ca76a0f5eb1146f7725acd",
)
load("//lib:guava.bzl", "GUAVA_VERSION", "GUAVA_BIN_SHA1")
@@ -711,6 +711,18 @@
sha1 = "636e49d675bc28e0b3ae0edd077d6acbbb159166",
)
+maven_jar(
+ name = "truth-liteproto-extension",
+ artifact = "com.google.truth.extensions:truth-liteproto-extension:" + TRUTH_VERS,
+ sha1 = "21210ac07e5cfbe83f04733f806224a6c0ae4d2d",
+)
+
+maven_jar(
+ name = "truth-proto-extension",
+ artifact = "com.google.truth.extensions:truth-proto-extension:" + TRUTH_VERS,
+ sha1 = "5a2b504143a5fec2b6be8bce292b3b7577a81789",
+)
+
# When bumping the easymock version number, make sure to also move powermock to a compatible version
maven_jar(
name = "easymock",
diff --git a/contrib/check-valid-commit.py b/contrib/check-valid-commit.py
index d26fa58..763ae3e 100755
--- a/contrib/check-valid-commit.py
+++ b/contrib/check-valid-commit.py
@@ -10,13 +10,16 @@
SSH_USER = 'bot'
SSH_HOST = 'localhost'
SSH_PORT = 29418
-SSH_COMMAND = 'ssh %s@%s -p %d gerrit approve ' % (SSH_USER, SSH_HOST, SSH_PORT)
+SSH_COMMAND = 'ssh %s@%s -p %d gerrit approve ' % (SSH_USER,
+ SSH_HOST,
+ SSH_PORT)
FAILURE_SCORE = '--code-review=-2'
FAILURE_MESSAGE = 'This commit message does not match the standard.' \
+ ' Please correct the commit message and upload a replacement patch.'
PASS_SCORE = '--code-review=0'
PASS_MESSAGE = ''
+
def main():
change = None
project = None
@@ -25,8 +28,9 @@
patchset = None
try:
- opts, _args = getopt.getopt(sys.argv[1:], '', \
- ['change=', 'project=', 'branch=', 'commit=', 'patchset='])
+ opts, _args = getopt.getopt(sys.argv[1:], '',
+ ['change=', 'project=', 'branch=',
+ 'commit=', 'patchset='])
except getopt.GetoptError as err:
print('Error: %s' % (err))
usage()
@@ -48,8 +52,7 @@
usage()
sys.exit(-1)
- if change == None or project == None or branch == None \
- or commit == None or patchset == None:
+ if any(p is None for p in [change, project, branch, commit, patchset]):
usage()
sys.exit(-1)
@@ -57,16 +60,16 @@
status, output = subprocess.getstatusoutput(command)
if status != 0:
- print('Error running \'%s\'. status: %s, output:\n\n%s' % \
- (command, status, output))
+ print('Error running \'%s\'. status: %s, output:\n\n%s' %
+ (command, status, output))
sys.exit(-1)
commitMessage = output[(output.find('\n\n')+2):]
commitLines = commitMessage.split('\n')
if len(commitLines) > 1 and len(commitLines[1]) != 0:
- fail(commit, 'Invalid commit summary. The summary must be ' \
- + 'one line followed by a blank line.')
+ fail(commit, 'Invalid commit summary. The summary must be '
+ + 'one line followed by a blank line.')
i = 0
for line in commitLines:
@@ -76,23 +79,27 @@
passes(commit)
+
def usage():
print('Usage:\n')
- print(sys.argv[0] + ' --change <change id> --project <project name> ' \
- + '--branch <branch> --commit <sha1> --patchset <patchset id>')
+ print(sys.argv[0] + ' --change <change id> --project <project name> '
+ + '--branch <branch> --commit <sha1> --patchset <patchset id>')
-def fail( commit, message ):
+
+def fail(commit, message):
command = SSH_COMMAND + FAILURE_SCORE + ' -m \\\"' \
- + _shell_escape( FAILURE_MESSAGE + '\n\n' + message) \
+ + _shell_escape(FAILURE_MESSAGE + '\n\n' + message) \
+ '\\\" ' + commit
subprocess.getstatusoutput(command)
sys.exit(1)
-def passes( commit ):
+
+def passes(commit):
command = SSH_COMMAND + PASS_SCORE + ' -m \\\"' \
+ _shell_escape(PASS_MESSAGE) + ' \\\" ' + commit
subprocess.getstatusoutput(command)
+
def _shell_escape(x):
s = ''
for c in x:
@@ -102,6 +109,6 @@
s = s + c
return s
+
if __name__ == '__main__':
main()
-
diff --git a/contrib/populate-fixture-data.py b/contrib/populate-fixture-data.py
index 93ac34f..07a0f01 100755
--- a/contrib/populate-fixture-data.py
+++ b/contrib/populate-fixture-data.py
@@ -47,228 +47,235 @@
# Random names from US Census Data
FIRST_NAMES = [
- "Casey", "Yesenia", "Shirley", "Tara", "Wanda", "Sheryl", "Jaime", "Elaine",
- "Charlotte", "Carly", "Bonnie", "Kirsten", "Kathryn", "Carla", "Katrina",
- "Melody", "Suzanne", "Sandy", "Joann", "Kristie", "Sally", "Emma", "Susan",
- "Amanda", "Alyssa", "Patty", "Angie", "Dominique", "Cynthia", "Jennifer",
- "Theresa", "Desiree", "Kaylee", "Maureen", "Jeanne", "Kellie", "Valerie",
- "Nina", "Judy", "Diamond", "Anita", "Rebekah", "Stefanie", "Kendra", "Erin",
- "Tammie", "Tracey", "Bridget", "Krystal", "Jasmin", "Sonia", "Meghan",
- "Rebecca", "Jeanette", "Meredith", "Beverly", "Natasha", "Chloe", "Selena",
- "Teresa", "Sheena", "Cassandra", "Rhonda", "Tami", "Jodi", "Shelly", "Angela",
- "Kimberly", "Terry", "Joanna", "Isabella", "Lindsey", "Loretta", "Dana",
- "Veronica", "Carolyn", "Laura", "Karen", "Dawn", "Alejandra", "Cassie",
- "Lorraine", "Yolanda", "Kerry", "Stephanie", "Caitlin", "Melanie", "Kerri",
- "Doris", "Sandra", "Beth", "Carol", "Vicki", "Shelia", "Bethany", "Rachael",
- "Donna", "Alexandra", "Barbara", "Ana", "Jillian", "Ann", "Rachel", "Lauren",
- "Hayley", "Misty", "Brianna", "Tanya", "Danielle", "Courtney", "Jacqueline",
- "Becky", "Christy", "Alisha", "Phyllis", "Faith", "Jocelyn", "Nancy",
- "Gloria", "Kristen", "Evelyn", "Julie", "Julia", "Kara", "Chelsey", "Cassidy",
- "Jean", "Chelsea", "Jenny", "Diana", "Haley", "Kristine", "Kristina", "Erika",
- "Jenna", "Alison", "Deanna", "Abigail", "Melissa", "Sierra", "Linda",
- "Monica", "Tasha", "Traci", "Yvonne", "Tracy", "Marie", "Maria", "Michaela",
- "Stacie", "April", "Morgan", "Cathy", "Darlene", "Cristina", "Emily"
- "Ian", "Russell", "Phillip", "Jay", "Barry", "Brad", "Frederick", "Fernando",
- "Timothy", "Ricardo", "Bernard", "Daniel", "Ruben", "Alexis", "Kyle", "Malik",
- "Norman", "Kent", "Melvin", "Stephen", "Daryl", "Kurt", "Greg", "Alex",
- "Mario", "Riley", "Marvin", "Dan", "Steven", "Roberto", "Lucas", "Leroy",
- "Preston", "Drew", "Fred", "Casey", "Wesley", "Elijah", "Reginald", "Joel",
- "Christopher", "Jacob", "Luis", "Philip", "Mark", "Rickey", "Todd", "Scott",
- "Terrence", "Jim", "Stanley", "Bobby", "Thomas", "Gabriel", "Tracy", "Marcus",
- "Peter", "Michael", "Calvin", "Herbert", "Darryl", "Billy", "Ross", "Dustin",
- "Jaime", "Adam", "Henry", "Xavier", "Dominic", "Lonnie", "Danny", "Victor",
- "Glen", "Perry", "Jackson", "Grant", "Gerald", "Garrett", "Alejandro",
- "Eddie", "Alan", "Ronnie", "Mathew", "Dave", "Wayne", "Joe", "Craig",
- "Terry", "Chris", "Randall", "Parker", "Francis", "Keith", "Neil", "Caleb",
- "Jon", "Earl", "Taylor", "Bryce", "Brady", "Max", "Sergio", "Leon", "Gene",
- "Darin", "Bill", "Edgar", "Antonio", "Dalton", "Arthur", "Austin", "Cristian",
- "Kevin", "Omar", "Kelly", "Aaron", "Ethan", "Tom", "Isaac", "Maurice",
- "Gilbert", "Hunter", "Willie", "Harry", "Dale", "Darius", "Jerome", "Jason",
- "Harold", "Kerry", "Clarence", "Gregg", "Shane", "Eduardo", "Micheal",
- "Howard", "Vernon", "Rodney", "Anthony", "Levi", "Larry", "Franklin", "Jimmy",
- "Jonathon", "Carl",
+ "Casey", "Yesenia", "Shirley", "Tara", "Wanda", "Sheryl", "Jaime",
+ "Elaine", "Charlotte", "Carly", "Bonnie", "Kirsten", "Kathryn", "Carla",
+ "Katrina", "Melody", "Suzanne", "Sandy", "Joann", "Kristie", "Sally",
+ "Emma", "Susan", "Amanda", "Alyssa", "Patty", "Angie", "Dominique",
+ "Cynthia", "Jennifer", "Theresa", "Desiree", "Kaylee", "Maureen",
+ "Jeanne", "Kellie", "Valerie", "Nina", "Judy", "Diamond", "Anita",
+ "Rebekah", "Stefanie", "Kendra", "Erin", "Tammie", "Tracey", "Bridget",
+ "Krystal", "Jasmin", "Sonia", "Meghan", "Rebecca", "Jeanette", "Meredith",
+ "Beverly", "Natasha", "Chloe", "Selena", "Teresa", "Sheena", "Cassandra",
+ "Rhonda", "Tami", "Jodi", "Shelly", "Angela", "Kimberly", "Terry",
+ "Joanna", "Isabella", "Lindsey", "Loretta", "Dana", "Veronica", "Carolyn",
+ "Laura", "Karen", "Dawn", "Alejandra", "Cassie", "Lorraine", "Yolanda",
+ "Kerry", "Stephanie", "Caitlin", "Melanie", "Kerri", "Doris", "Sandra",
+ "Beth", "Carol", "Vicki", "Shelia", "Bethany", "Rachael", "Donna",
+ "Alexandra", "Barbara", "Ana", "Jillian", "Ann", "Rachel", "Lauren",
+ "Hayley", "Misty", "Brianna", "Tanya", "Danielle", "Courtney",
+ "Jacqueline", "Becky", "Christy", "Alisha", "Phyllis", "Faith", "Jocelyn",
+ "Nancy", "Gloria", "Kristen", "Evelyn", "Julie", "Julia", "Kara",
+ "Chelsey", "Cassidy", "Jean", "Chelsea", "Jenny", "Diana", "Haley",
+ "Kristine", "Kristina", "Erika", "Jenna", "Alison", "Deanna", "Abigail",
+ "Melissa", "Sierra", "Linda", "Monica", "Tasha", "Traci", "Yvonne",
+ "Tracy", "Marie", "Maria", "Michaela", "Stacie", "April", "Morgan",
+ "Cathy", "Darlene", "Cristina", "Emily" "Ian", "Russell", "Phillip", "Jay",
+ "Barry", "Brad", "Frederick", "Fernando", "Timothy", "Ricardo", "Bernard",
+ "Daniel", "Ruben", "Alexis", "Kyle", "Malik", "Norman", "Kent", "Melvin",
+ "Stephen", "Daryl", "Kurt", "Greg", "Alex", "Mario", "Riley", "Marvin",
+ "Dan", "Steven", "Roberto", "Lucas", "Leroy", "Preston", "Drew", "Fred",
+ "Casey", "Wesley", "Elijah", "Reginald", "Joel", "Christopher", "Jacob",
+ "Luis", "Philip", "Mark", "Rickey", "Todd", "Scott", "Terrence", "Jim",
+ "Stanley", "Bobby", "Thomas", "Gabriel", "Tracy", "Marcus", "Peter",
+ "Michael", "Calvin", "Herbert", "Darryl", "Billy", "Ross", "Dustin",
+ "Jaime", "Adam", "Henry", "Xavier", "Dominic", "Lonnie", "Danny", "Victor",
+ "Glen", "Perry", "Jackson", "Grant", "Gerald", "Garrett", "Alejandro",
+ "Eddie", "Alan", "Ronnie", "Mathew", "Dave", "Wayne", "Joe", "Craig",
+ "Terry", "Chris", "Randall", "Parker", "Francis", "Keith", "Neil", "Caleb",
+ "Jon", "Earl", "Taylor", "Bryce", "Brady", "Max", "Sergio", "Leon", "Gene",
+ "Darin", "Bill", "Edgar", "Antonio", "Dalton", "Arthur", "Austin",
+ "Cristian", "Kevin", "Omar", "Kelly", "Aaron", "Ethan", "Tom", "Isaac",
+ "Maurice", "Gilbert", "Hunter", "Willie", "Harry", "Dale", "Darius",
+ "Jerome", "Jason", "Harold", "Kerry", "Clarence", "Gregg", "Shane",
+ "Eduardo", "Micheal", "Howard", "Vernon", "Rodney", "Anthony", "Levi",
+ "Larry", "Franklin", "Jimmy", "Jonathon", "Carl",
]
LAST_NAMES = [
- "Savage", "Hendrix", "Moon", "Larsen", "Rocha", "Burgess", "Bailey", "Farley",
- "Moses", "Schmidt", "Brown", "Hoover", "Klein", "Jennings", "Braun", "Rangel",
- "Casey", "Dougherty", "Hancock", "Wolf", "Henry", "Thomas", "Bentley",
- "Barnett", "Kline", "Pitts", "Rojas", "Sosa", "Paul", "Hess", "Chase",
- "Mckay", "Bender", "Colins", "Montoya", "Townsend", "Potts", "Ayala", "Avery",
- "Sherman", "Tapia", "Hamilton", "Ferguson", "Huang", "Hooper", "Zamora",
- "Logan", "Lloyd", "Quinn", "Monroe", "Brock", "Ibarra", "Fowler", "Weiss",
- "Montgomery", "Diaz", "Dixon", "Olson", "Robertson", "Arias", "Benjamin",
- "Abbott", "Stein", "Schroeder", "Beck", "Velasquez", "Barber", "Nichols",
- "Ortiz", "Burns", "Moody", "Stokes", "Wilcox", "Rush", "Michael", "Kidd",
- "Rowland", "Mclean", "Saunders", "Chung", "Newton", "Potter", "Hickman",
- "Ray", "Larson", "Figueroa", "Duncan", "Sparks", "Rose", "Hodge", "Huynh",
- "Joseph", "Morales", "Beasley", "Mora", "Fry", "Ross", "Novak", "Hahn",
- "Wise", "Knight", "Frederick", "Heath", "Pollard", "Vega", "Mcclain",
- "Buckley", "Conrad", "Cantrell", "Bond", "Mejia", "Wang", "Lewis", "Johns",
- "Mcknight", "Callahan", "Reynolds", "Norris", "Burnett", "Carey", "Jacobson",
- "Oneill", "Oconnor", "Leonard", "Mckenzie", "Hale", "Delgado", "Spence",
- "Brandt", "Obrien", "Bowman", "James", "Avila", "Roberts", "Barker", "Cohen",
- "Bradley", "Prince", "Warren", "Summers", "Little", "Caldwell", "Garrett",
- "Hughes", "Norton", "Burke", "Holden", "Merritt", "Lee", "Frank", "Wiley",
- "Ho", "Weber", "Keith", "Winters", "Gray", "Watts", "Brady", "Aguilar",
- "Nicholson", "David", "Pace", "Cervantes", "Davis", "Baxter", "Sanchez",
- "Singleton", "Taylor", "Strickland", "Glenn", "Valentine", "Roy", "Cameron",
- "Beard", "Norman", "Fritz", "Anthony", "Koch", "Parrish", "Herman", "Hines",
- "Sutton", "Gallegos", "Stephenson", "Lozano", "Franklin", "Howe", "Bauer",
- "Love", "Ali", "Ellison", "Lester", "Guzman", "Jarvis", "Espinoza",
- "Fletcher", "Burton", "Woodard", "Peterson", "Barajas", "Richard", "Bryan",
- "Goodman", "Cline", "Rowe", "Faulkner", "Crawford", "Mueller", "Patterson",
- "Hull", "Walton", "Wu", "Flores", "York", "Dickson", "Barnes", "Fisher",
- "Strong", "Juarez", "Fitzgerald", "Schmitt", "Blevins", "Villa", "Sullivan",
- "Velazquez", "Horton", "Meadows", "Riley", "Barrera", "Neal", "Mendez",
- "Mcdonald", "Floyd", "Lynch", "Mcdowell", "Benson", "Hebert", "Livingston",
- "Davies", "Richardson", "Vincent", "Davenport", "Osborn", "Mckee", "Marshall",
- "Ferrell", "Martinez", "Melton", "Mercer", "Yoder", "Jacobs", "Mcdaniel",
- "Mcmillan", "Peters", "Atkinson", "Wood", "Briggs", "Valencia", "Chandler",
- "Rios", "Hunter", "Bean", "Hicks", "Hays", "Lucero", "Malone", "Waller",
- "Banks", "Myers", "Mitchell", "Grimes", "Houston", "Hampton", "Trujillo",
- "Perkins", "Moran", "Welch", "Contreras", "Montes", "Ayers", "Hayden",
- "Daniel", "Weeks", "Porter", "Gill", "Mullen", "Nolan", "Dorsey", "Crane",
- "Estes", "Lam", "Wells", "Cisneros", "Giles", "Watson", "Vang", "Scott",
- "Knox", "Hanna", "Fields",
+ "Savage", "Hendrix", "Moon", "Larsen", "Rocha", "Burgess", "Bailey",
+ "Farley", "Moses", "Schmidt", "Brown", "Hoover", "Klein", "Jennings",
+ "Braun", "Rangel", "Casey", "Dougherty", "Hancock", "Wolf", "Henry",
+ "Thomas", "Bentley", "Barnett", "Kline", "Pitts", "Rojas", "Sosa", "Paul",
+ "Hess", "Chase", "Mckay", "Bender", "Colins", "Montoya", "Townsend",
+ "Potts", "Ayala", "Avery", "Sherman", "Tapia", "Hamilton", "Ferguson",
+ "Huang", "Hooper", "Zamora", "Logan", "Lloyd", "Quinn", "Monroe", "Brock",
+ "Ibarra", "Fowler", "Weiss", "Montgomery", "Diaz", "Dixon", "Olson",
+ "Robertson", "Arias", "Benjamin", "Abbott", "Stein", "Schroeder", "Beck",
+ "Velasquez", "Barber", "Nichols", "Ortiz", "Burns", "Moody", "Stokes",
+ "Wilcox", "Rush", "Michael", "Kidd", "Rowland", "Mclean", "Saunders",
+ "Chung", "Newton", "Potter", "Hickman", "Ray", "Larson", "Figueroa",
+ "Duncan", "Sparks", "Rose", "Hodge", "Huynh", "Joseph", "Morales",
+ "Beasley", "Mora", "Fry", "Ross", "Novak", "Hahn", "Wise", "Knight",
+ "Frederick", "Heath", "Pollard", "Vega", "Mcclain", "Buckley", "Conrad",
+ "Cantrell", "Bond", "Mejia", "Wang", "Lewis", "Johns", "Mcknight",
+ "Callahan", "Reynolds", "Norris", "Burnett", "Carey", "Jacobson", "Oneill",
+ "Oconnor", "Leonard", "Mckenzie", "Hale", "Delgado", "Spence", "Brandt",
+ "Obrien", "Bowman", "James", "Avila", "Roberts", "Barker", "Cohen",
+ "Bradley", "Prince", "Warren", "Summers", "Little", "Caldwell", "Garrett",
+ "Hughes", "Norton", "Burke", "Holden", "Merritt", "Lee", "Frank", "Wiley",
+ "Ho", "Weber", "Keith", "Winters", "Gray", "Watts", "Brady", "Aguilar",
+ "Nicholson", "David", "Pace", "Cervantes", "Davis", "Baxter", "Sanchez",
+ "Singleton", "Taylor", "Strickland", "Glenn", "Valentine", "Roy",
+ "Cameron", "Beard", "Norman", "Fritz", "Anthony", "Koch", "Parrish",
+ "Herman", "Hines", "Sutton", "Gallegos", "Stephenson", "Lozano",
+ "Franklin", "Howe", "Bauer", "Love", "Ali", "Ellison", "Lester", "Guzman",
+ "Jarvis", "Espinoza", "Fletcher", "Burton", "Woodard", "Peterson",
+ "Barajas", "Richard", "Bryan", "Goodman", "Cline", "Rowe", "Faulkner",
+ "Crawford", "Mueller", "Patterson", "Hull", "Walton", "Wu", "Flores",
+ "York", "Dickson", "Barnes", "Fisher", "Strong", "Juarez", "Fitzgerald",
+ "Schmitt", "Blevins", "Villa", "Sullivan", "Velazquez", "Horton",
+ "Meadows", "Riley", "Barrera", "Neal", "Mendez", "Mcdonald", "Floyd",
+ "Lynch", "Mcdowell", "Benson", "Hebert", "Livingston", "Davies",
+ "Richardson", "Vincent", "Davenport", "Osborn", "Mckee", "Marshall",
+ "Ferrell", "Martinez", "Melton", "Mercer", "Yoder", "Jacobs", "Mcdaniel",
+ "Mcmillan", "Peters", "Atkinson", "Wood", "Briggs", "Valencia", "Chandler",
+ "Rios", "Hunter", "Bean", "Hicks", "Hays", "Lucero", "Malone", "Waller",
+ "Banks", "Myers", "Mitchell", "Grimes", "Houston", "Hampton", "Trujillo",
+ "Perkins", "Moran", "Welch", "Contreras", "Montes", "Ayers", "Hayden",
+ "Daniel", "Weeks", "Porter", "Gill", "Mullen", "Nolan", "Dorsey", "Crane",
+ "Estes", "Lam", "Wells", "Cisneros", "Giles", "Watson", "Vang", "Scott",
+ "Knox", "Hanna", "Fields",
]
def clean(json_string):
- # Strip JSON XSS Tag
- json_string = json_string.strip()
- if json_string.startswith(")]}'"):
- return json_string[5:]
- return json_string
+ # Strip JSON XSS Tag
+ json_string = json_string.strip()
+ if json_string.startswith(")]}'"):
+ return json_string[5:]
+ return json_string
def basic_auth(user):
- return requests.auth.HTTPBasicAuth(user["username"], user["http_password"])
+ return requests.auth.HTTPBasicAuth(user["username"], user["http_password"])
def fetch_admin_group():
- global GROUP_ADMIN
- # Get admin group
- r = json.loads(clean(requests.get(BASE_URL + "groups/" + "?suggest=ad&p=All-Projects",
- headers=HEADERS,
- auth=ADMIN_BASIC_AUTH).text))
- admin_group_name = r.keys()[0]
- GROUP_ADMIN = r[admin_group_name]
- GROUP_ADMIN["name"] = admin_group_name
+ global GROUP_ADMIN
+ # Get admin group
+ r = json.loads(clean(requests.get(
+ BASE_URL + "groups/?suggest=ad&p=All-Projects",
+ headers=HEADERS,
+ auth=ADMIN_BASIC_AUTH).text))
+ admin_group_name = r.keys()[0]
+ GROUP_ADMIN = r[admin_group_name]
+ GROUP_ADMIN["name"] = admin_group_name
def generate_random_text():
- return " ".join([random.choice("lorem ipsum "
- "doleret delendam "
- "\n esse".split(" ")) for _ in xrange(1, 100)])
+ return " ".join([random.choice("lorem ipsum "
+ "doleret delendam "
+ "\n esse".split(" ")) for _ in range(1,
+ 100)])
def set_up():
- global TMP_PATH
- TMP_PATH = tempfile.mkdtemp()
- atexit.register(clean_up)
- os.makedirs(TMP_PATH + "/ssh")
- os.makedirs(TMP_PATH + "/repos")
- fetch_admin_group()
+ global TMP_PATH
+ TMP_PATH = tempfile.mkdtemp()
+ atexit.register(clean_up)
+ os.makedirs(TMP_PATH + "/ssh")
+ os.makedirs(TMP_PATH + "/repos")
+ fetch_admin_group()
def get_random_users(num_users):
- users = random.sample([(f, l) for f in FIRST_NAMES for l in LAST_NAMES],
- num_users)
- names = []
- for u in users:
- names.append({"firstname": u[0],
- "lastname": u[1],
- "name": u[0] + " " + u[1],
- "username": u[0] + u[1],
- "email": u[0] + "." + u[1] + "@gerritcodereview.com",
- "http_password": "secret",
- "groups": []})
- return names
+ users = random.sample([(f, l) for f in FIRST_NAMES for l in LAST_NAMES],
+ num_users)
+ names = []
+ for u in users:
+ names.append({"firstname": u[0],
+ "lastname": u[1],
+ "name": u[0] + " " + u[1],
+ "username": u[0] + u[1],
+ "email": u[0] + "." + u[1] + "@gerritcodereview.com",
+ "http_password": "secret",
+ "groups": []})
+ return names
def generate_ssh_keys(gerrit_users):
- for user in gerrit_users:
- key_file = TMP_PATH + "/ssh/" + user["username"] + ".key"
- subprocess.check_output(["ssh-keygen", "-f", key_file, "-N", ""])
- with open(key_file + ".pub", "r") as f:
- user["ssh_key"] = f.read()
+ for user in gerrit_users:
+ key_file = TMP_PATH + "/ssh/" + user["username"] + ".key"
+ subprocess.check_output(["ssh-keygen", "-f", key_file, "-N", ""])
+ with open(key_file + ".pub", "r") as f:
+ user["ssh_key"] = f.read()
def create_gerrit_groups():
- groups = [
- {"name": "iOS-Maintainers", "description": "iOS Maintainers",
- "visible_to_all": True, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]},
- {"name": "Android-Maintainers", "description": "Android Maintainers",
- "visible_to_all": True, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]},
- {"name": "Backend-Maintainers", "description": "Backend Maintainers",
- "visible_to_all": True, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]},
- {"name": "Script-Maintainers", "description": "Script Maintainers",
- "visible_to_all": True, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]},
- {"name": "Security-Team", "description": "Sec Team",
- "visible_to_all": False, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]}]
- for g in groups:
- requests.put(BASE_URL + "groups/" + g["name"],
- json.dumps(g),
- headers=HEADERS,
- auth=ADMIN_BASIC_AUTH)
- return [g["name"] for g in groups]
+ groups = [
+ {"name": "iOS-Maintainers", "description": "iOS Maintainers",
+ "visible_to_all": True, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]},
+ {"name": "Android-Maintainers", "description": "Android Maintainers",
+ "visible_to_all": True, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]},
+ {"name": "Backend-Maintainers", "description": "Backend Maintainers",
+ "visible_to_all": True, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]},
+ {"name": "Script-Maintainers", "description": "Script Maintainers",
+ "visible_to_all": True, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]},
+ {"name": "Security-Team", "description": "Sec Team",
+ "visible_to_all": False, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]}]
+ for g in groups:
+ requests.put(BASE_URL + "groups/" + g["name"],
+ json.dumps(g),
+ headers=HEADERS,
+ auth=ADMIN_BASIC_AUTH)
+ return [g["name"] for g in groups]
def create_gerrit_projects(owner_groups):
- projects = [
- {"id": "android", "name": "Android", "parent": "All-Projects",
- "branches": ["master"], "description": "Our android app.",
- "owners": [owner_groups[0]], "create_empty_commit": True},
- {"id": "ios", "name": "iOS", "parent": "All-Projects",
- "branches": ["master"], "description": "Our ios app.",
- "owners": [owner_groups[1]], "create_empty_commit": True},
- {"id": "backend", "name": "Backend", "parent": "All-Projects",
- "branches": ["master"], "description": "Our awesome backend.",
- "owners": [owner_groups[2]], "create_empty_commit": True},
- {"id": "scripts", "name": "Scripts", "parent": "All-Projects",
- "branches": ["master"], "description": "some small scripts.",
- "owners": [owner_groups[3]], "create_empty_commit": True}]
- for p in projects:
- requests.put(BASE_URL + "projects/" + p["name"],
- json.dumps(p),
- headers=HEADERS,
- auth=ADMIN_BASIC_AUTH)
- return [p["name"] for p in projects]
+ projects = [
+ {"id": "android", "name": "Android", "parent": "All-Projects",
+ "branches": ["master"], "description": "Our android app.",
+ "owners": [owner_groups[0]], "create_empty_commit": True},
+ {"id": "ios", "name": "iOS", "parent": "All-Projects",
+ "branches": ["master"], "description": "Our ios app.",
+ "owners": [owner_groups[1]], "create_empty_commit": True},
+ {"id": "backend", "name": "Backend", "parent": "All-Projects",
+ "branches": ["master"], "description": "Our awesome backend.",
+ "owners": [owner_groups[2]], "create_empty_commit": True},
+ {"id": "scripts", "name": "Scripts", "parent": "All-Projects",
+ "branches": ["master"], "description": "some small scripts.",
+ "owners": [owner_groups[3]], "create_empty_commit": True}]
+ for p in projects:
+ requests.put(BASE_URL + "projects/" + p["name"],
+ json.dumps(p),
+ headers=HEADERS,
+ auth=ADMIN_BASIC_AUTH)
+ return [p["name"] for p in projects]
def create_gerrit_users(gerrit_users):
- for user in gerrit_users:
- requests.put(BASE_URL + "accounts/" + user["username"],
- json.dumps(user),
- headers=HEADERS,
- auth=ADMIN_BASIC_AUTH)
+ for user in gerrit_users:
+ requests.put(BASE_URL + "accounts/" + user["username"],
+ json.dumps(user),
+ headers=HEADERS,
+ auth=ADMIN_BASIC_AUTH)
def create_change(user, project_name):
- random_commit_message = generate_random_text()
- change = {
- "project": project_name,
- "subject": random_commit_message.split("\n")[0],
- "branch": "master",
- "status": "NEW",
- }
- requests.post(BASE_URL + "changes/",
- json.dumps(change),
- headers=HEADERS,
- auth=basic_auth(user))
+ random_commit_message = generate_random_text()
+ change = {
+ "project": project_name,
+ "subject": random_commit_message.split("\n")[0],
+ "branch": "master",
+ "status": "NEW",
+ }
+ requests.post(BASE_URL + "changes/",
+ json.dumps(change),
+ headers=HEADERS,
+ auth=basic_auth(user))
def clean_up():
- shutil.rmtree(TMP_PATH)
+ shutil.rmtree(TMP_PATH)
def main():
+<<<<<<< HEAD
p = optparse.OptionParser()
p.add_option("-u", "--user_count", action="store",
default=100,
@@ -299,7 +306,42 @@
project_names = create_gerrit_projects(group_names)
for idx, u in enumerate(gerrit_users):
- for _ in xrange(random.randint(1, 5)):
+ for _ in range(random.randint(1, 5)):
create_change(u, project_names[4 * idx / len(gerrit_users)])
+=======
+ p = optparse.OptionParser()
+ p.add_option("-u", "--user_count", action="store",
+ default=100,
+ type='int',
+ help="number of users to generate")
+ p.add_option("-p", "--port", action="store",
+ default=8080,
+ type='int',
+ help="port of server")
+ (options, _) = p.parse_args()
+ global BASE_URL
+ BASE_URL = BASE_URL % options.port
+ print(BASE_URL)
+
+ set_up()
+ gerrit_users = get_random_users(options.user_count)
+
+ group_names = create_gerrit_groups()
+ for idx, u in enumerate(gerrit_users):
+ u["groups"].append(group_names[idx % len(group_names)])
+ if idx % 5 == 0:
+ # Also add to security group
+ u["groups"].append(group_names[4])
+
+ generate_ssh_keys(gerrit_users)
+ create_gerrit_users(gerrit_users)
+
+ project_names = create_gerrit_projects(group_names)
+
+ for idx, u in enumerate(gerrit_users):
+ for _ in xrange(random.randint(1, 5)):
+ create_change(u, project_names[4 * idx / len(gerrit_users)])
+
+>>>>>>> 730efd14f4... Python cleanups, round 1: whitespace
main()
diff --git a/gerrit-gwtui/BUILD b/gerrit-gwtui/BUILD
index a6c9763..56ac0ea 100644
--- a/gerrit-gwtui/BUILD
+++ b/gerrit-gwtui/BUILD
@@ -34,8 +34,8 @@
"//java/com/google/gerrit/common:client",
"//java/com/google/gerrit/extensions:client",
"//lib:junit",
- "//lib:truth",
"//lib/gwt:dev",
"//lib/gwt:user",
+ "//lib/truth",
],
)
diff --git a/java/com/google/gerrit/acceptance/BUILD b/java/com/google/gerrit/acceptance/BUILD
index acd5130a..9587860 100644
--- a/java/com/google/gerrit/acceptance/BUILD
+++ b/java/com/google/gerrit/acceptance/BUILD
@@ -76,9 +76,8 @@
"//java/com/google/gerrit/server/group/testing",
"//java/com/google/gerrit/server/project/testing:project-test-util",
"//java/com/google/gerrit/testing:gerrit-test-util",
+ "//lib:guava",
"//lib:jimfs",
- "//lib:truth",
- "//lib:truth-java8-extension",
"//lib/auto:auto-value",
"//lib/auto:auto-value-annotations",
"//lib/httpcomponents:fluent-hc",
@@ -88,6 +87,8 @@
"//lib/jgit/org.eclipse.jgit.junit:junit",
"//lib/log:impl_log4j",
"//lib/log:log4j",
+ "//lib/truth",
+ "//lib/truth:truth-java8-extension",
"//prolog:gerrit-prolog-common",
],
visibility = ["//visibility:public"],
diff --git a/lib/asciidoctor/java/AsciiDoctor.java b/java/com/google/gerrit/asciidoctor/AsciiDoctor.java
similarity index 98%
rename from lib/asciidoctor/java/AsciiDoctor.java
rename to java/com/google/gerrit/asciidoctor/AsciiDoctor.java
index 596fe66..3768ce7 100644
--- a/lib/asciidoctor/java/AsciiDoctor.java
+++ b/java/com/google/gerrit/asciidoctor/AsciiDoctor.java
@@ -12,6 +12,8 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+package com.google.gerrit.asciidoctor;
+
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.common.io.ByteStreams;
diff --git a/java/com/google/gerrit/asciidoctor/BUILD b/java/com/google/gerrit/asciidoctor/BUILD
new file mode 100644
index 0000000..b2b0f5d
--- /dev/null
+++ b/java/com/google/gerrit/asciidoctor/BUILD
@@ -0,0 +1,40 @@
+java_binary(
+ name = "asciidoc",
+ main_class = "com.google.gerrit.asciidoctor.AsciiDoctor",
+ visibility = ["//visibility:public"],
+ runtime_deps = [":asciidoc_lib"],
+)
+
+java_library(
+ name = "asciidoc_lib",
+ srcs = ["AsciiDoctor.java"],
+ visibility = ["//tools/eclipse:__pkg__"],
+ deps = [
+ "//lib:args4j",
+ "//lib:guava",
+ "//lib/asciidoctor",
+ "//lib/log:api",
+ "//lib/log:nop",
+ ],
+)
+
+java_binary(
+ name = "doc_indexer",
+ main_class = "com.google.gerrit.asciidoctor.DocIndexer",
+ visibility = ["//visibility:public"],
+ runtime_deps = [":doc_indexer_lib"],
+)
+
+java_library(
+ name = "doc_indexer_lib",
+ srcs = ["DocIndexer.java"],
+ visibility = ["//tools/eclipse:__pkg__"],
+ deps = [
+ ":asciidoc_lib",
+ "//java/com/google/gerrit/server:constants",
+ "//lib:args4j",
+ "//lib:guava",
+ "//lib/lucene:lucene-analyzers-common",
+ "//lib/lucene:lucene-core-and-backward-codecs",
+ ],
+)
diff --git a/lib/asciidoctor/java/DocIndexer.java b/java/com/google/gerrit/asciidoctor/DocIndexer.java
similarity index 98%
rename from lib/asciidoctor/java/DocIndexer.java
rename to java/com/google/gerrit/asciidoctor/DocIndexer.java
index c90c439..5dfde95 100644
--- a/lib/asciidoctor/java/DocIndexer.java
+++ b/java/com/google/gerrit/asciidoctor/DocIndexer.java
@@ -12,6 +12,8 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+package com.google.gerrit.asciidoctor;
+
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.gerrit.server.documentation.Constants;
diff --git a/java/com/google/gerrit/common/data/testing/BUILD b/java/com/google/gerrit/common/data/testing/BUILD
index 83f1c06..3899e39 100644
--- a/java/com/google/gerrit/common/data/testing/BUILD
+++ b/java/com/google/gerrit/common/data/testing/BUILD
@@ -6,6 +6,6 @@
deps = [
"//java/com/google/gerrit/common:server",
"//java/com/google/gerrit/reviewdb:server",
- "//lib:truth",
+ "//lib/truth",
],
)
diff --git a/java/com/google/gerrit/extensions/common/testing/BUILD b/java/com/google/gerrit/extensions/common/testing/BUILD
index 82dd425..94fecbf 100644
--- a/java/com/google/gerrit/extensions/common/testing/BUILD
+++ b/java/com/google/gerrit/extensions/common/testing/BUILD
@@ -6,7 +6,7 @@
deps = [
"//java/com/google/gerrit/extensions:api",
"//java/com/google/gerrit/truth",
- "//lib:truth",
"//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
],
)
diff --git a/java/com/google/gerrit/extensions/restapi/testing/BUILD b/java/com/google/gerrit/extensions/restapi/testing/BUILD
index d035816..434591e 100644
--- a/java/com/google/gerrit/extensions/restapi/testing/BUILD
+++ b/java/com/google/gerrit/extensions/restapi/testing/BUILD
@@ -6,6 +6,6 @@
deps = [
"//java/com/google/gerrit/extensions:api",
"//java/com/google/gerrit/truth",
- "//lib:truth",
+ "//lib/truth",
],
)
diff --git a/java/com/google/gerrit/git/testing/BUILD b/java/com/google/gerrit/git/testing/BUILD
index 0b83560..4900339 100644
--- a/java/com/google/gerrit/git/testing/BUILD
+++ b/java/com/google/gerrit/git/testing/BUILD
@@ -7,8 +7,8 @@
deps = [
"//java/com/google/gerrit/common:annotations",
"//lib:guava",
- "//lib:truth",
- "//lib:truth-java8-extension",
"//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
+ "//lib/truth:truth-java8-extension",
],
)
diff --git a/java/com/google/gerrit/gpg/server/PostGpgKeys.java b/java/com/google/gerrit/gpg/server/PostGpgKeys.java
index e487a54..4b92ec3 100644
--- a/java/com/google/gerrit/gpg/server/PostGpgKeys.java
+++ b/java/com/google/gerrit/gpg/server/PostGpgKeys.java
@@ -259,10 +259,10 @@
if (accountStates.size() > 1) {
StringBuilder msg = new StringBuilder();
- msg.append("GPG key ").append(extIdKey.get()).append(" associated with multiple accounts: ");
- Joiner.on(", ")
- .appendTo(msg, Lists.transform(accountStates, AccountState.ACCOUNT_ID_FUNCTION));
- log.error(msg.toString());
+ msg.append("GPG key ")
+ .append(extIdKey.get())
+ .append(" associated with multiple accounts: ")
+ .append(Lists.transform(accountStates, AccountState.ACCOUNT_ID_FUNCTION));
throw new IllegalStateException(msg.toString());
}
diff --git a/java/com/google/gerrit/httpd/ProjectBasicAuthFilter.java b/java/com/google/gerrit/httpd/ProjectBasicAuthFilter.java
index 55bd4d5..6174644 100644
--- a/java/com/google/gerrit/httpd/ProjectBasicAuthFilter.java
+++ b/java/com/google/gerrit/httpd/ProjectBasicAuthFilter.java
@@ -167,6 +167,8 @@
rsp.sendError(SC_UNAUTHORIZED);
return false;
} catch (AuthenticationFailedException e) {
+ // This exception is thrown if the user provided wrong credentials, we don't need to log a
+ // stacktrace for it.
log.warn(authenticationFailedMsg(username, req) + ": " + e.getMessage());
rsp.sendError(SC_UNAUTHORIZED);
return false;
diff --git a/java/com/google/gerrit/httpd/auth/ldap/LdapLoginServlet.java b/java/com/google/gerrit/httpd/auth/ldap/LdapLoginServlet.java
index 24ba4ac..4671475 100644
--- a/java/com/google/gerrit/httpd/auth/ldap/LdapLoginServlet.java
+++ b/java/com/google/gerrit/httpd/auth/ldap/LdapLoginServlet.java
@@ -30,6 +30,7 @@
import com.google.gerrit.server.account.AccountUserNameException;
import com.google.gerrit.server.account.AuthRequest;
import com.google.gerrit.server.account.AuthResult;
+import com.google.gerrit.server.account.AuthenticationFailedException;
import com.google.gerrit.server.auth.AuthenticationUnavailableException;
import com.google.gwtexpui.server.CacheHeaders;
import com.google.inject.Inject;
@@ -126,10 +127,16 @@
} catch (AuthenticationUnavailableException e) {
sendForm(req, res, "Authentication unavailable at this time.");
return;
- } catch (AccountException e) {
- log.info(String.format("'%s' failed to sign in: %s", username, e.getMessage()));
+ } catch (AuthenticationFailedException e) {
+ // This exception is thrown if the user provided wrong credentials, we don't need to log a
+ // stacktrace for it.
+ log.warn("'{}' failed to sign in: {}", username, e.getMessage());
sendForm(req, res, "Invalid username or password.");
return;
+ } catch (AccountException e) {
+ log.warn("'{}' failed to sign in", username, e);
+ sendForm(req, res, "Authentication failed.");
+ return;
} catch (RuntimeException e) {
log.error("LDAP authentication failed", e);
sendForm(req, res, "Authentication unavailable at this time.");
diff --git a/java/com/google/gerrit/httpd/gitweb/GitwebServlet.java b/java/com/google/gerrit/httpd/gitweb/GitwebServlet.java
index 5b60a36f..cc22d24 100644
--- a/java/com/google/gerrit/httpd/gitweb/GitwebServlet.java
+++ b/java/com/google/gerrit/httpd/gitweb/GitwebServlet.java
@@ -659,7 +659,7 @@
dst.close();
}
} catch (IOException e) {
- log.debug("Unexpected error copying input to CGI", e);
+ log.error("Unexpected error copying input to CGI", e);
}
},
"Gitweb-InputFeeder")
@@ -669,14 +669,19 @@
private void copyStderrToLog(InputStream in) {
new Thread(
() -> {
+ StringBuilder b = new StringBuilder();
try (BufferedReader br =
new BufferedReader(new InputStreamReader(in, ISO_8859_1.name()))) {
String line;
while ((line = br.readLine()) != null) {
- log.error("CGI: " + line);
+ if (b.length() > 0) {
+ b.append('\n');
+ }
+ b.append("CGI: ").append(line);
}
+ log.error(b.toString());
} catch (IOException e) {
- log.debug("Unexpected error copying stderr from CGI", e);
+ log.error("Unexpected error copying stderr from CGI", e);
}
},
"Gitweb-ErrorLogger")
diff --git a/java/com/google/gerrit/httpd/raw/BazelBuild.java b/java/com/google/gerrit/httpd/raw/BazelBuild.java
index 85453fb..f52792c 100644
--- a/java/com/google/gerrit/httpd/raw/BazelBuild.java
+++ b/java/com/google/gerrit/httpd/raw/BazelBuild.java
@@ -17,6 +17,7 @@
import static com.google.common.base.MoreObjects.firstNonNull;
import static java.nio.charset.StandardCharsets.UTF_8;
+import com.google.common.base.Joiner;
import com.google.common.escape.Escaper;
import com.google.common.html.HtmlEscapers;
import com.google.common.io.ByteStreams;
@@ -62,7 +63,8 @@
try {
status = rebuild.waitFor();
} catch (InterruptedException e) {
- throw new InterruptedIOException("interrupted waiting for " + proc.toString());
+ throw new InterruptedIOException(
+ "interrupted waiting for: " + Joiner.on(' ').join(proc.command()));
}
if (status != 0) {
log.warn("build failed: " + new String(out, UTF_8));
diff --git a/java/com/google/gerrit/metrics/proc/OperatingSystemMXBeanProvider.java b/java/com/google/gerrit/metrics/proc/OperatingSystemMXBeanProvider.java
index 7256e8c..bc2846a 100644
--- a/java/com/google/gerrit/metrics/proc/OperatingSystemMXBeanProvider.java
+++ b/java/com/google/gerrit/metrics/proc/OperatingSystemMXBeanProvider.java
@@ -41,7 +41,7 @@
return new OperatingSystemMXBeanProvider(sys);
}
} catch (ReflectiveOperationException e) {
- log.debug(String.format("No implementation for %s: %s", name, e.getMessage()));
+ log.debug("No implementation for {}", name, e);
}
}
log.warn("No implementation of UnixOperatingSystemMXBean found");
diff --git a/java/com/google/gerrit/pgm/http/jetty/JettyServer.java b/java/com/google/gerrit/pgm/http/jetty/JettyServer.java
index b6eac05..25a28a4 100644
--- a/java/com/google/gerrit/pgm/http/jetty/JettyServer.java
+++ b/java/com/google/gerrit/pgm/http/jetty/JettyServer.java
@@ -69,13 +69,9 @@
import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.eclipse.jetty.util.thread.ThreadPool;
import org.eclipse.jgit.lib.Config;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
@Singleton
public class JettyServer {
- private static final Logger log = LoggerFactory.getLogger(JettyServer.class);
-
static class Lifecycle implements LifecycleListener {
private final JettyServer server;
private final Config cfg;
@@ -425,9 +421,8 @@
"/*",
EnumSet.of(DispatcherType.REQUEST, DispatcherType.ASYNC));
} catch (Throwable e) {
- String errorMessage = "Unable to instantiate front-end HTTP Filter " + filterClassName;
- log.error(errorMessage, e);
- throw new IllegalArgumentException(errorMessage, e);
+ throw new IllegalArgumentException(
+ "Unable to instantiate front-end HTTP Filter " + filterClassName, e);
}
}
diff --git a/java/com/google/gerrit/pgm/init/api/AllProjectsConfig.java b/java/com/google/gerrit/pgm/init/api/AllProjectsConfig.java
index c1112ae..5073200 100644
--- a/java/com/google/gerrit/pgm/init/api/AllProjectsConfig.java
+++ b/java/com/google/gerrit/pgm/init/api/AllProjectsConfig.java
@@ -30,7 +30,6 @@
import org.slf4j.LoggerFactory;
public class AllProjectsConfig extends VersionedMetaDataOnInit {
-
private static final Logger log = LoggerFactory.getLogger(AllProjectsConfig.class);
private Config cfg;
@@ -65,7 +64,7 @@
return GroupList.parse(
new Project.NameKey(project),
readUTF8(GroupList.FILE_NAME),
- GroupList.createLoggerSink(GroupList.FILE_NAME, log));
+ error -> log.error("Error parsing file {}: {}", GroupList.FILE_NAME, error.getMessage()));
}
public void save(String pluginName, String message) throws IOException, ConfigInvalidException {
diff --git a/java/com/google/gerrit/reviewdb/server/ReviewDbCodecs.java b/java/com/google/gerrit/reviewdb/server/ReviewDbCodecs.java
index 631e7f5..2958464 100644
--- a/java/com/google/gerrit/reviewdb/server/ReviewDbCodecs.java
+++ b/java/com/google/gerrit/reviewdb/server/ReviewDbCodecs.java
@@ -15,6 +15,7 @@
package com.google.gerrit.reviewdb.server;
import com.google.gerrit.reviewdb.client.Change;
+import com.google.gerrit.reviewdb.client.ChangeMessage;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.PatchSetApproval;
import com.google.gwtorm.protobuf.CodecFactory;
@@ -27,6 +28,9 @@
public static final ProtobufCodec<Change> CHANGE_CODEC = CodecFactory.encoder(Change.class);
+ public static final ProtobufCodec<ChangeMessage> MESSAGE_CODEC =
+ CodecFactory.encoder(ChangeMessage.class);
+
public static final ProtobufCodec<PatchSet> PATCH_SET_CODEC =
CodecFactory.encoder(PatchSet.class);
diff --git a/java/com/google/gerrit/server/account/AccountsUpdate.java b/java/com/google/gerrit/server/account/AccountsUpdate.java
index 2f36cf2..996e602 100644
--- a/java/com/google/gerrit/server/account/AccountsUpdate.java
+++ b/java/com/google/gerrit/server/account/AccountsUpdate.java
@@ -106,7 +106,8 @@
* <li>binding {@link GitReferenceUpdated#DISABLED} and
* <li>passing an {@link
* com.google.gerrit.server.account.externalids.ExternalIdNotes.FactoryNoReindex} factory as
- * parameter of {@link AccountsUpdate.Factory#create(IdentifiedUser, ExternalIdNotesLoader)}
+ * parameter of {@link AccountsUpdate.Factory#create(IdentifiedUser,
+ * ExternalIdNotes.ExternalIdNotesLoader)}
* </ul>
*
* <p>If there are concurrent account updates updating the user branch in NoteDb may fail with
diff --git a/java/com/google/gerrit/server/account/VersionedAccountDestinations.java b/java/com/google/gerrit/server/account/VersionedAccountDestinations.java
index a57dc7b..1064546 100644
--- a/java/com/google/gerrit/server/account/VersionedAccountDestinations.java
+++ b/java/com/google/gerrit/server/account/VersionedAccountDestinations.java
@@ -16,8 +16,6 @@
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.RefNames;
-import com.google.gerrit.server.git.ValidationError;
-import com.google.gerrit.server.git.meta.TabFile;
import com.google.gerrit.server.git.meta.VersionedMetaData;
import java.io.IOException;
import org.eclipse.jgit.errors.ConfigInvalidException;
@@ -61,17 +59,15 @@
String path = p.path;
if (path.startsWith(prefix)) {
String label = path.substring(prefix.length());
- ValidationError.Sink errors = TabFile.createLoggerSink(path, log);
- destinations.parseLabel(label, readUTF8(path), errors);
+ destinations.parseLabel(
+ label,
+ readUTF8(path),
+ error -> log.error("Error parsing file {}: {}", path, error.getMessage()));
}
}
}
}
- public ValidationError.Sink createSink(String file) {
- return ValidationError.createLoggerSink(file, log);
- }
-
@Override
protected boolean onSave(CommitBuilder commit) throws IOException, ConfigInvalidException {
throw new UnsupportedOperationException("Cannot yet save destinations");
diff --git a/java/com/google/gerrit/server/account/VersionedAccountQueries.java b/java/com/google/gerrit/server/account/VersionedAccountQueries.java
index b43a65d..b021d24 100644
--- a/java/com/google/gerrit/server/account/VersionedAccountQueries.java
+++ b/java/com/google/gerrit/server/account/VersionedAccountQueries.java
@@ -51,7 +51,9 @@
protected void onLoad() throws IOException, ConfigInvalidException {
queryList =
QueryList.parse(
- readUTF8(QueryList.FILE_NAME), QueryList.createLoggerSink(QueryList.FILE_NAME, log));
+ readUTF8(QueryList.FILE_NAME),
+ error ->
+ log.error("Error parsing file {}: {}", QueryList.FILE_NAME, error.getMessage()));
}
@Override
diff --git a/java/com/google/gerrit/server/auth/ldap/Helper.java b/java/com/google/gerrit/server/auth/ldap/Helper.java
index 5af730f..fe57374 100644
--- a/java/com/google/gerrit/server/auth/ldap/Helper.java
+++ b/java/com/google/gerrit/server/auth/ldap/Helper.java
@@ -24,6 +24,7 @@
import com.google.gerrit.server.auth.NoSuchUserException;
import com.google.gerrit.server.config.ConfigUtil;
import com.google.gerrit.server.config.GerritServerConfig;
+import com.google.gerrit.util.ssl.BlindHostnameVerifier;
import com.google.gerrit.util.ssl.BlindSSLSocketFactory;
import com.google.inject.Inject;
import com.google.inject.Singleton;
@@ -139,6 +140,7 @@
SSLSocketFactory sslfactory = null;
if (!sslVerify) {
sslfactory = (SSLSocketFactory) BlindSSLSocketFactory.getDefault();
+ tls.setHostnameVerifier(BlindHostnameVerifier.getInstance());
}
tls.negotiate(sslfactory);
ctx.addToEnvironment(STARTTLS_PROPERTY, tls);
@@ -196,7 +198,7 @@
Throwables.throwIfInstanceOf(e.getException(), IOException.class);
Throwables.throwIfInstanceOf(e.getException(), NamingException.class);
Throwables.throwIfInstanceOf(e.getException(), RuntimeException.class);
- LdapRealm.log.warn("Internal error", e.getException());
+ log.warn("Internal error", e.getException());
return null;
} finally {
ctx.logout();
@@ -343,7 +345,7 @@
}
}
} catch (NamingException e) {
- LdapRealm.log.warn("Could not find group " + groupDN, e);
+ log.warn("Could not find group {}", groupDN, e);
}
cachedParentsDNs = dns.build();
parentGroups.put(groupDN, cachedParentsDNs);
@@ -474,10 +476,10 @@
try {
return LdapType.guessType(ctx);
} catch (NamingException e) {
- LdapRealm.log.warn(
- "Cannot discover type of LDAP server at "
- + server
- + ", assuming the server is RFC 2307 compliant.",
+ log.warn(
+ "Cannot discover type of LDAP server at {},"
+ + " assuming the server is RFC 2307 compliant.",
+ server,
e);
return LdapType.RFC_2307;
}
diff --git a/java/com/google/gerrit/server/auth/ldap/LdapRealm.java b/java/com/google/gerrit/server/auth/ldap/LdapRealm.java
index 6184674..b83c7b2 100644
--- a/java/com/google/gerrit/server/auth/ldap/LdapRealm.java
+++ b/java/com/google/gerrit/server/auth/ldap/LdapRealm.java
@@ -61,7 +61,8 @@
@Singleton
class LdapRealm extends AbstractRealm {
- static final Logger log = LoggerFactory.getLogger(LdapRealm.class);
+ private static final Logger log = LoggerFactory.getLogger(LdapRealm.class);
+
static final String LDAP = "com.sun.jndi.ldap.LdapCtxFactory";
static final String USERNAME = "username";
diff --git a/java/com/google/gerrit/server/auth/oauth/OAuthTokenCache.java b/java/com/google/gerrit/server/auth/oauth/OAuthTokenCache.java
index f380051..13a09a1 100644
--- a/java/com/google/gerrit/server/auth/oauth/OAuthTokenCache.java
+++ b/java/com/google/gerrit/server/auth/oauth/OAuthTokenCache.java
@@ -32,7 +32,6 @@
import com.google.inject.Module;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
-import java.io.IOException;
@Singleton
public class OAuthTokenCache {
@@ -70,12 +69,7 @@
@Override
public OAuthToken deserialize(byte[] in) {
- OAuthTokenProto proto;
- try {
- proto = OAuthTokenProto.parseFrom(in);
- } catch (IOException e) {
- throw new IllegalArgumentException("failed to deserialize OAuthToken");
- }
+ OAuthTokenProto proto = ProtoCacheSerializers.parseUnchecked(OAuthTokenProto.parser(), in);
return new OAuthToken(
proto.getToken(),
proto.getSecret(),
diff --git a/java/com/google/gerrit/server/cache/PersistentCacheBinding.java b/java/com/google/gerrit/server/cache/PersistentCacheBinding.java
index 429f5ab..794d3bb 100644
--- a/java/com/google/gerrit/server/cache/PersistentCacheBinding.java
+++ b/java/com/google/gerrit/server/cache/PersistentCacheBinding.java
@@ -34,7 +34,12 @@
PersistentCacheBinding<K, V> version(int version);
- /** Set the total on-disk limit of the cache */
+ /**
+ * Set the total on-disk limit of the cache.
+ *
+ * <p>If 0 or negative, persistence for the cache is disabled by default, but may still be
+ * overridden in the config.
+ */
PersistentCacheBinding<K, V> diskLimit(long limit);
PersistentCacheBinding<K, V> keySerializer(CacheSerializer<K> keySerializer);
diff --git a/java/com/google/gerrit/server/cache/PersistentCacheProvider.java b/java/com/google/gerrit/server/cache/PersistentCacheProvider.java
index 405de4f..46a9e61 100644
--- a/java/com/google/gerrit/server/cache/PersistentCacheProvider.java
+++ b/java/com/google/gerrit/server/cache/PersistentCacheProvider.java
@@ -39,6 +39,7 @@
CacheModule module, String name, TypeLiteral<K> keyType, TypeLiteral<V> valType) {
super(module, name, keyType, valType);
version = -1;
+ diskLimit = 128 << 20;
}
@Inject(optional = true)
@@ -93,10 +94,7 @@
@Override
public long diskLimit() {
- if (diskLimit > 0) {
- return diskLimit;
- }
- return 128 << 20;
+ return diskLimit;
}
@Override
diff --git a/java/com/google/gerrit/server/cache/ProtoCacheSerializers.java b/java/com/google/gerrit/server/cache/ProtoCacheSerializers.java
index 795df72..c6fc0b9 100644
--- a/java/com/google/gerrit/server/cache/ProtoCacheSerializers.java
+++ b/java/com/google/gerrit/server/cache/ProtoCacheSerializers.java
@@ -14,17 +14,24 @@
package com.google.gerrit.server.cache;
+import static com.google.common.base.Preconditions.checkArgument;
+import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
+
+import com.google.gwtorm.protobuf.ProtobufCodec;
+import com.google.protobuf.ByteString;
import com.google.protobuf.CodedOutputStream;
import com.google.protobuf.MessageLite;
+import com.google.protobuf.Parser;
import java.io.IOException;
+import org.eclipse.jgit.lib.ObjectId;
/** Static utilities for writing protobuf-based {@link CacheSerializer} implementations. */
public class ProtoCacheSerializers {
/**
* Serializes a proto to a byte array.
*
- * <p>Guarantees deterministic serialization and thus is suitable for use as a persistent cache
- * key. Should be used in preference to {@link MessageLite#toByteArray()}, which is not guaranteed
+ * <p>Guarantees deterministic serialization and thus is suitable for use in persistent caches.
+ * Should be used in preference to {@link MessageLite#toByteArray()}, which is not guaranteed
* deterministic.
*
* @param message the proto message to serialize.
@@ -39,7 +46,80 @@
cout.checkNoSpaceLeft();
return bytes;
} catch (IOException e) {
- throw new IllegalStateException("exception writing to byte array");
+ throw new IllegalStateException("exception writing to byte array", e);
+ }
+ }
+
+ /**
+ * Serializes an object to a {@link ByteString} using a protobuf codec.
+ *
+ * <p>Guarantees deterministic serialization and thus is suitable for use in persistent caches.
+ * Should be used in preference to {@link ProtobufCodec#encodeToByteString(Object)}, which is not
+ * guaranteed deterministic.
+ *
+ * @param object the object to serialize.
+ * @param codec codec for serializing.
+ * @return a {@code ByteString} with the message contents.
+ */
+ public static <T> ByteString toByteString(T object, ProtobufCodec<T> codec) {
+ try (ByteString.Output bout = ByteString.newOutput()) {
+ CodedOutputStream cout = CodedOutputStream.newInstance(bout);
+ cout.useDeterministicSerialization();
+ codec.encode(object, cout);
+ cout.flush();
+ return bout.toByteString();
+ } catch (IOException e) {
+ throw new IllegalStateException("exception writing to ByteString", e);
+ }
+ }
+
+ /**
+ * Parses a byte array to a protobuf message.
+ *
+ * @param parser parser for the proto type.
+ * @param in byte array with the message contents.
+ * @return parsed proto.
+ */
+ public static <M extends MessageLite> M parseUnchecked(Parser<M> parser, byte[] in) {
+ try {
+ return parser.parseFrom(in);
+ } catch (IOException e) {
+ throw new IllegalArgumentException("exception parsing byte array to proto", e);
+ }
+ }
+
+ /**
+ * Helper for serializing {@link ObjectId} instances to/from protobuf fields.
+ *
+ * <p>Reuse a single instance's {@link #toByteString(ObjectId)} and {@link
+ * #fromByteString(ByteString)} within a single {@link CacheSerializer#serialize} or {@link
+ * CacheSerializer#deserialize} method body to minimize allocation of temporary buffers.
+ *
+ * <p><strong>Note:</strong> This class is not threadsafe. Instances must not be stored in {@link
+ * CacheSerializer} fields if the serializer instances will be used from multiple threads.
+ */
+ public static class ObjectIdConverter {
+ public static ObjectIdConverter create() {
+ return new ObjectIdConverter();
+ }
+
+ private final byte[] buf = new byte[OBJECT_ID_LENGTH];
+
+ private ObjectIdConverter() {}
+
+ public ByteString toByteString(ObjectId id) {
+ id.copyRawTo(buf, 0);
+ return ByteString.copyFrom(buf);
+ }
+
+ public ObjectId fromByteString(ByteString in) {
+ checkArgument(
+ in.size() == OBJECT_ID_LENGTH,
+ "expected ByteString of length %s: %s",
+ OBJECT_ID_LENGTH,
+ in);
+ in.copyTo(buf, 0);
+ return ObjectId.fromRaw(buf);
}
}
diff --git a/java/com/google/gerrit/server/cache/testing/BUILD b/java/com/google/gerrit/server/cache/testing/BUILD
index c1293f9..ed412af 100644
--- a/java/com/google/gerrit/server/cache/testing/BUILD
+++ b/java/com/google/gerrit/server/cache/testing/BUILD
@@ -7,7 +7,7 @@
deps = [
"//lib:guava",
"//lib:protobuf",
- "//lib:truth",
"//lib/commons:lang3",
+ "//lib/truth",
],
)
diff --git a/java/com/google/gerrit/server/cache/testing/SerializedClassSubject.java b/java/com/google/gerrit/server/cache/testing/SerializedClassSubject.java
index 78900cb..19c5b67 100644
--- a/java/com/google/gerrit/server/cache/testing/SerializedClassSubject.java
+++ b/java/com/google/gerrit/server/cache/testing/SerializedClassSubject.java
@@ -22,8 +22,10 @@
import com.google.common.truth.FailureMetadata;
import com.google.common.truth.Subject;
import java.lang.reflect.Field;
+import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Type;
+import java.util.Arrays;
import java.util.Map;
import org.apache.commons.lang3.reflect.FieldUtils;
@@ -62,6 +64,13 @@
super(metadata, actual);
}
+ public void isAbstract() {
+ isNotNull();
+ assertWithMessage("expected class %s to be abstract", actual().getName())
+ .that(Modifier.isAbstract(actual().getModifiers()))
+ .isTrue();
+ }
+
public void isConcrete() {
isNotNull();
assertWithMessage("expected class %s to be concrete", actual().getName())
@@ -78,4 +87,17 @@
.collect(toImmutableMap(Field::getName, Field::getGenericType)))
.containsExactlyEntriesIn(expectedFields);
}
+
+ public void hasAutoValueMethods(Map<String, Type> expectedMethods) {
+ // Would be nice if we could check clazz is an @AutoValue, but the retention is not RUNTIME.
+ isAbstract();
+ assertThat(
+ Arrays.stream(actual().getDeclaredMethods())
+ .filter(m -> !Modifier.isStatic(m.getModifiers()))
+ .filter(m -> Modifier.isAbstract(m.getModifiers()))
+ .filter(m -> m.getParameters().length == 0)
+ .collect(toImmutableMap(Method::getName, Method::getGenericReturnType)))
+ .named("no-argument abstract methods on %s", actual().getName())
+ .isEqualTo(expectedMethods);
+ }
}
diff --git a/java/com/google/gerrit/server/change/ChangeKindCacheImpl.java b/java/com/google/gerrit/server/change/ChangeKindCacheImpl.java
index a4eb90f..f3ab847 100644
--- a/java/com/google/gerrit/server/change/ChangeKindCacheImpl.java
+++ b/java/com/google/gerrit/server/change/ChangeKindCacheImpl.java
@@ -15,8 +15,8 @@
package com.google.gerrit.server.change;
import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkNotNull;
+import com.google.auto.value.AutoValue;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.cache.Cache;
import com.google.common.cache.Weigher;
@@ -31,6 +31,7 @@
import com.google.gerrit.server.cache.CacheSerializer;
import com.google.gerrit.server.cache.EnumCacheSerializer;
import com.google.gerrit.server.cache.ProtoCacheSerializers;
+import com.google.gerrit.server.cache.ProtoCacheSerializers.ObjectIdConverter;
import com.google.gerrit.server.cache.proto.Cache.ChangeKindKeyProto;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.git.GitRepositoryManager;
@@ -41,8 +42,6 @@
import com.google.inject.Inject;
import com.google.inject.Module;
import com.google.inject.name.Named;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.InvalidProtocolBufferException;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
@@ -51,8 +50,8 @@
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import org.eclipse.jgit.errors.LargeObjectException;
+import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Config;
-import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.Repository;
@@ -106,7 +105,7 @@
ObjectId prior,
ObjectId next) {
try {
- Key key = new Key(prior, next, useRecursiveMerge);
+ Key key = Key.create(prior, next, useRecursiveMerge);
return new Loader(key, repoManager, project, rw, repoConfig).call();
} catch (IOException e) {
log.warn(
@@ -127,78 +126,44 @@
}
}
- public static class Key {
- private transient ObjectId prior;
- private transient ObjectId next;
- private transient String strategyName;
-
- private Key(ObjectId prior, ObjectId next, boolean useRecursiveMerge) {
- checkNotNull(next, "next");
- String strategyName = MergeUtil.mergeStrategyName(true, useRecursiveMerge);
- this.prior = prior.copy();
- this.next = next.copy();
- this.strategyName = strategyName;
+ @AutoValue
+ public abstract static class Key {
+ public static Key create(AnyObjectId prior, AnyObjectId next, String strategyName) {
+ return new AutoValue_ChangeKindCacheImpl_Key(prior.copy(), next.copy(), strategyName);
}
- public Key(ObjectId prior, ObjectId next, String strategyName) {
- this.prior = prior;
- this.next = next;
- this.strategyName = strategyName;
+ private static Key create(AnyObjectId prior, AnyObjectId next, boolean useRecursiveMerge) {
+ return create(prior, next, MergeUtil.mergeStrategyName(true, useRecursiveMerge));
}
- public ObjectId getPrior() {
- return prior;
- }
+ public abstract ObjectId prior();
- public ObjectId getNext() {
- return next;
- }
+ public abstract ObjectId next();
- public String getStrategyName() {
- return strategyName;
- }
-
- @Override
- public boolean equals(Object o) {
- if (o instanceof Key) {
- Key k = (Key) o;
- return Objects.equals(prior, k.prior)
- && Objects.equals(next, k.next)
- && Objects.equals(strategyName, k.strategyName);
- }
- return false;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(prior, next, strategyName);
- }
+ public abstract String strategyName();
@VisibleForTesting
static class Serializer implements CacheSerializer<Key> {
@Override
public byte[] serialize(Key object) {
- byte[] buf = new byte[Constants.OBJECT_ID_LENGTH];
- ChangeKindKeyProto.Builder b = ChangeKindKeyProto.newBuilder();
- object.getPrior().copyRawTo(buf, 0);
- b.setPrior(ByteString.copyFrom(buf));
- object.getNext().copyRawTo(buf, 0);
- b.setNext(ByteString.copyFrom(buf));
- b.setStrategyName(object.getStrategyName());
- return ProtoCacheSerializers.toByteArray(b.build());
+ ObjectIdConverter idConverter = ObjectIdConverter.create();
+ return ProtoCacheSerializers.toByteArray(
+ ChangeKindKeyProto.newBuilder()
+ .setPrior(idConverter.toByteString(object.prior()))
+ .setNext(idConverter.toByteString(object.next()))
+ .setStrategyName(object.strategyName())
+ .build());
}
@Override
public Key deserialize(byte[] in) {
- try {
- ChangeKindKeyProto proto = ChangeKindKeyProto.parseFrom(in);
- return new Key(
- ObjectId.fromRaw(proto.getPrior().toByteArray()),
- ObjectId.fromRaw(proto.getNext().toByteArray()),
- proto.getStrategyName());
- } catch (InvalidProtocolBufferException e) {
- throw new IllegalArgumentException("Failed to deserialize object", e);
- }
+ ChangeKindKeyProto proto =
+ ProtoCacheSerializers.parseUnchecked(ChangeKindKeyProto.parser(), in);
+ ObjectIdConverter idConverter = ObjectIdConverter.create();
+ return create(
+ idConverter.fromByteString(proto.getPrior()),
+ idConverter.fromByteString(proto.getNext()),
+ proto.getStrategyName());
}
}
}
@@ -231,7 +196,7 @@
@SuppressWarnings("resource") // Resources are manually managed.
@Override
public ChangeKind call() throws IOException {
- if (Objects.equals(key.prior, key.next)) {
+ if (Objects.equals(key.prior(), key.next())) {
return ChangeKind.NO_CODE_CHANGE;
}
@@ -244,9 +209,9 @@
config = repo.getConfig();
}
try {
- RevCommit prior = rw.parseCommit(key.prior);
+ RevCommit prior = rw.parseCommit(key.prior());
rw.parseBody(prior);
- RevCommit next = rw.parseCommit(key.next);
+ RevCommit next = rw.parseCommit(key.next());
rw.parseBody(next);
if (!next.getFullMessage().equals(prior.getFullMessage())) {
@@ -277,7 +242,7 @@
// having the same tree as would exist when the prior commit is
// cherry-picked onto the next commit's new first parent.
try (ObjectInserter ins = new InMemoryInserter(rw.getObjectReader())) {
- ThreeWayMerger merger = MergeUtil.newThreeWayMerger(ins, config, key.strategyName);
+ ThreeWayMerger merger = MergeUtil.newThreeWayMerger(ins, config, key.strategyName());
merger.setBase(prior.getParent(0));
if (merger.merge(next.getParent(0), prior)
&& merger.getResultTreeId().equals(next.getTree())) {
@@ -321,7 +286,7 @@
}
private static boolean isSameDeltaAndTree(RevCommit prior, RevCommit next) {
- if (next.getTree() != prior.getTree()) {
+ if (!Objects.equals(next.getTree(), prior.getTree())) {
return false;
}
@@ -334,7 +299,7 @@
// Make sure that the prior/next delta is the same - not just the tree.
// This is done by making sure that the parent trees are equal.
for (int i = 0; i < prior.getParentCount(); i++) {
- if (next.getParent(i).getTree() != prior.getParent(i).getTree()) {
+ if (!Objects.equals(next.getParent(i).getTree(), prior.getParent(i).getTree())) {
return false;
}
}
@@ -347,7 +312,7 @@
public int weigh(Key key, ChangeKind changeKind) {
return 16
+ 2 * 36
- + 2 * key.strategyName.length() // Size of Key, 64 bit JVM
+ + 2 * key.strategyName().length() // Size of Key, 64 bit JVM
+ 2 * changeKind.name().length(); // Size of ChangeKind, 64 bit JVM
}
}
@@ -377,7 +342,7 @@
ObjectId prior,
ObjectId next) {
try {
- Key key = new Key(prior, next, useRecursiveMerge);
+ Key key = Key.create(prior, next, useRecursiveMerge);
return cache.get(key, new Loader(key, repoManager, project, rw, repoConfig));
} catch (ExecutionException e) {
log.warn("Cannot check trivial rebase of new patch set " + next.name() + " in " + project, e);
diff --git a/java/com/google/gerrit/server/change/MergeabilityCacheImpl.java b/java/com/google/gerrit/server/change/MergeabilityCacheImpl.java
index a192228..b57be15 100644
--- a/java/com/google/gerrit/server/change/MergeabilityCacheImpl.java
+++ b/java/com/google/gerrit/server/change/MergeabilityCacheImpl.java
@@ -29,6 +29,7 @@
import com.google.gerrit.server.cache.CacheModule;
import com.google.gerrit.server.cache.CacheSerializer;
import com.google.gerrit.server.cache.ProtoCacheSerializers;
+import com.google.gerrit.server.cache.ProtoCacheSerializers.ObjectIdConverter;
import com.google.gerrit.server.cache.proto.Cache.MergeabilityKeyProto;
import com.google.gerrit.server.git.CodeReviewCommit;
import com.google.gerrit.server.git.CodeReviewCommit.CodeReviewRevWalk;
@@ -37,13 +38,10 @@
import com.google.inject.Module;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
-import com.google.protobuf.ByteString;
-import java.io.IOException;
import java.util.Arrays;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ExecutionException;
-import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
@@ -145,33 +143,24 @@
@Override
public byte[] serialize(EntryKey object) {
- byte[] buf = new byte[Constants.OBJECT_ID_LENGTH];
- MergeabilityKeyProto.Builder b = MergeabilityKeyProto.newBuilder();
- object.getCommit().copyRawTo(buf, 0);
- b.setCommit(ByteString.copyFrom(buf));
- object.getInto().copyRawTo(buf, 0);
- b.setInto(ByteString.copyFrom(buf));
- b.setSubmitType(SUBMIT_TYPE_CONVERTER.reverse().convert(object.getSubmitType()));
- b.setMergeStrategy(object.getMergeStrategy());
- return ProtoCacheSerializers.toByteArray(b.build());
+ ObjectIdConverter idConverter = ObjectIdConverter.create();
+ return ProtoCacheSerializers.toByteArray(
+ MergeabilityKeyProto.newBuilder()
+ .setCommit(idConverter.toByteString(object.getCommit()))
+ .setInto(idConverter.toByteString(object.getInto()))
+ .setSubmitType(SUBMIT_TYPE_CONVERTER.reverse().convert(object.getSubmitType()))
+ .setMergeStrategy(object.getMergeStrategy())
+ .build());
}
@Override
public EntryKey deserialize(byte[] in) {
- MergeabilityKeyProto proto;
- try {
- proto = MergeabilityKeyProto.parseFrom(in);
- } catch (IOException e) {
- throw new IllegalArgumentException("Failed to deserialize mergeability cache key");
- }
- byte[] buf = new byte[Constants.OBJECT_ID_LENGTH];
- proto.getCommit().copyTo(buf, 0);
- ObjectId commit = ObjectId.fromRaw(buf);
- proto.getInto().copyTo(buf, 0);
- ObjectId into = ObjectId.fromRaw(buf);
+ MergeabilityKeyProto proto =
+ ProtoCacheSerializers.parseUnchecked(MergeabilityKeyProto.parser(), in);
+ ObjectIdConverter idConverter = ObjectIdConverter.create();
return new EntryKey(
- commit,
- into,
+ idConverter.fromByteString(proto.getCommit()),
+ idConverter.fromByteString(proto.getInto()),
SUBMIT_TYPE_CONVERTER.convert(proto.getSubmitType()),
proto.getMergeStrategy());
}
diff --git a/java/com/google/gerrit/server/git/GarbageCollection.java b/java/com/google/gerrit/server/git/GarbageCollection.java
index 3bf89c7..997907e 100644
--- a/java/com/google/gerrit/server/git/GarbageCollection.java
+++ b/java/com/google/gerrit/server/git/GarbageCollection.java
@@ -41,9 +41,6 @@
public class GarbageCollection {
private static final Logger log = LoggerFactory.getLogger(GarbageCollection.class);
- public static final String LOG_NAME = "gc_log";
- private static final Logger gcLog = LoggerFactory.getLogger(LOG_NAME);
-
private final GitRepositoryManager repoManager;
private final GarbageCollectionQueue gcQueue;
private final GcConfig gcConfig;
@@ -142,7 +139,7 @@
}
b.append(s);
}
- gcLog.info(b.toString());
+ log.info(b.toString());
}
private static void logGcConfiguration(
@@ -182,7 +179,6 @@
print(writer, "failed.\n\n");
StringBuilder b = new StringBuilder();
b.append("[").append(projectName.get()).append("]");
- gcLog.error(b.toString(), e);
log.error(b.toString(), e);
}
diff --git a/java/com/google/gerrit/server/git/GarbageCollectionLogFile.java b/java/com/google/gerrit/server/git/GarbageCollectionLogFile.java
index e03ef67..8796fdf 100644
--- a/java/com/google/gerrit/server/git/GarbageCollectionLogFile.java
+++ b/java/com/google/gerrit/server/git/GarbageCollectionLogFile.java
@@ -26,6 +26,8 @@
import org.eclipse.jgit.lib.Config;
public class GarbageCollectionLogFile implements LifecycleListener {
+ private static final String LOG_NAME = "gc_log";
+
@Inject
public GarbageCollectionLogFile(SitePaths sitePaths, @GerritServerConfig Config config) {
if (SystemLog.shouldConfigure()) {
@@ -38,15 +40,20 @@
@Override
public void stop() {
- LogManager.getLogger(GarbageCollection.LOG_NAME).removeAllAppenders();
+ LogManager.getLogger(GarbageCollection.class).removeAllAppenders();
+ LogManager.getLogger(GarbageCollectionRunner.class).removeAllAppenders();
}
private static void initLogSystem(Path logdir, boolean rotate) {
- Logger gcLogger = LogManager.getLogger(GarbageCollection.LOG_NAME);
+ initGcLogger(logdir, rotate, LogManager.getLogger(GarbageCollection.class));
+ initGcLogger(logdir, rotate, LogManager.getLogger(GarbageCollectionRunner.class));
+ }
+
+ private static void initGcLogger(Path logdir, boolean rotate, Logger gcLogger) {
gcLogger.removeAllAppenders();
gcLogger.addAppender(
SystemLog.createAppender(
- logdir, GarbageCollection.LOG_NAME, new PatternLayout("[%d] %-5p %x: %m%n"), rotate));
+ logdir, LOG_NAME, new PatternLayout("[%d] %-5p %x: %m%n"), rotate));
gcLogger.setAdditivity(false);
}
}
diff --git a/java/com/google/gerrit/server/git/GarbageCollectionRunner.java b/java/com/google/gerrit/server/git/GarbageCollectionRunner.java
index e4316c5..054e56a 100644
--- a/java/com/google/gerrit/server/git/GarbageCollectionRunner.java
+++ b/java/com/google/gerrit/server/git/GarbageCollectionRunner.java
@@ -24,7 +24,7 @@
/** Runnable to enable scheduling gc to run periodically */
public class GarbageCollectionRunner implements Runnable {
- private static final Logger gcLog = LoggerFactory.getLogger(GarbageCollection.LOG_NAME);
+ private static final Logger log = LoggerFactory.getLogger(GarbageCollectionRunner.class);
static class Lifecycle implements LifecycleListener {
private final WorkQueue queue;
@@ -61,7 +61,7 @@
@Override
public void run() {
- gcLog.info("Triggering gc on all repositories");
+ log.info("Triggering gc on all repositories");
garbageCollectionFactory.create().run(Lists.newArrayList(projectCache.all()));
}
diff --git a/java/com/google/gerrit/server/git/ValidationError.java b/java/com/google/gerrit/server/git/ValidationError.java
index 2fd65d2..28d5171 100644
--- a/java/com/google/gerrit/server/git/ValidationError.java
+++ b/java/com/google/gerrit/server/git/ValidationError.java
@@ -15,7 +15,6 @@
package com.google.gerrit.server.git;
import java.util.Objects;
-import org.slf4j.Logger;
/** Indicates a problem with Git based data. */
public class ValidationError {
@@ -46,10 +45,6 @@
void error(ValidationError error);
}
- public static Sink createLoggerSink(String message, Logger log) {
- return error -> log.error(message + error.getMessage());
- }
-
@Override
public boolean equals(Object o) {
if (o == this) {
diff --git a/java/com/google/gerrit/server/git/meta/TabFile.java b/java/com/google/gerrit/server/git/meta/TabFile.java
index 68950602..ef25cd8 100644
--- a/java/com/google/gerrit/server/git/meta/TabFile.java
+++ b/java/com/google/gerrit/server/git/meta/TabFile.java
@@ -24,7 +24,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.slf4j.Logger;
public class TabFile {
@FunctionalInterface
@@ -141,8 +140,4 @@
}
return r.toString();
}
-
- public static ValidationError.Sink createLoggerSink(String file, Logger log) {
- return ValidationError.createLoggerSink("Error parsing file " + file + ": ", log);
- }
}
diff --git a/java/com/google/gerrit/server/group/db/InternalGroupUpdate.java b/java/com/google/gerrit/server/group/db/InternalGroupUpdate.java
index 5ce3c1c..bff2952 100644
--- a/java/com/google/gerrit/server/group/db/InternalGroupUpdate.java
+++ b/java/com/google/gerrit/server/group/db/InternalGroupUpdate.java
@@ -142,8 +142,8 @@
* InternalGroupUpdate}.
*
* <p>This modification can be tweaked further and passed to {@link
- * #setMemberModification(MemberModification)} in order to combine multiple member additions,
- * deletions, or other modifications into one update.
+ * #setMemberModification(InternalGroupUpdate.MemberModification)} in order to combine multiple
+ * member additions, deletions, or other modifications into one update.
*/
public abstract MemberModification getMemberModification();
@@ -155,8 +155,8 @@
* InternalGroupUpdate}.
*
* <p>This modification can be tweaked further and passed to {@link
- * #setSubgroupModification(SubgroupModification)} in order to combine multiple subgroup
- * additions, deletions, or other modifications into one update.
+ * #setSubgroupModification(InternalGroupUpdate.SubgroupModification)} in order to combine
+ * multiple subgroup additions, deletions, or other modifications into one update.
*/
public abstract SubgroupModification getSubgroupModification();
diff --git a/java/com/google/gerrit/server/group/testing/BUILD b/java/com/google/gerrit/server/group/testing/BUILD
index 134de78..8b8cd00 100644
--- a/java/com/google/gerrit/server/group/testing/BUILD
+++ b/java/com/google/gerrit/server/group/testing/BUILD
@@ -8,7 +8,8 @@
"//java/com/google/gerrit/common:server",
"//java/com/google/gerrit/reviewdb:server",
"//java/com/google/gerrit/server",
- "//lib:truth",
+ "//lib:guava",
"//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
],
)
diff --git a/java/com/google/gerrit/server/index/change/ChangeField.java b/java/com/google/gerrit/server/index/change/ChangeField.java
index 5db347e..82253f2 100644
--- a/java/com/google/gerrit/server/index/change/ChangeField.java
+++ b/java/com/google/gerrit/server/index/change/ChangeField.java
@@ -643,7 +643,7 @@
* <p>Stored fields need to use a stable format over a long period; this type insulates the index
* from implementation changes in SubmitRecord itself.
*/
- static class StoredSubmitRecord {
+ public static class StoredSubmitRecord {
static class StoredLabel {
String label;
SubmitRecord.Label.Status status;
@@ -661,7 +661,7 @@
List<StoredRequirement> requirements;
String errorMessage;
- StoredSubmitRecord(SubmitRecord rec) {
+ public StoredSubmitRecord(SubmitRecord rec) {
this.status = rec.status;
this.errorMessage = rec.errorMessage;
if (rec.labels != null) {
@@ -686,7 +686,7 @@
}
}
- private SubmitRecord toSubmitRecord() {
+ public SubmitRecord toSubmitRecord() {
SubmitRecord rec = new SubmitRecord();
rec.status = status;
rec.errorMessage = errorMessage;
diff --git a/java/com/google/gerrit/server/notedb/ChangeNotesCache.java b/java/com/google/gerrit/server/notedb/ChangeNotesCache.java
index 5658569..06d940e 100644
--- a/java/com/google/gerrit/server/notedb/ChangeNotesCache.java
+++ b/java/com/google/gerrit/server/notedb/ChangeNotesCache.java
@@ -25,6 +25,10 @@
import com.google.gerrit.server.ReviewerByEmailSet;
import com.google.gerrit.server.ReviewerSet;
import com.google.gerrit.server.cache.CacheModule;
+import com.google.gerrit.server.cache.CacheSerializer;
+import com.google.gerrit.server.cache.ProtoCacheSerializers;
+import com.google.gerrit.server.cache.ProtoCacheSerializers.ObjectIdConverter;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesKeyProto;
import com.google.gerrit.server.notedb.AbstractChangeNotes.Args;
import com.google.gerrit.server.notedb.ChangeNotesCommit.ChangeNotesRevWalk;
import com.google.inject.Inject;
@@ -49,20 +53,53 @@
@Override
protected void configure() {
bind(ChangeNotesCache.class);
- cache(CACHE_NAME, Key.class, ChangeNotesState.class)
+ persist(CACHE_NAME, Key.class, ChangeNotesState.class)
.weigher(Weigher.class)
- .maximumWeight(10 << 20);
+ .maximumWeight(10 << 20)
+ .diskLimit(-1)
+ .version(1)
+ .keySerializer(Key.Serializer.INSTANCE)
+ .valueSerializer(ChangeNotesState.Serializer.INSTANCE);
}
};
}
@AutoValue
public abstract static class Key {
+ static Key create(Project.NameKey project, Change.Id changeId, ObjectId id) {
+ return new AutoValue_ChangeNotesCache_Key(project, changeId, id.copy());
+ }
+
abstract Project.NameKey project();
abstract Change.Id changeId();
abstract ObjectId id();
+
+ @VisibleForTesting
+ static enum Serializer implements CacheSerializer<Key> {
+ INSTANCE;
+
+ @Override
+ public byte[] serialize(Key object) {
+ return ProtoCacheSerializers.toByteArray(
+ ChangeNotesKeyProto.newBuilder()
+ .setProject(object.project().get())
+ .setChangeId(object.changeId().get())
+ .setId(ObjectIdConverter.create().toByteString(object.id()))
+ .build());
+ }
+
+ @Override
+ public Key deserialize(byte[] in) {
+ ChangeNotesKeyProto proto =
+ ProtoCacheSerializers.parseUnchecked(ChangeNotesKeyProto.parser(), in);
+ return Key.create(
+ new Project.NameKey(proto.getProject()),
+ new Change.Id(proto.getChangeId()),
+ ObjectIdConverter.create().fromByteString(proto.getId()));
+ }
+ }
}
public static class Weigher implements com.google.common.cache.Weigher<Key, ChangeNotesState> {
@@ -134,7 +171,7 @@
+ T // readOnlyUntil
+ 1 // isPrivate
+ 1 // workInProgress
- + 1; // hasReviewStarted
+ + 1; // reviewStarted
}
private static int ptr(Object o, int size) {
@@ -330,7 +367,7 @@
Value get(Project.NameKey project, Change.Id changeId, ObjectId metaId, ChangeNotesRevWalk rw)
throws IOException {
try {
- Key key = new AutoValue_ChangeNotesCache_Key(project, changeId, metaId.copy());
+ Key key = Key.create(project, changeId, metaId);
Loader loader = new Loader(key, rw);
ChangeNotesState s = cache.get(key, loader);
return new AutoValue_ChangeNotesCache_Value(s, loader.revisionNoteMap);
diff --git a/java/com/google/gerrit/server/notedb/ChangeNotesState.java b/java/com/google/gerrit/server/notedb/ChangeNotesState.java
index 78734f9..3eb06b2 100644
--- a/java/com/google/gerrit/server/notedb/ChangeNotesState.java
+++ b/java/com/google/gerrit/server/notedb/ChangeNotesState.java
@@ -14,15 +14,29 @@
package com.google.gerrit.server.notedb;
+import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
+import static com.google.common.collect.ImmutableList.toImmutableList;
+import static com.google.common.collect.ImmutableListMultimap.toImmutableListMultimap;
+import static com.google.common.collect.ImmutableSet.toImmutableSet;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.APPROVAL_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.MESSAGE_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.PATCH_SET_CODEC;
+import static com.google.gerrit.server.cache.ProtoCacheSerializers.toByteString;
import com.google.auto.value.AutoValue;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Converter;
+import com.google.common.base.Enums;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.ImmutableTable;
import com.google.common.collect.ListMultimap;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Table;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.common.data.SubmitRecord;
import com.google.gerrit.reviewdb.client.Account;
@@ -34,10 +48,22 @@
import com.google.gerrit.reviewdb.client.PatchSetApproval;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.RevId;
+import com.google.gerrit.server.OutputFormat;
import com.google.gerrit.server.ReviewerByEmailSet;
import com.google.gerrit.server.ReviewerSet;
import com.google.gerrit.server.ReviewerStatusUpdate;
+import com.google.gerrit.server.cache.CacheSerializer;
+import com.google.gerrit.server.cache.ProtoCacheSerializers;
+import com.google.gerrit.server.cache.ProtoCacheSerializers.ObjectIdConverter;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ChangeColumnsProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerByEmailSetEntryProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerSetEntryProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerStatusUpdateProto;
+import com.google.gerrit.server.index.change.ChangeField.StoredSubmitRecord;
+import com.google.gerrit.server.mail.Address;
import com.google.gerrit.server.notedb.NoteDbChangeState.PrimaryStorage;
+import com.google.gson.Gson;
import java.io.IOException;
import java.sql.Timestamp;
import java.util.List;
@@ -95,7 +121,7 @@
@Nullable Timestamp readOnlyUntil,
boolean isPrivate,
boolean workInProgress,
- boolean hasReviewStarted,
+ boolean reviewStarted,
@Nullable Change.Id revertOf) {
checkNotNull(
metaId,
@@ -106,22 +132,22 @@
.metaId(metaId)
.changeId(changeId)
.columns(
- new AutoValue_ChangeNotesState_ChangeColumns.Builder()
+ ChangeColumns.builder()
.changeKey(changeKey)
.createdOn(createdOn)
.lastUpdatedOn(lastUpdatedOn)
.owner(owner)
.branch(branch)
+ .status(status)
.currentPatchSetId(currentPatchSetId)
.subject(subject)
.topic(topic)
.originalSubject(originalSubject)
.submissionId(submissionId)
.assignee(assignee)
- .status(status)
.isPrivate(isPrivate)
- .isWorkInProgress(workInProgress)
- .hasReviewStarted(hasReviewStarted)
+ .workInProgress(workInProgress)
+ .reviewStarted(reviewStarted)
.revertOf(revertOf)
.build())
.pastAssignees(pastAssignees)
@@ -147,10 +173,14 @@
* <p>Notable exceptions include rowVersion and noteDbState, which are only make sense when read
* from NoteDb, so they cannot be cached.
*
- * <p>Fields are in listed column order.
+ * <p>Fields should match the column names in {@link Change}, and are in listed column order.
*/
@AutoValue
abstract static class ChangeColumns {
+ static Builder builder() {
+ return new AutoValue_ChangeNotesState_ChangeColumns.Builder();
+ }
+
abstract Change.Key changeKey();
abstract Timestamp createdOn();
@@ -162,6 +192,10 @@
// Project not included, as it's not stored anywhere in the meta ref.
abstract String branch();
+ // TODO(dborowitz): Use a sensible default other than null
+ @Nullable
+ abstract Change.Status status();
+
@Nullable
abstract PatchSet.Id currentPatchSetId();
@@ -178,19 +212,18 @@
@Nullable
abstract Account.Id assignee();
- // TODO(dborowitz): Use a sensible default other than null
- @Nullable
- abstract Change.Status status();
abstract boolean isPrivate();
- abstract boolean isWorkInProgress();
+ abstract boolean workInProgress();
- abstract boolean hasReviewStarted();
+ abstract boolean reviewStarted();
@Nullable
abstract Change.Id revertOf();
+ abstract Builder toBuilder();
+
@AutoValue.Builder
abstract static class Builder {
abstract Builder changeKey(Change.Key changeKey);
@@ -219,9 +252,9 @@
abstract Builder isPrivate(boolean isPrivate);
- abstract Builder isWorkInProgress(boolean isWorkInProgress);
+ abstract Builder workInProgress(boolean workInProgress);
- abstract Builder hasReviewStarted(boolean hasReviewStarted);
+ abstract Builder reviewStarted(boolean reviewStarted);
abstract Builder revertOf(@Nullable Change.Id revertOf);
@@ -327,8 +360,8 @@
change.setSubmissionId(c.submissionId());
change.setAssignee(c.assignee());
change.setPrivate(c.isPrivate());
- change.setWorkInProgress(c.isWorkInProgress());
- change.setReviewStarted(c.hasReviewStarted());
+ change.setWorkInProgress(c.workInProgress());
+ change.setReviewStarted(c.reviewStarted());
change.setRevertOf(c.revertOf());
if (!patchSets().isEmpty()) {
@@ -368,7 +401,7 @@
abstract Builder pastAssignees(Set<Account.Id> pastAssignees);
- abstract Builder hashtags(Set<String> hashtags);
+ abstract Builder hashtags(Iterable<String> hashtags);
abstract Builder patchSets(Iterable<Map.Entry<PatchSet.Id, PatchSet>> patchSets);
@@ -396,4 +429,267 @@
abstract ChangeNotesState build();
}
+
+ static enum Serializer implements CacheSerializer<ChangeNotesState> {
+ INSTANCE;
+
+ @VisibleForTesting static final Gson GSON = OutputFormat.JSON_COMPACT.newGson();
+
+ private static final Converter<String, Change.Status> STATUS_CONVERTER =
+ Enums.stringConverter(Change.Status.class);
+ private static final Converter<String, ReviewerStateInternal> REVIEWER_STATE_CONVERTER =
+ Enums.stringConverter(ReviewerStateInternal.class);
+
+ @Override
+ public byte[] serialize(ChangeNotesState object) {
+ checkArgument(object.metaId() != null, "meta ID is required in: %s", object);
+ checkArgument(object.columns() != null, "ChangeColumns is required in: %s", object);
+ ChangeNotesStateProto.Builder b = ChangeNotesStateProto.newBuilder();
+
+ b.setMetaId(ObjectIdConverter.create().toByteString(object.metaId()))
+ .setChangeId(object.changeId().get())
+ .setColumns(toChangeColumnsProto(object.columns()));
+
+ object.pastAssignees().forEach(a -> b.addPastAssignee(a.get()));
+ object.hashtags().forEach(b::addHashtag);
+ object.patchSets().forEach(e -> b.addPatchSet(toByteString(e.getValue(), PATCH_SET_CODEC)));
+ object.approvals().forEach(e -> b.addApproval(toByteString(e.getValue(), APPROVAL_CODEC)));
+
+ object.reviewers().asTable().cellSet().forEach(c -> b.addReviewer(toReviewerSetEntry(c)));
+ object
+ .reviewersByEmail()
+ .asTable()
+ .cellSet()
+ .forEach(c -> b.addReviewerByEmail(toReviewerByEmailSetEntry(c)));
+ object
+ .pendingReviewers()
+ .asTable()
+ .cellSet()
+ .forEach(c -> b.addPendingReviewer(toReviewerSetEntry(c)));
+ object
+ .pendingReviewersByEmail()
+ .asTable()
+ .cellSet()
+ .forEach(c -> b.addPendingReviewerByEmail(toReviewerByEmailSetEntry(c)));
+
+ object.allPastReviewers().forEach(a -> b.addPastReviewer(a.get()));
+ object.reviewerUpdates().forEach(u -> b.addReviewerUpdate(toReviewerStatusUpdateProto(u)));
+ object
+ .submitRecords()
+ .forEach(r -> b.addSubmitRecord(GSON.toJson(new StoredSubmitRecord(r))));
+ object.changeMessages().forEach(m -> b.addChangeMessage(toByteString(m, MESSAGE_CODEC)));
+ object.publishedComments().values().forEach(c -> b.addPublishedComment(GSON.toJson(c)));
+
+ if (object.readOnlyUntil() != null) {
+ b.setReadOnlyUntil(object.readOnlyUntil().getTime()).setHasReadOnlyUntil(true);
+ }
+
+ return ProtoCacheSerializers.toByteArray(b.build());
+ }
+
+ private static ChangeColumnsProto toChangeColumnsProto(ChangeColumns cols) {
+ ChangeColumnsProto.Builder b =
+ ChangeColumnsProto.newBuilder()
+ .setChangeKey(cols.changeKey().get())
+ .setCreatedOn(cols.createdOn().getTime())
+ .setLastUpdatedOn(cols.lastUpdatedOn().getTime())
+ .setOwner(cols.owner().get())
+ .setBranch(cols.branch());
+ if (cols.currentPatchSetId() != null) {
+ b.setCurrentPatchSetId(cols.currentPatchSetId().get()).setHasCurrentPatchSetId(true);
+ }
+ b.setSubject(cols.subject());
+ if (cols.topic() != null) {
+ b.setTopic(cols.topic()).setHasTopic(true);
+ }
+ if (cols.originalSubject() != null) {
+ b.setOriginalSubject(cols.originalSubject()).setHasOriginalSubject(true);
+ }
+ if (cols.submissionId() != null) {
+ b.setSubmissionId(cols.submissionId()).setHasSubmissionId(true);
+ }
+ if (cols.assignee() != null) {
+ b.setAssignee(cols.assignee().get()).setHasAssignee(true);
+ }
+ if (cols.status() != null) {
+ b.setStatus(STATUS_CONVERTER.reverse().convert(cols.status())).setHasStatus(true);
+ }
+ b.setIsPrivate(cols.isPrivate())
+ .setWorkInProgress(cols.workInProgress())
+ .setReviewStarted(cols.reviewStarted());
+ if (cols.revertOf() != null) {
+ b.setRevertOf(cols.revertOf().get()).setHasRevertOf(true);
+ }
+ return b.build();
+ }
+
+ private static ReviewerSetEntryProto toReviewerSetEntry(
+ Table.Cell<ReviewerStateInternal, Account.Id, Timestamp> c) {
+ return ReviewerSetEntryProto.newBuilder()
+ .setState(REVIEWER_STATE_CONVERTER.reverse().convert(c.getRowKey()))
+ .setAccountId(c.getColumnKey().get())
+ .setTimestamp(c.getValue().getTime())
+ .build();
+ }
+
+ private static ReviewerByEmailSetEntryProto toReviewerByEmailSetEntry(
+ Table.Cell<ReviewerStateInternal, Address, Timestamp> c) {
+ return ReviewerByEmailSetEntryProto.newBuilder()
+ .setState(REVIEWER_STATE_CONVERTER.reverse().convert(c.getRowKey()))
+ .setAddress(c.getColumnKey().toHeaderString())
+ .setTimestamp(c.getValue().getTime())
+ .build();
+ }
+
+ private static ReviewerStatusUpdateProto toReviewerStatusUpdateProto(ReviewerStatusUpdate u) {
+ return ReviewerStatusUpdateProto.newBuilder()
+ .setDate(u.date().getTime())
+ .setUpdatedBy(u.updatedBy().get())
+ .setReviewer(u.reviewer().get())
+ .setState(REVIEWER_STATE_CONVERTER.reverse().convert(u.state()))
+ .build();
+ }
+
+ @Override
+ public ChangeNotesState deserialize(byte[] in) {
+ ChangeNotesStateProto proto =
+ ProtoCacheSerializers.parseUnchecked(ChangeNotesStateProto.parser(), in);
+ Change.Id changeId = new Change.Id(proto.getChangeId());
+
+ ChangeNotesState.Builder b =
+ builder()
+ .metaId(ObjectIdConverter.create().fromByteString(proto.getMetaId()))
+ .changeId(changeId)
+ .columns(toChangeColumns(changeId, proto.getColumns()))
+ .pastAssignees(
+ proto
+ .getPastAssigneeList()
+ .stream()
+ .map(Account.Id::new)
+ .collect(toImmutableSet()))
+ .hashtags(proto.getHashtagList())
+ .patchSets(
+ proto
+ .getPatchSetList()
+ .stream()
+ .map(PATCH_SET_CODEC::decode)
+ .map(ps -> Maps.immutableEntry(ps.getId(), ps))
+ .collect(toImmutableList()))
+ .approvals(
+ proto
+ .getApprovalList()
+ .stream()
+ .map(APPROVAL_CODEC::decode)
+ .map(a -> Maps.immutableEntry(a.getPatchSetId(), a))
+ .collect(toImmutableList()))
+ .reviewers(toReviewerSet(proto.getReviewerList()))
+ .reviewersByEmail(toReviewerByEmailSet(proto.getReviewerByEmailList()))
+ .pendingReviewers(toReviewerSet(proto.getPendingReviewerList()))
+ .pendingReviewersByEmail(toReviewerByEmailSet(proto.getPendingReviewerByEmailList()))
+ .allPastReviewers(
+ proto
+ .getPastReviewerList()
+ .stream()
+ .map(Account.Id::new)
+ .collect(toImmutableList()))
+ .reviewerUpdates(toReviewerStatusUpdateList(proto.getReviewerUpdateList()))
+ .submitRecords(
+ proto
+ .getSubmitRecordList()
+ .stream()
+ .map(r -> GSON.fromJson(r, StoredSubmitRecord.class).toSubmitRecord())
+ .collect(toImmutableList()))
+ .changeMessages(
+ proto
+ .getChangeMessageList()
+ .stream()
+ .map(MESSAGE_CODEC::decode)
+ .collect(toImmutableList()))
+ .publishedComments(
+ proto
+ .getPublishedCommentList()
+ .stream()
+ .map(r -> GSON.fromJson(r, Comment.class))
+ .collect(toImmutableListMultimap(c -> new RevId(c.revId), c -> c)));
+ if (proto.getHasReadOnlyUntil()) {
+ b.readOnlyUntil(new Timestamp(proto.getReadOnlyUntil()));
+ }
+ return b.build();
+ }
+
+ private static ChangeColumns toChangeColumns(Change.Id changeId, ChangeColumnsProto proto) {
+ ChangeColumns.Builder b =
+ ChangeColumns.builder()
+ .changeKey(new Change.Key(proto.getChangeKey()))
+ .createdOn(new Timestamp(proto.getCreatedOn()))
+ .lastUpdatedOn(new Timestamp(proto.getLastUpdatedOn()))
+ .owner(new Account.Id(proto.getOwner()))
+ .branch(proto.getBranch());
+ if (proto.getHasCurrentPatchSetId()) {
+ b.currentPatchSetId(new PatchSet.Id(changeId, proto.getCurrentPatchSetId()));
+ }
+ b.subject(proto.getSubject());
+ if (proto.getHasTopic()) {
+ b.topic(proto.getTopic());
+ }
+ if (proto.getHasOriginalSubject()) {
+ b.originalSubject(proto.getOriginalSubject());
+ }
+ if (proto.getHasSubmissionId()) {
+ b.submissionId(proto.getSubmissionId());
+ }
+ if (proto.getHasAssignee()) {
+ b.assignee(new Account.Id(proto.getAssignee()));
+ }
+ if (proto.getHasStatus()) {
+ b.status(STATUS_CONVERTER.convert(proto.getStatus()));
+ }
+ b.isPrivate(proto.getIsPrivate())
+ .workInProgress(proto.getWorkInProgress())
+ .reviewStarted(proto.getReviewStarted());
+ if (proto.getHasRevertOf()) {
+ b.revertOf(new Change.Id(proto.getRevertOf()));
+ }
+ return b.build();
+ }
+
+ private static ReviewerSet toReviewerSet(List<ReviewerSetEntryProto> protos) {
+ ImmutableTable.Builder<ReviewerStateInternal, Account.Id, Timestamp> b =
+ ImmutableTable.builder();
+ for (ReviewerSetEntryProto e : protos) {
+ b.put(
+ REVIEWER_STATE_CONVERTER.convert(e.getState()),
+ new Account.Id(e.getAccountId()),
+ new Timestamp(e.getTimestamp()));
+ }
+ return ReviewerSet.fromTable(b.build());
+ }
+
+ private static ReviewerByEmailSet toReviewerByEmailSet(
+ List<ReviewerByEmailSetEntryProto> protos) {
+ ImmutableTable.Builder<ReviewerStateInternal, Address, Timestamp> b =
+ ImmutableTable.builder();
+ for (ReviewerByEmailSetEntryProto e : protos) {
+ b.put(
+ REVIEWER_STATE_CONVERTER.convert(e.getState()),
+ Address.parse(e.getAddress()),
+ new Timestamp(e.getTimestamp()));
+ }
+ return ReviewerByEmailSet.fromTable(b.build());
+ }
+
+ private static ImmutableList<ReviewerStatusUpdate> toReviewerStatusUpdateList(
+ List<ReviewerStatusUpdateProto> protos) {
+ ImmutableList.Builder<ReviewerStatusUpdate> b = ImmutableList.builder();
+ for (ReviewerStatusUpdateProto proto : protos) {
+ b.add(
+ ReviewerStatusUpdate.create(
+ new Timestamp(proto.getDate()),
+ new Account.Id(proto.getUpdatedBy()),
+ new Account.Id(proto.getReviewer()),
+ REVIEWER_STATE_CONVERTER.convert(proto.getState())));
+ }
+ return b.build();
+ }
+ }
}
diff --git a/java/com/google/gerrit/server/query/change/ConflictKey.java b/java/com/google/gerrit/server/query/change/ConflictKey.java
index 0101ffe..9daf886 100644
--- a/java/com/google/gerrit/server/query/change/ConflictKey.java
+++ b/java/com/google/gerrit/server/query/change/ConflictKey.java
@@ -14,62 +14,80 @@
package com.google.gerrit.server.query.change;
+import com.google.auto.value.AutoValue;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Converter;
+import com.google.common.base.Enums;
+import com.google.common.collect.Ordering;
import com.google.gerrit.extensions.client.SubmitType;
-import java.io.Serializable;
-import java.util.Objects;
+import com.google.gerrit.server.cache.CacheSerializer;
+import com.google.gerrit.server.cache.ProtoCacheSerializers;
+import com.google.gerrit.server.cache.ProtoCacheSerializers.ObjectIdConverter;
+import com.google.gerrit.server.cache.proto.Cache.ConflictKeyProto;
+import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId;
-public class ConflictKey implements Serializable {
- private static final long serialVersionUID = 2L;
-
- private final ObjectId commit;
- private final ObjectId otherCommit;
- private final SubmitType submitType;
- private final boolean contentMerge;
-
- public ConflictKey(
- ObjectId commit, ObjectId otherCommit, SubmitType submitType, boolean contentMerge) {
- if (SubmitType.FAST_FORWARD_ONLY.equals(submitType) || commit.compareTo(otherCommit) < 0) {
- this.commit = commit;
- this.otherCommit = otherCommit;
- } else {
- this.commit = otherCommit;
- this.otherCommit = commit;
+@AutoValue
+public abstract class ConflictKey {
+ public static ConflictKey create(
+ AnyObjectId commit, AnyObjectId otherCommit, SubmitType submitType, boolean contentMerge) {
+ ObjectId commitCopy = commit.copy();
+ ObjectId otherCommitCopy = otherCommit.copy();
+ if (submitType == SubmitType.FAST_FORWARD_ONLY) {
+ // The conflict check for FF-only is non-symmetrical, and we need to treat (X, Y) differently
+ // from (Y, X). Store the commits in the input order.
+ return new AutoValue_ConflictKey(commitCopy, otherCommitCopy, submitType, contentMerge);
}
- this.submitType = submitType;
- this.contentMerge = contentMerge;
+ // Otherwise, the check is symmetrical; sort commit/otherCommit before storing, so the actual
+ // key is independent of the order in which they are passed to this method.
+ return new AutoValue_ConflictKey(
+ Ordering.natural().min(commitCopy, otherCommitCopy),
+ Ordering.natural().max(commitCopy, otherCommitCopy),
+ submitType,
+ contentMerge);
}
- public ObjectId getCommit() {
- return commit;
+ @VisibleForTesting
+ static ConflictKey createWithoutNormalization(
+ AnyObjectId commit, AnyObjectId otherCommit, SubmitType submitType, boolean contentMerge) {
+ return new AutoValue_ConflictKey(commit.copy(), otherCommit.copy(), submitType, contentMerge);
}
- public ObjectId getOtherCommit() {
- return otherCommit;
- }
+ public abstract ObjectId commit();
- public SubmitType getSubmitType() {
- return submitType;
- }
+ public abstract ObjectId otherCommit();
- public boolean isContentMerge() {
- return contentMerge;
- }
+ public abstract SubmitType submitType();
- @Override
- public boolean equals(Object o) {
- if (!(o instanceof ConflictKey)) {
- return false;
+ public abstract boolean contentMerge();
+
+ public static enum Serializer implements CacheSerializer<ConflictKey> {
+ INSTANCE;
+
+ private static final Converter<String, SubmitType> SUBMIT_TYPE_CONVERTER =
+ Enums.stringConverter(SubmitType.class);
+
+ @Override
+ public byte[] serialize(ConflictKey object) {
+ ObjectIdConverter idConverter = ObjectIdConverter.create();
+ return ProtoCacheSerializers.toByteArray(
+ ConflictKeyProto.newBuilder()
+ .setCommit(idConverter.toByteString(object.commit()))
+ .setOtherCommit(idConverter.toByteString(object.otherCommit()))
+ .setSubmitType(SUBMIT_TYPE_CONVERTER.reverse().convert(object.submitType()))
+ .setContentMerge(object.contentMerge())
+ .build());
}
- ConflictKey other = (ConflictKey) o;
- return commit.equals(other.commit)
- && otherCommit.equals(other.otherCommit)
- && submitType.equals(other.submitType)
- && contentMerge == other.contentMerge;
- }
- @Override
- public int hashCode() {
- return Objects.hash(commit, otherCommit, submitType, contentMerge);
+ @Override
+ public ConflictKey deserialize(byte[] in) {
+ ConflictKeyProto proto = ProtoCacheSerializers.parseUnchecked(ConflictKeyProto.parser(), in);
+ ObjectIdConverter idConverter = ObjectIdConverter.create();
+ return create(
+ idConverter.fromByteString(proto.getCommit()),
+ idConverter.fromByteString(proto.getOtherCommit()),
+ SUBMIT_TYPE_CONVERTER.convert(proto.getSubmitType()),
+ proto.getContentMerge());
+ }
}
}
diff --git a/java/com/google/gerrit/server/query/change/ConflictsCache.java b/java/com/google/gerrit/server/query/change/ConflictsCache.java
index e8b2fef..c7ee79b 100644
--- a/java/com/google/gerrit/server/query/change/ConflictsCache.java
+++ b/java/com/google/gerrit/server/query/change/ConflictsCache.java
@@ -18,7 +18,7 @@
public interface ConflictsCache {
- void put(ConflictKey key, Boolean value);
+ void put(ConflictKey key, boolean value);
@Nullable
Boolean getIfPresent(ConflictKey key);
diff --git a/java/com/google/gerrit/server/query/change/ConflictsCacheImpl.java b/java/com/google/gerrit/server/query/change/ConflictsCacheImpl.java
index 1185677..0b8c5ee 100644
--- a/java/com/google/gerrit/server/query/change/ConflictsCacheImpl.java
+++ b/java/com/google/gerrit/server/query/change/ConflictsCacheImpl.java
@@ -15,6 +15,7 @@
package com.google.gerrit.server.query.change;
import com.google.common.cache.Cache;
+import com.google.gerrit.server.cache.BooleanCacheSerializer;
import com.google.gerrit.server.cache.CacheModule;
import com.google.inject.Inject;
import com.google.inject.Module;
@@ -29,7 +30,11 @@
return new CacheModule() {
@Override
protected void configure() {
- persist(NAME, ConflictKey.class, Boolean.class).maximumWeight(37400);
+ persist(NAME, ConflictKey.class, Boolean.class)
+ .version(1)
+ .keySerializer(ConflictKey.Serializer.INSTANCE)
+ .valueSerializer(BooleanCacheSerializer.INSTANCE)
+ .maximumWeight(37400);
bind(ConflictsCache.class).to(ConflictsCacheImpl.class);
}
};
@@ -43,7 +48,7 @@
}
@Override
- public void put(ConflictKey key, Boolean value) {
+ public void put(ConflictKey key, boolean value) {
conflictsCache.put(key, value);
}
diff --git a/java/com/google/gerrit/server/query/change/ConflictsPredicate.java b/java/com/google/gerrit/server/query/change/ConflictsPredicate.java
index f870951..7dc7a0b 100644
--- a/java/com/google/gerrit/server/query/change/ConflictsPredicate.java
+++ b/java/com/google/gerrit/server/query/change/ConflictsPredicate.java
@@ -115,19 +115,19 @@
ObjectId other = ObjectId.fromString(object.currentPatchSet().getRevision().get());
ConflictKey conflictsKey =
- new ConflictKey(
+ ConflictKey.create(
changeDataCache.getTestAgainst(),
other,
str.type,
projectState.is(BooleanProjectConfig.USE_CONTENT_MERGE));
- Boolean conflicts = args.conflictsCache.getIfPresent(conflictsKey);
- if (conflicts != null) {
- return conflicts;
+ Boolean maybeConflicts = args.conflictsCache.getIfPresent(conflictsKey);
+ if (maybeConflicts != null) {
+ return maybeConflicts;
}
try (Repository repo = args.repoManager.openRepository(otherChange.getProject());
CodeReviewRevWalk rw = CodeReviewCommit.newRevWalk(repo)) {
- conflicts =
+ boolean conflicts =
!args.submitDryRun.run(
str.type,
repo,
diff --git a/java/com/google/gerrit/server/restapi/change/PostReviewers.java b/java/com/google/gerrit/server/restapi/change/PostReviewers.java
index c344513..65c7db7 100644
--- a/java/com/google/gerrit/server/restapi/change/PostReviewers.java
+++ b/java/com/google/gerrit/server/restapi/change/PostReviewers.java
@@ -98,7 +98,6 @@
private final AccountLoader.Factory accountLoaderFactory;
private final Provider<ReviewDb> dbProvider;
private final ChangeData.Factory changeDataFactory;
- private final IdentifiedUser.GenericFactory identifiedUserFactory;
private final Config cfg;
private final ReviewerJson json;
private final NotesMigration migration;
@@ -118,7 +117,6 @@
Provider<ReviewDb> db,
ChangeData.Factory changeDataFactory,
RetryHelper retryHelper,
- IdentifiedUser.GenericFactory identifiedUserFactory,
@GerritServerConfig Config cfg,
ReviewerJson json,
NotesMigration migration,
@@ -135,7 +133,6 @@
this.accountLoaderFactory = accountLoaderFactory;
this.dbProvider = db;
this.changeDataFactory = changeDataFactory;
- this.identifiedUserFactory = identifiedUserFactory;
this.cfg = cfg;
this.json = json;
this.migration = migration;
diff --git a/java/com/google/gerrit/server/restapi/change/Submit.java b/java/com/google/gerrit/server/restapi/change/Submit.java
index 54ecd18..51a4090 100644
--- a/java/com/google/gerrit/server/restapi/change/Submit.java
+++ b/java/com/google/gerrit/server/restapi/change/Submit.java
@@ -95,14 +95,10 @@
"Submit all ${topicSize} changes of the same topic "
+ "(${submitSize} changes including ancestors and other "
+ "changes related by topic)";
- private static final String BLOCKED_SUBMIT_TOOLTIP =
- "This change depends on other changes which are not ready";
private static final String BLOCKED_HIDDEN_SUBMIT_TOOLTIP =
"This change depends on other hidden changes which are not ready";
- private static final String BLOCKED_WORK_IN_PROGRESS = "This change is marked work in progress";
private static final String CLICK_FAILURE_TOOLTIP = "Clicking the button would fail";
private static final String CHANGE_UNMERGEABLE = "Problems with integrating this change";
- private static final String CHANGES_NOT_MERGEABLE = "Problems with change(s): ";
public static class Output {
transient Change change;
@@ -240,6 +236,11 @@
}
/**
+ * Returns a message describing what prevents the current change from being submitted - or null.
+ * This method only considers parent changes, and changes in the same topic. The caller is
+ * responsible for making sure the current change to be submitted can indeed be submitted
+ * (permissions, submit rules, is not a WIP...)
+ *
* @param cd the change the user is currently looking at
* @param cs set of changes to be submitted at once
* @param user the user who is checking to submit
@@ -251,6 +252,11 @@
return BLOCKED_HIDDEN_SUBMIT_TOOLTIP;
}
for (ChangeData c : cs.changes()) {
+ if (cd.getId().equals(c.getId())) {
+ // We ignore the change about to be submitted, as these checks are already done in the
+ // #apply and #getDescription methods.
+ continue;
+ }
Set<ChangePermission> can =
permissionBackend
.user(user)
@@ -261,12 +267,16 @@
return BLOCKED_HIDDEN_SUBMIT_TOOLTIP;
}
if (!can.contains(ChangePermission.SUBMIT)) {
- return BLOCKED_SUBMIT_TOOLTIP;
+ return "You don't have permission to submit change " + c.getId();
}
if (c.change().isWorkInProgress()) {
- return BLOCKED_WORK_IN_PROGRESS;
+ return "Change " + c.getId() + " is marked work in progress";
}
- MergeOp.checkSubmitRule(c, false);
+ try {
+ MergeOp.checkSubmitRule(c, false);
+ } catch (ResourceConflictException e) {
+ return "Change " + c.getId() + " is not ready: " + e.getMessage();
+ }
}
Collection<ChangeData> unmergeable = unmergeableChanges(cs);
@@ -278,11 +288,10 @@
return CHANGE_UNMERGEABLE;
}
}
- return CHANGES_NOT_MERGEABLE
+
+ return "Problems with change(s): "
+ unmergeable.stream().map(c -> c.getId().toString()).collect(joining(", "));
}
- } catch (ResourceConflictException e) {
- return BLOCKED_SUBMIT_TOOLTIP;
} catch (PermissionBackendException | OrmException | IOException e) {
log.error("Error checking if change is submittable", e);
throw new OrmRuntimeException("Could not determine problems for the change", e);
@@ -294,6 +303,7 @@
public UiAction.Description getDescription(RevisionResource resource) {
Change change = resource.getChange();
if (!change.getStatus().isOpen()
+ || change.isWorkInProgress()
|| !resource.isCurrent()
|| !resource.permissions().testOrFalse(ChangePermission.SUBMIT)) {
return null; // submit not visible
diff --git a/java/com/google/gerrit/testing/BUILD b/java/com/google/gerrit/testing/BUILD
index f2fe4c2..875d636 100644
--- a/java/com/google/gerrit/testing/BUILD
+++ b/java/com/google/gerrit/testing/BUILD
@@ -29,9 +29,10 @@
"//java/com/google/gerrit/server/cache/mem",
"//java/com/google/gerrit/server/restapi",
"//java/com/google/gerrit/server/schema",
+ "//lib:guava",
"//lib:gwtorm",
"//lib:h2",
- "//lib:truth",
+ "//lib:junit",
"//lib/auto:auto-value",
"//lib/auto:auto-value-annotations",
"//lib/guice",
@@ -39,5 +40,6 @@
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
"//lib/log:api",
+ "//lib/truth",
],
)
diff --git a/java/com/google/gerrit/truth/BUILD b/java/com/google/gerrit/truth/BUILD
index a0e2ee9..719ddce 100644
--- a/java/com/google/gerrit/truth/BUILD
+++ b/java/com/google/gerrit/truth/BUILD
@@ -4,6 +4,7 @@
srcs = glob(["**/*.java"]),
visibility = ["//visibility:public"],
deps = [
- "//lib:truth",
+ "//lib:guava",
+ "//lib/truth",
],
)
diff --git a/java/com/google/gerrit/util/ssl/BlindHostnameVerifier.java b/java/com/google/gerrit/util/ssl/BlindHostnameVerifier.java
new file mode 100644
index 0000000..ac758690
--- /dev/null
+++ b/java/com/google/gerrit/util/ssl/BlindHostnameVerifier.java
@@ -0,0 +1,33 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.util.ssl;
+
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.SSLSession;
+
+/** HostnameVerifier that ignores host name. */
+public class BlindHostnameVerifier implements HostnameVerifier {
+
+ private static final HostnameVerifier INSTANCE = new BlindHostnameVerifier();
+
+ public static HostnameVerifier getInstance() {
+ return INSTANCE;
+ }
+
+ @Override
+ public boolean verify(String hostname, SSLSession session) {
+ return true;
+ }
+}
diff --git a/javatests/com/google/gerrit/acceptance/BUILD b/javatests/com/google/gerrit/acceptance/BUILD
index 234e4be..9246abb 100644
--- a/javatests/com/google/gerrit/acceptance/BUILD
+++ b/javatests/com/google/gerrit/acceptance/BUILD
@@ -6,7 +6,7 @@
deps = [
"//java/com/google/gerrit/acceptance:lib",
"//lib:guava",
- "//lib:truth",
"//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/acceptance/api/group/BUILD b/javatests/com/google/gerrit/acceptance/api/group/BUILD
index 21294f5..a0b70cc 100644
--- a/javatests/com/google/gerrit/acceptance/api/group/BUILD
+++ b/javatests/com/google/gerrit/acceptance/api/group/BUILD
@@ -21,6 +21,6 @@
"//java/com/google/gerrit/reviewdb:server",
"//java/com/google/gerrit/server",
"//lib:gwtorm",
- "//lib:truth",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/acceptance/pgm/ReindexIT.java b/javatests/com/google/gerrit/acceptance/pgm/ReindexIT.java
index 4b6f8b2..41640a9 100644
--- a/javatests/com/google/gerrit/acceptance/pgm/ReindexIT.java
+++ b/javatests/com/google/gerrit/acceptance/pgm/ReindexIT.java
@@ -80,6 +80,9 @@
.flatMap(g -> g.members.stream())
.map(a -> a._accountId))
.containsExactly(adminId.get());
+ // Query project index
+ assertThat(gApi.projects().query(project.get()).get().stream().map(p -> p.name))
+ .containsExactly(project.get());
}
}
@@ -220,7 +223,7 @@
}
private void setUpChange() throws Exception {
- project = new Project.NameKey("project");
+ project = new Project.NameKey("reindex-project-test");
try (ServerContext ctx = startServer()) {
GerritApi gApi = ctx.getInjector().getInstance(GerritApi.class);
gApi.projects().create(project.get());
diff --git a/javatests/com/google/gerrit/acceptance/rest/change/ActionsIT.java b/javatests/com/google/gerrit/acceptance/rest/change/ActionsIT.java
index f89f2a1..171babd 100644
--- a/javatests/com/google/gerrit/acceptance/rest/change/ActionsIT.java
+++ b/javatests/com/google/gerrit/acceptance/rest/change/ActionsIT.java
@@ -105,7 +105,9 @@
public void revisionActionsTwoChangesInTopic() throws Exception {
String changeId = createChangeWithTopic().getChangeId();
approve(changeId);
- String changeId2 = createChangeWithTopic().getChangeId();
+ PushOneCommit.Result change2 = createChangeWithTopic();
+ int legacyId2 = change2.getChange().getId().get();
+ String changeId2 = change2.getChangeId();
Map<String, ActionInfo> actions = getActions(changeId);
commonActionsAssertions(actions);
if (isSubmitWholeTopicEnabled()) {
@@ -113,7 +115,7 @@
assertThat(info.enabled).isNull();
assertThat(info.label).isEqualTo("Submit whole topic");
assertThat(info.method).isEqualTo("POST");
- assertThat(info.title).isEqualTo("This change depends on other changes which are not ready");
+ assertThat(info.title).matches("Change " + legacyId2 + " is not ready: needs Code-Review");
} else {
noSubmitWholeTopicAssertions(actions, 1);
diff --git a/javatests/com/google/gerrit/acceptance/rest/project/BUILD b/javatests/com/google/gerrit/acceptance/rest/project/BUILD
index 0720fb3..dad3ca9 100644
--- a/javatests/com/google/gerrit/acceptance/rest/project/BUILD
+++ b/javatests/com/google/gerrit/acceptance/rest/project/BUILD
@@ -18,7 +18,8 @@
],
deps = [
"//java/com/google/gerrit/extensions:api",
- "//lib:truth",
+ "//lib:guava",
+ "//lib/truth",
],
)
@@ -31,8 +32,9 @@
"//java/com/google/gerrit/extensions:api",
"//java/com/google/gerrit/reviewdb:server",
"//java/com/google/gerrit/server",
+ "//lib:guava",
"//lib:gwtorm",
- "//lib:truth",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/common/BUILD b/javatests/com/google/gerrit/common/BUILD
index ff19646..ba9a5bc 100644
--- a/javatests/com/google/gerrit/common/BUILD
+++ b/javatests/com/google/gerrit/common/BUILD
@@ -15,7 +15,7 @@
"//java/com/google/gerrit/common:client",
"//lib:guava",
"//lib:junit",
- "//lib:truth",
+ "//lib/truth",
],
)
@@ -28,8 +28,8 @@
"//java/com/google/gerrit/common:version",
"//java/com/google/gerrit/launcher",
"//lib:guava",
- "//lib:truth",
"//lib/auto:auto-value",
"//lib/auto:auto-value-annotations",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/elasticsearch/BUILD b/javatests/com/google/gerrit/elasticsearch/BUILD
index 70d7089..a2f5229 100644
--- a/javatests/com/google/gerrit/elasticsearch/BUILD
+++ b/javatests/com/google/gerrit/elasticsearch/BUILD
@@ -14,10 +14,10 @@
"//lib:gson",
"//lib:guava",
"//lib:junit",
- "//lib:truth",
"//lib/elasticsearch",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/extensions/BUILD b/javatests/com/google/gerrit/extensions/BUILD
index 2557750..069c915 100644
--- a/javatests/com/google/gerrit/extensions/BUILD
+++ b/javatests/com/google/gerrit/extensions/BUILD
@@ -7,7 +7,7 @@
deps = [
"//java/com/google/gerrit/extensions:api",
"//java/com/google/gerrit/extensions/common/testing:common-test-util",
- "//lib:truth",
"//lib/guice",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/extensions/conditions/BUILD b/javatests/com/google/gerrit/extensions/conditions/BUILD
index aebe347..e2d5951 100644
--- a/javatests/com/google/gerrit/extensions/conditions/BUILD
+++ b/javatests/com/google/gerrit/extensions/conditions/BUILD
@@ -5,6 +5,6 @@
srcs = glob(["*.java"]),
deps = [
"//java/com/google/gerrit/extensions:lib",
- "//lib:truth",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/git/testing/BUILD b/javatests/com/google/gerrit/git/testing/BUILD
index 13eb5bf..56e9ec2 100644
--- a/javatests/com/google/gerrit/git/testing/BUILD
+++ b/javatests/com/google/gerrit/git/testing/BUILD
@@ -5,6 +5,6 @@
srcs = glob(["*.java"]),
deps = [
"//java/com/google/gerrit/git/testing",
- "//lib:truth",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/gpg/BUILD b/javatests/com/google/gerrit/gpg/BUILD
index 5cc9ae8..ab66f9a 100644
--- a/javatests/com/google/gerrit/gpg/BUILD
+++ b/javatests/com/google/gerrit/gpg/BUILD
@@ -20,7 +20,6 @@
"//java/com/google/gerrit/testing:gerrit-test-util",
"//lib:guava",
"//lib:gwtorm",
- "//lib:truth",
"//lib/bouncycastle:bcpg",
"//lib/bouncycastle:bcpg-neverlink",
"//lib/bouncycastle:bcprov",
@@ -30,5 +29,6 @@
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
"//lib/log:api",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/httpd/BUILD b/javatests/com/google/gerrit/httpd/BUILD
index e2f2a45..ec2df15 100644
--- a/javatests/com/google/gerrit/httpd/BUILD
+++ b/javatests/com/google/gerrit/httpd/BUILD
@@ -19,11 +19,11 @@
"//lib:junit",
"//lib:servlet-api-3_1-without-neverlink",
"//lib:soy",
- "//lib:truth",
"//lib/easymock",
"//lib/guice",
"//lib/guice:guice-servlet",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/index/BUILD b/javatests/com/google/gerrit/index/BUILD
index bd79860..d905188 100644
--- a/javatests/com/google/gerrit/index/BUILD
+++ b/javatests/com/google/gerrit/index/BUILD
@@ -9,9 +9,10 @@
"//java/com/google/gerrit/index",
"//java/com/google/gerrit/index:query_exception",
"//java/com/google/gerrit/index:query_parser",
+ "//lib:guava",
"//lib:junit",
- "//lib:truth",
"//lib/antlr:java_runtime",
"//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/metrics/proc/BUILD b/javatests/com/google/gerrit/metrics/proc/BUILD
index 8e50cf6..91e5cf6 100644
--- a/javatests/com/google/gerrit/metrics/proc/BUILD
+++ b/javatests/com/google/gerrit/metrics/proc/BUILD
@@ -9,8 +9,8 @@
"//java/com/google/gerrit/lifecycle",
"//java/com/google/gerrit/metrics",
"//java/com/google/gerrit/metrics/dropwizard",
- "//lib:truth",
"//lib/dropwizard:dropwizard-core",
"//lib/guice",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/pgm/BUILD b/javatests/com/google/gerrit/pgm/BUILD
index af0bea6..e4afae2 100644
--- a/javatests/com/google/gerrit/pgm/BUILD
+++ b/javatests/com/google/gerrit/pgm/BUILD
@@ -13,11 +13,11 @@
"//java/com/google/gerrit/server",
"//lib:guava",
"//lib:junit",
- "//lib:truth",
"//lib/easymock",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/reviewdb/BUILD b/javatests/com/google/gerrit/reviewdb/BUILD
index a7b9b51..0fd140e 100644
--- a/javatests/com/google/gerrit/reviewdb/BUILD
+++ b/javatests/com/google/gerrit/reviewdb/BUILD
@@ -7,7 +7,8 @@
"//java/com/google/gerrit/reviewdb:client",
"//java/com/google/gerrit/server/project/testing:project-test-util",
"//java/com/google/gerrit/testing:gerrit-test-util",
+ "//lib:guava",
"//lib:gwtorm",
- "//lib:truth",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/server/BUILD b/javatests/com/google/gerrit/server/BUILD
index 1ab1124..3113a8a 100644
--- a/javatests/com/google/gerrit/server/BUILD
+++ b/javatests/com/google/gerrit/server/BUILD
@@ -12,7 +12,8 @@
"//java/com/google/gerrit/extensions:api",
"//java/com/google/gerrit/server",
"//java/com/google/gerrit/truth",
- "//lib:truth",
+ "//lib:guava",
+ "//lib/truth",
],
)
@@ -52,16 +53,19 @@
"//java/org/eclipse/jgit:server",
"//lib:grappa",
"//lib:gson",
+ "//lib:guava",
"//lib:guava-retrying",
"//lib:gwtorm",
"//lib:protobuf",
- "//lib:truth-java8-extension",
"//lib/auto:auto-value",
"//lib/auto:auto-value-annotations",
"//lib/commons:codec",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
+ "//lib/truth:truth-java8-extension",
+ "//lib/truth:truth-proto-extension",
"//proto:cache_java_proto",
],
)
diff --git a/javatests/com/google/gerrit/server/auth/oauth/OAuthTokenCacheTest.java b/javatests/com/google/gerrit/server/auth/oauth/OAuthTokenCacheTest.java
index 586c065..5e93a09 100644
--- a/javatests/com/google/gerrit/server/auth/oauth/OAuthTokenCacheTest.java
+++ b/javatests/com/google/gerrit/server/auth/oauth/OAuthTokenCacheTest.java
@@ -1,6 +1,7 @@
package com.google.gerrit.server.auth.oauth;
import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
import static com.google.gerrit.server.cache.testing.SerializedClassSubject.assertThatSerializedClass;
import com.google.common.collect.ImmutableMap;
diff --git a/javatests/com/google/gerrit/server/cache/BUILD b/javatests/com/google/gerrit/server/cache/BUILD
index b173957..ab88169 100644
--- a/javatests/com/google/gerrit/server/cache/BUILD
+++ b/javatests/com/google/gerrit/server/cache/BUILD
@@ -5,12 +5,16 @@
srcs = glob(["*.java"]),
deps = [
"//java/com/google/gerrit/server",
+ "//java/com/google/gerrit/server/cache/testing",
"//lib:guava",
"//lib:gwtorm",
"//lib:junit",
"//lib:protobuf",
- "//lib:truth",
"//lib/auto:auto-value",
"//lib/auto:auto-value-annotations",
+ "//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
+ "//lib/truth:truth-proto-extension",
+ "//proto:cache_java_proto",
],
)
diff --git a/javatests/com/google/gerrit/server/cache/ProtoCacheSerializersTest.java b/javatests/com/google/gerrit/server/cache/ProtoCacheSerializersTest.java
new file mode 100644
index 0000000..8bf9762
--- /dev/null
+++ b/javatests/com/google/gerrit/server/cache/ProtoCacheSerializersTest.java
@@ -0,0 +1,116 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.cache;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.Truth.assert_;
+import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
+import static com.google.gerrit.server.cache.testing.CacheSerializerTestUtil.bytes;
+
+import com.google.gerrit.server.cache.ProtoCacheSerializers.ObjectIdConverter;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesKeyProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto;
+import com.google.protobuf.ByteString;
+import org.eclipse.jgit.lib.ObjectId;
+import org.junit.Test;
+
+public class ProtoCacheSerializersTest {
+ @Test
+ public void objectIdFromByteString() {
+ ObjectIdConverter idConverter = ObjectIdConverter.create();
+ assertThat(
+ idConverter.fromByteString(
+ bytes(
+ 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+ 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa)))
+ .isEqualTo(ObjectId.fromString("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"));
+ assertThat(
+ idConverter.fromByteString(
+ bytes(
+ 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb,
+ 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb)))
+ .isEqualTo(ObjectId.fromString("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"));
+ }
+
+ @Test
+ public void objectIdFromByteStringWrongSize() {
+ try {
+ ObjectIdConverter.create().fromByteString(ByteString.copyFromUtf8("foo"));
+ assert_().fail("expected IllegalArgumentException");
+ } catch (IllegalArgumentException e) {
+ // Expected.
+ }
+ }
+
+ @Test
+ public void objectIdToByteString() {
+ ObjectIdConverter idConverter = ObjectIdConverter.create();
+ assertThat(
+ idConverter.toByteString(
+ ObjectId.fromString("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")))
+ .isEqualTo(
+ bytes(
+ 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+ 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa));
+ assertThat(
+ idConverter.toByteString(
+ ObjectId.fromString("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb")))
+ .isEqualTo(
+ bytes(
+ 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb,
+ 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb));
+ }
+
+ @Test
+ public void parseUncheckedWrongProtoType() {
+ ChangeNotesKeyProto proto =
+ ChangeNotesKeyProto.newBuilder()
+ .setProject("project")
+ .setChangeId(1234)
+ .setId(ByteString.copyFromUtf8("foo"))
+ .build();
+ byte[] bytes = ProtoCacheSerializers.toByteArray(proto);
+ try {
+ ProtoCacheSerializers.parseUnchecked(ChangeNotesStateProto.parser(), bytes);
+ assert_().fail("expected IllegalArgumentException");
+ } catch (IllegalArgumentException e) {
+ // Expected.
+ }
+ }
+
+ @Test
+ public void parseUncheckedInvalidData() {
+ byte[] bytes = new byte[] {0x00};
+ try {
+ ProtoCacheSerializers.parseUnchecked(ChangeNotesStateProto.parser(), bytes);
+ assert_().fail("expected IllegalArgumentException");
+ } catch (IllegalArgumentException e) {
+ // Expected.
+ }
+ }
+
+ @Test
+ public void parseUnchecked() {
+ ChangeNotesKeyProto proto =
+ ChangeNotesKeyProto.newBuilder()
+ .setProject("project")
+ .setChangeId(1234)
+ .setId(ByteString.copyFromUtf8("foo"))
+ .build();
+ byte[] bytes = ProtoCacheSerializers.toByteArray(proto);
+ assertThat(ProtoCacheSerializers.parseUnchecked(ChangeNotesKeyProto.parser(), bytes))
+ .isEqualTo(proto);
+ }
+}
diff --git a/javatests/com/google/gerrit/server/cache/h2/BUILD b/javatests/com/google/gerrit/server/cache/h2/BUILD
index e2b9257..63ae94b 100644
--- a/javatests/com/google/gerrit/server/cache/h2/BUILD
+++ b/javatests/com/google/gerrit/server/cache/h2/BUILD
@@ -9,7 +9,7 @@
"//lib:guava",
"//lib:h2",
"//lib:junit",
- "//lib:truth",
"//lib/guice",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/server/change/ChangeKindCacheImplTest.java b/javatests/com/google/gerrit/server/change/ChangeKindCacheImplTest.java
index 5b77094..03e0d4e 100644
--- a/javatests/com/google/gerrit/server/change/ChangeKindCacheImplTest.java
+++ b/javatests/com/google/gerrit/server/change/ChangeKindCacheImplTest.java
@@ -15,12 +15,14 @@
package com.google.gerrit.server.change;
import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
import static com.google.gerrit.server.cache.testing.CacheSerializerTestUtil.bytes;
import static com.google.gerrit.server.cache.testing.SerializedClassSubject.assertThatSerializedClass;
import com.google.common.collect.ImmutableMap;
import com.google.gerrit.server.cache.CacheSerializer;
import com.google.gerrit.server.cache.proto.Cache.ChangeKindKeyProto;
+import com.google.gerrit.server.change.ChangeKindCacheImpl.Key;
import org.eclipse.jgit.lib.ObjectId;
import org.junit.Test;
@@ -28,7 +30,7 @@
@Test
public void keySerializer() throws Exception {
ChangeKindCacheImpl.Key key =
- new ChangeKindCacheImpl.Key(
+ Key.create(
ObjectId.zeroId(),
ObjectId.fromString("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef"),
"aStrategy");
@@ -54,7 +56,7 @@
@Test
public void keyFields() throws Exception {
assertThatSerializedClass(ChangeKindCacheImpl.Key.class)
- .hasFields(
+ .hasAutoValueMethods(
ImmutableMap.of(
"prior", ObjectId.class, "next", ObjectId.class, "strategyName", String.class));
}
diff --git a/javatests/com/google/gerrit/server/change/MergeabilityCacheImplTest.java b/javatests/com/google/gerrit/server/change/MergeabilityCacheImplTest.java
index 69fc531..c8e6f2b 100644
--- a/javatests/com/google/gerrit/server/change/MergeabilityCacheImplTest.java
+++ b/javatests/com/google/gerrit/server/change/MergeabilityCacheImplTest.java
@@ -15,6 +15,7 @@
package com.google.gerrit.server.change;
import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
import static com.google.gerrit.server.cache.testing.CacheSerializerTestUtil.bytes;
import static com.google.gerrit.server.cache.testing.SerializedClassSubject.assertThatSerializedClass;
diff --git a/javatests/com/google/gerrit/server/group/db/BUILD b/javatests/com/google/gerrit/server/group/db/BUILD
index 48e8d303..eee5529 100644
--- a/javatests/com/google/gerrit/server/group/db/BUILD
+++ b/javatests/com/google/gerrit/server/group/db/BUILD
@@ -16,9 +16,10 @@
"//java/com/google/gerrit/server/group/testing",
"//java/com/google/gerrit/testing:gerrit-test-util",
"//java/com/google/gerrit/truth",
+ "//lib:guava",
"//lib:gwtorm",
- "//lib:truth",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/server/notedb/ChangeNotesCacheTest.java b/javatests/com/google/gerrit/server/notedb/ChangeNotesCacheTest.java
new file mode 100644
index 0000000..5a7d812
--- /dev/null
+++ b/javatests/com/google/gerrit/server/notedb/ChangeNotesCacheTest.java
@@ -0,0 +1,60 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.notedb;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
+import static com.google.gerrit.server.cache.testing.CacheSerializerTestUtil.bytes;
+import static com.google.gerrit.server.cache.testing.SerializedClassSubject.assertThatSerializedClass;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.gerrit.reviewdb.client.Change;
+import com.google.gerrit.reviewdb.client.Project;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesKeyProto;
+import org.eclipse.jgit.lib.ObjectId;
+import org.junit.Test;
+
+public final class ChangeNotesCacheTest {
+ @Test
+ public void keySerializer() throws Exception {
+ ChangeNotesCache.Key key =
+ ChangeNotesCache.Key.create(
+ new Project.NameKey("project"),
+ new Change.Id(1234),
+ ObjectId.fromString("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef"));
+ byte[] serialized = ChangeNotesCache.Key.Serializer.INSTANCE.serialize(key);
+ assertThat(ChangeNotesKeyProto.parseFrom(serialized))
+ .isEqualTo(
+ ChangeNotesKeyProto.newBuilder()
+ .setProject("project")
+ .setChangeId(1234)
+ .setId(
+ bytes(
+ 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+ 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef))
+ .build());
+ assertThat(ChangeNotesCache.Key.Serializer.INSTANCE.deserialize(serialized)).isEqualTo(key);
+ }
+
+ @Test
+ public void keyMethods() throws Exception {
+ assertThatSerializedClass(ChangeNotesCache.Key.class)
+ .hasAutoValueMethods(
+ ImmutableMap.of(
+ "project", Project.NameKey.class,
+ "changeId", Change.Id.class,
+ "id", ObjectId.class));
+ }
+}
diff --git a/javatests/com/google/gerrit/server/notedb/ChangeNotesParserTest.java b/javatests/com/google/gerrit/server/notedb/ChangeNotesParserTest.java
index d974877..b8f544a 100644
--- a/javatests/com/google/gerrit/server/notedb/ChangeNotesParserTest.java
+++ b/javatests/com/google/gerrit/server/notedb/ChangeNotesParserTest.java
@@ -442,17 +442,17 @@
// Change created in WIP remains in WIP.
RevCommit commit = writeCommit("Update WIP change\n" + "\n" + "Patch-set: 1\n", true);
ChangeNotesState state = newParser(commit).parseAll();
- assertThat(state.columns().hasReviewStarted()).isFalse();
+ assertThat(state.columns().reviewStarted()).isFalse();
// Moving change out of WIP starts review.
commit =
writeCommit("New ready change\n" + "\n" + "Patch-set: 1\n" + "Work-in-progress: false\n");
state = newParser(commit).parseAll();
- assertThat(state.columns().hasReviewStarted()).isTrue();
+ assertThat(state.columns().reviewStarted()).isTrue();
// Change created not in WIP has always been in review started state.
state = assertParseSucceeds("New change that doesn't declare WIP\n" + "\n" + "Patch-set: 1\n");
- assertThat(state.columns().hasReviewStarted()).isTrue();
+ assertThat(state.columns().reviewStarted()).isTrue();
}
@Test
diff --git a/javatests/com/google/gerrit/server/notedb/ChangeNotesStateTest.java b/javatests/com/google/gerrit/server/notedb/ChangeNotesStateTest.java
new file mode 100644
index 0000000..3d65eae
--- /dev/null
+++ b/javatests/com/google/gerrit/server/notedb/ChangeNotesStateTest.java
@@ -0,0 +1,946 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.notedb;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.APPROVAL_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.MESSAGE_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.PATCH_SET_CODEC;
+import static com.google.gerrit.server.cache.ProtoCacheSerializers.toByteString;
+import static com.google.gerrit.server.cache.testing.SerializedClassSubject.assertThatSerializedClass;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableListMultimap;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.ImmutableTable;
+import com.google.common.collect.Iterables;
+import com.google.gerrit.common.data.SubmitRecord;
+import com.google.gerrit.common.data.SubmitRequirement;
+import com.google.gerrit.reviewdb.client.Account;
+import com.google.gerrit.reviewdb.client.Change;
+import com.google.gerrit.reviewdb.client.ChangeMessage;
+import com.google.gerrit.reviewdb.client.Comment;
+import com.google.gerrit.reviewdb.client.LabelId;
+import com.google.gerrit.reviewdb.client.PatchSet;
+import com.google.gerrit.reviewdb.client.PatchSetApproval;
+import com.google.gerrit.reviewdb.client.RevId;
+import com.google.gerrit.server.ReviewerByEmailSet;
+import com.google.gerrit.server.ReviewerSet;
+import com.google.gerrit.server.ReviewerStatusUpdate;
+import com.google.gerrit.server.cache.ProtoCacheSerializers.ObjectIdConverter;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ChangeColumnsProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerByEmailSetEntryProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerSetEntryProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerStatusUpdateProto;
+import com.google.gerrit.server.mail.Address;
+import com.google.gerrit.server.notedb.ChangeNotesState.ChangeColumns;
+import com.google.gerrit.server.notedb.ChangeNotesState.Serializer;
+import com.google.gwtorm.client.KeyUtil;
+import com.google.gwtorm.server.StandardKeyEncoder;
+import com.google.inject.TypeLiteral;
+import com.google.protobuf.ByteString;
+import java.lang.reflect.Type;
+import java.sql.Timestamp;
+import java.util.List;
+import java.util.Map;
+import org.eclipse.jgit.lib.ObjectId;
+import org.junit.Before;
+import org.junit.Test;
+
+public class ChangeNotesStateTest {
+ static {
+ KeyUtil.setEncoderImpl(new StandardKeyEncoder());
+ }
+
+ private static final Change.Id ID = new Change.Id(123);
+ private static final ObjectId SHA =
+ ObjectId.fromString("1234567812345678123456781234567812345678");
+ private static final ByteString SHA_BYTES = ObjectIdConverter.create().toByteString(SHA);
+ private static final String CHANGE_KEY = "Iabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd";
+
+ private ChangeColumns cols;
+ private ChangeColumnsProto colsProto;
+
+ @Before
+ public void setUp() throws Exception {
+ cols =
+ ChangeColumns.builder()
+ .changeKey(new Change.Key(CHANGE_KEY))
+ .createdOn(new Timestamp(123456L))
+ .lastUpdatedOn(new Timestamp(234567L))
+ .owner(new Account.Id(1000))
+ .branch("refs/heads/master")
+ .subject("Test change")
+ .isPrivate(false)
+ .workInProgress(false)
+ .reviewStarted(true)
+ .build();
+ colsProto = toProto(newBuilder().build()).getColumns();
+ }
+
+ private ChangeNotesState.Builder newBuilder() {
+ return ChangeNotesState.Builder.empty(ID).metaId(SHA).columns(cols);
+ }
+
+ @Test
+ public void serializeChangeKey() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .columns(
+ cols.toBuilder()
+ .changeKey(new Change.Key("Ieeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"))
+ .build())
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(
+ colsProto.toBuilder().setChangeKey("Ieeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"))
+ .build());
+ }
+
+ @Test
+ public void serializeCreatedOn() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().createdOn(new Timestamp(98765L)).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setCreatedOn(98765L))
+ .build());
+ }
+
+ @Test
+ public void serializeLastUpdatedOn() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().lastUpdatedOn(new Timestamp(98765L)).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setLastUpdatedOn(98765L))
+ .build());
+ }
+
+ @Test
+ public void serializeOwner() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().owner(new Account.Id(7777)).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setOwner(7777))
+ .build());
+ }
+
+ @Test
+ public void serializeBranch() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().branch("refs/heads/bar").build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setBranch("refs/heads/bar"))
+ .build());
+ }
+
+ @Test
+ public void serializeSubject() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().subject("A different test change").build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setSubject("A different test change"))
+ .build());
+ }
+
+ @Test
+ public void serializeCurrentPatchSetId() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .columns(cols.toBuilder().currentPatchSetId(new PatchSet.Id(ID, 2)).build())
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setCurrentPatchSetId(2).setHasCurrentPatchSetId(true))
+ .build());
+ }
+
+ @Test
+ public void serializeNullTopic() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().topic(null).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .build());
+ }
+
+ @Test
+ public void serializeEmptyTopic() throws Exception {
+ ChangeNotesState state = newBuilder().columns(cols.toBuilder().topic("").build()).build();
+ assertRoundTrip(
+ state,
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setTopic("").setHasTopic(true))
+ .build());
+ }
+
+ @Test
+ public void serializeNonEmptyTopic() throws Exception {
+ ChangeNotesState state = newBuilder().columns(cols.toBuilder().topic("topic").build()).build();
+ assertRoundTrip(
+ state,
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setTopic("topic").setHasTopic(true))
+ .build());
+ }
+
+ @Test
+ public void serializeOriginalSubject() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .columns(cols.toBuilder().originalSubject("The first patch set").build())
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(
+ colsProto
+ .toBuilder()
+ .setOriginalSubject("The first patch set")
+ .setHasOriginalSubject(true))
+ .build());
+ }
+
+ @Test
+ public void serializeSubmissionId() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().submissionId("xyz").build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setSubmissionId("xyz").setHasSubmissionId(true))
+ .build());
+ }
+
+ @Test
+ public void serializeAssignee() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().assignee(new Account.Id(2000)).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setAssignee(2000).setHasAssignee(true))
+ .build());
+ }
+
+ @Test
+ public void serializeStatus() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().status(Change.Status.MERGED).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setStatus("MERGED").setHasStatus(true))
+ .build());
+ }
+
+ @Test
+ public void serializeIsPrivate() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().isPrivate(true).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setIsPrivate(true))
+ .build());
+ }
+
+ @Test
+ public void serializeIsWorkInProgress() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().workInProgress(true).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setWorkInProgress(true))
+ .build());
+ }
+
+ @Test
+ public void serializeHasReviewStarted() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().reviewStarted(true).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setReviewStarted(true))
+ .build());
+ }
+
+ @Test
+ public void serializeRevertOf() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().revertOf(new Change.Id(999)).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setRevertOf(999).setHasRevertOf(true))
+ .build());
+ }
+
+ @Test
+ public void serializePastAssignees() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .pastAssignees(ImmutableSet.of(new Account.Id(2002), new Account.Id(2001)))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPastAssignee(2002)
+ .addPastAssignee(2001)
+ .build());
+ }
+
+ @Test
+ public void serializeHashtags() throws Exception {
+ assertRoundTrip(
+ newBuilder().hashtags(ImmutableSet.of("tag2", "tag1")).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addHashtag("tag2")
+ .addHashtag("tag1")
+ .build());
+ }
+
+ @Test
+ public void serializePatchSets() throws Exception {
+ PatchSet ps1 = new PatchSet(new PatchSet.Id(ID, 1));
+ ps1.setUploader(new Account.Id(2000));
+ ps1.setRevision(new RevId("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"));
+ ps1.setCreatedOn(cols.createdOn());
+ ByteString ps1Bytes = toByteString(ps1, PATCH_SET_CODEC);
+ assertThat(ps1Bytes.size()).isEqualTo(66);
+
+ PatchSet ps2 = new PatchSet(new PatchSet.Id(ID, 2));
+ ps2.setUploader(new Account.Id(3000));
+ ps2.setRevision(new RevId("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"));
+ ps2.setCreatedOn(cols.lastUpdatedOn());
+ ByteString ps2Bytes = toByteString(ps2, PATCH_SET_CODEC);
+ assertThat(ps2Bytes.size()).isEqualTo(66);
+ assertThat(ps2Bytes).isNotEqualTo(ps1Bytes);
+
+ assertRoundTrip(
+ newBuilder()
+ .patchSets(ImmutableMap.of(ps2.getId(), ps2, ps1.getId(), ps1).entrySet())
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPatchSet(ps2Bytes)
+ .addPatchSet(ps1Bytes)
+ .build());
+ }
+
+ @Test
+ public void serializeApprovals() throws Exception {
+ PatchSetApproval a1 =
+ new PatchSetApproval(
+ new PatchSetApproval.Key(
+ new PatchSet.Id(ID, 1), new Account.Id(2001), new LabelId("Code-Review")),
+ (short) 1,
+ new Timestamp(1212L));
+ ByteString a1Bytes = toByteString(a1, APPROVAL_CODEC);
+ assertThat(a1Bytes.size()).isEqualTo(43);
+
+ PatchSetApproval a2 =
+ new PatchSetApproval(
+ new PatchSetApproval.Key(
+ new PatchSet.Id(ID, 1), new Account.Id(2002), new LabelId("Verified")),
+ (short) -1,
+ new Timestamp(3434L));
+ ByteString a2Bytes = toByteString(a2, APPROVAL_CODEC);
+ assertThat(a2Bytes.size()).isEqualTo(49);
+ assertThat(a2Bytes).isNotEqualTo(a1Bytes);
+
+ assertRoundTrip(
+ newBuilder()
+ .approvals(
+ ImmutableListMultimap.of(a2.getPatchSetId(), a2, a1.getPatchSetId(), a1).entries())
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addApproval(a2Bytes)
+ .addApproval(a1Bytes)
+ .build());
+ }
+
+ @Test
+ public void serializeReviewers() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .reviewers(
+ ReviewerSet.fromTable(
+ ImmutableTable.<ReviewerStateInternal, Account.Id, Timestamp>builder()
+ .put(ReviewerStateInternal.CC, new Account.Id(2001), new Timestamp(1212L))
+ .put(
+ ReviewerStateInternal.REVIEWER,
+ new Account.Id(2002),
+ new Timestamp(3434L))
+ .build()))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addReviewer(
+ ReviewerSetEntryProto.newBuilder()
+ .setState("CC")
+ .setAccountId(2001)
+ .setTimestamp(1212L))
+ .addReviewer(
+ ReviewerSetEntryProto.newBuilder()
+ .setState("REVIEWER")
+ .setAccountId(2002)
+ .setTimestamp(3434L))
+ .build());
+ }
+
+ @Test
+ public void serializeReviewersByEmail() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .reviewersByEmail(
+ ReviewerByEmailSet.fromTable(
+ ImmutableTable.<ReviewerStateInternal, Address, Timestamp>builder()
+ .put(
+ ReviewerStateInternal.CC,
+ new Address("Name1", "email1@example.com"),
+ new Timestamp(1212L))
+ .put(
+ ReviewerStateInternal.REVIEWER,
+ new Address("Name2", "email2@example.com"),
+ new Timestamp(3434L))
+ .build()))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addReviewerByEmail(
+ ReviewerByEmailSetEntryProto.newBuilder()
+ .setState("CC")
+ .setAddress("Name1 <email1@example.com>")
+ .setTimestamp(1212L))
+ .addReviewerByEmail(
+ ReviewerByEmailSetEntryProto.newBuilder()
+ .setState("REVIEWER")
+ .setAddress("Name2 <email2@example.com>")
+ .setTimestamp(3434L))
+ .build());
+ }
+
+ @Test
+ public void serializeReviewersByEmailWithNullName() throws Exception {
+ ChangeNotesState actual =
+ assertRoundTrip(
+ newBuilder()
+ .reviewersByEmail(
+ ReviewerByEmailSet.fromTable(
+ ImmutableTable.of(
+ ReviewerStateInternal.CC,
+ new Address("emailonly@example.com"),
+ new Timestamp(1212L))))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addReviewerByEmail(
+ ReviewerByEmailSetEntryProto.newBuilder()
+ .setState("CC")
+ .setAddress("emailonly@example.com")
+ .setTimestamp(1212L))
+ .build());
+
+ // Address doesn't consider the name field in equals, so we have to check it manually.
+ // TODO(dborowitz): Fix Address#equals.
+ ImmutableSet<Address> ccs = actual.reviewersByEmail().byState(ReviewerStateInternal.CC);
+ assertThat(ccs).hasSize(1);
+ Address address = Iterables.getOnlyElement(ccs);
+ assertThat(address.getName()).isNull();
+ assertThat(address.getEmail()).isEqualTo("emailonly@example.com");
+ }
+
+ @Test
+ public void serializePendingReviewers() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .pendingReviewers(
+ ReviewerSet.fromTable(
+ ImmutableTable.<ReviewerStateInternal, Account.Id, Timestamp>builder()
+ .put(ReviewerStateInternal.CC, new Account.Id(2001), new Timestamp(1212L))
+ .put(
+ ReviewerStateInternal.REVIEWER,
+ new Account.Id(2002),
+ new Timestamp(3434L))
+ .build()))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPendingReviewer(
+ ReviewerSetEntryProto.newBuilder()
+ .setState("CC")
+ .setAccountId(2001)
+ .setTimestamp(1212L))
+ .addPendingReviewer(
+ ReviewerSetEntryProto.newBuilder()
+ .setState("REVIEWER")
+ .setAccountId(2002)
+ .setTimestamp(3434L))
+ .build());
+ }
+
+ @Test
+ public void serializePendingReviewersByEmail() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .pendingReviewersByEmail(
+ ReviewerByEmailSet.fromTable(
+ ImmutableTable.<ReviewerStateInternal, Address, Timestamp>builder()
+ .put(
+ ReviewerStateInternal.CC,
+ new Address("Name1", "email1@example.com"),
+ new Timestamp(1212L))
+ .put(
+ ReviewerStateInternal.REVIEWER,
+ new Address("Name2", "email2@example.com"),
+ new Timestamp(3434L))
+ .build()))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPendingReviewerByEmail(
+ ReviewerByEmailSetEntryProto.newBuilder()
+ .setState("CC")
+ .setAddress("Name1 <email1@example.com>")
+ .setTimestamp(1212L))
+ .addPendingReviewerByEmail(
+ ReviewerByEmailSetEntryProto.newBuilder()
+ .setState("REVIEWER")
+ .setAddress("Name2 <email2@example.com>")
+ .setTimestamp(3434L))
+ .build());
+ }
+
+ @Test
+ public void serializeAllPastReviewers() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .allPastReviewers(ImmutableList.of(new Account.Id(2002), new Account.Id(2001)))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPastReviewer(2002)
+ .addPastReviewer(2001)
+ .build());
+ }
+
+ @Test
+ public void serializeReviewerUpdates() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .reviewerUpdates(
+ ImmutableList.of(
+ ReviewerStatusUpdate.create(
+ new Timestamp(1212L),
+ new Account.Id(1000),
+ new Account.Id(2002),
+ ReviewerStateInternal.CC),
+ ReviewerStatusUpdate.create(
+ new Timestamp(3434L),
+ new Account.Id(1000),
+ new Account.Id(2001),
+ ReviewerStateInternal.REVIEWER)))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addReviewerUpdate(
+ ReviewerStatusUpdateProto.newBuilder()
+ .setDate(1212L)
+ .setUpdatedBy(1000)
+ .setReviewer(2002)
+ .setState("CC"))
+ .addReviewerUpdate(
+ ReviewerStatusUpdateProto.newBuilder()
+ .setDate(3434L)
+ .setUpdatedBy(1000)
+ .setReviewer(2001)
+ .setState("REVIEWER"))
+ .build());
+ }
+
+ @Test
+ public void serializeSubmitRecords() throws Exception {
+ SubmitRecord sr1 = new SubmitRecord();
+ sr1.status = SubmitRecord.Status.OK;
+
+ SubmitRecord sr2 = new SubmitRecord();
+ sr2.status = SubmitRecord.Status.FORCED;
+
+ assertRoundTrip(
+ newBuilder().submitRecords(ImmutableList.of(sr2, sr1)).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addSubmitRecord("{\"status\":\"FORCED\"}")
+ .addSubmitRecord("{\"status\":\"OK\"}")
+ .build());
+ }
+
+ @Test
+ public void serializeChangeMessages() throws Exception {
+ ChangeMessage m1 =
+ new ChangeMessage(
+ new ChangeMessage.Key(ID, "uuid1"),
+ new Account.Id(1000),
+ new Timestamp(1212L),
+ new PatchSet.Id(ID, 1));
+ ByteString m1Bytes = toByteString(m1, MESSAGE_CODEC);
+ assertThat(m1Bytes.size()).isEqualTo(35);
+
+ ChangeMessage m2 =
+ new ChangeMessage(
+ new ChangeMessage.Key(ID, "uuid2"),
+ new Account.Id(2000),
+ new Timestamp(3434L),
+ new PatchSet.Id(ID, 2));
+ ByteString m2Bytes = toByteString(m2, MESSAGE_CODEC);
+ assertThat(m2Bytes.size()).isEqualTo(35);
+ assertThat(m2Bytes).isNotEqualTo(m1Bytes);
+
+ assertRoundTrip(
+ newBuilder().changeMessages(ImmutableList.of(m2, m1)).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addChangeMessage(m2Bytes)
+ .addChangeMessage(m1Bytes)
+ .build());
+ }
+
+ @Test
+ public void serializePublishedComments() throws Exception {
+ Comment c1 =
+ new Comment(
+ new Comment.Key("uuid1", "file1", 1),
+ new Account.Id(1001),
+ new Timestamp(1212L),
+ (short) 1,
+ "message 1",
+ "serverId",
+ false);
+ c1.setRevId(new RevId("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"));
+ String c1Json = Serializer.GSON.toJson(c1);
+
+ Comment c2 =
+ new Comment(
+ new Comment.Key("uuid2", "file2", 2),
+ new Account.Id(1002),
+ new Timestamp(3434L),
+ (short) 2,
+ "message 2",
+ "serverId",
+ true);
+ c2.setRevId(new RevId("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"));
+ String c2Json = Serializer.GSON.toJson(c2);
+
+ assertRoundTrip(
+ newBuilder()
+ .publishedComments(
+ ImmutableListMultimap.of(new RevId(c2.revId), c2, new RevId(c1.revId), c1))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPublishedComment(c2Json)
+ .addPublishedComment(c1Json)
+ .build());
+ }
+
+ @Test
+ public void serializeReadOnlyUntil() throws Exception {
+ assertRoundTrip(
+ newBuilder().readOnlyUntil(new Timestamp(1212L)).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .setReadOnlyUntil(1212L)
+ .setHasReadOnlyUntil(true)
+ .build());
+ }
+
+ @Test
+ public void changeNotesStateMethods() throws Exception {
+ assertThatSerializedClass(ChangeNotesState.class)
+ .hasAutoValueMethods(
+ ImmutableMap.<String, Type>builder()
+ .put("metaId", ObjectId.class)
+ .put("changeId", Change.Id.class)
+ .put("columns", ChangeColumns.class)
+ .put("pastAssignees", new TypeLiteral<ImmutableSet<Account.Id>>() {}.getType())
+ .put("hashtags", new TypeLiteral<ImmutableSet<String>>() {}.getType())
+ .put(
+ "patchSets",
+ new TypeLiteral<ImmutableList<Map.Entry<PatchSet.Id, PatchSet>>>() {}.getType())
+ .put(
+ "approvals",
+ new TypeLiteral<
+ ImmutableList<Map.Entry<PatchSet.Id, PatchSetApproval>>>() {}.getType())
+ .put("reviewers", ReviewerSet.class)
+ .put("reviewersByEmail", ReviewerByEmailSet.class)
+ .put("pendingReviewers", ReviewerSet.class)
+ .put("pendingReviewersByEmail", ReviewerByEmailSet.class)
+ .put("allPastReviewers", new TypeLiteral<ImmutableList<Account.Id>>() {}.getType())
+ .put(
+ "reviewerUpdates",
+ new TypeLiteral<ImmutableList<ReviewerStatusUpdate>>() {}.getType())
+ .put("submitRecords", new TypeLiteral<ImmutableList<SubmitRecord>>() {}.getType())
+ .put("changeMessages", new TypeLiteral<ImmutableList<ChangeMessage>>() {}.getType())
+ .put(
+ "publishedComments",
+ new TypeLiteral<ImmutableListMultimap<RevId, Comment>>() {}.getType())
+ .put("readOnlyUntil", Timestamp.class)
+ .build());
+ }
+
+ @Test
+ public void changeColumnsMethods() throws Exception {
+ assertThatSerializedClass(ChangeColumns.class)
+ .hasAutoValueMethods(
+ ImmutableMap.<String, Type>builder()
+ .put("changeKey", Change.Key.class)
+ .put("createdOn", Timestamp.class)
+ .put("lastUpdatedOn", Timestamp.class)
+ .put("owner", Account.Id.class)
+ .put("branch", String.class)
+ .put("currentPatchSetId", PatchSet.Id.class)
+ .put("subject", String.class)
+ .put("topic", String.class)
+ .put("originalSubject", String.class)
+ .put("submissionId", String.class)
+ .put("assignee", Account.Id.class)
+ .put("status", Change.Status.class)
+ .put("isPrivate", boolean.class)
+ .put("workInProgress", boolean.class)
+ .put("reviewStarted", boolean.class)
+ .put("revertOf", Change.Id.class)
+ .put("toBuilder", ChangeNotesState.ChangeColumns.Builder.class)
+ .build());
+ }
+
+ @Test
+ public void patchSetFields() throws Exception {
+ assertThatSerializedClass(PatchSet.class)
+ .hasFields(
+ ImmutableMap.<String, Type>builder()
+ .put("id", PatchSet.Id.class)
+ .put("revision", RevId.class)
+ .put("uploader", Account.Id.class)
+ .put("createdOn", Timestamp.class)
+ .put("groups", String.class)
+ .put("pushCertificate", String.class)
+ .put("description", String.class)
+ .build());
+ }
+
+ @Test
+ public void patchSetApprovalFields() throws Exception {
+ assertThatSerializedClass(PatchSetApproval.Key.class)
+ .hasFields(
+ ImmutableMap.<String, Type>builder()
+ .put("patchSetId", PatchSet.Id.class)
+ .put("accountId", Account.Id.class)
+ .put("categoryId", LabelId.class)
+ .build());
+ assertThatSerializedClass(PatchSetApproval.class)
+ .hasFields(
+ ImmutableMap.<String, Type>builder()
+ .put("key", PatchSetApproval.Key.class)
+ .put("value", short.class)
+ .put("granted", Timestamp.class)
+ .put("tag", String.class)
+ .put("realAccountId", Account.Id.class)
+ .put("postSubmit", boolean.class)
+ .build());
+ }
+
+ @Test
+ public void reviewerSetFields() throws Exception {
+ assertThatSerializedClass(ReviewerSet.class)
+ .hasFields(
+ ImmutableMap.of(
+ "table",
+ new TypeLiteral<
+ ImmutableTable<
+ ReviewerStateInternal, Account.Id, Timestamp>>() {}.getType(),
+ "accounts", new TypeLiteral<ImmutableSet<Account.Id>>() {}.getType()));
+ }
+
+ @Test
+ public void reviewerByEmailSetFields() throws Exception {
+ assertThatSerializedClass(ReviewerByEmailSet.class)
+ .hasFields(
+ ImmutableMap.of(
+ "table",
+ new TypeLiteral<
+ ImmutableTable<ReviewerStateInternal, Address, Timestamp>>() {}.getType(),
+ "users", new TypeLiteral<ImmutableSet<Address>>() {}.getType()));
+ }
+
+ @Test
+ public void reviewerStatusUpdateMethods() throws Exception {
+ assertThatSerializedClass(ReviewerStatusUpdate.class)
+ .hasAutoValueMethods(
+ ImmutableMap.of(
+ "date", Timestamp.class,
+ "updatedBy", Account.Id.class,
+ "reviewer", Account.Id.class,
+ "state", ReviewerStateInternal.class));
+ }
+
+ @Test
+ public void submitRecordFields() throws Exception {
+ assertThatSerializedClass(SubmitRecord.class)
+ .hasFields(
+ ImmutableMap.of(
+ "status",
+ SubmitRecord.Status.class,
+ "labels",
+ new TypeLiteral<List<SubmitRecord.Label>>() {}.getType(),
+ "requirements",
+ new TypeLiteral<List<SubmitRequirement>>() {}.getType(),
+ "errorMessage",
+ String.class));
+ assertThatSerializedClass(SubmitRecord.Label.class)
+ .hasFields(
+ ImmutableMap.of(
+ "label", String.class,
+ "status", SubmitRecord.Label.Status.class,
+ "appliedBy", Account.Id.class));
+ assertThatSerializedClass(SubmitRequirement.class)
+ .hasAutoValueMethods(
+ ImmutableMap.of(
+ "fallbackText", String.class,
+ "type", String.class,
+ "data", new TypeLiteral<ImmutableMap<String, String>>() {}.getType()));
+ }
+
+ @Test
+ public void changeMessageFields() throws Exception {
+ assertThatSerializedClass(ChangeMessage.Key.class)
+ .hasFields(ImmutableMap.of("changeId", Change.Id.class, "uuid", String.class));
+ assertThatSerializedClass(ChangeMessage.class)
+ .hasFields(
+ ImmutableMap.<String, Type>builder()
+ .put("key", ChangeMessage.Key.class)
+ .put("author", Account.Id.class)
+ .put("writtenOn", Timestamp.class)
+ .put("message", String.class)
+ .put("patchset", PatchSet.Id.class)
+ .put("tag", String.class)
+ .put("realAuthor", Account.Id.class)
+ .build());
+ }
+
+ @Test
+ public void commentFields() throws Exception {
+ assertThatSerializedClass(Comment.Key.class)
+ .hasFields(
+ ImmutableMap.of(
+ "uuid", String.class, "filename", String.class, "patchSetId", int.class));
+ assertThatSerializedClass(Comment.Identity.class).hasFields(ImmutableMap.of("id", int.class));
+ assertThatSerializedClass(Comment.Range.class)
+ .hasFields(
+ ImmutableMap.of(
+ "startLine", int.class,
+ "startChar", int.class,
+ "endLine", int.class,
+ "endChar", int.class));
+ assertThatSerializedClass(Comment.class)
+ .hasFields(
+ ImmutableMap.<String, Type>builder()
+ .put("key", Comment.Key.class)
+ .put("lineNbr", int.class)
+ .put("author", Comment.Identity.class)
+ .put("realAuthor", Comment.Identity.class)
+ .put("writtenOn", Timestamp.class)
+ .put("side", short.class)
+ .put("message", String.class)
+ .put("parentUuid", String.class)
+ .put("range", Comment.Range.class)
+ .put("tag", String.class)
+ .put("revId", String.class)
+ .put("serverId", String.class)
+ .put("unresolved", boolean.class)
+ .put("legacyFormat", boolean.class)
+ .build());
+ }
+
+ private static ChangeNotesStateProto toProto(ChangeNotesState state) throws Exception {
+ return ChangeNotesStateProto.parseFrom(Serializer.INSTANCE.serialize(state));
+ }
+
+ private static ChangeNotesState assertRoundTrip(
+ ChangeNotesState state, ChangeNotesStateProto expectedProto) throws Exception {
+ ChangeNotesStateProto actualProto = toProto(state);
+ assertThat(actualProto).isEqualTo(expectedProto);
+ ChangeNotesState actual = Serializer.INSTANCE.deserialize(Serializer.INSTANCE.serialize(state));
+ assertThat(actual).isEqualTo(state);
+ // It's possible that ChangeNotesState contains objects which implement equals without taking
+ // into account all fields. Return the actual deserialized instance so that callers can perform
+ // additional assertions if necessary.
+ return actual;
+ }
+}
diff --git a/javatests/com/google/gerrit/server/query/account/BUILD b/javatests/com/google/gerrit/server/query/account/BUILD
index c352f43..e6c631b 100644
--- a/javatests/com/google/gerrit/server/query/account/BUILD
+++ b/javatests/com/google/gerrit/server/query/account/BUILD
@@ -15,10 +15,11 @@
"//java/com/google/gerrit/server",
"//java/com/google/gerrit/server/schema",
"//java/com/google/gerrit/testing:gerrit-test-util",
- "//lib:truth",
- "//lib:truth-java8-extension",
+ "//lib:guava",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
+ "//lib/truth:truth-java8-extension",
"//prolog:gerrit-prolog-common",
],
)
diff --git a/javatests/com/google/gerrit/server/query/change/BUILD b/javatests/com/google/gerrit/server/query/change/BUILD
index 66c825c..09e3243 100644
--- a/javatests/com/google/gerrit/server/query/change/BUILD
+++ b/javatests/com/google/gerrit/server/query/change/BUILD
@@ -19,21 +19,21 @@
"//java/com/google/gerrit/server/project/testing:project-test-util",
"//java/com/google/gerrit/server/schema",
"//java/com/google/gerrit/testing:gerrit-test-util",
+ "//lib:guava",
"//lib:gwtorm",
- "//lib:truth",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
],
)
+LUCENE_QUERY_TEST = ["LuceneQueryChangesTest.java"]
+
junit_tests(
name = "lucene_query_test",
size = "large",
- srcs = glob(
- ["*.java"],
- exclude = ABSTRACT_QUERY_TEST,
- ),
+ srcs = LUCENE_QUERY_TEST,
visibility = ["//visibility:public"],
deps = [
":abstract_query_tests",
@@ -41,10 +41,34 @@
"//java/com/google/gerrit/reviewdb:server",
"//java/com/google/gerrit/server",
"//java/com/google/gerrit/testing:gerrit-test-util",
+ "//lib:guava",
"//lib:gwtorm",
- "//lib:truth",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
+ ],
+)
+
+junit_tests(
+ name = "small_tests",
+ size = "small",
+ srcs = glob(
+ ["*.java"],
+ exclude = ABSTRACT_QUERY_TEST + LUCENE_QUERY_TEST,
+ ),
+ visibility = ["//visibility:public"],
+ deps = [
+ "//java/com/google/gerrit/extensions:api",
+ "//java/com/google/gerrit/reviewdb:server",
+ "//java/com/google/gerrit/server",
+ "//java/com/google/gerrit/server/cache/testing",
+ "//java/com/google/gerrit/testing:gerrit-test-util",
+ "//lib:guava",
+ "//lib:gwtorm",
+ "//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
+ "//lib/truth:truth-proto-extension",
+ "//proto:cache_java_proto",
],
)
diff --git a/javatests/com/google/gerrit/server/query/change/ConflictKeyTest.java b/javatests/com/google/gerrit/server/query/change/ConflictKeyTest.java
new file mode 100644
index 0000000..b87bbf7
--- /dev/null
+++ b/javatests/com/google/gerrit/server/query/change/ConflictKeyTest.java
@@ -0,0 +1,98 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.query.change;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
+import static com.google.gerrit.extensions.client.SubmitType.FAST_FORWARD_ONLY;
+import static com.google.gerrit.extensions.client.SubmitType.MERGE_IF_NECESSARY;
+import static com.google.gerrit.server.cache.testing.CacheSerializerTestUtil.bytes;
+import static com.google.gerrit.server.cache.testing.SerializedClassSubject.assertThatSerializedClass;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.gerrit.extensions.client.SubmitType;
+import com.google.gerrit.server.cache.proto.Cache.ConflictKeyProto;
+import org.eclipse.jgit.lib.ObjectId;
+import org.junit.Test;
+
+public class ConflictKeyTest {
+ @Test
+ public void ffOnlyPreservesInputOrder() {
+ ObjectId id1 = ObjectId.fromString("badc0feebadc0feebadc0feebadc0feebadc0fee");
+ ObjectId id2 = ObjectId.fromString("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef");
+ ConflictKey id1First = ConflictKey.create(id1, id2, FAST_FORWARD_ONLY, true);
+ ConflictKey id2First = ConflictKey.create(id2, id1, FAST_FORWARD_ONLY, true);
+
+ assertThat(id1First)
+ .isEqualTo(ConflictKey.createWithoutNormalization(id1, id2, FAST_FORWARD_ONLY, true));
+ assertThat(id2First)
+ .isEqualTo(ConflictKey.createWithoutNormalization(id2, id1, FAST_FORWARD_ONLY, true));
+ assertThat(id1First).isNotEqualTo(id2First);
+ }
+
+ @Test
+ public void nonFfOnlyNormalizesInputOrder() {
+ ObjectId id1 = ObjectId.fromString("badc0feebadc0feebadc0feebadc0feebadc0fee");
+ ObjectId id2 = ObjectId.fromString("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef");
+ ConflictKey id1First = ConflictKey.create(id1, id2, MERGE_IF_NECESSARY, true);
+ ConflictKey id2First = ConflictKey.create(id2, id1, MERGE_IF_NECESSARY, true);
+ ConflictKey expected =
+ ConflictKey.createWithoutNormalization(id1, id2, MERGE_IF_NECESSARY, true);
+
+ assertThat(id1First).isEqualTo(expected);
+ assertThat(id2First).isEqualTo(expected);
+ }
+
+ @Test
+ public void serializer() throws Exception {
+ ConflictKey key =
+ ConflictKey.create(
+ ObjectId.fromString("badc0feebadc0feebadc0feebadc0feebadc0fee"),
+ ObjectId.fromString("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef"),
+ SubmitType.MERGE_IF_NECESSARY,
+ false);
+ byte[] serialized = ConflictKey.Serializer.INSTANCE.serialize(key);
+ assertThat(ConflictKeyProto.parseFrom(serialized))
+ .isEqualTo(
+ ConflictKeyProto.newBuilder()
+ .setCommit(
+ bytes(
+ 0xba, 0xdc, 0x0f, 0xee, 0xba, 0xdc, 0x0f, 0xee, 0xba, 0xdc, 0x0f, 0xee,
+ 0xba, 0xdc, 0x0f, 0xee, 0xba, 0xdc, 0x0f, 0xee))
+ .setOtherCommit(
+ bytes(
+ 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+ 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef))
+ .setSubmitType("MERGE_IF_NECESSARY")
+ .setContentMerge(false)
+ .build());
+ assertThat(ConflictKey.Serializer.INSTANCE.deserialize(serialized)).isEqualTo(key);
+ }
+
+ /**
+ * See {@link com.google.gerrit.server.cache.testing.SerializedClassSubject} for background and
+ * what to do if this test fails.
+ */
+ @Test
+ public void methods() throws Exception {
+ assertThatSerializedClass(ConflictKey.class)
+ .hasAutoValueMethods(
+ ImmutableMap.of(
+ "commit", ObjectId.class,
+ "otherCommit", ObjectId.class,
+ "submitType", SubmitType.class,
+ "contentMerge", boolean.class));
+ }
+}
diff --git a/javatests/com/google/gerrit/server/query/group/BUILD b/javatests/com/google/gerrit/server/query/group/BUILD
index 01a54a3..0dd16cd 100644
--- a/javatests/com/google/gerrit/server/query/group/BUILD
+++ b/javatests/com/google/gerrit/server/query/group/BUILD
@@ -15,10 +15,11 @@
"//java/com/google/gerrit/server",
"//java/com/google/gerrit/server/schema",
"//java/com/google/gerrit/testing:gerrit-test-util",
- "//lib:truth",
- "//lib:truth-java8-extension",
+ "//lib:guava",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
+ "//lib/truth:truth-java8-extension",
],
)
diff --git a/javatests/com/google/gerrit/server/query/project/BUILD b/javatests/com/google/gerrit/server/query/project/BUILD
index ac2692b..eaa3df3 100644
--- a/javatests/com/google/gerrit/server/query/project/BUILD
+++ b/javatests/com/google/gerrit/server/query/project/BUILD
@@ -14,9 +14,10 @@
"//java/com/google/gerrit/server",
"//java/com/google/gerrit/server/schema",
"//java/com/google/gerrit/testing:gerrit-test-util",
- "//lib:truth",
+ "//lib:guava",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/server/rules/BUILD b/javatests/com/google/gerrit/server/rules/BUILD
index 04a6485..42452df 100644
--- a/javatests/com/google/gerrit/server/rules/BUILD
+++ b/javatests/com/google/gerrit/server/rules/BUILD
@@ -10,10 +10,10 @@
"//java/com/google/gerrit/server",
"//java/com/google/gerrit/server/project/testing:project-test-util",
"//java/com/google/gerrit/testing:gerrit-test-util",
- "//lib:truth",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/prolog:runtime",
+ "//lib/truth",
"//prolog:gerrit-prolog-common",
],
)
diff --git a/javatests/com/google/gerrit/server/update/BUILD b/javatests/com/google/gerrit/server/update/BUILD
index 81e8b31..46820c7 100644
--- a/javatests/com/google/gerrit/server/update/BUILD
+++ b/javatests/com/google/gerrit/server/update/BUILD
@@ -12,9 +12,9 @@
"//java/com/google/gerrit/server",
"//lib:guava",
"//lib:junit",
- "//lib:truth",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
],
)
@@ -34,10 +34,10 @@
"//java/com/google/gerrit/testing:gerrit-test-util",
"//lib:guava",
"//lib:gwtorm",
- "//lib:truth",
- "//lib:truth-java8-extension",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
+ "//lib/truth:truth-java8-extension",
],
)
diff --git a/javatests/com/google/gerrit/sshd/BUILD b/javatests/com/google/gerrit/sshd/BUILD
index c0eaedf..ad7d8a9 100644
--- a/javatests/com/google/gerrit/sshd/BUILD
+++ b/javatests/com/google/gerrit/sshd/BUILD
@@ -7,7 +7,7 @@
"//java/com/google/gerrit/extensions:api",
"//java/com/google/gerrit/server",
"//java/com/google/gerrit/sshd",
- "//lib:truth",
"//lib/mina:sshd",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/testing/BUILD b/javatests/com/google/gerrit/testing/BUILD
index 191e98f..5774707 100644
--- a/javatests/com/google/gerrit/testing/BUILD
+++ b/javatests/com/google/gerrit/testing/BUILD
@@ -7,6 +7,6 @@
deps = [
"//java/com/google/gerrit/server",
"//java/com/google/gerrit/testing:gerrit-test-util",
- "//lib:truth",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/util/http/BUILD b/javatests/com/google/gerrit/util/http/BUILD
index 5755ca8..48b4339 100644
--- a/javatests/com/google/gerrit/util/http/BUILD
+++ b/javatests/com/google/gerrit/util/http/BUILD
@@ -8,7 +8,7 @@
"//javatests/com/google/gerrit/util/http/testutil",
"//lib:junit",
"//lib:servlet-api-3_1-without-neverlink",
- "//lib:truth",
"//lib/easymock",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gwtexpui/safehtml/BUILD b/javatests/com/google/gwtexpui/safehtml/BUILD
index 4f75bdb..694f422 100644
--- a/javatests/com/google/gwtexpui/safehtml/BUILD
+++ b/javatests/com/google/gwtexpui/safehtml/BUILD
@@ -5,8 +5,9 @@
srcs = glob(["client/**/*.java"]),
deps = [
"//java/com/google/gwtexpui/safehtml",
- "//lib:truth",
+ "//lib:guava",
"//lib/gwt:dev",
"//lib/gwt:user",
+ "//lib/truth",
],
)
diff --git a/lib/BUILD b/lib/BUILD
index 5e391e9..c698afb 100644
--- a/lib/BUILD
+++ b/lib/BUILD
@@ -217,28 +217,6 @@
)
java_library(
- name = "truth",
- data = ["//lib:LICENSE-DO_NOT_DISTRIBUTE"],
- visibility = ["//visibility:public"],
- exports = [
- ":guava",
- ":junit",
- "@truth//jar",
- ],
-)
-
-java_library(
- name = "truth-java8-extension",
- data = ["//lib:LICENSE-DO_NOT_DISTRIBUTE"],
- visibility = ["//visibility:public"],
- exports = [
- ":guava",
- ":truth",
- "@truth-java8-extension//jar",
- ],
-)
-
-java_library(
name = "javassist",
data = ["//lib:LICENSE-DO_NOT_DISTRIBUTE"],
visibility = ["//visibility:public"],
diff --git a/lib/asciidoctor/BUILD b/lib/asciidoctor/BUILD
index da05dd1..62b1114 100644
--- a/lib/asciidoctor/BUILD
+++ b/lib/asciidoctor/BUILD
@@ -1,48 +1,7 @@
-java_binary(
- name = "asciidoc",
- main_class = "AsciiDoctor",
- visibility = ["//visibility:public"],
- runtime_deps = [":asciidoc_lib"],
-)
-
-java_library(
- name = "asciidoc_lib",
- srcs = ["java/AsciiDoctor.java"],
- visibility = ["//visibility:public"],
- deps = [
- ":asciidoctor",
- "//lib:args4j",
- "//lib:guava",
- "//lib/log:api",
- "//lib/log:nop",
- ],
-)
-
-java_binary(
- name = "doc_indexer",
- main_class = "DocIndexer",
- visibility = ["//visibility:public"],
- runtime_deps = [":doc_indexer_lib"],
-)
-
-java_library(
- name = "doc_indexer_lib",
- srcs = ["java/DocIndexer.java"],
- visibility = ["//visibility:public"],
- deps = [
- ":asciidoc_lib",
- "//java/com/google/gerrit/server:constants",
- "//lib:args4j",
- "//lib:guava",
- "//lib/lucene:lucene-analyzers-common",
- "//lib/lucene:lucene-core-and-backward-codecs",
- ],
-)
-
java_library(
name = "asciidoctor",
data = ["//lib:LICENSE-asciidoctor"],
- visibility = ["//visibility:public"],
+ visibility = ["//java/com/google/gerrit/asciidoctor:__pkg__"],
exports = ["@asciidoctor//jar"],
runtime_deps = [":jruby"],
)
diff --git a/lib/guava.bzl b/lib/guava.bzl
index db85900..069149b 100644
--- a/lib/guava.bzl
+++ b/lib/guava.bzl
@@ -1,5 +1,5 @@
-GUAVA_VERSION = "24.1-jre"
+GUAVA_VERSION = "25.1-jre"
-GUAVA_BIN_SHA1 = "96c528475465aeb22cce60605d230a7e67cebd7b"
+GUAVA_BIN_SHA1 = "6c57e4b22b44e89e548b5c9f70f0c45fe10fb0b4"
GUAVA_DOC_URL = "https://google.github.io/guava/releases/" + GUAVA_VERSION + "/api/docs/"
diff --git a/lib/js/bower_archives.bzl b/lib/js/bower_archives.bzl
index 5ee3535..6b4e003 100644
--- a/lib/js/bower_archives.bzl
+++ b/lib/js/bower_archives.bzl
@@ -65,8 +65,8 @@
bower_archive(
name = "iron-menu-behavior",
package = "PolymerElements/iron-menu-behavior",
- version = "2.0.1",
- sha1 = "139528ee1e8d86257e2aa445de7761b8ec70ae91")
+ version = "2.1.1",
+ sha1 = "1504997f6eb9aec490b855dadee473cac064f38c")
bower_archive(
name = "iron-meta",
package = "PolymerElements/iron-meta",
@@ -105,8 +105,8 @@
bower_archive(
name = "paper-icon-button",
package = "PolymerElements/paper-icon-button",
- version = "2.1.0",
- sha1 = "caead6a276877888d128ace809376980c3f3fe42")
+ version = "2.2.0",
+ sha1 = "9525e76ef433428bb9d6ec4fa65c4ef83156a803")
bower_archive(
name = "paper-ripple",
package = "PolymerElements/paper-ripple",
diff --git a/lib/truth/BUILD b/lib/truth/BUILD
new file mode 100644
index 0000000..82cd98a
--- /dev/null
+++ b/lib/truth/BUILD
@@ -0,0 +1,49 @@
+java_library(
+ name = "truth",
+ data = ["//lib:LICENSE-DO_NOT_DISTRIBUTE"],
+ visibility = ["//visibility:public"],
+ exports = ["@truth//jar"],
+ runtime_deps = [
+ "//lib:guava",
+ "//lib:junit",
+ ],
+)
+
+java_library(
+ name = "truth-java8-extension",
+ data = ["//lib:LICENSE-DO_NOT_DISTRIBUTE"],
+ visibility = ["//visibility:public"],
+ exports = ["@truth-java8-extension//jar"],
+ runtime_deps = [
+ ":truth",
+ "//lib:guava",
+ ],
+)
+
+java_library(
+ name = "truth-liteproto-extension",
+ data = ["//lib:LICENSE-DO_NOT_DISTRIBUTE"],
+ visibility = ["//visibility:private"],
+ exports = ["@truth-liteproto-extension//jar"],
+ runtime_deps = [
+ ":truth",
+ "//lib:guava",
+ "//lib:protobuf",
+ ],
+)
+
+java_library(
+ name = "truth-proto-extension",
+ data = ["//lib:LICENSE-DO_NOT_DISTRIBUTE"],
+ visibility = ["//visibility:public"],
+ exports = [
+ ":truth-liteproto-extension",
+ "@truth-proto-extension//jar",
+ ],
+ runtime_deps = [
+ ":truth",
+ ":truth-liteproto-extension",
+ "//lib:guava",
+ "//lib:protobuf",
+ ],
+)
diff --git a/plugins/codemirror-editor b/plugins/codemirror-editor
index c97e280..53dccff 160000
--- a/plugins/codemirror-editor
+++ b/plugins/codemirror-editor
@@ -1 +1 @@
-Subproject commit c97e2806532cff00fea6424cde0d440f9ea5016d
+Subproject commit 53dccff17c029459999ff70ac886b80626af634b
diff --git a/plugins/download-commands b/plugins/download-commands
index 37219fe..39b9d56 160000
--- a/plugins/download-commands
+++ b/plugins/download-commands
@@ -1 +1 @@
-Subproject commit 37219fe3fd59727af1ac7a3b0ee00a6924ff8e00
+Subproject commit 39b9d56312d505308f54b810e59ff481b9a380aa
diff --git a/plugins/hooks b/plugins/hooks
index da73b23..d497bed 160000
--- a/plugins/hooks
+++ b/plugins/hooks
@@ -1 +1 @@
-Subproject commit da73b23cfb065fc28c9e7653860ccd34bd68f0f0
+Subproject commit d497bed6963134388e7f500364a4ae59b94bafe7
diff --git a/plugins/reviewnotes b/plugins/reviewnotes
index 4672856..8ddb7e2 160000
--- a/plugins/reviewnotes
+++ b/plugins/reviewnotes
@@ -1 +1 @@
-Subproject commit 467285664ebf8eb6f1e03ff13ebc706eee6d8662
+Subproject commit 8ddb7e2ebda8cb7813ae0f3f0b8602e14915e300
diff --git a/polygerrit-ui/app/behaviors/gr-tooltip-behavior/gr-tooltip-behavior.js b/polygerrit-ui/app/behaviors/gr-tooltip-behavior/gr-tooltip-behavior.js
index 4d53631..04d8b6e 100644
--- a/polygerrit-ui/app/behaviors/gr-tooltip-behavior/gr-tooltip-behavior.js
+++ b/polygerrit-ui/app/behaviors/gr-tooltip-behavior/gr-tooltip-behavior.js
@@ -51,7 +51,6 @@
detached() {
this._handleHideTooltip();
- this.unlisten(window, 'scroll', '_handleWindowScroll');
},
_setupTooltipListeners() {
@@ -59,9 +58,6 @@
this._hasSetupTooltipListeners = true;
this.addEventListener('mouseenter', this._handleShowTooltip.bind(this));
- this.addEventListener('mouseleave', this._handleHideTooltip.bind(this));
- this.addEventListener('tap', this._handleHideTooltip.bind(this));
- this.listen(window, 'scroll', '_handleWindowScroll');
},
_handleShowTooltip(e) {
@@ -91,6 +87,9 @@
tooltip.style.visibility = null;
this._tooltip = tooltip;
+ this.listen(window, 'scroll', '_handleWindowScroll');
+ this.listen(this, 'mouseleave', '_handleHideTooltip');
+ this.listen(this, 'tap', '_handleHideTooltip');
},
_handleHideTooltip(e) {
@@ -100,6 +99,9 @@
return;
}
+ this.unlisten(window, 'scroll', '_handleWindowScroll');
+ this.unlisten(this, 'mouseleave', '_handleHideTooltip');
+ this.unlisten(this, 'tap', '_handleHideTooltip');
this.setAttribute('title', this._titleText);
if (this._tooltip && this._tooltip.parentNode) {
this._tooltip.parentNode.removeChild(this._tooltip);
diff --git a/polygerrit-ui/app/elements/admin/gr-repo-detail-list/gr-repo-detail-list.js b/polygerrit-ui/app/elements/admin/gr-repo-detail-list/gr-repo-detail-list.js
index feaadc7..8512a5d 100644
--- a/polygerrit-ui/app/elements/admin/gr-repo-detail-list/gr-repo-detail-list.js
+++ b/polygerrit-ui/app/elements/admin/gr-repo-detail-list/gr-repo-detail-list.js
@@ -209,8 +209,7 @@
_handleDeleteItemConfirm() {
this.$.overlay.close();
if (this.detailType === DETAIL_TYPES.BRANCHES) {
- return this.$.restAPI.deleteRepoBranches(this._repo,
- this._refName)
+ return this.$.restAPI.deleteRepoBranches(this._repo, this._refName)
.then(itemDeleted => {
if (itemDeleted.status === 204) {
this._getItems(
@@ -219,8 +218,7 @@
}
});
} else if (this.detailType === DETAIL_TYPES.TAGS) {
- return this.$.restAPI.deleteRepoTags(this._repo,
- this._refName)
+ return this.$.restAPI.deleteRepoTags(this._repo, this._refName)
.then(itemDeleted => {
if (itemDeleted.status === 204) {
this._getItems(
diff --git a/polygerrit-ui/app/elements/admin/gr-repo/gr-repo.html b/polygerrit-ui/app/elements/admin/gr-repo/gr-repo.html
index ac12d71..704974d 100644
--- a/polygerrit-ui/app/elements/admin/gr-repo/gr-repo.html
+++ b/polygerrit-ui/app/elements/admin/gr-repo/gr-repo.html
@@ -30,7 +30,7 @@
<dom-module id="gr-repo">
<template>
- <style="shared-styles"></style>
+ <style include="shared-styles"></style>
<style include="gr-subpage-styles">
h2.edited:after {
color: var(--deemphasized-text-color);
diff --git a/polygerrit-ui/app/elements/change/gr-change-actions/gr-change-actions.js b/polygerrit-ui/app/elements/change/gr-change-actions/gr-change-actions.js
index 3f967c8..cfdf88c 100644
--- a/polygerrit-ui/app/elements/change/gr-change-actions/gr-change-actions.js
+++ b/polygerrit-ui/app/elements/change/gr-change-actions/gr-change-actions.js
@@ -1188,7 +1188,7 @@
}
const patchNum = revisionAction ? this.latestPatchNum : null;
return this.$.restAPI.getChangeURLAndSend(this.changeNum, method,
- patchNum, actionEndpoint, payload, handleError, this)
+ patchNum, actionEndpoint, payload, handleError)
.then(response => {
cleanupFn.call(this);
return response;
diff --git a/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.js b/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.js
index 29ffec8..cb9f4c5 100644
--- a/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.js
+++ b/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.js
@@ -57,6 +57,8 @@
UNIFIED: 'UNIFIED_DIFF',
};
+ const CHANGE_DATA_TIMING_LABEL = 'ChangeDataLoaded';
+
Polymer({
is: 'gr-change-view',
@@ -624,6 +626,8 @@
this.$.fileList.collapseAllDiffs();
this._patchRange = patchRange;
+ // If the change has already been loaded and the parameter change is only
+ // in the patch range, then don't do a full reload.
if (this._initialLoadComplete && patchChanged) {
if (patchRange.patchNum == null) {
patchRange.patchNum = this.computeLatestPatchNum(this._allPatchSets);
@@ -637,7 +641,7 @@
this._changeNum = value.changeNum;
this.$.relatedChanges.clear();
- this._reload().then(() => {
+ this._reload(true).then(() => {
this._performPostLoadTasks();
});
},
@@ -651,7 +655,6 @@
},
_performPostLoadTasks() {
- this.$.relatedChanges.reload();
this._maybeShowReplyDialog();
this._maybeShowRevertDialog();
@@ -1199,43 +1202,102 @@
});
},
- _reload() {
+ /**
+ * Reload the change.
+ * @param {boolean=} opt_reloadRelatedChanges Reloads the related chanegs
+ * when true.
+ * @return {Promise} A promise that resolves when the core data has loaded.
+ * Some non-core data loading may still be in-flight when the core data
+ * promise resolves.
+ */
+ _reload(opt_reloadRelatedChanges) {
this._loading = true;
this._relatedChangesCollapsed = true;
- const detailCompletes = this._getChangeDetail().then(() => {
- this._loading = false;
- this._getProjectConfig();
- });
+ // Array to house all promises related to data requests.
+ const allDataPromises = [];
- this._reloadComments();
+ // Resolves when the change detail and the edit patch set (if available)
+ // are loaded.
+ const detailCompletes = this._getChangeDetail();
+ allDataPromises.push(detailCompletes);
- let reloadPromise;
+ // Resolves when the loading flag is set to false, meaning that some
+ // change content may start appearing.
+ const loadingFlagSet = detailCompletes
+ .then(() => { this._loading = false; });
+ // Resolves when the project config has loaded.
+ const projectConfigLoaded = detailCompletes
+ .then(() => this._getProjectConfig());
+ allDataPromises.push(projectConfigLoaded);
+
+ // Resolves when change comments have loaded (comments, drafts and robot
+ // comments).
+ const commentsLoaded = this._reloadComments();
+ allDataPromises.push(commentsLoaded);
+
+ let coreDataPromise;
+
+ // If the patch number is specified
if (this._patchRange.patchNum) {
- reloadPromise = Promise.all([
- this._reloadPatchNumDependentResources(),
- detailCompletes,
- ]).then(() => {
- return Promise.all([
- this._getMergeability(),
- this.$.actions.reload(),
- ]);
- });
+ // Because a specific patchset is specified, reload the resources that
+ // are keyed by patch number or patch range.
+ const patchResourcesLoaded = this._reloadPatchNumDependentResources();
+ allDataPromises.push(patchResourcesLoaded);
+
+ // Promise resolves when the change detail and patch dependent resources
+ // have loaded.
+ const detailAndPatchResourcesLoaded =
+ Promise.all([patchResourcesLoaded, loadingFlagSet]);
+
+ // Promise resolves when mergeability information has loaded.
+ const mergeabilityLoaded = detailAndPatchResourcesLoaded
+ .then(() => this._getMergeability());
+ allDataPromises.push(mergeabilityLoaded);
+
+ // Promise resovles when the change actions have loaded.
+ const actionsLoaded = detailAndPatchResourcesLoaded
+ .then(() => this.$.actions.reload());
+ allDataPromises.push(actionsLoaded);
+
+ // The core data is loaded when both mergeability and actions are known.
+ coreDataPromise = Promise.all([mergeabilityLoaded, actionsLoaded]);
} else {
- // The patch number is reliant on the change detail request.
- reloadPromise = detailCompletes.then(() => {
- this.$.fileList.reload();
- if (!this._latestCommitMessage) {
- this._getLatestCommitMessage();
- }
- return this._getMergeability();
+ // Resolves when the file list has loaded.
+ const fileListReload = loadingFlagSet
+ .then(() => this.$.fileList.reload());
+ allDataPromises.push(fileListReload);
+
+ const latestCommitMessageLoaded = loadingFlagSet.then(() => {
+ // If the latest commit message is known, there is nothing to do.
+ if (this._latestCommitMessage) { return Promise.resolve(); }
+ return this._getLatestCommitMessage();
});
+ allDataPromises.push(latestCommitMessageLoaded);
+
+ // Promise resolves when mergeability information has loaded.
+ const mergeabilityLoaded = loadingFlagSet
+ .then(() => this._getMergeability());
+ allDataPromises.push(mergeabilityLoaded);
+
+ // Core data is loaded when mergeability has been loaded.
+ coreDataPromise = mergeabilityLoaded;
}
- return reloadPromise.then(() => {
- this.$.reporting.changeDisplayed();
+ if (opt_reloadRelatedChanges) {
+ const relatedChangesLoaded = coreDataPromise
+ .then(() => this.$.relatedChanges.reload());
+ allDataPromises.push(relatedChangesLoaded);
+ }
+
+ this.$.reporting.time(CHANGE_DATA_TIMING_LABEL);
+ Promise.all(allDataPromises).then(() => {
+ this.$.reporting.timeEnd(CHANGE_DATA_TIMING_LABEL);
});
+
+ return coreDataPromise
+ .then(() => { this.$.reporting.changeDisplayed(); });
},
/**
diff --git a/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list.html b/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list.html
index b59742e..3f26628 100644
--- a/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list.html
+++ b/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list.html
@@ -283,6 +283,7 @@
as="file"
initial-count="[[fileListIncrement]]"
target-framerate="1">
+ [[_reportRenderedRow(index)]]
<div class="stickyArea">
<div class$="file-row row [[_computePathClass(file.__path, _expandedFilePaths.*)]]"
data-path$="[[file.__path]]" tabindex="-1">
diff --git a/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list.js b/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list.js
index 0fa037c..1d3a020 100644
--- a/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list.js
+++ b/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list.js
@@ -26,6 +26,8 @@
const SIZE_BAR_GAP_WIDTH = 1;
const SIZE_BAR_MIN_WIDTH = 1.5;
+ const RENDER_TIME = 'FileListRenderTime';
+
const FileStatus = {
A: 'Added',
C: 'Copied',
@@ -101,8 +103,6 @@
_filesByPath: Object,
_files: {
type: Array,
- computed: '_computeFiles(_filesByPath, changeComments, patchRange, ' +
- '_reviewed)',
observer: '_filesChanged',
value() { return []; },
},
@@ -181,6 +181,8 @@
observers: [
'_expandedPathsChanged(_expandedFilePaths.splices)',
+ '_computeFiles(_filesByPath, changeComments, patchRange, _reviewed, ' +
+ '_loading)',
],
keyBindings: {
@@ -429,17 +431,21 @@
return GrCountStringFormatter.computeShortString(commentCount, 'c');
},
- _reviewFile(path) {
+ /**
+ * @param {string} path
+ * @param {boolean=} opt_reviewed
+ */
+ _reviewFile(path, opt_reviewed) {
if (this.editMode) { return; }
const index = this._files.findIndex(file => file.__path === path);
- const reviewed = this._files[index].isReviewed;
+ const reviewed = opt_reviewed || !this._files[index].isReviewed;
- this.set(['_files', index, 'isReviewed'], !reviewed);
+ this.set(['_files', index, 'isReviewed'], reviewed);
if (index < this._shownFiles.length) {
- this.set(['_shownFiles', index, 'isReviewed'], !reviewed);
+ this.set(['_shownFiles', index, 'isReviewed'], reviewed);
}
- this._saveReviewedState(path, !reviewed);
+ this._saveReviewedState(path, reviewed);
},
_saveReviewedState(path, reviewed) {
@@ -776,13 +782,14 @@
'gr-icons:expand-less' : 'gr-icons:expand-more';
},
- _computeFiles(filesByPath, changeComments, patchRange, reviewed) {
+ _computeFiles(filesByPath, changeComments, patchRange, reviewed, loading) {
+ // Await all promises resolving from reload. @See Issue 9057
+ if (loading) { return; }
+
const commentedPaths = changeComments.getPaths(patchRange);
const files = Object.assign({}, filesByPath);
Object.keys(commentedPaths).forEach(commentedPath => {
- if (files.hasOwnProperty(commentedPath)) {
- return;
- }
+ if (files.hasOwnProperty(commentedPath)) { return; }
files[commentedPath] = {status: 'U'};
});
const reviewedSet = new Set(reviewed || []);
@@ -791,12 +798,18 @@
files[filePath].isReviewed = reviewedSet.has(filePath);
}
- return this._normalizeChangeFilesResponse(files);
+ this._files = this._normalizeChangeFilesResponse(files);
},
_computeFilesShown(numFilesShown, files) {
const filesShown = files.base.slice(0, numFilesShown);
this.fire('files-shown-changed', {length: filesShown.length});
+
+ // Start the timer for the rendering work hwere because this is where the
+ // _shownFiles property is being set, and _shownFiles is used in the
+ // dom-repeat binding.
+ this.$.reporting.time(RENDER_TIME);
+
return filesShown;
},
@@ -953,7 +966,7 @@
path, this.patchRange, this.projectConfig);
const promises = [diffElem.reload()];
if (this._loggedIn && !this.diffPrefs.manual_review) {
- promises.push(this._reviewFile(path));
+ promises.push(this._reviewFile(path, true));
}
return Promise.all(promises);
}).then(() => {
@@ -1175,5 +1188,21 @@
_noDiffsExpanded() {
return this.filesExpanded === GrFileListConstants.FilesExpandedState.NONE;
},
+
+ /**
+ * Method to call via binding when each file list row is rendered. This
+ * allows approximate detection of when the dom-repeat has completed
+ * rendering.
+ * @param {number} index The index of the row being rendered.
+ * @return {string} an empty string.
+ */
+ _reportRenderedRow(index) {
+ if (index === this._shownFiles.length - 1) {
+ this.async(() => {
+ this.$.reporting.timeEnd(RENDER_TIME);
+ }, 1);
+ }
+ return '';
+ },
});
})();
diff --git a/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list_test.html b/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list_test.html
index 8541edf..0289449 100644
--- a/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list_test.html
+++ b/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list_test.html
@@ -60,10 +60,11 @@
stub('gr-rest-api-interface', {
getLoggedIn() { return Promise.resolve(true); },
getPreferences() { return Promise.resolve({}); },
- fetchJSON() { return Promise.resolve({}); },
+ getDiffPreferences() { return Promise.resolve({}); },
getDiffComments() { return Promise.resolve({}); },
getDiffRobotComments() { return Promise.resolve({}); },
getDiffDrafts() { return Promise.resolve({}); },
+ getAccountCapabilities() { return Promise.resolve({}); },
});
stub('gr-date-formatter', {
_loadTimeFormat() { return Promise.resolve(''); },
@@ -86,6 +87,7 @@
.returns({meta: {}, left: [], right: []});
done();
});
+ element._loading = false;
element.diffPrefs = {};
element.numFilesShown = 200;
element.patchRange = {
@@ -127,6 +129,19 @@
assert.isTrue(controlRow.classList.contains('invisible'));
});
+ test('rendering each row calls the _reportRenderedRow method', () => {
+ const renderedStub = sandbox.stub(element, '_reportRenderedRow');
+ element._filesByPath = _.range(10)
+ .reduce((_filesByPath, i) => {
+ _filesByPath['/file' + i] = {lines_inserted: 9};
+ return _filesByPath;
+ }, {});
+ flushAsynchronousOperations();
+ assert.equal(
+ Polymer.dom(element.root).querySelectorAll('.file-row').length, 10);
+ assert.equal(renderedStub.callCount, 10);
+ });
+
test('calculate totals for patch number', () => {
element._filesByPath = {
'/COMMIT_MSG': {
@@ -1023,6 +1038,7 @@
delete element.diffPrefs.manual_review;
return element._renderInOrder(['p'], diffs, 1).then(() => {
assert.isTrue(reviewStub.called);
+ assert.isTrue(reviewStub.calledWithExactly('p', true));
});
});
});
@@ -1308,6 +1324,7 @@
.returns({meta: {}, left: [], right: []});
done();
});
+ element._loading = false;
element.numFilesShown = 75;
element.selectedIndex = 0;
element._filesByPath = {
diff --git a/polygerrit-ui/app/elements/change/gr-included-in-dialog/gr-included-in-dialog.html b/polygerrit-ui/app/elements/change/gr-included-in-dialog/gr-included-in-dialog.html
index cf79a31..b824f1c 100644
--- a/polygerrit-ui/app/elements/change/gr-included-in-dialog/gr-included-in-dialog.html
+++ b/polygerrit-ui/app/elements/change/gr-included-in-dialog/gr-included-in-dialog.html
@@ -30,6 +30,7 @@
padding: 4.5em 1em 1em 1em;
}
header {
+ background-color: var(--dialog-background-color);
border-bottom: 1px solid var(--border-color);
left: 0;
padding: 1em;
diff --git a/polygerrit-ui/app/elements/core/gr-account-dropdown/gr-account-dropdown.html b/polygerrit-ui/app/elements/core/gr-account-dropdown/gr-account-dropdown.html
index bbe2877..d1ae719 100644
--- a/polygerrit-ui/app/elements/core/gr-account-dropdown/gr-account-dropdown.html
+++ b/polygerrit-ui/app/elements/core/gr-account-dropdown/gr-account-dropdown.html
@@ -31,7 +31,7 @@
color: var(--header-text-color);
}
--gr-dropdown-item: {
- color: var(--header-text-color);
+ color: var(--primary-text-color);
}
}
gr-avatar {
diff --git a/polygerrit-ui/app/elements/core/gr-reporting/gr-jank-detector.js b/polygerrit-ui/app/elements/core/gr-reporting/gr-jank-detector.js
new file mode 100644
index 0000000..28c46f4
--- /dev/null
+++ b/polygerrit-ui/app/elements/core/gr-reporting/gr-jank-detector.js
@@ -0,0 +1,61 @@
+/**
+ * @license
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+(function() {
+ 'use strict';
+
+ const JANK_SLEEP_TIME_MS = 1000;
+
+ const GrJankDetector = {
+ // Slowdowns counter.
+ jank: 0,
+ fps: 0,
+ _lastFrameTime: 0,
+
+ start() {
+ this._requestAnimationFrame(this._detect.bind(this));
+ },
+
+ _requestAnimationFrame(callback) {
+ window.requestAnimationFrame(callback);
+ },
+
+ _detect(now) {
+ if (this._lastFrameTime === 0) {
+ this._lastFrameTime = now;
+ this.fps = 0;
+ this._requestAnimationFrame(this._detect.bind(this));
+ return;
+ }
+ const fpsNow = 1000/(now - this._lastFrameTime);
+ this._lastFrameTime = now;
+ // Calculate moving average within last 3 measurements.
+ this.fps = this.fps === 0 ? fpsNow : ((this.fps * 2 + fpsNow) / 3);
+ if (this.fps > 10) {
+ this._requestAnimationFrame(this._detect.bind(this));
+ } else {
+ this.jank++;
+ console.warn('JANK', this.jank);
+ this._lastFrameTime = 0;
+ window.setTimeout(
+ () => this._requestAnimationFrame(this._detect.bind(this)),
+ JANK_SLEEP_TIME_MS);
+ }
+ },
+ };
+
+ window.GrJankDetector = GrJankDetector;
+})();
diff --git a/polygerrit-ui/app/elements/core/gr-reporting/gr-jank-detector_test.html b/polygerrit-ui/app/elements/core/gr-reporting/gr-jank-detector_test.html
new file mode 100644
index 0000000..6faeec1
--- /dev/null
+++ b/polygerrit-ui/app/elements/core/gr-reporting/gr-jank-detector_test.html
@@ -0,0 +1,78 @@
+<!DOCTYPE html>
+<!--
+@license
+Copyright (C) 2018 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+<meta name="viewport" content="width=device-width, minimum-scale=1.0, initial-scale=1.0, user-scalable=yes">
+<title>gr-jank-detector</title>
+
+<script src="../../../bower_components/webcomponentsjs/webcomponents-lite.min.js"></script>
+<script src="../../../bower_components/web-component-tester/browser.js"></script>
+<link rel="import" href="../../../test/common-test-setup.html"/>
+
+<script src="gr-jank-detector.js"></script>
+
+<script>
+ suite('gr-jank-detector tests', () => {
+ let sandbox;
+ let clock;
+ let instance;
+
+ const NOW_TIME = 100;
+
+ setup(() => {
+ sandbox = sinon.sandbox.create();
+ clock = sinon.useFakeTimers(NOW_TIME);
+ instance = GrJankDetector;
+ instance._lastFrameTime = 0;
+ sandbox.stub(instance, '_requestAnimationFrame');
+ });
+
+ teardown(() => {
+ sandbox.restore();
+ });
+
+ test('start() installs frame callback', () => {
+ sandbox.stub(instance, '_detect');
+ instance._requestAnimationFrame.callsArg(0);
+ instance.start();
+ assert.isTrue(instance._detect.calledOnce);
+ });
+
+ test('measures fps', () => {
+ instance._detect(10);
+ instance._detect(30);
+ assert.equal(instance.fps, 50);
+ });
+
+ test('detects jank', () => {
+ let now = 10;
+ instance._detect(now);
+ const fastFrame = () => instance._detect(now += 20);
+ const slowFrame = () => instance._detect(now += 300);
+ fastFrame();
+ assert.equal(instance.jank, 0);
+ _.times(4, slowFrame);
+ assert.equal(instance.jank, 0);
+ instance._requestAnimationFrame.reset();
+ slowFrame();
+ assert.equal(instance.jank, 1);
+ assert.isFalse(instance._requestAnimationFrame.called);
+ clock.tick(1000);
+ assert.isTrue(instance._requestAnimationFrame.called);
+ });
+ });
+</script>
diff --git a/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting.html b/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting.html
index 2970a26..cbb2c09 100644
--- a/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting.html
+++ b/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting.html
@@ -19,5 +19,6 @@
<link rel="import" href="../../shared/gr-js-api-interface/gr-js-api-interface.html">
<dom-module id="gr-reporting">
+ <script src="gr-jank-detector.js"></script>
<script src="gr-reporting.js"></script>
</dom-module>
diff --git a/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting.js b/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting.js
index 0db442f..ae67dac 100644
--- a/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting.js
+++ b/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting.js
@@ -48,6 +48,14 @@
STARTED_HIDDEN: 'hidden',
};
+ // Frame rate related constants.
+ const JANK = {
+ TYPE: 'lifecycle',
+ CATEGORY: 'UI Latency',
+ // Reported events - alphabetize below.
+ COUNT: 'Jank count',
+ };
+
// Navigation reporting constants.
const NAVIGATION = {
TYPE: 'nav-report',
@@ -118,6 +126,8 @@
};
catchErrors();
+ GrJankDetector.start();
+
const GrReporting = Polymer({
is: 'gr-reporting',
@@ -206,6 +216,11 @@
},
beforeLocationChanged() {
+ if (GrJankDetector.jank > 0) {
+ this.reporter(
+ JANK.TYPE, JANK.CATEGORY, JANK.COUNT, GrJankDetector.jank);
+ GrJankDetector.jank = 0;
+ }
for (const prop of Object.keys(this._baselines)) {
delete this._baselines[prop];
}
diff --git a/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting_test.html b/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting_test.html
index bfb45f6..e2bb83d 100644
--- a/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting_test.html
+++ b/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting_test.html
@@ -93,7 +93,11 @@
test('beforeLocationChanged', () => {
element._baselines['garbage'] = 'monster';
sandbox.stub(element, 'time');
+ GrJankDetector.jank = 42;
element.beforeLocationChanged();
+ assert.equal(GrJankDetector.jank, 0);
+ assert.isTrue(element.reporter.calledWithExactly(
+ 'lifecycle', 'UI Latency', 'Jank count', 42));
assert.isTrue(element.time.calledWithExactly('DashboardDisplayed'));
assert.isTrue(element.time.calledWithExactly('ChangeDisplayed'));
assert.isTrue(element.time.calledWithExactly('DiffViewDisplayed'));
diff --git a/polygerrit-ui/app/elements/core/gr-router/gr-router.js b/polygerrit-ui/app/elements/core/gr-router/gr-router.js
index 8af7301..6adc286 100644
--- a/polygerrit-ui/app/elements/core/gr-router/gr-router.js
+++ b/polygerrit-ui/app/elements/core/gr-router/gr-router.js
@@ -113,16 +113,21 @@
CHANGE_NUMBER_LEGACY: /^\/(\d+)\/?/,
// Matches
- // /c/<project>/+/<changeNum>/
- // [<basePatchNum|edit>..][<patchNum|edit>]/[path].
+ // /c/<project>/+/<changeNum>/[<basePatchNum|edit>..][<patchNum|edit>].
// TODO(kaspern): Migrate completely to project based URLs, with backwards
// compatibility for change-only.
- // eslint-disable-next-line max-len
- CHANGE_OR_DIFF: /^\/c\/(.+)\/\+\/(\d+)(\/?((-?\d+|edit)(\.\.(\d+|edit))?(\/(.+))?))?\/?$/,
+ CHANGE: /^\/c\/(.+)\/\+\/(\d+)(\/?((-?\d+|edit)(\.\.(\d+|edit))?))?\/?$/,
// Matches /c/<project>/+/<changeNum>/[<patchNum|edit>],edit
CHANGE_EDIT: /^\/c\/(.+)\/\+\/(\d+)(\/(\d+))?,edit\/?$/,
+ // Matches
+ // /c/<project>/+/<changeNum>/[<basePatchNum|edit>..]<patchNum|edit>/<path>.
+ // TODO(kaspern): Migrate completely to project based URLs, with backwards
+ // compatibility for change-only.
+ // eslint-disable-next-line max-len
+ DIFF: /^\/c\/(.+)\/\+\/(\d+)(\/((-?\d+|edit)(\.\.(\d+|edit))?(\/(.+))))\/?$/,
+
// Matches /c/<project>/+/<changeNum>/[<patchNum|edit>]/<path>,edit
DIFF_EDIT: /^\/c\/(.+)\/\+\/(\d+)\/(\d+|edit)\/(.+),edit$/,
@@ -642,24 +647,13 @@
return;
}
page(pattern, this._loadUserMiddleware.bind(this), data => {
- this.$.reporting.locationChanged(this._getPageName(handlerName, data));
+ this.$.reporting.locationChanged(handlerName);
const promise = opt_authRedirect ?
this._redirectIfNotLoggedIn(data) : Promise.resolve();
promise.then(() => { this[handlerName](data); });
});
},
- _getPageName(handlerName, ctx) {
- switch (handlerName) {
- case '_handleChangeOrDiffRoute': {
- const isDiffView = ctx.params[8];
- return isDiffView ? Gerrit.Nav.View.DIFF : Gerrit.Nav.View.CHANGE;
- }
- default:
- return handlerName;
- }
- },
-
_startRouter() {
const base = this.getBaseUrl();
if (base) {
@@ -806,7 +800,9 @@
this._mapRoute(RoutePattern.CHANGE_EDIT, '_handleChangeEditRoute', true);
- this._mapRoute(RoutePattern.CHANGE_OR_DIFF, '_handleChangeOrDiffRoute');
+ this._mapRoute(RoutePattern.DIFF, '_handleDiffRoute');
+
+ this._mapRoute(RoutePattern.CHANGE, '_handleChangeRoute');
this._mapRoute(RoutePattern.CHANGE_LEGACY, '_handleChangeLegacyRoute');
@@ -1245,9 +1241,20 @@
this._redirect('/c/' + encodeURIComponent(ctx.params[0]));
},
- _handleChangeOrDiffRoute(ctx) {
- const isDiffView = ctx.params[8];
+ _handleChangeRoute(ctx) {
+ // Parameter order is based on the regex group number matched.
+ const params = {
+ project: ctx.params[0],
+ changeNum: ctx.params[1],
+ basePatchNum: ctx.params[4],
+ patchNum: ctx.params[6],
+ view: Gerrit.Nav.View.CHANGE,
+ };
+ this._redirectOrNavigate(params);
+ },
+
+ _handleDiffRoute(ctx) {
// Parameter order is based on the regex group number matched.
const params = {
project: ctx.params[0],
@@ -1255,15 +1262,13 @@
basePatchNum: ctx.params[4],
patchNum: ctx.params[6],
path: ctx.params[8],
- view: isDiffView ? Gerrit.Nav.View.DIFF : Gerrit.Nav.View.CHANGE,
+ view: Gerrit.Nav.View.DIFF,
};
- if (isDiffView) {
- const address = this._parseLineAddress(ctx.hash);
- if (address) {
- params.leftSide = address.leftSide;
- params.lineNum = address.lineNum;
- }
+ const address = this._parseLineAddress(ctx.hash);
+ if (address) {
+ params.leftSide = address.leftSide;
+ params.lineNum = address.lineNum;
}
this._redirectOrNavigate(params);
diff --git a/polygerrit-ui/app/elements/core/gr-router/gr-router_test.html b/polygerrit-ui/app/elements/core/gr-router/gr-router_test.html
index e0a7e46..b68a5e9 100644
--- a/polygerrit-ui/app/elements/core/gr-router/gr-router_test.html
+++ b/polygerrit-ui/app/elements/core/gr-router/gr-router_test.html
@@ -152,7 +152,8 @@
'_handleBranchListFilterRoute',
'_handleBranchListOffsetRoute',
'_handleChangeNumberLegacyRoute',
- '_handleChangeOrDiffRoute',
+ '_handleChangeRoute',
+ '_handleDiffRoute',
'_handleDefaultRoute',
'_handleChangeLegacyRoute',
'_handleDiffLegacyRoute',
@@ -1267,7 +1268,57 @@
'/c/1234/3..8/foo/bar#b123'));
});
- suite('_handleChangeOrDiffRoute', () => {
+ suite('_handleChangeRoute', () => {
+ let normalizeRangeStub;
+
+ function makeParams(path, hash) {
+ return {
+ params: [
+ 'foo/bar', // 0 Project
+ 1234, // 1 Change number
+ null, // 2 Unused
+ null, // 3 Unused
+ 4, // 4 Base patch number
+ null, // 5 Unused
+ 7, // 6 Patch number
+ ],
+ };
+ }
+
+ setup(() => {
+ normalizeRangeStub = sandbox.stub(element,
+ '_normalizePatchRangeParams');
+ sandbox.stub(element.$.restAPI, 'setInProjectLookup');
+ });
+
+ test('needs redirect', () => {
+ normalizeRangeStub.returns(true);
+ sandbox.stub(element, '_generateUrl').returns('foo');
+ const ctx = makeParams(null, '');
+ element._handleChangeRoute(ctx);
+ assert.isTrue(normalizeRangeStub.called);
+ assert.isFalse(setParamsStub.called);
+ assert.isTrue(redirectStub.calledOnce);
+ assert.isTrue(redirectStub.calledWithExactly('foo'));
+ });
+
+ test('change view', () => {
+ normalizeRangeStub.returns(false);
+ sandbox.stub(element, '_generateUrl').returns('foo');
+ const ctx = makeParams(null, '');
+ assertDataToParams(ctx, '_handleChangeRoute', {
+ view: Gerrit.Nav.View.CHANGE,
+ project: 'foo/bar',
+ changeNum: 1234,
+ basePatchNum: 4,
+ patchNum: 7,
+ });
+ assert.isFalse(redirectStub.called);
+ assert.isTrue(normalizeRangeStub.called);
+ });
+ });
+
+ suite('_handleDiffRoute', () => {
let normalizeRangeStub;
function makeParams(path, hash) {
@@ -1297,40 +1348,18 @@
normalizeRangeStub.returns(true);
sandbox.stub(element, '_generateUrl').returns('foo');
const ctx = makeParams(null, '');
- element._handleChangeOrDiffRoute(ctx);
+ element._handleDiffRoute(ctx);
assert.isTrue(normalizeRangeStub.called);
assert.isFalse(setParamsStub.called);
assert.isTrue(redirectStub.calledOnce);
assert.isTrue(redirectStub.calledWithExactly('foo'));
});
- test('change view', () => {
- normalizeRangeStub.returns(false);
- sandbox.stub(element, '_generateUrl').returns('foo');
- const ctx = makeParams(null, '');
- assertDataToParams(ctx, '_handleChangeOrDiffRoute', {
- view: Gerrit.Nav.View.CHANGE,
- project: 'foo/bar',
- changeNum: 1234,
- basePatchNum: 4,
- patchNum: 7,
- path: null,
- });
- assert.isFalse(redirectStub.called);
- assert.isTrue(normalizeRangeStub.called);
- });
-
- test('gr-reporting recognizes change page', () => {
- const ctx = makeParams(null, '');
- assert.equal(element._getPageName('_handleChangeOrDiffRoute', ctx),
- Gerrit.Nav.View.CHANGE);
- });
-
test('diff view', () => {
normalizeRangeStub.returns(false);
sandbox.stub(element, '_generateUrl').returns('foo');
const ctx = makeParams('foo/bar/baz', 'b44');
- assertDataToParams(ctx, '_handleChangeOrDiffRoute', {
+ assertDataToParams(ctx, '_handleDiffRoute', {
view: Gerrit.Nav.View.DIFF,
project: 'foo/bar',
changeNum: 1234,
@@ -1343,12 +1372,6 @@
assert.isFalse(redirectStub.called);
assert.isTrue(normalizeRangeStub.called);
});
-
- test('gr-reporting recognizes diff page', () => {
- const ctx = makeParams('foo/bar/baz', 'b44');
- assert.equal(element._getPageName('_handleChangeOrDiffRoute', ctx),
- Gerrit.Nav.View.DIFF);
- });
});
test('_handleDiffEditRoute', () => {
diff --git a/polygerrit-ui/app/elements/diff/gr-diff/gr-diff.html b/polygerrit-ui/app/elements/diff/gr-diff/gr-diff.html
index 81c6d99..540df98 100644
--- a/polygerrit-ui/app/elements/diff/gr-diff/gr-diff.html
+++ b/polygerrit-ui/app/elements/diff/gr-diff/gr-diff.html
@@ -108,7 +108,6 @@
cursor: pointer;
}
.content {
- overflow: hidden;
/* Set min width since setting width on table cells still
allows them to shrink. Do not set max width because
CJK (Chinese-Japanese-Korean) glyphs have variable width */
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.html b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.html
index cd9f9dc..017cd5d 100644
--- a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.html
+++ b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.html
@@ -15,11 +15,11 @@
limitations under the License.
-->
<link rel="import" href="../../../bower_components/polymer/polymer.html">
-<link rel="import" href="../gr-syntax-lib-loader/gr-syntax-lib-loader.html">
+<link rel="import" href="../../shared/gr-lib-loader/gr-lib-loader.html">
<dom-module id="gr-syntax-layer">
<template>
- <gr-syntax-lib-loader id="libLoader"></gr-syntax-lib-loader>
+ <gr-lib-loader id="libLoader"></gr-lib-loader>
</template>
<script src="../gr-diff/gr-diff-line.js"></script>
<script src="../gr-diff-highlight/gr-annotation.js"></script>
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.js b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.js
index f8db343..15a8a0a 100644
--- a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.js
+++ b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.js
@@ -442,7 +442,7 @@
},
_loadHLJS() {
- return this.$.libLoader.get().then(hljs => {
+ return this.$.libLoader.getHLJS().then(hljs => {
this._hljs = hljs;
});
},
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer_test.html b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer_test.html
index 74fc3bf..f2458fc 100644
--- a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer_test.html
+++ b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer_test.html
@@ -185,7 +185,7 @@
const mockHLJS = getMockHLJS();
const highlightSpy = sinon.spy(mockHLJS, 'highlight');
- sandbox.stub(element.$.libLoader, 'get',
+ sandbox.stub(element.$.libLoader, 'getHLJS',
() => { return Promise.resolve(mockHLJS); });
const processNextSpy = sandbox.spy(element, '_processNextLine');
const processPromise = element.process();
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.js b/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.js
deleted file mode 100644
index 6ec7ab2..0000000
--- a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.js
+++ /dev/null
@@ -1,113 +0,0 @@
-/**
- * @license
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-(function() {
- 'use strict';
-
- const HLJS_PATH = 'bower_components/highlightjs/highlight.min.js';
- const LIB_ROOT_PATTERN = /(.+\/)elements\/gr-app\.html/;
-
- Polymer({
- is: 'gr-syntax-lib-loader',
-
- properties: {
- _state: {
- type: Object,
-
- // NOTE: intended singleton.
- value: {
- configured: false,
- loading: false,
- callbacks: [],
- },
- },
- },
-
- get() {
- return new Promise((resolve, reject) => {
- // If the lib is totally loaded, resolve immediately.
- if (this._getHighlightLib()) {
- resolve(this._getHighlightLib());
- return;
- }
-
- // If the library is not currently being loaded, then start loading it.
- if (!this._state.loading) {
- this._state.loading = true;
- this._loadHLJS().then(this._onLibLoaded.bind(this)).catch(reject);
- }
-
- this._state.callbacks.push(resolve);
- });
- },
-
- _onLibLoaded() {
- const lib = this._getHighlightLib();
- this._state.loading = false;
- for (const cb of this._state.callbacks) {
- cb(lib);
- }
- this._state.callbacks = [];
- },
-
- _getHighlightLib() {
- const lib = window.hljs;
- if (lib && !this._state.configured) {
- this._state.configured = true;
-
- lib.configure({classPrefix: 'gr-diff gr-syntax gr-syntax-'});
- }
- return lib;
- },
-
- _getLibRoot() {
- if (this._cachedLibRoot) { return this._cachedLibRoot; }
-
- const appLink = document.head
- .querySelector('link[rel=import][href$="gr-app.html"]');
-
- if (!appLink) { return null; }
-
- return this._cachedLibRoot = appLink
- .href
- .match(LIB_ROOT_PATTERN)[1];
- },
- _cachedLibRoot: null,
-
- _loadHLJS() {
- return new Promise((resolve, reject) => {
- const script = document.createElement('script');
- const src = this._getHLJSUrl();
-
- if (!src) {
- reject(new Error('Unable to load blank HLJS url.'));
- return;
- }
-
- script.src = src;
- script.onload = resolve;
- script.onerror = reject;
- Polymer.dom(document.head).appendChild(script);
- });
- },
-
- _getHLJSUrl() {
- const root = this._getLibRoot();
- if (!root) { return null; }
- return root + HLJS_PATH;
- },
- });
-})();
diff --git a/polygerrit-ui/app/elements/gr-app.html b/polygerrit-ui/app/elements/gr-app.html
index d7d50d1..6a2bfe0 100644
--- a/polygerrit-ui/app/elements/gr-app.html
+++ b/polygerrit-ui/app/elements/gr-app.html
@@ -19,6 +19,11 @@
if (localStorage.getItem('USE_SHADOW_DOM') === 'true') {
window.Polymer = {
dom: 'shadow',
+ passiveTouchGestures: true,
+ };
+ } else if (!window.Polymer) {
+ window.Polymer = {
+ passiveTouchGestures: true,
};
}
</script>
@@ -34,8 +39,8 @@
<link rel="import" href="../behaviors/base-url-behavior/base-url-behavior.html">
<link rel="import" href="../behaviors/keyboard-shortcut-behavior/keyboard-shortcut-behavior.html">
-<link rel="import" href="../styles/app-theme.html">
<link rel="import" href="../styles/shared-styles.html">
+<link rel="import" href="../styles/themes/app-theme.html">
<link rel="import" href="./admin/gr-admin-view/gr-admin-view.html">
<link rel="import" href="./change-list/gr-change-list-view/gr-change-list-view.html">
<link rel="import" href="./change-list/gr-dashboard-view/gr-dashboard-view.html">
@@ -56,6 +61,7 @@
<link rel="import" href="./settings/gr-registration-dialog/gr-registration-dialog.html">
<link rel="import" href="./settings/gr-settings-view/gr-settings-view.html">
<link rel="import" href="./shared/gr-fixed-panel/gr-fixed-panel.html">
+<link rel="import" href="./shared/gr-lib-loader/gr-lib-loader.html">
<link rel="import" href="./shared/gr-rest-api-interface/gr-rest-api-interface.html">
<script src="../scripts/util.js"></script>
@@ -229,6 +235,7 @@
<gr-plugin-host id="plugins"
config="[[_serverConfig]]">
</gr-plugin-host>
+ <gr-lib-loader id="libLoader"></gr-lib-loader>
<gr-external-style id="externalStyle" name="app-theme"></gr-external-style>
</template>
<script src="gr-app.js" crossorigin="anonymous"></script>
diff --git a/polygerrit-ui/app/elements/gr-app.js b/polygerrit-ui/app/elements/gr-app.js
index b866088..3b66ae8 100644
--- a/polygerrit-ui/app/elements/gr-app.js
+++ b/polygerrit-ui/app/elements/gr-app.js
@@ -127,6 +127,12 @@
this._version = version;
});
+ if (window.localStorage.getItem('dark-theme')) {
+ this.$.libLoader.getDarkTheme().then(module => {
+ Polymer.dom(this.root).appendChild(module);
+ });
+ }
+
// Note: this is evaluated here to ensure that it only happens after the
// router has been initialized. @see Issue 7837
this._settingsUrl = Gerrit.Nav.getUrlForSettings();
@@ -191,7 +197,8 @@
},
_computeShowGwtUiLink(config) {
- return config.gerrit.web_uis && config.gerrit.web_uis.includes('GWT');
+ return !window.DEPRECATE_GWT_UI &&
+ config.gerrit.web_uis && config.gerrit.web_uis.includes('GWT');
},
_handlePageError(e) {
diff --git a/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.html b/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.html
index 48b01f6..14e5e6f 100644
--- a/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.html
+++ b/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.html
@@ -16,7 +16,9 @@
-->
<link rel="import" href="../../../bower_components/polymer/polymer.html">
+
<link rel="import" href="../../../behaviors/docs-url-behavior/docs-url-behavior.html">
+<link rel="import" href="../../../bower_components/paper-toggle-button/paper-toggle-button.html">
<link rel="import" href="../../../styles/gr-form-styles.html">
<link rel="import" href="../../../styles/gr-menu-page-styles.html">
<link rel="import" href="../../../styles/gr-page-nav-styles.html">
@@ -52,12 +54,19 @@
#email {
margin-bottom: 1em;
}
- .filters p {
+ .filters p,
+ .darkToggle p {
margin-bottom: 1em;
}
.queryExample em {
color: violet;
}
+ .toggle {
+ align-items: center;
+ display: flex;
+ margin-bottom: 1rem;
+ margin-right: 1rem;
+ }
</style>
<style include="gr-form-styles"></style>
<style include="gr-menu-page-styles"></style>
@@ -95,6 +104,19 @@
</gr-page-nav>
<main class="gr-form-styles">
<h1>User Settings</h1>
+ <section class="darkToggle">
+ <div class="toggle">
+ <paper-toggle-button
+ checked="[[_isDark]]"
+ on-change="_handleToggleDark"></paper-toggle-button>
+ <div>Dark theme (alpha)</div>
+ </div>
+ <p>
+ Gerrit's dark theme is in early alpha, and almost definitely will
+ not play nicely with themes set by specific Gerrit hosts. Filing
+ feedback via the link in the app footer is strongly encouraged!
+ </p>
+ </section>
<h2
id="Profile"
class$="[[_computeHeaderClass(_accountInfoChanged)]]">Profile</h2>
diff --git a/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.js b/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.js
index 215aaa1..213ab65 100644
--- a/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.js
+++ b/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.js
@@ -35,6 +35,8 @@
const ABSOLUTE_URL_PATTERN = /^https?:/;
const TRAILING_SLASH_PATTERN = /\/$/;
+ const RELOAD_MESSAGE = 'Reloading...';
+
Polymer({
is: 'gr-settings-view',
@@ -45,7 +47,7 @@
*/
/**
- * Fired with email confirmation text.
+ * Fired with email confirmation text, or when the page reloads.
*
* @event show-alert
*/
@@ -132,6 +134,11 @@
_loadingPromise: Object,
_showNumber: Boolean,
+
+ _isDark: {
+ type: Boolean,
+ value: false,
+ },
},
behaviors: [
@@ -149,6 +156,8 @@
attached() {
this.fire('title-change', {title: 'Settings'});
+ this._isDark = !!window.localStorage.getItem('dark-theme');
+
const promises = [
this.$.accountInfo.loadData(),
this.$.watchedProjectsEditor.loadData(),
@@ -410,5 +419,20 @@
return base + GERRIT_DOCS_FILTER_PATH;
},
+
+ _handleToggleDark() {
+ if (this._isDark) {
+ window.localStorage.removeItem('dark-theme');
+ } else {
+ window.localStorage.setItem('dark-theme', 'true');
+ }
+ this.dispatchEvent(new CustomEvent('show-alert', {
+ detail: {message: RELOAD_MESSAGE},
+ bubbles: true,
+ }));
+ this.async(() => {
+ window.location.reload();
+ }, 1);
+ },
});
})();
diff --git a/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context.js b/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context.js
index 84b7f0a..5ac8773 100644
--- a/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context.js
+++ b/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context.js
@@ -93,7 +93,14 @@
}
this.plugin.restApi()
.send(this.action.method, this.action.__url, payload)
- .then(onSuccess);
+ .then(onSuccess)
+ .catch(error => {
+ document.dispatchEvent(new CustomEvent('show-alert', {
+ detail: {
+ message: `Plugin network error: ${error}`,
+ },
+ }));
+ });
};
window.GrPluginActionContext = GrPluginActionContext;
diff --git a/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context_test.html b/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context_test.html
index 7c18a99..bf6a046 100644
--- a/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context_test.html
+++ b/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context_test.html
@@ -128,5 +128,26 @@
assert.isTrue(sendStub.calledWith(
'METHOD', '/changes/1/revisions/2/foo~bar', payload));
});
+
+ test('call error', done => {
+ instance.action = {
+ method: 'METHOD',
+ __key: 'key',
+ __url: '/changes/1/revisions/2/foo~bar',
+ };
+ const sendStub = sandbox.stub().returns(Promise.reject('boom'));
+ sandbox.stub(plugin, 'restApi').returns({
+ send: sendStub,
+ });
+ const errorStub = sandbox.stub();
+ document.addEventListener('network-error', errorStub);
+ instance.call();
+ flush(() => {
+ assert.isTrue(errorStub.calledOnce);
+ assert.equal(errorStub.args[0][0].detail.message,
+ 'Plugin network error: boom');
+ done();
+ });
+ });
});
</script>
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.html b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.html
similarity index 88%
rename from polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.html
rename to polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.html
index f5b71be..f70aff4 100644
--- a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.html
+++ b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.html
@@ -16,6 +16,6 @@
-->
<link rel="import" href="../../../bower_components/polymer/polymer.html">
-<dom-module id="gr-syntax-lib-loader">
- <script src="gr-syntax-lib-loader.js"></script>
+<dom-module id="gr-lib-loader">
+ <script src="gr-lib-loader.js"></script>
</dom-module>
diff --git a/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.js b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.js
new file mode 100644
index 0000000..de92d8a
--- /dev/null
+++ b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.js
@@ -0,0 +1,166 @@
+/**
+ * @license
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+(function() {
+ 'use strict';
+
+ const HLJS_PATH = 'bower_components/highlightjs/highlight.min.js';
+ const DARK_THEME_PATH = 'styles/themes/dark-theme.html';
+ const LIB_ROOT_PATTERN = /(.+\/)elements\/gr-app\.html/;
+
+ Polymer({
+ is: 'gr-lib-loader',
+
+ properties: {
+ _hljsState: {
+ type: Object,
+
+ // NOTE: intended singleton.
+ value: {
+ configured: false,
+ loading: false,
+ callbacks: [],
+ },
+ },
+ },
+
+ /**
+ * Get the HLJS library. Returns a promise that resolves with a reference to
+ * the library after it's been loaded. The promise resolves immediately if
+ * it's already been loaded.
+ * @return {!Promise<Object>}
+ */
+ getHLJS() {
+ return new Promise((resolve, reject) => {
+ // If the lib is totally loaded, resolve immediately.
+ if (this._getHighlightLib()) {
+ resolve(this._getHighlightLib());
+ return;
+ }
+
+ // If the library is not currently being loaded, then start loading it.
+ if (!this._hljsState.loading) {
+ this._hljsState.loading = true;
+ this._loadScript(this._getHLJSUrl())
+ .then(this._onHLJSLibLoaded.bind(this)).catch(reject);
+ }
+
+ this._hljsState.callbacks.push(resolve);
+ });
+ },
+
+ /**
+ * Loads the dark theme document. Returns a promise that resolves with a
+ * custom-style DOM element.
+ * @return {!Promise<Element>}
+ */
+ getDarkTheme() {
+ return new Promise((resolve, reject) => {
+ this.importHref(this._getLibRoot() + DARK_THEME_PATH, () => {
+ const module = document.createElement('style', 'custom-style');
+ module.setAttribute('include', 'dark-theme');
+ resolve(module);
+ });
+ });
+ },
+
+ /**
+ * Execute callbacks awaiting the HLJS lib load.
+ */
+ _onHLJSLibLoaded() {
+ const lib = this._getHighlightLib();
+ this._hljsState.loading = false;
+ for (const cb of this._hljsState.callbacks) {
+ cb(lib);
+ }
+ this._hljsState.callbacks = [];
+ },
+
+ /**
+ * Get the HLJS library, assuming it has been loaded. Configure the library
+ * if it hasn't already been configured.
+ * @return {!Object}
+ */
+ _getHighlightLib() {
+ const lib = window.hljs;
+ if (lib && !this._hljsState.configured) {
+ this._hljsState.configured = true;
+
+ lib.configure({classPrefix: 'gr-diff gr-syntax gr-syntax-'});
+ }
+ return lib;
+ },
+
+ /**
+ * Get the resource path used to load the application. If the application
+ * was loaded through a CDN, then this will be the path to CDN resources.
+ * @return {string}
+ */
+ _getLibRoot() {
+ // TODO(wyatta): Remove the remainder of this method logic once the
+ // STATIC_RESOURCE_PATH variable is being provided generally.
+ if (window.STATIC_RESOURCE_PATH) {
+ return window.STATIC_RESOURCE_PATH + '/';
+ }
+
+ if (this._cachedLibRoot) { return this._cachedLibRoot; }
+
+ const appLink = document.head
+ .querySelector('link[rel=import][href$="gr-app.html"]');
+
+ if (!appLink) { throw new Error('Could not find application link'); }
+
+ this._cachedLibRoot = appLink
+ .href
+ .match(LIB_ROOT_PATTERN)[1];
+
+ if (!this._cachedLibRoot) {
+ throw new Error('Could not extract lib root');
+ }
+
+ return this._cachedLibRoot;
+ },
+ _cachedLibRoot: null,
+
+ /**
+ * Load and execute a JS file from the lib root.
+ * @param {string} src The path to the JS file without the lib root.
+ * @return {Promise} a promise that resolves when the script's onload
+ * executes.
+ */
+ _loadScript(src) {
+ return new Promise((resolve, reject) => {
+ const script = document.createElement('script');
+
+ if (!src) {
+ reject(new Error('Unable to load blank script url.'));
+ return;
+ }
+
+ script.src = src;
+ script.onload = resolve;
+ script.onerror = reject;
+ Polymer.dom(document.head).appendChild(script);
+ });
+ },
+
+ _getHLJSUrl() {
+ const root = this._getLibRoot();
+ if (!root) { return null; }
+ return root + HLJS_PATH;
+ },
+ });
+})();
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader_test.html b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader_test.html
similarity index 77%
rename from polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader_test.html
rename to polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader_test.html
index a260a97..cf9a41c 100644
--- a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader_test.html
+++ b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader_test.html
@@ -17,64 +17,67 @@
-->
<meta name="viewport" content="width=device-width, minimum-scale=1.0, initial-scale=1.0, user-scalable=yes">
-<title>gr-syntax-lib-loader</title>
+<title>gr-lib-loader</title>
<script src="../../../bower_components/webcomponentsjs/webcomponents-lite.min.js"></script>
<script src="../../../bower_components/web-component-tester/browser.js"></script>
<link rel="import" href="../../../test/common-test-setup.html"/>
-<link rel="import" href="gr-syntax-lib-loader.html">
+<link rel="import" href="gr-lib-loader.html">
<script>void(0);</script>
<test-fixture id="basic">
<template>
- <gr-syntax-lib-loader></gr-syntax-lib-loader>
+ <gr-lib-loader></gr-lib-loader>
</template>
</test-fixture>
<script>
- suite('gr-syntax-lib-loader tests', () => {
+ suite('gr-lib-loader tests', () => {
+ let sandbox;
let element;
let resolveLoad;
let loadStub;
setup(() => {
+ sandbox = sinon.sandbox.create();
element = fixture('basic');
- loadStub = sinon.stub(element, '_loadHLJS', () =>
+ loadStub = sandbox.stub(element, '_loadScript', () =>
new Promise(resolve => resolveLoad = resolve)
);
// Assert preconditions:
- assert.isFalse(element._state.loading);
+ assert.isFalse(element._hljsState.loading);
});
teardown(() => {
if (window.hljs) {
delete window.hljs;
}
- loadStub.restore();
+ sandbox.restore();
// Because the element state is a singleton, clean it up.
- element._state.configured = false;
- element._state.loading = false;
- element._state.callbacks = [];
+ element._hljsState.configured = false;
+ element._hljsState.loading = false;
+ element._hljsState.callbacks = [];
});
test('only load once', done => {
+ sandbox.stub(element, '_getHLJSUrl').returns('');
const firstCallHandler = sinon.stub();
- element.get().then(firstCallHandler);
+ element.getHLJS().then(firstCallHandler);
// It should now be in the loading state.
assert.isTrue(loadStub.called);
- assert.isTrue(element._state.loading);
+ assert.isTrue(element._hljsState.loading);
assert.isFalse(firstCallHandler.called);
const secondCallHandler = sinon.stub();
- element.get().then(secondCallHandler);
+ element.getHLJS().then(secondCallHandler);
// No change in state.
- assert.isTrue(element._state.loading);
+ assert.isTrue(element._hljsState.loading);
assert.isFalse(firstCallHandler.called);
assert.isFalse(secondCallHandler.called);
@@ -82,7 +85,7 @@
resolveLoad();
flush(() => {
// The state should be loaded and both handlers called.
- assert.isFalse(element._state.loading);
+ assert.isFalse(element._hljsState.loading);
assert.isTrue(firstCallHandler.called);
assert.isTrue(secondCallHandler.called);
done();
@@ -105,7 +108,7 @@
test('returns hljs', done => {
const firstCallHandler = sinon.stub();
- element.get().then(firstCallHandler);
+ element.getHLJS().then(firstCallHandler);
flush(() => {
assert.isTrue(firstCallHandler.called);
assert.isTrue(firstCallHandler.calledWith(hljsStub));
@@ -114,7 +117,7 @@
});
test('configures hljs', done => {
- element.get().then(() => {
+ element.getHLJS().then(() => {
assert.isTrue(window.hljs.configure.calledOnce);
done();
});
@@ -123,15 +126,10 @@
suite('_getHLJSUrl', () => {
suite('checking _getLibRoot', () => {
- let libRootStub;
let root;
setup(() => {
- libRootStub = sinon.stub(element, '_getLibRoot', () => root);
- });
-
- teardown(() => {
- libRootStub.restore();
+ sandbox.stub(element, '_getLibRoot', () => root);
});
test('with no root', () => {
diff --git a/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface.js b/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface.js
index 9dc51ba..c081b30 100644
--- a/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface.js
+++ b/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface.js
@@ -27,6 +27,36 @@
*/
Defs.patchRange;
+ /**
+ * Object to describe a request for passing into _fetchJSON or _fetchRawJSON.
+ * - url is the URL for the request (excluding get params)
+ * - errFn is a function to invoke when the request fails.
+ * - cancelCondition is a function that, if provided and returns true, will
+ * cancel the response after it resolves.
+ * - params is a key-value hash to specify get params for the request URL.
+ * @typedef {{
+ * url: string,
+ * errFn: (function(?Response, string=)|null|undefined),
+ * cancelCondition: (function()|null|undefined),
+ * params: (Object|null|undefined),
+ * fetchOptions: (Object|null|undefined),
+ * }}
+ */
+ Defs.FetchJSONRequest;
+
+ /**
+ * @typedef {{
+ * changeNum: (string|number),
+ * endpoint: string,
+ * patchNum: (string|number|null|undefined),
+ * errFn: (function(?Response, string=)|null|undefined),
+ * cancelCondition: (function()|null|undefined),
+ * params: (Object|null|undefined),
+ * fetchOptions: (Object|null|undefined),
+ * }}
+ */
+ Defs.ChangeFetchRequest;
+
const DiffViewMode = {
SIDE_BY_SIDE: 'SIDE_BY_SIDE',
UNIFIED: 'UNIFIED_DIFF',
@@ -112,23 +142,17 @@
* Returns a Promise that resolves to a native Response.
* Doesn't do error checking. Supports cancel condition. Performs auth.
* Validates auth expiry errors.
- * @param {string} url
- * @param {?function(?Response, string=)=} opt_errFn
- * passed as null sometimes.
- * @param {?function()=} opt_cancelCondition
- * passed as null sometimes.
- * @param {?Object=} opt_params URL params, key-value hash.
- * @param {?Object=} opt_options Fetch options.
+ * @param {Defs.FetchJSONRequest} req
+ * @return {Promise}
*/
- _fetchRawJSON(url, opt_errFn, opt_cancelCondition, opt_params,
- opt_options) {
- const urlWithParams = this._urlWithParams(url, opt_params);
- return this._auth.fetch(urlWithParams, opt_options).then(response => {
- if (opt_cancelCondition && opt_cancelCondition()) {
- response.body.cancel();
+ _fetchRawJSON(req) {
+ const urlWithParams = this._urlWithParams(req.url, req.params);
+ return this._auth.fetch(urlWithParams, req.fetchOptions).then(res => {
+ if (req.cancelCondition && req.cancelCondition()) {
+ res.body.cancel();
return;
}
- return response;
+ return res;
}).catch(err => {
const isLoggedIn = !!this._cache['/accounts/self/detail'];
if (isLoggedIn && err && err.message === FAILED_TO_FETCH_ERROR) {
@@ -139,8 +163,8 @@
CHECK_SIGN_IN_DEBOUNCE_MS);
return;
}
- if (opt_errFn) {
- opt_errFn.call(undefined, null, err);
+ if (req.errFn) {
+ req.errFn.call(undefined, null, err);
} else {
this.fire('network-error', {error: err});
}
@@ -152,31 +176,23 @@
* Fetch JSON from url provided.
* Returns a Promise that resolves to a parsed response.
* Same as {@link _fetchRawJSON}, plus error handling.
- * @param {string} url
- * @param {?function(?Response, string=)=} opt_errFn
- * passed as null sometimes.
- * @param {?function()=} opt_cancelCondition
- * passed as null sometimes.
- * @param {?Object=} opt_params URL params, key-value hash.
- * @param {?Object=} opt_options Fetch options.
+ * @param {Defs.FetchJSONRequest} req
*/
- fetchJSON(url, opt_errFn, opt_cancelCondition, opt_params, opt_options) {
- return this._fetchRawJSON(
- url, opt_errFn, opt_cancelCondition, opt_params, opt_options)
- .then(response => {
- if (!response) {
- return;
- }
- if (!response.ok) {
- if (opt_errFn) {
- opt_errFn.call(null, response);
- return;
- }
- this.fire('server-error', {response});
- return;
- }
- return response && this.getResponseObject(response);
- });
+ _fetchJSON(req) {
+ return this._fetchRawJSON(req).then(response => {
+ if (!response) {
+ return;
+ }
+ if (!response.ok) {
+ if (req.errFn) {
+ req.errFn.call(null, response);
+ return;
+ }
+ this.fire('server-error', {response});
+ return;
+ }
+ return response && this.getResponseObject(response);
+ });
},
/**
@@ -236,121 +252,120 @@
getConfig(noCache) {
if (!noCache) {
- return this._fetchSharedCacheURL('/config/server/info');
+ return this._fetchSharedCacheURL({url: '/config/server/info'});
}
- return this.fetchJSON('/config/server/info');
+ return this._fetchJSON({url: '/config/server/info'});
},
getRepo(repo, opt_errFn) {
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
- return this._fetchSharedCacheURL(
- '/projects/' + encodeURIComponent(repo), opt_errFn);
+ return this._fetchSharedCacheURL({
+ url: '/projects/' + encodeURIComponent(repo),
+ errFn: opt_errFn,
+ });
},
getProjectConfig(repo, opt_errFn) {
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
- return this._fetchSharedCacheURL(
- '/projects/' + encodeURIComponent(repo) + '/config', opt_errFn);
+ return this._fetchSharedCacheURL({
+ url: '/projects/' + encodeURIComponent(repo) + '/config',
+ errFn: opt_errFn,
+ });
},
getRepoAccess(repo) {
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
- return this._fetchSharedCacheURL(
- '/access/?project=' + encodeURIComponent(repo));
+ return this._fetchSharedCacheURL({
+ url: '/access/?project=' + encodeURIComponent(repo),
+ });
},
getRepoDashboards(repo, opt_errFn) {
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
- return this._fetchSharedCacheURL(
- `/projects/${encodeURIComponent(repo)}/dashboards?inherited`,
- opt_errFn);
+ return this._fetchSharedCacheURL({
+ url: `/projects/${encodeURIComponent(repo)}/dashboards?inherited`,
+ errFn: opt_errFn,
+ });
},
- saveRepoConfig(repo, config, opt_errFn, opt_ctx) {
+ saveRepoConfig(repo, config, opt_errFn) {
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(repo);
return this.send('PUT', `/projects/${encodeName}/config`, config,
- opt_errFn, opt_ctx);
+ opt_errFn);
},
- runRepoGC(repo, opt_errFn, opt_ctx) {
+ runRepoGC(repo, opt_errFn) {
if (!repo) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(repo);
- return this.send('POST', `/projects/${encodeName}/gc`, '',
- opt_errFn, opt_ctx);
+ return this.send('POST', `/projects/${encodeName}/gc`, '', opt_errFn);
},
/**
* @param {?Object} config
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- createRepo(config, opt_errFn, opt_ctx) {
+ createRepo(config, opt_errFn) {
if (!config.name) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(config.name);
- return this.send('PUT', `/projects/${encodeName}`, config, opt_errFn,
- opt_ctx);
+ return this.send('PUT', `/projects/${encodeName}`, config, opt_errFn);
},
/**
* @param {?Object} config
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- createGroup(config, opt_errFn, opt_ctx) {
+ createGroup(config, opt_errFn) {
if (!config.name) { return ''; }
const encodeName = encodeURIComponent(config.name);
- return this.send('PUT', `/groups/${encodeName}`, config, opt_errFn,
- opt_ctx);
+ return this.send('PUT', `/groups/${encodeName}`, config, opt_errFn);
},
getGroupConfig(group, opt_errFn) {
- const encodeName = encodeURIComponent(group);
- return this.fetchJSON(`/groups/${encodeName}/detail`, opt_errFn);
+ return this._fetchJSON({
+ url: `/groups/${encodeURIComponent(group)}/detail`,
+ errFn: opt_errFn,
+ });
},
/**
* @param {string} repo
* @param {string} ref
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- deleteRepoBranches(repo, ref, opt_errFn, opt_ctx) {
+ deleteRepoBranches(repo, ref, opt_errFn) {
if (!repo || !ref) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(repo);
const encodeRef = encodeURIComponent(ref);
return this.send('DELETE',
- `/projects/${encodeName}/branches/${encodeRef}`, '',
- opt_errFn, opt_ctx);
+ `/projects/${encodeName}/branches/${encodeRef}`, '', opt_errFn);
},
/**
* @param {string} repo
* @param {string} ref
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- deleteRepoTags(repo, ref, opt_errFn, opt_ctx) {
+ deleteRepoTags(repo, ref, opt_errFn) {
if (!repo || !ref) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(repo);
const encodeRef = encodeURIComponent(ref);
return this.send('DELETE',
- `/projects/${encodeName}/tags/${encodeRef}`, '',
- opt_errFn, opt_ctx);
+ `/projects/${encodeName}/tags/${encodeRef}`, '', opt_errFn);
},
/**
@@ -358,9 +373,8 @@
* @param {string} branch
* @param {string} revision
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- createRepoBranch(name, branch, revision, opt_errFn, opt_ctx) {
+ createRepoBranch(name, branch, revision, opt_errFn) {
if (!name || !branch || !revision) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
@@ -368,7 +382,7 @@
const encodeBranch = encodeURIComponent(branch);
return this.send('PUT',
`/projects/${encodeName}/branches/${encodeBranch}`,
- revision, opt_errFn, opt_ctx);
+ revision, opt_errFn);
},
/**
@@ -376,16 +390,15 @@
* @param {string} tag
* @param {string} revision
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- createRepoTag(name, tag, revision, opt_errFn, opt_ctx) {
+ createRepoTag(name, tag, revision, opt_errFn) {
if (!name || !tag || !revision) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(name);
const encodeTag = encodeURIComponent(tag);
return this.send('PUT', `/projects/${encodeName}/tags/${encodeTag}`,
- revision, opt_errFn, opt_ctx);
+ revision, opt_errFn);
},
/**
@@ -394,7 +407,7 @@
*/
getIsGroupOwner(groupName) {
const encodeName = encodeURIComponent(groupName);
- return this._fetchSharedCacheURL(`/groups/?owned&q=${encodeName}`)
+ return this._fetchSharedCacheURL({url: `/groups/?owned&q=${encodeName}`})
.then(configs => configs.hasOwnProperty(groupName));
},
@@ -432,8 +445,10 @@
},
getGroupAuditLog(group, opt_errFn) {
- return this._fetchSharedCacheURL(
- '/groups/' + group + '/log.audit', opt_errFn);
+ return this._fetchSharedCacheURL({
+ url: '/groups/' + group + '/log.audit',
+ errFn: opt_errFn,
+ });
},
saveGroupMembers(groupName, groupMembers) {
@@ -470,13 +485,15 @@
},
getVersion() {
- return this._fetchSharedCacheURL('/config/server/version');
+ return this._fetchSharedCacheURL({url: '/config/server/version'});
},
getDiffPreferences() {
return this.getLoggedIn().then(loggedIn => {
if (loggedIn) {
- return this._fetchSharedCacheURL('/accounts/self/preferences.diff');
+ return this._fetchSharedCacheURL({
+ url: '/accounts/self/preferences.diff',
+ });
}
// These defaults should match the defaults in
// java/com/google/gerrit/extensions/client/DiffPreferencesInfo.java
@@ -504,7 +521,9 @@
getEditPreferences() {
return this.getLoggedIn().then(loggedIn => {
if (loggedIn) {
- return this._fetchSharedCacheURL('/accounts/self/preferences.edit');
+ return this._fetchSharedCacheURL({
+ url: '/accounts/self/preferences.edit',
+ });
}
// These defaults should match the defaults in
// java/com/google/gerrit/extensions/client/EditPreferencesInfo.java
@@ -532,53 +551,52 @@
/**
* @param {?Object} prefs
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- savePreferences(prefs, opt_errFn, opt_ctx) {
+ savePreferences(prefs, opt_errFn) {
// Note (Issue 5142): normalize the download scheme with lower case before
// saving.
if (prefs.download_scheme) {
prefs.download_scheme = prefs.download_scheme.toLowerCase();
}
- return this.send('PUT', '/accounts/self/preferences', prefs, opt_errFn,
- opt_ctx);
+ return this.send('PUT', '/accounts/self/preferences', prefs, opt_errFn);
},
/**
* @param {?Object} prefs
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- saveDiffPreferences(prefs, opt_errFn, opt_ctx) {
+ saveDiffPreferences(prefs, opt_errFn) {
// Invalidate the cache.
this._cache['/accounts/self/preferences.diff'] = undefined;
return this.send('PUT', '/accounts/self/preferences.diff', prefs,
- opt_errFn, opt_ctx);
+ opt_errFn);
},
/**
* @param {?Object} prefs
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- saveEditPreferences(prefs, opt_errFn, opt_ctx) {
+ saveEditPreferences(prefs, opt_errFn) {
// Invalidate the cache.
this._cache['/accounts/self/preferences.edit'] = undefined;
return this.send('PUT', '/accounts/self/preferences.edit', prefs,
- opt_errFn, opt_ctx);
+ opt_errFn);
},
getAccount() {
- return this._fetchSharedCacheURL('/accounts/self/detail', resp => {
- if (!resp || resp.status === 403) {
- this._cache['/accounts/self/detail'] = null;
- }
+ return this._fetchSharedCacheURL({
+ url: '/accounts/self/detail',
+ errFn: resp => {
+ if (!resp || resp.status === 403) {
+ this._cache['/accounts/self/detail'] = null;
+ }
+ },
});
},
getExternalIds() {
- return this.fetchJSON('/accounts/self/external.ids');
+ return this._fetchJSON({url: '/accounts/self/external.ids'});
},
deleteAccountIdentity(id) {
@@ -591,56 +609,55 @@
* @return {!Promise<!Object>}
*/
getAccountDetails(userId) {
- return this.fetchJSON(`/accounts/${encodeURIComponent(userId)}/detail`);
+ return this._fetchJSON({
+ url: `/accounts/${encodeURIComponent(userId)}/detail`,
+ });
},
getAccountEmails() {
- return this._fetchSharedCacheURL('/accounts/self/emails');
+ return this._fetchSharedCacheURL({url: '/accounts/self/emails'});
},
/**
* @param {string} email
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- addAccountEmail(email, opt_errFn, opt_ctx) {
+ addAccountEmail(email, opt_errFn) {
return this.send('PUT', '/accounts/self/emails/' +
- encodeURIComponent(email), null, opt_errFn, opt_ctx);
+ encodeURIComponent(email), null, opt_errFn);
},
/**
* @param {string} email
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- deleteAccountEmail(email, opt_errFn, opt_ctx) {
+ deleteAccountEmail(email, opt_errFn) {
return this.send('DELETE', '/accounts/self/emails/' +
- encodeURIComponent(email), null, opt_errFn, opt_ctx);
+ encodeURIComponent(email), null, opt_errFn);
},
/**
* @param {string} email
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- setPreferredAccountEmail(email, opt_errFn, opt_ctx) {
- return this.send('PUT', '/accounts/self/emails/' +
- encodeURIComponent(email) + '/preferred', null,
- opt_errFn, opt_ctx).then(() => {
- // If result of getAccountEmails is in cache, update it in the cache
- // so we don't have to invalidate it.
- const cachedEmails = this._cache['/accounts/self/emails'];
- if (cachedEmails) {
- const emails = cachedEmails.map(entry => {
- if (entry.email === email) {
- return {email, preferred: true};
- } else {
- return {email};
- }
- });
- this._cache['/accounts/self/emails'] = emails;
+ setPreferredAccountEmail(email, opt_errFn) {
+ const encodedEmail = encodeURIComponent(email);
+ const url = `/accounts/self/emails/${encodedEmail}/preferred`;
+ return this.send('PUT', url, null, opt_errFn).then(() => {
+ // If result of getAccountEmails is in cache, update it in the cache
+ // so we don't have to invalidate it.
+ const cachedEmails = this._cache['/accounts/self/emails'];
+ if (cachedEmails) {
+ const emails = cachedEmails.map(entry => {
+ if (entry.email === email) {
+ return {email, preferred: true};
+ } else {
+ return {email};
}
});
+ this._cache['/accounts/self/emails'] = emails;
+ }
+ });
},
/**
@@ -660,47 +677,45 @@
/**
* @param {string} name
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- setAccountName(name, opt_errFn, opt_ctx) {
- return this.send('PUT', '/accounts/self/name', {name}, opt_errFn, opt_ctx)
- .then(response => this.getResponseObject(response)
- .then(newName => this._updateCachedAccount({name: newName})));
+ setAccountName(name, opt_errFn) {
+ return this.send('PUT', '/accounts/self/name', {name}, opt_errFn)
+ .then(response => this.getResponseObject(response))
+ .then(newName => this._updateCachedAccount({name: newName}));
},
/**
* @param {string} username
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- setAccountUsername(username, opt_errFn, opt_ctx) {
- return this.send('PUT', '/accounts/self/username', {username}, opt_errFn,
- opt_ctx).then(response => this.getResponseObject(response)
- .then(newName => this._updateCachedAccount({username: newName})));
+ setAccountUsername(username, opt_errFn) {
+ return this.send('PUT', '/accounts/self/username', {username}, opt_errFn)
+ .then(response => this.getResponseObject(response))
+ .then(newName => this._updateCachedAccount({username: newName}));
},
/**
* @param {string} status
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- setAccountStatus(status, opt_errFn, opt_ctx) {
- return this.send('PUT', '/accounts/self/status', {status},
- opt_errFn, opt_ctx).then(response => this.getResponseObject(response)
- .then(newStatus => this._updateCachedAccount(
- {status: newStatus})));
+ setAccountStatus(status, opt_errFn) {
+ return this.send('PUT', '/accounts/self/status', {status}, opt_errFn)
+ .then(response => this.getResponseObject(response))
+ .then(newStatus => this._updateCachedAccount({status: newStatus}));
},
getAccountStatus(userId) {
- return this.fetchJSON(`/accounts/${encodeURIComponent(userId)}/status`);
+ return this._fetchJSON({
+ url: `/accounts/${encodeURIComponent(userId)}/status`,
+ });
},
getAccountGroups() {
- return this.fetchJSON('/accounts/self/groups');
+ return this._fetchJSON({url: '/accounts/self/groups'});
},
getAccountAgreements() {
- return this.fetchJSON('/accounts/self/agreements');
+ return this._fetchJSON({url: '/accounts/self/agreements'});
},
saveAccountAgreement(name) {
@@ -717,8 +732,9 @@
.map(param => { return encodeURIComponent(param); })
.join('&q=');
}
- return this._fetchSharedCacheURL('/accounts/self/capabilities' +
- queryString);
+ return this._fetchSharedCacheURL({
+ url: '/accounts/self/capabilities' + queryString,
+ });
},
getLoggedIn() {
@@ -741,31 +757,31 @@
checkCredentials() {
// Skip the REST response cache.
- return this._fetchRawJSON('/accounts/self/detail').then(response => {
- if (!response) { return; }
- if (response.status === 403) {
+ return this._fetchRawJSON({url: '/accounts/self/detail'}).then(res => {
+ if (!res) { return; }
+ if (res.status === 403) {
this.fire('auth-error');
this._cache['/accounts/self/detail'] = null;
- } else if (response.ok) {
- return this.getResponseObject(response);
+ } else if (res.ok) {
+ return this.getResponseObject(res);
}
- }).then(response => {
- if (response) {
- this._cache['/accounts/self/detail'] = response;
+ }).then(res => {
+ if (res) {
+ this._cache['/accounts/self/detail'] = res;
}
- return response;
+ return res;
});
},
getDefaultPreferences() {
- return this._fetchSharedCacheURL('/config/server/preferences');
+ return this._fetchSharedCacheURL({url: '/config/server/preferences'});
},
getPreferences() {
return this.getLoggedIn().then(loggedIn => {
if (loggedIn) {
- return this._fetchSharedCacheURL('/accounts/self/preferences').then(
- res => {
+ return this._fetchSharedCacheURL({url: '/accounts/self/preferences'})
+ .then(res => {
if (this._isNarrowScreen()) {
res.default_diff_view = DiffViewMode.UNIFIED;
} else {
@@ -786,56 +802,53 @@
},
getWatchedProjects() {
- return this._fetchSharedCacheURL('/accounts/self/watched.projects');
+ return this._fetchSharedCacheURL({
+ url: '/accounts/self/watched.projects',
+ });
},
/**
* @param {string} projects
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- saveWatchedProjects(projects, opt_errFn, opt_ctx) {
- return this.send('POST', '/accounts/self/watched.projects', projects,
- opt_errFn, opt_ctx)
- .then(response => {
- return this.getResponseObject(response);
- });
+ saveWatchedProjects(projects, opt_errFn) {
+ const url = '/accounts/self/watched.projects';
+ return this.send('POST', url, projects, opt_errFn)
+ .then(response => this.getResponseObject(response));
},
/**
* @param {string} projects
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- deleteWatchedProjects(projects, opt_errFn, opt_ctx) {
+ deleteWatchedProjects(projects, opt_errFn) {
return this.send('POST', '/accounts/self/watched.projects:delete',
- projects, opt_errFn, opt_ctx);
+ projects, opt_errFn);
},
/**
- * @param {string} url
- * @param {function(?Response, string=)=} opt_errFn
+ * @param {Defs.FetchJSONRequest} req
*/
- _fetchSharedCacheURL(url, opt_errFn) {
- if (this._sharedFetchPromises[url]) {
- return this._sharedFetchPromises[url];
+ _fetchSharedCacheURL(req) {
+ if (this._sharedFetchPromises[req.url]) {
+ return this._sharedFetchPromises[req.url];
}
// TODO(andybons): Periodic cache invalidation.
- if (this._cache[url] !== undefined) {
- return Promise.resolve(this._cache[url]);
+ if (this._cache[req.url] !== undefined) {
+ return Promise.resolve(this._cache[req.url]);
}
- this._sharedFetchPromises[url] = this.fetchJSON(url, opt_errFn)
+ this._sharedFetchPromises[req.url] = this._fetchJSON(req)
.then(response => {
if (response !== undefined) {
- this._cache[url] = response;
+ this._cache[req.url] = response;
}
- this._sharedFetchPromises[url] = undefined;
+ this._sharedFetchPromises[req.url] = undefined;
return response;
}).catch(err => {
- this._sharedFetchPromises[url] = undefined;
+ this._sharedFetchPromises[req.url] = undefined;
throw err;
});
- return this._sharedFetchPromises[url];
+ return this._sharedFetchPromises[req.url];
},
_isNarrowScreen() {
@@ -848,8 +861,8 @@
* @param {number|string=} opt_offset
* @param {!Object=} opt_options
* @return {?Array<!Object>|?Array<!Array<!Object>>} If opt_query is an
- * array, fetchJSON will return an array of arrays of changeInfos. If it
- * is unspecified or a string, fetchJSON will return an array of
+ * array, _fetchJSON will return an array of arrays of changeInfos. If it
+ * is unspecified or a string, _fetchJSON will return an array of
* changeInfos.
*/
getChanges(opt_changesPerPage, opt_query, opt_offset, opt_options) {
@@ -874,7 +887,7 @@
this._maybeInsertInLookup(change);
}
};
- return this.fetchJSON('/changes/', null, null, params).then(response => {
+ return this._fetchJSON({url: '/changes/', params}).then(response => {
// Response may be an array of changes OR an array of arrays of
// changes.
if (opt_query instanceof Array) {
@@ -959,43 +972,43 @@
* @param {function(?Response, string=)=} opt_errFn
* @param {function()=} opt_cancelCondition
*/
- _getChangeDetail(changeNum, params, opt_errFn,
- opt_cancelCondition) {
+ _getChangeDetail(changeNum, params, opt_errFn, opt_cancelCondition) {
return this.getChangeActionURL(changeNum, null, '/detail').then(url => {
const urlWithParams = this._urlWithParams(url, params);
- return this._fetchRawJSON(
- url,
- opt_errFn,
- opt_cancelCondition,
- {O: params},
- this._etags.getOptions(urlWithParams))
- .then(response => {
- if (response && response.status === 304) {
- return Promise.resolve(this._parsePrefixedJSON(
- this._etags.getCachedPayload(urlWithParams)));
- }
+ const req = {
+ url,
+ errFn: opt_errFn,
+ cancelCondition: opt_cancelCondition,
+ params: {O: params},
+ fetchOptions: this._etags.getOptions(urlWithParams),
+ };
+ return this._fetchRawJSON(req).then(response => {
+ if (response && response.status === 304) {
+ return Promise.resolve(this._parsePrefixedJSON(
+ this._etags.getCachedPayload(urlWithParams)));
+ }
- if (response && !response.ok) {
- if (opt_errFn) {
- opt_errFn.call(null, response);
- } else {
- this.fire('server-error', {response});
- }
- return;
- }
+ if (response && !response.ok) {
+ if (opt_errFn) {
+ opt_errFn.call(null, response);
+ } else {
+ this.fire('server-error', {response});
+ }
+ return;
+ }
- const payloadPromise = response ?
- this._readResponsePayload(response) :
- Promise.resolve(null);
+ const payloadPromise = response ?
+ this._readResponsePayload(response) :
+ Promise.resolve(null);
- return payloadPromise.then(payload => {
- if (!payload) { return null; }
- this._etags.collect(urlWithParams, response, payload.raw);
- this._maybeInsertInLookup(payload.parsed);
+ return payloadPromise.then(payload => {
+ if (!payload) { return null; }
+ this._etags.collect(urlWithParams, response, payload.raw);
+ this._maybeInsertInLookup(payload.parsed);
- return payload.parsed;
- });
- });
+ return payload.parsed;
+ });
+ });
});
},
@@ -1004,7 +1017,11 @@
* @param {number|string} patchNum
*/
getChangeCommitInfo(changeNum, patchNum) {
- return this._getChangeURLAndFetch(changeNum, '/commit?links', patchNum);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: '/commit?links',
+ patchNum,
+ });
},
/**
@@ -1019,8 +1036,12 @@
} else if (!this.patchNumEquals(patchRange.basePatchNum, 'PARENT')) {
params = {base: patchRange.basePatchNum};
}
- return this._getChangeURLAndFetch(changeNum, '/files',
- patchRange.patchNum, undefined, undefined, params);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: '/files',
+ patchNum: patchRange.patchNum,
+ params,
+ });
},
/**
@@ -1032,7 +1053,7 @@
if (patchRange.basePatchNum !== 'PARENT') {
endpoint += '&base=' + encodeURIComponent(patchRange.basePatchNum + '');
}
- return this._getChangeURLAndFetch(changeNum, endpoint);
+ return this._getChangeURLAndFetch({changeNum, endpoint});
},
/**
@@ -1042,8 +1063,11 @@
* @return {!Promise<!Object>}
*/
queryChangeFiles(changeNum, patchNum, query) {
- return this._getChangeURLAndFetch(changeNum,
- `/files?q=${encodeURIComponent(query)}`, patchNum);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: `/files?q=${encodeURIComponent(query)}`,
+ patchNum,
+ });
},
/**
@@ -1071,16 +1095,16 @@
},
getChangeRevisionActions(changeNum, patchNum) {
- return this._getChangeURLAndFetch(changeNum, '/actions', patchNum)
- .then(revisionActions => {
- // The rebase button on change screen is always enabled.
- if (revisionActions.rebase) {
- revisionActions.rebase.rebaseOnCurrent =
- !!revisionActions.rebase.enabled;
- revisionActions.rebase.enabled = true;
- }
- return revisionActions;
- });
+ const req = {changeNum, endpoint: '/actions', patchNum};
+ return this._getChangeURLAndFetch(req).then(revisionActions => {
+ // The rebase button on change screen is always enabled.
+ if (revisionActions.rebase) {
+ revisionActions.rebase.rebaseOnCurrent =
+ !!revisionActions.rebase.enabled;
+ revisionActions.rebase.enabled = true;
+ }
+ return revisionActions;
+ });
},
/**
@@ -1091,15 +1115,19 @@
getChangeSuggestedReviewers(changeNum, inputVal, opt_errFn) {
const params = {n: 10};
if (inputVal) { params.q = inputVal; }
- return this._getChangeURLAndFetch(changeNum, '/suggest_reviewers', null,
- opt_errFn, null, params);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: '/suggest_reviewers',
+ errFn: opt_errFn,
+ params,
+ });
},
/**
* @param {number|string} changeNum
*/
getChangeIncludedIn(changeNum) {
- return this._getChangeURLAndFetch(changeNum, '/in', null);
+ return this._getChangeURLAndFetch({changeNum, endpoint: '/in'});
},
_computeFilter(filter) {
@@ -1122,10 +1150,10 @@
getGroups(filter, groupsPerPage, opt_offset) {
const offset = opt_offset || 0;
- return this._fetchSharedCacheURL(
- `/groups/?n=${groupsPerPage + 1}&S=${offset}` +
- this._computeFilter(filter)
- );
+ return this._fetchSharedCacheURL({
+ url: `/groups/?n=${groupsPerPage + 1}&S=${offset}` +
+ this._computeFilter(filter),
+ });
},
/**
@@ -1139,10 +1167,10 @@
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
- return this._fetchSharedCacheURL(
- `/projects/?d&n=${reposPerPage + 1}&S=${offset}` +
- this._computeFilter(filter)
- );
+ return this._fetchSharedCacheURL({
+ url: `/projects/?d&n=${reposPerPage + 1}&S=${offset}` +
+ this._computeFilter(filter),
+ });
},
setRepoHead(repo, ref) {
@@ -1162,15 +1190,13 @@
*/
getRepoBranches(filter, repo, reposBranchesPerPage, opt_offset, opt_errFn) {
const offset = opt_offset || 0;
-
+ const count = reposBranchesPerPage + 1;
+ filter = this._computeFilter(filter);
+ repo = encodeURIComponent(repo);
+ const url = `/projects/${repo}/branches?n=${count}&S=${offset}${filter}`;
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
- return this.fetchJSON(
- `/projects/${encodeURIComponent(repo)}/branches` +
- `?n=${reposBranchesPerPage + 1}&S=${offset}` +
- this._computeFilter(filter),
- opt_errFn
- );
+ return this._fetchJSON({url, errFn: opt_errFn});
},
/**
@@ -1183,15 +1209,14 @@
*/
getRepoTags(filter, repo, reposTagsPerPage, opt_offset, opt_errFn) {
const offset = opt_offset || 0;
-
+ const encodedRepo = encodeURIComponent(repo);
+ const n = reposTagsPerPage + 1;
+ const encodedFilter = this._computeFilter(filter);
+ const url = `/projects/${encodedRepo}/tags` + `?n=${n}&S=${offset}` +
+ encodedFilter;
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
- return this.fetchJSON(
- `/projects/${encodeURIComponent(repo)}/tags` +
- `?n=${reposTagsPerPage + 1}&S=${offset}` +
- this._computeFilter(filter),
- opt_errFn
- );
+ return this._fetchJSON({url, errFn: opt_errFn});
},
/**
@@ -1203,21 +1228,19 @@
*/
getPlugins(filter, pluginsPerPage, opt_offset, opt_errFn) {
const offset = opt_offset || 0;
-
- return this.fetchJSON(
- `/plugins/?all&n=${pluginsPerPage + 1}&S=${offset}` +
- this._computeFilter(filter),
- opt_errFn
- );
+ const encodedFilter = this._computeFilter(filter);
+ const n = pluginsPerPage + 1;
+ const url = `/plugins/?all&n=${n}&S=${offset}${encodedFilter}`;
+ return this._fetchJSON({url, errFn: opt_errFn});
},
getRepoAccessRights(repoName, opt_errFn) {
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
- return this.fetchJSON(
- `/projects/${encodeURIComponent(repoName)}/access`,
- opt_errFn
- );
+ return this._fetchJSON({
+ url: `/projects/${encodeURIComponent(repoName)}/access`,
+ errFn: opt_errFn,
+ });
},
setRepoAccessRights(repoName, repoInfo) {
@@ -1238,43 +1261,52 @@
* @param {string} inputVal
* @param {number} opt_n
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- getSuggestedGroups(inputVal, opt_n, opt_errFn, opt_ctx) {
+ getSuggestedGroups(inputVal, opt_n, opt_errFn) {
const params = {s: inputVal};
if (opt_n) { params.n = opt_n; }
- return this.fetchJSON('/groups/', opt_errFn, opt_ctx, params);
+ return this._fetchJSON({
+ url: '/groups/',
+ errFn: opt_errFn,
+ params,
+ });
},
/**
* @param {string} inputVal
* @param {number} opt_n
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- getSuggestedProjects(inputVal, opt_n, opt_errFn, opt_ctx) {
+ getSuggestedProjects(inputVal, opt_n, opt_errFn) {
const params = {
m: inputVal,
n: MAX_PROJECT_RESULTS,
type: 'ALL',
};
if (opt_n) { params.n = opt_n; }
- return this.fetchJSON('/projects/', opt_errFn, opt_ctx, params);
+ return this._fetchJSON({
+ url: '/projects/',
+ errFn: opt_errFn,
+ params,
+ });
},
/**
* @param {string} inputVal
* @param {number} opt_n
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- getSuggestedAccounts(inputVal, opt_n, opt_errFn, opt_ctx) {
+ getSuggestedAccounts(inputVal, opt_n, opt_errFn) {
if (!inputVal) {
return Promise.resolve([]);
}
const params = {suggest: null, q: inputVal};
if (opt_n) { params.n = opt_n; }
- return this.fetchJSON('/accounts/', opt_errFn, opt_ctx, params);
+ return this._fetchJSON({
+ url: '/accounts/',
+ errFn: opt_errFn,
+ params,
+ });
},
addChangeReviewer(changeNum, reviewerID) {
@@ -1305,11 +1337,18 @@
},
getRelatedChanges(changeNum, patchNum) {
- return this._getChangeURLAndFetch(changeNum, '/related', patchNum);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: '/related',
+ patchNum,
+ });
},
getChangesSubmittedTogether(changeNum) {
- return this._getChangeURLAndFetch(changeNum, '/submitted_together', null);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: '/submitted_together',
+ });
},
getChangeConflicts(changeNum) {
@@ -1321,7 +1360,7 @@
O: options,
q: 'status:open is:mergeable conflicts:' + changeNum,
};
- return this.fetchJSON('/changes/', null, null, params);
+ return this._fetchJSON({url: '/changes/', params});
},
getChangeCherryPicks(project, changeID, changeNum) {
@@ -1339,7 +1378,7 @@
O: options,
q: query,
};
- return this.fetchJSON('/changes/', null, null, params);
+ return this._fetchJSON({url: '/changes/', params});
},
getChangesWithSameTopic(topic) {
@@ -1353,11 +1392,15 @@
O: options,
q: 'status:open topic:' + topic,
};
- return this.fetchJSON('/changes/', null, null, params);
+ return this._fetchJSON({url: '/changes/', params});
},
getReviewedFiles(changeNum, patchNum) {
- return this._getChangeURLAndFetch(changeNum, '/files?reviewed', patchNum);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: '/files?reviewed',
+ patchNum,
+ });
},
/**
@@ -1366,13 +1409,12 @@
* @param {string} path
* @param {boolean} reviewed
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- saveFileReviewed(changeNum, patchNum, path, reviewed, opt_errFn, opt_ctx) {
+ saveFileReviewed(changeNum, patchNum, path, reviewed, opt_errFn) {
const method = reviewed ? 'PUT' : 'DELETE';
- const e = `/files/${encodeURIComponent(path)}/reviewed`;
- return this.getChangeURLAndSend(changeNum, method, patchNum, e, null,
- opt_errFn, opt_ctx);
+ const endpoint = `/files/${encodeURIComponent(path)}/reviewed`;
+ return this.getChangeURLAndSend(changeNum, method, patchNum, endpoint,
+ null, opt_errFn);
},
/**
@@ -1380,25 +1422,26 @@
* @param {number|string} patchNum
* @param {!Object} review
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- saveChangeReview(changeNum, patchNum, review, opt_errFn, opt_ctx) {
+ saveChangeReview(changeNum, patchNum, review, opt_errFn) {
const promises = [
this.awaitPendingDiffDrafts(),
this.getChangeActionURL(changeNum, patchNum, '/review'),
];
return Promise.all(promises).then(([, url]) => {
- return this.send('POST', url, review, opt_errFn, opt_ctx);
+ return this.send('POST', url, review, opt_errFn);
});
},
getChangeEdit(changeNum, opt_download_commands) {
const params = opt_download_commands ? {'download-commands': true} : null;
return this.getLoggedIn().then(loggedIn => {
- return loggedIn ?
- this._getChangeURLAndFetch(changeNum, '/edit/', null, null, null,
- params) :
- false;
+ if (!loggedIn) { return false; }
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: '/edit/',
+ params,
+ });
});
},
@@ -1465,7 +1508,7 @@
const e = `/files/${encodeURIComponent(path)}/content`;
const headers = {Accept: 'application/json'};
return this.getChangeURLAndSend(changeNum, 'GET', patchNum, e, null,
- opt_errFn, null, null, headers);
+ opt_errFn, null, headers);
},
/**
@@ -1477,7 +1520,7 @@
const e = '/edit/' + encodeURIComponent(path);
const headers = {Accept: 'application/json'};
return this.getChangeURLAndSend(changeNum, 'GET', null, e, null, null,
- null, null, headers);
+ null, headers);
},
rebaseChangeEdit(changeNum) {
@@ -1506,7 +1549,7 @@
saveChangeEdit(changeNum, path, contents) {
const e = '/edit/' + encodeURIComponent(path);
return this.getChangeURLAndSend(changeNum, 'PUT', null, e, contents, null,
- null, 'text/plain');
+ 'text/plain');
},
// Deprecated, prefer to use putChangeCommitMessage instead.
@@ -1540,12 +1583,10 @@
* number at least.
* @param {?function(?Response, string=)=} opt_errFn
* passed as null sometimes.
- * @param {?=} opt_ctx
* @param {?string=} opt_contentType
* @param {Object=} opt_headers
*/
- send(method, url, opt_body, opt_errFn, opt_ctx, opt_contentType,
- opt_headers) {
+ send(method, url, opt_body, opt_errFn, opt_contentType, opt_headers) {
const options = {method};
if (opt_body) {
options.headers = new Headers();
@@ -1569,7 +1610,7 @@
return this._auth.fetch(url, options).then(response => {
if (!response.ok) {
if (opt_errFn) {
- return opt_errFn.call(opt_ctx || null, response);
+ return opt_errFn.call(null, response);
}
this.fire('server-error', {response});
}
@@ -1577,7 +1618,7 @@
}).catch(err => {
this.fire('network-error', {error: err});
if (opt_errFn) {
- return opt_errFn.call(opt_ctx, null, err);
+ return opt_errFn.call(null, null, err);
} else {
throw err;
}
@@ -1607,8 +1648,14 @@
}
const endpoint = `/files/${encodeURIComponent(path)}/diff`;
- return this._getChangeURLAndFetch(changeNum, endpoint, patchNum,
- opt_errFn, opt_cancelCondition, params);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint,
+ patchNum,
+ errFn: opt_errFn,
+ cancelCondition: opt_cancelCondition,
+ params,
+ });
},
/**
@@ -1695,7 +1742,11 @@
* @return {!Promise<!Object>} Diff comments response.
*/
const fetchComments = opt_patchNum => {
- return this._getChangeURLAndFetch(changeNum, endpoint, opt_patchNum);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint,
+ patchNum: opt_patchNum,
+ });
};
if (!opt_basePatchNum && !opt_patchNum && !opt_path) {
@@ -1809,9 +1860,10 @@
},
getCommitInfo(project, commit) {
- return this.fetchJSON(
- '/projects/' + encodeURIComponent(project) +
- '/commits/' + encodeURIComponent(commit));
+ return this._fetchJSON({
+ url: '/projects/' + encodeURIComponent(project) +
+ '/commits/' + encodeURIComponent(commit),
+ });
},
_fetchB64File(url) {
@@ -1940,11 +1992,11 @@
},
getAccountSSHKeys() {
- return this._fetchSharedCacheURL('/accounts/self/sshkeys');
+ return this._fetchSharedCacheURL({url: '/accounts/self/sshkeys'});
},
addAccountSSHKey(key) {
- return this.send('POST', '/accounts/self/sshkeys', key, null, null,
+ return this.send('POST', '/accounts/self/sshkeys', key, null,
'plain/text')
.then(response => {
if (response.status < 200 && response.status >= 300) {
@@ -1963,7 +2015,7 @@
},
getAccountGPGKeys() {
- return this.fetchJSON('/accounts/self/gpgkeys');
+ return this._fetchJSON({url: '/accounts/self/gpgkeys'});
},
addAccountGPGKey(key) {
@@ -2006,7 +2058,10 @@
},
getCapabilities(token, opt_errFn) {
- return this.fetchJSON('/config/server/capabilities', opt_errFn);
+ return this._fetchJSON({
+ url: '/config/server/capabilities',
+ errFn: opt_errFn,
+ });
},
setAssignee(changeNum, assignee) {
@@ -2073,11 +2128,13 @@
*/
getChange(changeNum, opt_errFn) {
// Cannot use _changeBaseURL, as this function is used by _projectLookup.
- return this.fetchJSON(`/changes/?q=change:${changeNum}`, opt_errFn)
- .then(res => {
- if (!res || !res.length) { return null; }
- return res[0];
- });
+ return this._fetchJSON({
+ url: `/changes/?q=change:${changeNum}`,
+ errFn: opt_errFn,
+ }).then(res => {
+ if (!res || !res.length) { return null; }
+ return res[0];
+ });
},
/**
@@ -2127,36 +2184,31 @@
* @param {?Object|number|string=} opt_payload gets passed as null, string,
* Object, or number.
* @param {?function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
* @param {?=} opt_contentType
* @param {Object=} opt_headers
* @return {!Promise<!Object>}
*/
getChangeURLAndSend(changeNum, method, patchNum, endpoint, opt_payload,
- opt_errFn, opt_ctx, opt_contentType, opt_headers) {
- return this._changeBaseURL(changeNum, patchNum).then(url => {
- return this.send(method, url + endpoint, opt_payload, opt_errFn,
- opt_ctx, opt_contentType, opt_headers);
- });
+ opt_errFn, opt_contentType, opt_headers) {
+ return this._changeBaseURL(changeNum, patchNum).then(url =>
+ this.send(method, url + endpoint, opt_payload, opt_errFn,
+ opt_contentType, opt_headers));
},
- /**
- * Alias for _changeBaseURL.then(fetchJSON).
- * @todo(beckysiegel) clean up comments
- * @param {string|number} changeNum
- * @param {string} endpoint
- * @param {?string|number=} opt_patchNum gets passed as null.
- * @param {?function(?Response, string=)=} opt_errFn gets passed as null.
- * @param {?function()=} opt_cancelCondition gets passed as null.
- * @param {?Object=} opt_params gets passed as null.
- * @param {!Object=} opt_options
- * @return {!Promise<!Object>}
- */
- _getChangeURLAndFetch(changeNum, endpoint, opt_patchNum, opt_errFn,
- opt_cancelCondition, opt_params, opt_options) {
- return this._changeBaseURL(changeNum, opt_patchNum).then(url => {
- return this.fetchJSON(url + endpoint, opt_errFn, opt_cancelCondition,
- opt_params, opt_options);
+ /**
+ * Alias for _changeBaseURL.then(_fetchJSON).
+ * @param {Defs.ChangeFetchRequest} req
+ * @return {!Promise<!Object>}
+ */
+ _getChangeURLAndFetch(req) {
+ return this._changeBaseURL(req.changeNum, req.patchNum).then(url => {
+ return this._fetchJSON({
+ url: url + req.endpoint,
+ errFn: req.errFn,
+ cancelCondition: req.cancelCondition,
+ params: req.params,
+ fetchOptions: req.fetchOptions,
+ });
});
},
@@ -2171,9 +2223,12 @@
*/
getBlame(changeNum, patchNum, path, opt_base) {
const encodedPath = encodeURIComponent(path);
- return this._getChangeURLAndFetch(changeNum,
- `/files/${encodedPath}/blame`, patchNum, undefined, undefined,
- opt_base ? {base: 't'} : undefined);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: `/files/${encodedPath}/blame`,
+ patchNum,
+ params: opt_base ? {base: 't'} : undefined,
+ });
},
/**
@@ -2217,7 +2272,7 @@
getDashboard(project, dashboard, opt_errFn) {
const url = '/projects/' + encodeURIComponent(project) + '/dashboards/' +
encodeURIComponent(dashboard);
- return this._fetchSharedCacheURL(url, opt_errFn);
+ return this._fetchSharedCacheURL({url, errFn: opt_errFn});
},
getMergeable(changeNum) {
diff --git a/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface_test.html b/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface_test.html
index fb20da4..7e71efa 100644
--- a/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface_test.html
+++ b/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface_test.html
@@ -58,7 +58,7 @@
});
test('JSON prefix is properly removed', done => {
- element.fetchJSON('/dummy/url').then(obj => {
+ element._fetchJSON('/dummy/url').then(obj => {
assert.deepEqual(obj, {hello: 'bonjour'});
done();
});
@@ -66,7 +66,7 @@
test('cached results', done => {
let n = 0;
- sandbox.stub(element, 'fetchJSON', () => {
+ sandbox.stub(element, '_fetchJSON', () => {
return Promise.resolve(++n);
});
const promises = [];
@@ -86,7 +86,7 @@
test('cached promise', done => {
const promise = Promise.reject('foo');
element._cache['/foo'] = promise;
- element._fetchSharedCacheURL('/foo').catch(p => {
+ element._fetchSharedCacheURL({url: '/foo'}).catch(p => {
assert.equal(p, 'foo');
done();
});
@@ -120,7 +120,8 @@
cancel() { cancelCalled = true; },
},
}));
- element.fetchJSON('/dummy/url', null, () => { return true; }).then(
+ const cancelCondition = () => { return true; };
+ element._fetchJSON({url: '/dummy/url', cancelCondition}).then(
obj => {
assert.isUndefined(obj);
assert.isTrue(cancelCalled);
@@ -129,7 +130,7 @@
});
test('parent diff comments are properly grouped', done => {
- sandbox.stub(element, 'fetchJSON', () => {
+ sandbox.stub(element, '_fetchJSON', () => {
return Promise.resolve({
'/COMMIT_MSG': [],
'sieve.go': [
@@ -272,7 +273,8 @@
test('differing patch diff comments are properly grouped', done => {
sandbox.stub(element, 'getFromProjectLookup')
.returns(Promise.resolve('test'));
- sandbox.stub(element, 'fetchJSON', url => {
+ sandbox.stub(element, '_fetchJSON', request => {
+ const url = request.url;
if (url === '/changes/test~42/revisions/1') {
return Promise.resolve({
'/COMMIT_MSG': [],
@@ -386,11 +388,11 @@
});
suite('rebase action', () => {
- let resolveFetchJSON;
+ let resolve_fetchJSON;
setup(() => {
- sandbox.stub(element, 'fetchJSON').returns(
+ sandbox.stub(element, '_fetchJSON').returns(
new Promise(resolve => {
- resolveFetchJSON = resolve;
+ resolve_fetchJSON = resolve;
}));
});
@@ -401,7 +403,7 @@
assert.isFalse(response.rebase.rebaseOnCurrent);
done();
});
- resolveFetchJSON({rebase: {}});
+ resolve_fetchJSON({rebase: {}});
});
test('rebase on current', done => {
@@ -411,7 +413,7 @@
assert.isTrue(response.rebase.rebaseOnCurrent);
done();
});
- resolveFetchJSON({rebase: {enabled: true}});
+ resolve_fetchJSON({rebase: {enabled: true}});
});
});
@@ -423,7 +425,7 @@
element.addEventListener('server-error', resolve);
});
- element.fetchJSON().then(response => {
+ element._fetchJSON({}).then(response => {
assert.isUndefined(response);
assert.isTrue(getResponseObjectStub.notCalled);
serverErrorEventPromise.then(() => done());
@@ -444,7 +446,7 @@
element.addEventListener('server-error', serverErrorStub);
const authErrorStub = sandbox.stub();
element.addEventListener('auth-error', authErrorStub);
- element.fetchJSON('/bar').then(r => {
+ element._fetchJSON('/bar').then(r => {
flush(() => {
assert.isTrue(authErrorStub.called);
assert.isFalse(serverErrorStub.called);
@@ -484,10 +486,10 @@
});
test('legacy n,z key in change url is replaced', () => {
- const stub = sandbox.stub(element, 'fetchJSON')
+ const stub = sandbox.stub(element, '_fetchJSON')
.returns(Promise.resolve([]));
element.getChanges(1, null, 'n,z');
- assert.equal(stub.args[0][3].S, 0);
+ assert.equal(stub.lastCall.args[0].params.S, 0);
});
test('saveDiffPreferences invalidates cache line', () => {
@@ -512,7 +514,7 @@
});
element._cache[cacheKey] = 'fake cache';
- stub.callArg(1);
+ stub.lastCall.args[0].errFn();
});
test('getAccount does not add to the cache when resp.status is 403',
@@ -527,7 +529,7 @@
done();
});
element._cache[cacheKey] = 'fake cache';
- stub.callArgWith(1, {status: 403});
+ stub.lastCall.args[0].errFn({status: 403});
});
test('getAccount when resp is successful', done => {
@@ -541,7 +543,8 @@
done();
});
element._cache[cacheKey] = 'fake cache';
- stub.callArg(1, {});
+
+ stub.lastCall.args[0].errFn({});
});
const preferenceSetup = function(testJSON, loggedIn, smallScreen) {
@@ -872,66 +875,69 @@
const fetchStub = sandbox.stub(element, '_getChangeURLAndFetch')
.returns(Promise.resolve());
return element.queryChangeFiles('42', 'edit', 'test/path.js').then(() => {
- assert.deepEqual(fetchStub.lastCall.args,
- ['42', '/files?q=test%2Fpath.js', 'edit']);
+ assert.deepEqual(fetchStub.lastCall.args[0], {
+ changeNum: '42',
+ endpoint: '/files?q=test%2Fpath.js',
+ patchNum: 'edit',
+ });
});
});
test('getRepos', () => {
sandbox.stub(element, '_fetchSharedCacheURL');
element.getRepos('test', 25);
- assert.isTrue(element._fetchSharedCacheURL.lastCall
- .calledWithExactly('/projects/?d&n=26&S=0&m=test'));
+ assert.equal(element._fetchSharedCacheURL.lastCall.args[0].url,
+ '/projects/?d&n=26&S=0&m=test');
element.getRepos(null, 25);
- assert.isTrue(element._fetchSharedCacheURL.lastCall
- .calledWithExactly('/projects/?d&n=26&S=0'));
+ assert.equal(element._fetchSharedCacheURL.lastCall.args[0].url,
+ '/projects/?d&n=26&S=0');
element.getRepos('test', 25, 25);
- assert.isTrue(element._fetchSharedCacheURL.lastCall
- .calledWithExactly('/projects/?d&n=26&S=25&m=test'));
+ assert.equal(element._fetchSharedCacheURL.lastCall.args[0].url,
+ '/projects/?d&n=26&S=25&m=test');
});
test('getRepos filter', () => {
sandbox.stub(element, '_fetchSharedCacheURL');
element.getRepos('test/test/test', 25);
- assert.isTrue(element._fetchSharedCacheURL.lastCall
- .calledWithExactly('/projects/?d&n=26&S=0&m=test%2Ftest%2Ftest'));
+ assert.equal(element._fetchSharedCacheURL.lastCall.args[0].url,
+ '/projects/?d&n=26&S=0&m=test%2Ftest%2Ftest');
});
test('getRepos filter regex', () => {
sandbox.stub(element, '_fetchSharedCacheURL');
element.getRepos('^test.*', 25);
- assert.isTrue(element._fetchSharedCacheURL.lastCall
- .calledWithExactly('/projects/?d&n=26&S=0&r=%5Etest.*'));
+ assert.equal(element._fetchSharedCacheURL.lastCall.args[0].url,
+ '/projects/?d&n=26&S=0&r=%5Etest.*');
});
test('getGroups filter regex', () => {
sandbox.stub(element, '_fetchSharedCacheURL');
element.getGroups('^test.*', 25);
- assert.isTrue(element._fetchSharedCacheURL.lastCall
- .calledWithExactly('/groups/?n=26&S=0&r=%5Etest.*'));
+ assert.equal(element._fetchSharedCacheURL.lastCall.args[0].url,
+ '/groups/?n=26&S=0&r=%5Etest.*');
});
test('gerrit auth is used', () => {
sandbox.stub(Gerrit.Auth, 'fetch').returns(Promise.resolve());
- element.fetchJSON('foo');
+ element._fetchJSON('foo');
assert(Gerrit.Auth.fetch.called);
});
- test('getSuggestedAccounts does not return fetchJSON', () => {
- const fetchJSONSpy = sandbox.spy(element, 'fetchJSON');
+ test('getSuggestedAccounts does not return _fetchJSON', () => {
+ const _fetchJSONSpy = sandbox.spy(element, '_fetchJSON');
return element.getSuggestedAccounts().then(accts => {
- assert.isFalse(fetchJSONSpy.called);
+ assert.isFalse(_fetchJSONSpy.called);
assert.equal(accts.length, 0);
});
});
- test('fetchJSON gets called by getSuggestedAccounts', () => {
- const fetchJSONStub = sandbox.stub(element, 'fetchJSON',
+ test('_fetchJSON gets called by getSuggestedAccounts', () => {
+ const _fetchJSONStub = sandbox.stub(element, '_fetchJSON',
() => Promise.resolve());
return element.getSuggestedAccounts('own').then(() => {
- assert.deepEqual(fetchJSONStub.lastCall.args[3], {
+ assert.deepEqual(_fetchJSONStub.lastCall.args[0].params, {
q: 'own',
suggest: null,
});
@@ -1064,7 +1070,7 @@
suite('getChanges populates _projectLookup', () => {
test('multiple queries', () => {
- sandbox.stub(element, 'fetchJSON')
+ sandbox.stub(element, '_fetchJSON')
.returns(Promise.resolve([
[
{_number: 1, project: 'test'},
@@ -1073,7 +1079,7 @@
{_number: 3, project: 'test/test'},
],
]));
- // When opt_query instanceof Array, fetchJSON returns
+ // When opt_query instanceof Array, _fetchJSON returns
// Array<Array<Object>>.
return element.getChanges(null, []).then(() => {
assert.equal(Object.keys(element._projectLookup).length, 3);
@@ -1084,14 +1090,14 @@
});
test('no query', () => {
- sandbox.stub(element, 'fetchJSON')
+ sandbox.stub(element, '_fetchJSON')
.returns(Promise.resolve([
{_number: 1, project: 'test'},
{_number: 2, project: 'test'},
{_number: 3, project: 'test/test'},
]));
- // When opt_query !instanceof Array, fetchJSON returns
+ // When opt_query !instanceof Array, _fetchJSON returns
// Array<Object>.
return element.getChanges().then(() => {
assert.equal(Object.keys(element._projectLookup).length, 3);
@@ -1104,10 +1110,12 @@
test('_getChangeURLAndFetch', () => {
element._projectLookup = {1: 'test'};
- const fetchStub = sandbox.stub(element, 'fetchJSON')
+ const fetchStub = sandbox.stub(element, '_fetchJSON')
.returns(Promise.resolve());
- return element._getChangeURLAndFetch(1, '/test', 1).then(() => {
- assert.isTrue(fetchStub.calledWith('/changes/test~1/revisions/1/test'));
+ const req = {changeNum: 1, endpoint: '/test', patchNum: 1};
+ return element._getChangeURLAndFetch(req).then(() => {
+ assert.equal(fetchStub.lastCall.args[0].url,
+ '/changes/test~1/revisions/1/test');
});
});
@@ -1170,8 +1178,8 @@
const range = {basePatchNum: 'PARENT', patchNum: 2};
return element.getChangeFiles(123, range).then(() => {
assert.isTrue(fetchStub.calledOnce);
- assert.equal(fetchStub.lastCall.args[2], 2);
- assert.isNotOk(fetchStub.lastCall.args[5]);
+ assert.equal(fetchStub.lastCall.args[0].patchNum, 2);
+ assert.isNotOk(fetchStub.lastCall.args[0].params);
});
});
@@ -1181,10 +1189,10 @@
const range = {basePatchNum: 4, patchNum: 5};
return element.getChangeFiles(123, range).then(() => {
assert.isTrue(fetchStub.calledOnce);
- assert.equal(fetchStub.lastCall.args[2], 5);
- assert.isOk(fetchStub.lastCall.args[5]);
- assert.equal(fetchStub.lastCall.args[5].base, 4);
- assert.isNotOk(fetchStub.lastCall.args[5].parent);
+ assert.equal(fetchStub.lastCall.args[0].patchNum, 5);
+ assert.isOk(fetchStub.lastCall.args[0].params);
+ assert.equal(fetchStub.lastCall.args[0].params.base, 4);
+ assert.isNotOk(fetchStub.lastCall.args[0].params.parent);
});
});
@@ -1194,10 +1202,10 @@
const range = {basePatchNum: -3, patchNum: 5};
return element.getChangeFiles(123, range).then(() => {
assert.isTrue(fetchStub.calledOnce);
- assert.equal(fetchStub.lastCall.args[2], 5);
- assert.isOk(fetchStub.lastCall.args[5]);
- assert.isNotOk(fetchStub.lastCall.args[5].base);
- assert.equal(fetchStub.lastCall.args[5].parent, 3);
+ assert.equal(fetchStub.lastCall.args[0].patchNum, 5);
+ assert.isOk(fetchStub.lastCall.args[0].params);
+ assert.isNotOk(fetchStub.lastCall.args[0].params.base);
+ assert.equal(fetchStub.lastCall.args[0].params.parent, 3);
});
});
});
@@ -1208,10 +1216,10 @@
.returns(Promise.resolve());
return element.getDiff(123, 'PARENT', 2, 'foo/bar.baz').then(() => {
assert.isTrue(fetchStub.calledOnce);
- assert.equal(fetchStub.lastCall.args[2], 2);
- assert.isOk(fetchStub.lastCall.args[5]);
- assert.isNotOk(fetchStub.lastCall.args[5].parent);
- assert.isNotOk(fetchStub.lastCall.args[5].base);
+ assert.equal(fetchStub.lastCall.args[0].patchNum, 2);
+ assert.isOk(fetchStub.lastCall.args[0].params);
+ assert.isNotOk(fetchStub.lastCall.args[0].params.parent);
+ assert.isNotOk(fetchStub.lastCall.args[0].params.base);
});
});
@@ -1220,10 +1228,10 @@
.returns(Promise.resolve());
return element.getDiff(123, 4, 5, 'foo/bar.baz').then(() => {
assert.isTrue(fetchStub.calledOnce);
- assert.equal(fetchStub.lastCall.args[2], 5);
- assert.isOk(fetchStub.lastCall.args[5]);
- assert.isNotOk(fetchStub.lastCall.args[5].parent);
- assert.equal(fetchStub.lastCall.args[5].base, 4);
+ assert.equal(fetchStub.lastCall.args[0].patchNum, 5);
+ assert.isOk(fetchStub.lastCall.args[0].params);
+ assert.isNotOk(fetchStub.lastCall.args[0].params.parent);
+ assert.equal(fetchStub.lastCall.args[0].params.base, 4);
});
});
@@ -1232,10 +1240,10 @@
.returns(Promise.resolve());
return element.getDiff(123, -3, 5, 'foo/bar.baz').then(() => {
assert.isTrue(fetchStub.calledOnce);
- assert.equal(fetchStub.lastCall.args[2], 5);
- assert.isOk(fetchStub.lastCall.args[5]);
- assert.isNotOk(fetchStub.lastCall.args[5].base);
- assert.equal(fetchStub.lastCall.args[5].parent, 3);
+ assert.equal(fetchStub.lastCall.args[0].patchNum, 5);
+ assert.isOk(fetchStub.lastCall.args[0].params);
+ assert.isNotOk(fetchStub.lastCall.args[0].params.base);
+ assert.equal(fetchStub.lastCall.args[0].params.parent, 3);
});
});
});
@@ -1245,7 +1253,7 @@
element.getDashboard('gerrit/project', 'default:main');
assert.isTrue(fetchStub.calledOnce);
assert.equal(
- fetchStub.lastCall.args[0],
+ fetchStub.lastCall.args[0].url,
'/projects/gerrit%2Fproject/dashboards/default%3Amain');
});
diff --git a/polygerrit-ui/app/embed/embed.html b/polygerrit-ui/app/embed/embed.html
index f3c727e..9fb5c23 100644
--- a/polygerrit-ui/app/embed/embed.html
+++ b/polygerrit-ui/app/embed/embed.html
@@ -21,4 +21,4 @@
<link rel="import" href="../elements/change-list/gr-change-list-view/gr-change-list-view.html">
<link rel="import" href="../elements/change-list/gr-change-list/gr-change-list.html">
<link rel="import" href="../elements/change-list/gr-dashboard-view/gr-dashboard-view.html">
-<link rel="import" href="../styles/app-theme.html">
+<link rel="import" href="../styles/themes/app-theme.html">
diff --git a/polygerrit-ui/app/rules.bzl b/polygerrit-ui/app/rules.bzl
index b60aa22..199a947 100644
--- a/polygerrit-ui/app/rules.bzl
+++ b/polygerrit-ui/app/rules.bzl
@@ -62,6 +62,15 @@
)
native.filegroup(
+ name = name + "_theme_sources",
+ srcs = native.glob(
+ ["styles/themes/*.html"],
+ # app-theme.html already included via an import in gr-app.html.
+ exclude = ["styles/themes/app-theme.html"],
+ ),
+ )
+
+ native.filegroup(
name = name + "_top_sources",
srcs = [
"favicon.ico",
@@ -73,6 +82,7 @@
srcs = [
name + "_app_sources",
name + "_css_sources",
+ name + "_theme_sources",
name + "_top_sources",
"//lib/fonts:robotofonts",
"//lib/js:highlightjs_files",
@@ -82,11 +92,12 @@
],
outs = outs,
cmd = " && ".join([
- "mkdir -p $$TMP/polygerrit_ui/{styles,fonts,bower_components/{highlightjs,webcomponentsjs},elements}",
+ "mkdir -p $$TMP/polygerrit_ui/{styles/themes,fonts,bower_components/{highlightjs,webcomponentsjs},elements}",
"for f in $(locations " + name + "_app_sources); do ext=$${f##*.}; cp -p $$f $$TMP/polygerrit_ui/elements/" + appName + ".$$ext; done",
"cp $(locations //lib/fonts:robotofonts) $$TMP/polygerrit_ui/fonts/",
"for f in $(locations " + name + "_top_sources); do cp $$f $$TMP/polygerrit_ui/; done",
"for f in $(locations "+ name + "_css_sources); do cp $$f $$TMP/polygerrit_ui/styles; done",
+ "for f in $(locations "+ name + "_theme_sources); do cp $$f $$TMP/polygerrit_ui/styles/themes; done",
"for f in $(locations //lib/js:highlightjs_files); do cp $$f $$TMP/polygerrit_ui/bower_components/highlightjs/ ; done",
"unzip -qd $$TMP/polygerrit_ui/bower_components $(location @webcomponentsjs//:zipfile) webcomponentsjs/webcomponents-lite.js",
"cd $$TMP",
diff --git a/polygerrit-ui/app/styles/gr-change-list-styles.html b/polygerrit-ui/app/styles/gr-change-list-styles.html
index 7379b9c..410389b 100644
--- a/polygerrit-ui/app/styles/gr-change-list-styles.html
+++ b/polygerrit-ui/app/styles/gr-change-list-styles.html
@@ -68,7 +68,6 @@
}
.topHeader th {
background-color: var(--table-header-background-color);
- font-size: var(--font-size-large);
height: 3rem;
position: -webkit-sticky;
position: sticky;
diff --git a/polygerrit-ui/app/styles/gr-table-styles.html b/polygerrit-ui/app/styles/gr-table-styles.html
index 5e40735..cf4e84e 100644
--- a/polygerrit-ui/app/styles/gr-table-styles.html
+++ b/polygerrit-ui/app/styles/gr-table-styles.html
@@ -71,11 +71,11 @@
}
.genericList .topHeader {
background-color: var(--table-header-background-color);
- font-size: var(--font-size-large);
height: 3rem;
}
.genericList .groupHeader {
background-color: var(--table-subheader-background-color);
+ font-size: var(--font-size-large);
}
.genericList a {
color: var(--primary-text-color);
diff --git a/polygerrit-ui/app/styles/app-theme.html b/polygerrit-ui/app/styles/themes/app-theme.html
similarity index 89%
rename from polygerrit-ui/app/styles/app-theme.html
rename to polygerrit-ui/app/styles/themes/app-theme.html
index 69262c9..21db329 100644
--- a/polygerrit-ui/app/styles/app-theme.html
+++ b/polygerrit-ui/app/styles/themes/app-theme.html
@@ -42,7 +42,7 @@
--table-header-background-color: #fafafa;
--table-subheader-background-color: #eaeaea;
- --chip-background-color: var(--header-background-color);
+ --chip-background-color: #eee;
--dropdown-background-color: #fff;
@@ -53,7 +53,7 @@
/* Font sizes */
--font-size-normal: 1rem;
--font-size-small: .92rem;
- --font-size-large: 1.076rem;
+ --font-size-large: 1.154rem;
--link-color: #2a66d9;
--primary-button-background-color: var(--link-color);
@@ -78,14 +78,14 @@
/* Diff colors */
--diff-selection-background-color: #c7dbf9;
- --light-remove-highlight-color: #fee;
- --light-add-highlight-color: #efe;
- --light-remove-add-highlight-color: #fff6ea;
- --light-rebased-add-highlight-color: #edfffa;
- --dark-remove-highlight-color: rgba(255, 0, 0, 0.15);
- --dark-add-highlight-color: rgba(0, 255, 0, 0.15);
- --dark-rebased-remove-highlight-color: rgba(255, 139, 6, 0.15);
- --dark-rebased-add-highlight-color: rgba(11, 255, 155, 0.15);
+ --light-remove-highlight-color: #FFEBEE;
+ --light-add-highlight-color: #D8FED8;
+ --light-remove-add-highlight-color: #FFF8DC;
+ --light-rebased-add-highlight-color: #EEEEFF;
+ --dark-remove-highlight-color: #FFCDD2;
+ --dark-add-highlight-color: #AAF2AA;
+ --dark-rebased-remove-highlight-color: #F7E8B7;
+ --dark-rebased-add-highlight-color: #D7D7F9;
--diff-context-control-color: #fff7d4;
--diff-context-control-border-color: #f6e6a5;
--diff-tab-indicator-color: var(--deemphasized-text-color);
diff --git a/polygerrit-ui/app/styles/themes/dark-theme.html b/polygerrit-ui/app/styles/themes/dark-theme.html
new file mode 100644
index 0000000..1f473da
--- /dev/null
+++ b/polygerrit-ui/app/styles/themes/dark-theme.html
@@ -0,0 +1,83 @@
+<dom-module id="dark-theme">
+ <style is="custom-style">
+ html {
+ --primary-text-color: #e2e2e2;
+ --view-background-color: #212121;
+ --border-color: #555555;
+ --table-header-background-color: #353637;
+ --table-subheader-background-color: rgb(23, 27, 31);
+ --header-background-color: #5487E5;
+ --header-text-color: var(--primary-text-color);
+ --deemphasized-text-color: #9a9a9a;
+ --footer-background-color: var(--table-header-background-color);
+ --expanded-background-color: #26282b;
+ --link-color: #5487E5;
+ --primary-button-background-color: var(--link-color);
+ --primary-button-text-color: var(--primary-text-color);
+ --secondary-button-background-color: var(--primary-text-color);
+ --secondary-button-text-color: var(--deemphasized-text-color);
+ --default-button-text-color: var(--link-color);
+ --default-button-background-color: var(--table-subheader-background-color);
+ --dropdown-background-color: var(--table-header-background-color);
+ --dialog-background-color: var(--view-background-color);
+ --chip-background-color: var(--table-header-background-color);
+
+ --select-background-color: var(--table-subheader-background-color);
+
+ --assignee-highlight-color: rgb(58, 54, 28);
+
+ --diff-selection-background-color: #3A71D8;
+ --light-remove-highlight-color: rgb(53, 27, 27);
+ --light-add-highlight-color: rgb(24, 45, 24);
+ --light-rebased-remove-highlight-color: rgb(60, 37, 8);
+ --light-rebased-add-highlight-color: rgb(72, 113, 101);
+ --dark-remove-highlight-color: rgba(255, 0, 0, 0.15);
+ --dark-add-highlight-color: rgba(0, 255, 0, 0.15);
+ --dark-rebased-remove-highlight-color: rgba(255, 139, 6, 0.15);
+ --dark-rebased-add-highlight-color: rgba(11, 255, 155, 0.15);
+ --diff-context-control-color: var(--table-header-background-color);
+ --diff-context-control-border-color: var(--border-color);
+ --diff-highlight-range-color: rgba(0, 100, 200, 0.5);
+ --diff-highlight-range-hover-color: rgba(0, 150, 255, 0.5);
+ --comment-text-color: var(--primary-text-color);
+ --comment-background-color: #0B162B;
+ --unresolved-comment-background-color: rgb(56, 90, 154);
+
+ --vote-color-approved: rgb(127, 182, 107);
+ --vote-color-recommended: rgb(63, 103, 50);
+ --vote-color-rejected: #ac2d3e;
+ --vote-color-disliked: #bf6874;
+ --vote-color-neutral: #597280;
+
+ --edit-mode-background-color: rgb(92, 10, 54);
+ --emphasis-color: #383f4a;
+
+ --tooltip-background-color: #111;
+
+ --syntax-default-color: var(--primary-text-color);
+ --syntax-meta-color: #6D7EEE;
+ --syntax-keyword-color: #CD4CF0;
+ --syntax-number-color: #00998A;
+ --syntax-selector-class-color: #FFCB68;
+ --syntax-variable-color: #F77669;
+ --syntax-template-variable-color: #F77669;
+ --syntax-comment-color: var(--deemphasized-text-color);
+ --syntax-string-color: #C3E88D;
+ --syntax-selector-id-color: #F77669;
+ --syntax-built_in-color: rgb(247, 195, 105);
+ --syntax-tag-color: #F77669;
+ --syntax-link-color: #C792EA;
+ --syntax-meta-keyword-color: #EEFFF7;
+ --syntax-type-color: #DD5F5F;
+ --syntax-title-color: #75A5FF;
+ --syntax-attr-color: #80CBBF;
+ --syntax-literal-color: #EEFFF7;
+ --syntax-selector-pseudo-color: #C792EA;
+ --syntax-regexp-color: #F77669;
+ --syntax-selector-attr-color: #80CBBF;
+ --syntax-template-tag-color: #C792EA;
+
+ background-color: var(--view-background-color);
+ }
+ </style>
+</dom-module>
\ No newline at end of file
diff --git a/polygerrit-ui/app/template_test_srcs/convert_for_template_tests.py b/polygerrit-ui/app/template_test_srcs/convert_for_template_tests.py
index 3a5cd83b..579e783 100644
--- a/polygerrit-ui/app/template_test_srcs/convert_for_template_tests.py
+++ b/polygerrit-ui/app/template_test_srcs/convert_for_template_tests.py
@@ -1,5 +1,6 @@
-import os, re, json
-from shutil import copyfile, rmtree
+import json
+import os
+import re
polymerRegex = r"Polymer\({"
polymerCompiledRegex = re.compile(polymerRegex)
@@ -10,103 +11,119 @@
regexBehavior = r"<script>(.+)<\/script>"
behaviorCompiledRegex = re.compile(regexBehavior, re.DOTALL)
+
def _open(filename, mode="r"):
- try:
- return open(filename, mode, encoding="utf-8")
- except TypeError:
- return open(filename, mode)
+ try:
+ return open(filename, mode, encoding="utf-8")
+ except TypeError:
+ return open(filename, mode)
-def replaceBehaviorLikeHTML (fileIn, fileOut):
- with _open(fileIn) as f:
- file_str = f.read()
- match = behaviorCompiledRegex.search(file_str)
- if (match):
- with _open("polygerrit-ui/temp/behaviors/" + fileOut.replace("html", "js") , "w+") as f:
- f.write(match.group(1))
-def replaceBehaviorLikeJS (fileIn, fileOut):
- with _open(fileIn) as f:
- file_str = f.read()
- with _open("polygerrit-ui/temp/behaviors/" + fileOut , "w+") as f:
- f.write(file_str)
+def replaceBehaviorLikeHTML(fileIn, fileOut):
+ with _open(fileIn) as f:
+ file_str = f.read()
+ match = behaviorCompiledRegex.search(file_str)
+ if match:
+ with _open("polygerrit-ui/temp/behaviors/" +
+ fileOut.replace("html", "js"), "w+") as f:
+ f.write(match.group(1))
+
+
+def replaceBehaviorLikeJS(fileIn, fileOut):
+ with _open(fileIn) as f:
+ file_str = f.read()
+ with _open("polygerrit-ui/temp/behaviors/" + fileOut, "w+") as f:
+ f.write(file_str)
+
def generateStubBehavior(behaviorName):
- with _open("polygerrit-ui/temp/behaviors/" + behaviorName + ".js", "w+") as f:
- f.write("/** @polymerBehavior **/\n" + behaviorName + "= {};")
+ with _open("polygerrit-ui/temp/behaviors/" +
+ behaviorName + ".js", "w+") as f:
+ f.write("/** @polymerBehavior **/\n" + behaviorName + "= {};")
-def replacePolymerElement (fileIn, fileOut, root):
- with _open(fileIn) as f:
- key = fileOut.split('.')[0]
- # Removed self invoked function
- file_str = f.read()
- file_str_no_fn = fnCompiledRegex.search(file_str)
- if file_str_no_fn:
- package = root.replace("/", ".") + "." + fileOut
+def replacePolymerElement(fileIn, fileOut, root):
+ with _open(fileIn) as f:
+ key = fileOut.split('.')[0]
+ # Removed self invoked function
+ file_str = f.read()
+ file_str_no_fn = fnCompiledRegex.search(file_str)
- with _open("polygerrit-ui/temp/" + fileOut, "w+") as f:
- mainFileContents = re.sub(polymerCompiledRegex, "exports = Polymer({", file_str_no_fn.group(1)).replace("'use strict';", "")
- f.write("/** \n" \
- "* @fileoverview \n" \
- "* @suppress {missingProperties} \n" \
- "*/ \n\n" \
- "goog.module('polygerrit." + package + "')\n\n" + mainFileContents)
+ if file_str_no_fn:
+ package = root.replace("/", ".") + "." + fileOut
- # Add package and javascript to files object.
- elements[key]["js"] = "polygerrit-ui/temp/" + fileOut
- elements[key]["package"] = package
+ with _open("polygerrit-ui/temp/" + fileOut, "w+") as f:
+ mainFileContents = re.sub(
+ polymerCompiledRegex,
+ "exports = Polymer({",
+ file_str_no_fn.group(1)).replace("'use strict';", "")
+ f.write("/** \n"
+ "* @fileoverview \n"
+ "* @suppress {missingProperties} \n"
+ "*/ \n\n"
+ "goog.module('polygerrit." + package + "')\n\n" +
+ mainFileContents)
+
+ # Add package and javascript to files object.
+ elements[key]["js"] = "polygerrit-ui/temp/" + fileOut
+ elements[key]["package"] = package
+
def writeTempFile(file, root):
- # This is included in an extern because it is directly on the window object.
- # (for now at least).
- if "gr-reporting" in file:
- return
- key = file.split('.')[0]
- if not key in elements:
- # gr-app doesn't have an additional level
- elements[key] = {"directory": 'gr-app' if len(root.split("/")) < 4 else root.split("/")[3]}
- if file.endswith(".html") and not file.endswith("_test.html"):
- # gr-navigation is treated like a behavior rather than a standard element
- # because of the way it added to the Gerrit object.
- if file.endswith("gr-navigation.html"):
- replaceBehaviorLikeHTML(os.path.join(root, file), file)
- else:
- elements[key]["html"] = os.path.join(root, file)
- if file.endswith(".js"):
- replacePolymerElement(os.path.join(root, file), file, root)
+ # This is included in an extern because it is directly on the window object
+ # (for now at least).
+ if "gr-reporting" in file:
+ return
+ key = file.split('.')[0]
+ if key not in elements:
+ # gr-app doesn't have an additional level
+ elements[key] = {
+ "directory":
+ 'gr-app' if len(root.split("/")) < 4 else root.split("/")[3]
+ }
+ if file.endswith(".html") and not file.endswith("_test.html"):
+ # gr-navigation is treated like a behavior rather than a standard
+ # element because of the way it added to the Gerrit object.
+ if file.endswith("gr-navigation.html"):
+ replaceBehaviorLikeHTML(os.path.join(root, file), file)
+ else:
+ elements[key]["html"] = os.path.join(root, file)
+ if file.endswith(".js"):
+ replacePolymerElement(os.path.join(root, file), file, root)
if __name__ == "__main__":
- # Create temp directory.
- if not os.path.exists("polygerrit-ui/temp"):
- os.makedirs("polygerrit-ui/temp")
+ # Create temp directory.
+ if not os.path.exists("polygerrit-ui/temp"):
+ os.makedirs("polygerrit-ui/temp")
- # Within temp directory create behavior directory.
- if not os.path.exists("polygerrit-ui/temp/behaviors"):
- os.makedirs("polygerrit-ui/temp/behaviors")
+ # Within temp directory create behavior directory.
+ if not os.path.exists("polygerrit-ui/temp/behaviors"):
+ os.makedirs("polygerrit-ui/temp/behaviors")
- elements = {}
+ elements = {}
- # Go through every file in app/elements, and re-write accordingly to temp
- # directory, and also added to elements object, which is used to generate a
- # map of html files, package names, and javascript files.
- for root, dirs, files in os.walk("polygerrit-ui/app/elements"):
- for file in files:
- writeTempFile(file, root)
+ # Go through every file in app/elements, and re-write accordingly to temp
+ # directory, and also added to elements object, which is used to generate a
+ # map of html files, package names, and javascript files.
+ for root, dirs, files in os.walk("polygerrit-ui/app/elements"):
+ for file in files:
+ writeTempFile(file, root)
- # Special case for polymer behaviors we are using.
- replaceBehaviorLikeHTML("polygerrit-ui/app/bower_components/iron-a11y-keys-behavior/iron-a11y-keys-behavior.html", "iron-a11y-keys-behavior.html")
- generateStubBehavior("Polymer.IronOverlayBehavior")
- generateStubBehavior("Polymer.IronFitBehavior")
+ # Special case for polymer behaviors we are using.
+ replaceBehaviorLikeHTML("polygerrit-ui/app/bower_components/iron-a11y-keys-behavior/iron-a11y-keys-behavior.html", "iron-a11y-keys-behavior.html")
+ generateStubBehavior("Polymer.IronOverlayBehavior")
+ generateStubBehavior("Polymer.IronFitBehavior")
- #TODO figure out something to do with iron-overlay-behavior. it is hard-coded reformatted.
+ # TODO figure out something to do with iron-overlay-behavior.
+ # it is hard-coded reformatted.
- with _open("polygerrit-ui/temp/map.json", "w+") as f:
- f.write(json.dumps(elements))
+ with _open("polygerrit-ui/temp/map.json", "w+") as f:
+ f.write(json.dumps(elements))
- for root, dirs, files in os.walk("polygerrit-ui/app/behaviors"):
- for file in files:
- if file.endswith("behavior.html"):
- replaceBehaviorLikeHTML(os.path.join(root, file), file)
- elif file.endswith("behavior.js"):
- replaceBehaviorLikeJS(os.path.join(root, file), file)
+ for root, dirs, files in os.walk("polygerrit-ui/app/behaviors"):
+ for file in files:
+ if file.endswith("behavior.html"):
+ replaceBehaviorLikeHTML(os.path.join(root, file), file)
+ elif file.endswith("behavior.js"):
+ replaceBehaviorLikeJS(os.path.join(root, file), file)
diff --git a/polygerrit-ui/app/test/index.html b/polygerrit-ui/app/test/index.html
index 6cf674a..5a5dbcd 100644
--- a/polygerrit-ui/app/test/index.html
+++ b/polygerrit-ui/app/test/index.html
@@ -88,6 +88,7 @@
'core/gr-error-manager/gr-error-manager_test.html',
'core/gr-main-header/gr-main-header_test.html',
'core/gr-navigation/gr-navigation_test.html',
+ 'core/gr-reporting/gr-jank-detector_test.html',
'core/gr-reporting/gr-reporting_test.html',
'core/gr-router/gr-router_test.html',
'core/gr-search-bar/gr-search-bar_test.html',
@@ -111,7 +112,6 @@
'diff/gr-ranged-comment-layer/gr-ranged-comment-layer_test.html',
'diff/gr-selection-action-box/gr-selection-action-box_test.html',
'diff/gr-syntax-layer/gr-syntax-layer_test.html',
- 'diff/gr-syntax-lib-loader/gr-syntax-lib-loader_test.html',
'edit/gr-default-editor/gr-default-editor_test.html',
'edit/gr-edit-controls/gr-edit-controls_test.html',
'edit/gr-edit-file-controls/gr-edit-file-controls_test.html',
@@ -164,6 +164,7 @@
'shared/gr-js-api-interface/gr-plugin-endpoints_test.html',
'shared/gr-js-api-interface/gr-plugin-rest-api_test.html',
'shared/gr-fixed-panel/gr-fixed-panel_test.html',
+ 'shared/gr-lib-loader/gr-lib-loader_test.html',
'shared/gr-limited-text/gr-limited-text_test.html',
'shared/gr-linked-chip/gr-linked-chip_test.html',
'shared/gr-linked-text/gr-linked-text_test.html',
diff --git a/proto/cache.proto b/proto/cache.proto
index 634b595..a826f8c 100644
--- a/proto/cache.proto
+++ b/proto/cache.proto
@@ -45,3 +45,151 @@
int64 expires_at = 4;
string provider_id = 5;
}
+
+
+// Serialized form of com.google.gerrit.server.notedb.ChangeNotesCache.Key.
+// Next ID: 4
+message ChangeNotesKeyProto {
+ string project = 1;
+ int32 change_id = 2;
+ bytes id = 3;
+}
+
+// Serialized from of com.google.gerrit.server.notedb.ChangeNotesState.
+//
+// Note on embedded protos: this is just for storing in a cache, so some formats
+// were chosen ease of coding the initial implementation. In particular, where
+// there already exists another serialization mechanism in Gerrit for
+// serializing a particular field, we use that rather than defining a new proto
+// type. This includes ReviewDb types that can be serialized to proto using
+// ProtobufCodec as well as NoteDb and indexed types that are serialized using
+// JSON. We can always revisit this decision later, particularly when we
+// eliminate the ReviewDb types; it just requires bumping the cache version.
+//
+// Note on nullability: there are a lot of nullable fields in ChangeNotesState
+// and its dependencies. It's likely we could make some of them non-nullable,
+// but each one of those would be a potentially significant amount of cleanup,
+// and there's no guarantee we'd be able to eliminate all of them. (For a less
+// complex class, it's likely the cleanup would be more feasible.)
+//
+// Instead, we just take the tedious yet simple approach of having a "has_foo"
+// field for each nullable field "foo", indicating whether or not foo is null.
+//
+// Next ID: 19
+message ChangeNotesStateProto {
+ // Effectively required, even though the corresponding ChangeNotesState field
+ // is optional, since the field is only absent when NoteDb is disabled, in
+ // which case attempting to use the ChangeNotesCache is programmer error.
+ bytes meta_id = 1;
+
+ int32 change_id = 2;
+
+ // Next ID: 24
+ message ChangeColumnsProto {
+ string change_key = 1;
+
+ int64 created_on = 2;
+
+ int64 last_updated_on = 3;
+
+ int32 owner = 4;
+
+ string branch = 5;
+
+ int32 current_patch_set_id = 6;
+ bool has_current_patch_set_id = 7;
+
+ string subject = 8;
+
+ string topic = 9;
+ bool has_topic = 10;
+
+ string original_subject = 11;
+ bool has_original_subject = 12;
+
+ string submission_id = 13;
+ bool has_submission_id = 14;
+
+ int32 assignee = 15;
+ bool has_assignee = 16;
+
+ string status = 17;
+ bool has_status = 18;
+
+ bool is_private = 19;
+
+ bool work_in_progress = 20;
+
+ bool review_started = 21;
+
+ int32 revert_of = 22;
+ bool has_revert_of = 23;
+ }
+ // Effectively required, even though the corresponding ChangeNotesState field
+ // is optional, since the field is only absent when NoteDb is disabled, in
+ // which case attempting to use the ChangeNotesCache is programmer error.
+ ChangeColumnsProto columns = 3;
+
+ repeated int32 past_assignee = 4;
+
+ repeated string hashtag = 5;
+
+ // Raw PatchSet proto as produced by ProtobufCodec.
+ repeated bytes patch_set = 6;
+
+ // Raw PatchSetApproval proto as produced by ProtobufCodec.
+ repeated bytes approval = 7;
+
+ // Next ID: 4
+ message ReviewerSetEntryProto {
+ string state = 1;
+ int32 account_id = 2;
+ int64 timestamp = 3;
+ }
+ repeated ReviewerSetEntryProto reviewer = 8;
+
+ // Next ID: 4
+ message ReviewerByEmailSetEntryProto {
+ string state = 1;
+ string address = 2;
+ int64 timestamp = 3;
+ }
+ repeated ReviewerByEmailSetEntryProto reviewer_by_email = 9;
+
+ repeated ReviewerSetEntryProto pending_reviewer = 10;
+
+ repeated ReviewerByEmailSetEntryProto pending_reviewer_by_email = 11;
+
+ repeated int32 past_reviewer = 12;
+
+ // Next ID: 5
+ message ReviewerStatusUpdateProto {
+ int64 date = 1;
+ int32 updated_by = 2;
+ int32 reviewer = 3;
+ string state = 4;
+ }
+ repeated ReviewerStatusUpdateProto reviewer_update = 13;
+
+ // JSON produced from
+ // com.google.gerrit.server.index.change.ChangeField.StoredSubmitRecord.
+ repeated string submit_record = 14;
+
+ // Raw ChangeMessage proto as produced by ProtobufCodec.
+ repeated bytes change_message = 15;
+
+ // JSON produced from com.google.gerrit.reviewdb.client.Comment.
+ repeated string published_comment = 16;
+
+ int64 read_only_until = 17;
+ bool has_read_only_until = 18;
+}
+
+
+// Serialized form of com.google.gerrit.server.query.change.ConflictKey
+message ConflictKeyProto {
+ bytes commit = 1;
+ bytes other_commit = 2;
+ string submit_type = 3;
+ bool content_merge = 4;
+}
diff --git a/resources/com/google/gerrit/httpd/raw/PolyGerritIndexHtml.soy b/resources/com/google/gerrit/httpd/raw/PolyGerritIndexHtml.soy
index 699dd0e..3dd6360 100644
--- a/resources/com/google/gerrit/httpd/raw/PolyGerritIndexHtml.soy
+++ b/resources/com/google/gerrit/httpd/raw/PolyGerritIndexHtml.soy
@@ -21,6 +21,7 @@
* @param staticResourcePath
* @param? faviconPath
* @param? versionInfo
+ * @param? deprecateGwtUi
*/
{template .Index}
<!DOCTYPE html>{\n}
@@ -32,7 +33,9 @@
<script>
window.CLOSURE_NO_DEPS = true;
{if $canonicalPath != ''}window.CANONICAL_PATH = '{$canonicalPath}';{/if}
+ {if $deprecateGwtUi}window.DEPRECATE_GWT_UI = true;{/if}
{if $versionInfo}window.VERSION_INFO = '{$versionInfo}';{/if}
+ {if $staticResourcePath != ''}window.STATIC_RESOURCE_PATH = '{$staticResourcePath}';{/if}
</script>{\n}
{if $faviconPath}
diff --git a/resources/com/google/gerrit/pgm/Startup.py b/resources/com/google/gerrit/pgm/Startup.py
index 469d5df..ec18f42 100644
--- a/resources/com/google/gerrit/pgm/Startup.py
+++ b/resources/com/google/gerrit/pgm/Startup.py
@@ -19,14 +19,16 @@
from __future__ import print_function
import sys
+
def print_help():
- for (n, v) in vars(sys.modules['__main__']).items():
- if not n.startswith("__") and not n in ['help', 'reload'] \
- and str(type(v)) != "<type 'javapackage'>" \
- and not str(v).startswith("<module"):
- print("\"%s\" is \"%s\"" % (n, v))
- print()
- print("Welcome to the Gerrit Inspector")
- print("Enter help() to see the above again, EOF to quit and stop Gerrit")
+ for (n, v) in vars(sys.modules['__main__']).items():
+ if not n.startswith("__") and n not in ['help', 'reload'] \
+ and str(type(v)) != "<type 'javapackage'>" \
+ and not str(v).startswith("<module"):
+ print("\"%s\" is \"%s\"" % (n, v))
+ print()
+ print("Welcome to the Gerrit Inspector")
+ print("Enter help() to see the above again, EOF to quit and stop Gerrit")
+
print_help()
diff --git a/tools/bzl/asciidoc.bzl b/tools/bzl/asciidoc.bzl
index 62fa4c6..e20624d 100644
--- a/tools/bzl/asciidoc.bzl
+++ b/tools/bzl/asciidoc.bzl
@@ -103,7 +103,7 @@
_asciidoc_attrs = {
"_exe": attr.label(
- default = Label("//lib/asciidoctor:asciidoc"),
+ default = Label("//java/com/google/gerrit/asciidoctor:asciidoc"),
cfg = "host",
allow_files = True,
executable = True,
diff --git a/tools/bzl/license-map.py b/tools/bzl/license-map.py
index 74a84cc..476ccb9 100644
--- a/tools/bzl/license-map.py
+++ b/tools/bzl/license-map.py
@@ -25,35 +25,34 @@
handled_rules = []
for xml in args.xmls:
- tree = ET.parse(xml)
- root = tree.getroot()
+ tree = ET.parse(xml)
+ root = tree.getroot()
- for child in root:
- rule_name = child.attrib["name"]
- if rule_name in handled_rules:
- # already handled in other xml files
- continue
+ for child in root:
+ rule_name = child.attrib["name"]
+ if rule_name in handled_rules:
+ # already handled in other xml files
+ continue
- handled_rules.append(rule_name)
- for c in child.getchildren():
- if c.tag != "rule-input":
- continue
+ handled_rules.append(rule_name)
+ for c in child.getchildren():
+ if c.tag != "rule-input":
+ continue
- license_name = c.attrib["name"]
- if LICENSE_PREFIX in license_name:
- entries[rule_name].append(license_name)
- graph[license_name].append(rule_name)
+ license_name = c.attrib["name"]
+ if LICENSE_PREFIX in license_name:
+ entries[rule_name].append(license_name)
+ graph[license_name].append(rule_name)
if len(graph[DO_NOT_DISTRIBUTE]):
- print("DO_NOT_DISTRIBUTE license found in:", file=stderr)
- for target in graph[DO_NOT_DISTRIBUTE]:
- print(target, file=stderr)
- exit(1)
+ print("DO_NOT_DISTRIBUTE license found in:", file=stderr)
+ for target in graph[DO_NOT_DISTRIBUTE]:
+ print(target, file=stderr)
+ exit(1)
if args.asciidoctor:
- print(
-# We don't want any blank line before "= Gerrit Code Review - Licenses"
-"""= Gerrit Code Review - Licenses
+ # We don't want any blank line before "= Gerrit Code Review - Licenses"
+ print("""= Gerrit Code Review - Licenses
Gerrit open source software is licensed under the <<Apache2_0,Apache
License 2.0>>. Executable distributions also include other software
@@ -93,40 +92,39 @@
""")
for n in sorted(graph.keys()):
- if len(graph[n]) == 0:
- continue
+ if len(graph[n]) == 0:
+ continue
- name = n[len(LICENSE_PREFIX):]
- safename = name.replace(".", "_")
- print()
- print("[[%s]]" % safename)
- print(name)
- print()
- for d in sorted(graph[n]):
- if d.startswith("//lib:") or d.startswith("//lib/"):
- p = d[len("//lib:"):]
- else:
- p = d[d.index(":")+1:].lower()
- if "__" in p:
- p = p[:p.index("__")]
- print("* " + p)
- print()
- print("[[%s_license]]" % safename)
- print("----")
- filename = n[2:].replace(":", "/")
- try:
- with open(filename, errors='ignore') as fd:
- copyfileobj(fd, stdout)
- except TypeError:
- with open(filename) as fd:
- copyfileobj(fd, stdout)
- print()
- print("----")
- print()
+ name = n[len(LICENSE_PREFIX):]
+ safename = name.replace(".", "_")
+ print()
+ print("[[%s]]" % safename)
+ print(name)
+ print()
+ for d in sorted(graph[n]):
+ if d.startswith("//lib:") or d.startswith("//lib/"):
+ p = d[len("//lib:"):]
+ else:
+ p = d[d.index(":")+1:].lower()
+ if "__" in p:
+ p = p[:p.index("__")]
+ print("* " + p)
+ print()
+ print("[[%s_license]]" % safename)
+ print("----")
+ filename = n[2:].replace(":", "/")
+ try:
+ with open(filename, errors='ignore') as fd:
+ copyfileobj(fd, stdout)
+ except TypeError:
+ with open(filename) as fd:
+ copyfileobj(fd, stdout)
+ print()
+ print("----")
+ print()
if args.asciidoctor:
- print(
-"""
+ print("""
GERRIT
------
Part of link:index.html[Gerrit Code Review]
diff --git a/tools/download_file.py b/tools/download_file.py
index 26671f0..29398e6 100755
--- a/tools/download_file.py
+++ b/tools/download_file.py
@@ -30,49 +30,50 @@
def safe_mkdirs(d):
- if path.isdir(d):
- return
- try:
- makedirs(d)
- except OSError as err:
- if not path.isdir(d):
- raise err
+ if path.isdir(d):
+ return
+ try:
+ makedirs(d)
+ except OSError as err:
+ if not path.isdir(d):
+ raise err
def download_properties(root_dir):
- """ Get the download properties.
+ """ Get the download properties.
- First tries to find the properties file in the given root directory,
- and if not found there, tries in the Gerrit settings folder in the
- user's home directory.
+ First tries to find the properties file in the given root directory,
+ and if not found there, tries in the Gerrit settings folder in the
+ user's home directory.
- Returns a set of download properties, which may be empty.
+ Returns a set of download properties, which may be empty.
- """
- p = {}
- local_prop = path.join(root_dir, LOCAL_PROPERTIES)
- if not path.isfile(local_prop):
- local_prop = path.join(GERRIT_HOME, LOCAL_PROPERTIES)
- if path.isfile(local_prop):
- try:
- with open(local_prop) as fd:
- for line in fd:
- if line.startswith('download.'):
- d = [e.strip() for e in line.split('=', 1)]
- name, url = d[0], d[1]
- p[name[len('download.'):]] = url
- except OSError:
- pass
- return p
+ """
+ p = {}
+ local_prop = path.join(root_dir, LOCAL_PROPERTIES)
+ if not path.isfile(local_prop):
+ local_prop = path.join(GERRIT_HOME, LOCAL_PROPERTIES)
+ if path.isfile(local_prop):
+ try:
+ with open(local_prop) as fd:
+ for line in fd:
+ if line.startswith('download.'):
+ d = [e.strip() for e in line.split('=', 1)]
+ name, url = d[0], d[1]
+ p[name[len('download.'):]] = url
+ except OSError:
+ pass
+ return p
def cache_entry(args):
- if args.v:
- h = args.v
- else:
- h = sha1(args.u.encode('utf-8')).hexdigest()
- name = '%s-%s' % (path.basename(args.o), h)
- return path.join(CACHE_DIR, name)
+ if args.v:
+ h = args.v
+ else:
+ h = sha1(args.u.encode('utf-8')).hexdigest()
+ name = '%s-%s' % (path.basename(args.o), h)
+ return path.join(CACHE_DIR, name)
+
opts = OptionParser()
opts.add_option('-o', help='local output file')
@@ -85,89 +86,90 @@
root_dir = args.o
while root_dir and path.dirname(root_dir) != root_dir:
- root_dir, n = path.split(root_dir)
- if n == 'WORKSPACE':
- break
+ root_dir, n = path.split(root_dir)
+ if n == 'WORKSPACE':
+ break
redirects = download_properties(root_dir)
cache_ent = cache_entry(args)
src_url = resolve_url(args.u, redirects)
if not path.exists(cache_ent):
- try:
- safe_mkdirs(path.dirname(cache_ent))
- except OSError as err:
- print('error creating directory %s: %s' %
- (path.dirname(cache_ent), err), file=stderr)
- exit(1)
+ try:
+ safe_mkdirs(path.dirname(cache_ent))
+ except OSError as err:
+ print('error creating directory %s: %s' %
+ (path.dirname(cache_ent), err), file=stderr)
+ exit(1)
- print('Download %s' % src_url, file=stderr)
- try:
- check_call(['curl', '--proxy-anyauth', '-ksSfLo', cache_ent, src_url])
- except OSError as err:
- print('could not invoke curl: %s\nis curl installed?' % err, file=stderr)
- exit(1)
- except CalledProcessError as err:
- print('error using curl: %s' % err, file=stderr)
- exit(1)
+ print('Download %s' % src_url, file=stderr)
+ try:
+ check_call(['curl', '--proxy-anyauth', '-ksSfLo', cache_ent, src_url])
+ except OSError as err:
+ print('could not invoke curl: %s\nis curl installed?' % err,
+ file=stderr)
+ exit(1)
+ except CalledProcessError as err:
+ print('error using curl: %s' % err, file=stderr)
+ exit(1)
if args.v:
- have = hash_file(sha1(), cache_ent).hexdigest()
- if args.v != have:
- print((
- '%s:\n' +
- 'expected %s\n' +
- 'received %s\n') % (src_url, args.v, have), file=stderr)
- try:
- remove(cache_ent)
- except OSError as err:
- if path.exists(cache_ent):
- print('error removing %s: %s' % (cache_ent, err), file=stderr)
- exit(1)
+ have = hash_file(sha1(), cache_ent).hexdigest()
+ if args.v != have:
+ print((
+ '%s:\n' +
+ 'expected %s\n' +
+ 'received %s\n') % (src_url, args.v, have), file=stderr)
+ try:
+ remove(cache_ent)
+ except OSError as err:
+ if path.exists(cache_ent):
+ print('error removing %s: %s' % (cache_ent, err), file=stderr)
+ exit(1)
exclude = []
if args.x:
- exclude += args.x
+ exclude += args.x
if args.exclude_java_sources:
- try:
- with ZipFile(cache_ent, 'r') as zf:
- for n in zf.namelist():
- if n.endswith('.java'):
- exclude.append(n)
- except (BadZipfile, LargeZipFile) as err:
- print('error opening %s: %s' % (cache_ent, err), file=stderr)
- exit(1)
+ try:
+ with ZipFile(cache_ent, 'r') as zf:
+ for n in zf.namelist():
+ if n.endswith('.java'):
+ exclude.append(n)
+ except (BadZipfile, LargeZipFile) as err:
+ print('error opening %s: %s' % (cache_ent, err), file=stderr)
+ exit(1)
if args.unsign:
- try:
- with ZipFile(cache_ent, 'r') as zf:
- for n in zf.namelist():
- if (n.endswith('.RSA')
- or n.endswith('.SF')
- or n.endswith('.LIST')):
- exclude.append(n)
- except (BadZipfile, LargeZipFile) as err:
- print('error opening %s: %s' % (cache_ent, err), file=stderr)
- exit(1)
+ try:
+ with ZipFile(cache_ent, 'r') as zf:
+ for n in zf.namelist():
+ if (n.endswith('.RSA')
+ or n.endswith('.SF')
+ or n.endswith('.LIST')):
+ exclude.append(n)
+ except (BadZipfile, LargeZipFile) as err:
+ print('error opening %s: %s' % (cache_ent, err), file=stderr)
+ exit(1)
safe_mkdirs(path.dirname(args.o))
if exclude:
- try:
- shutil.copyfile(cache_ent, args.o)
- except (shutil.Error, IOError) as err:
- print('error copying to %s: %s' % (args.o, err), file=stderr)
- exit(1)
- try:
- check_call(['zip', '-d', args.o] + exclude)
- except CalledProcessError as err:
- print('error removing files from zip: %s' % err, file=stderr)
- exit(1)
-else:
- try:
- link(cache_ent, args.o)
- except OSError as err:
try:
- shutil.copyfile(cache_ent, args.o)
+ shutil.copyfile(cache_ent, args.o)
except (shutil.Error, IOError) as err:
- print('error copying to %s: %s' % (args.o, err), file=stderr)
- exit(1)
+ print('error copying to %s: %s' % (args.o, err), file=stderr)
+ exit(1)
+ try:
+ check_call(['zip', '-d', args.o] + exclude)
+ except CalledProcessError as err:
+ print('error removing files from zip: %s' % err, file=stderr)
+ exit(1)
+else:
+ try:
+ link(cache_ent, args.o)
+ except OSError as err:
+ try:
+ shutil.copyfile(cache_ent, args.o)
+ except (shutil.Error, IOError) as err:
+ print('error copying to %s: %s' % (args.o, err), file=stderr)
+ exit(1)
diff --git a/tools/eclipse/BUILD b/tools/eclipse/BUILD
index 67763e2..546c75e 100644
--- a/tools/eclipse/BUILD
+++ b/tools/eclipse/BUILD
@@ -18,8 +18,8 @@
"//gerrit-plugin-gwtui:gwtui-api-lib",
"//java/com/google/gerrit/acceptance:lib",
"//java/com/google/gerrit/server",
- "//lib/asciidoctor:asciidoc_lib",
- "//lib/asciidoctor:doc_indexer_lib",
+ "//java/com/google/gerrit/asciidoctor:asciidoc_lib",
+ "//java/com/google/gerrit/asciidoctor:doc_indexer_lib",
"//lib/auto:auto-value",
"//lib/gwt:ant",
"//lib/gwt:colt",
diff --git a/tools/eclipse/project.py b/tools/eclipse/project.py
index a6b0964..b99c04e 100755
--- a/tools/eclipse/project.py
+++ b/tools/eclipse/project.py
@@ -30,20 +30,20 @@
GWT = '//gerrit-gwtui:ui_module'
AUTO = '//lib/auto:auto-value'
JRE = '/'.join([
- 'org.eclipse.jdt.launching.JRE_CONTAINER',
- 'org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType',
- 'JavaSE-1.8',
+ 'org.eclipse.jdt.launching.JRE_CONTAINER',
+ 'org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType',
+ 'JavaSE-1.8',
])
# Map of targets to corresponding classpath collector rules
cp_targets = {
- AUTO: '//tools/eclipse:autovalue_classpath_collect',
- GWT: '//tools/eclipse:gwt_classpath_collect',
- MAIN: '//tools/eclipse:main_classpath_collect',
+ AUTO: '//tools/eclipse:autovalue_classpath_collect',
+ GWT: '//tools/eclipse:gwt_classpath_collect',
+ MAIN: '//tools/eclipse:main_classpath_collect',
}
ROOT = path.abspath(__file__)
while not path.exists(path.join(ROOT, 'WORKSPACE')):
- ROOT = path.dirname(ROOT)
+ ROOT = path.dirname(ROOT)
opts = OptionParser()
opts.add_option('--plugins', help='create eclipse projects for plugins',
@@ -56,38 +56,43 @@
batch_option = '--batch' if args.batch else None
+
def _build_bazel_cmd(*args):
- cmd = ['bazel']
- if batch_option:
- cmd.append('--batch')
- for arg in args:
- cmd.append(arg)
- return cmd
+ cmd = ['bazel']
+ if batch_option:
+ cmd.append('--batch')
+ for arg in args:
+ cmd.append(arg)
+ return cmd
+
def retrieve_ext_location():
- return check_output(_build_bazel_cmd('info', 'output_base')).strip()
+ return check_output(_build_bazel_cmd('info', 'output_base')).strip()
+
def gen_bazel_path():
- bazel = check_output(['which', 'bazel']).strip().decode('UTF-8')
- with open(path.join(ROOT, ".bazel_path"), 'w') as fd:
- fd.write("bazel=%s\n" % bazel)
- fd.write("PATH=%s\n" % environ["PATH"])
+ bazel = check_output(['which', 'bazel']).strip().decode('UTF-8')
+ with open(path.join(ROOT, ".bazel_path"), 'w') as fd:
+ fd.write("bazel=%s\n" % bazel)
+ fd.write("PATH=%s\n" % environ["PATH"])
+
def _query_classpath(target):
- deps = []
- t = cp_targets[target]
- try:
- check_call(_build_bazel_cmd('build', t))
- except CalledProcessError:
- exit(1)
- name = 'bazel-bin/tools/eclipse/' + t.split(':')[1] + '.runtime_classpath'
- deps = [line.rstrip('\n') for line in open(name)]
- return deps
+ deps = []
+ t = cp_targets[target]
+ try:
+ check_call(_build_bazel_cmd('build', t))
+ except CalledProcessError:
+ exit(1)
+ name = 'bazel-bin/tools/eclipse/' + t.split(':')[1] + '.runtime_classpath'
+ deps = [line.rstrip('\n') for line in open(name)]
+ return deps
+
def gen_project(name='gerrit', root=ROOT):
- p = path.join(root, '.project')
- with open(p, 'w') as fd:
- print("""\
+ p = path.join(root, '.project')
+ with open(p, 'w') as fd:
+ print("""\
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>%(name)s</name>
@@ -102,16 +107,17 @@
</projectDescription>\
""" % {"name": name}, file=fd)
+
def gen_plugin_classpath(root):
- p = path.join(root, '.classpath')
- with open(p, 'w') as fd:
- if path.exists(path.join(root, 'src', 'test', 'java')):
- testpath = """
+ p = path.join(root, '.classpath')
+ with open(p, 'w') as fd:
+ if path.exists(path.join(root, 'src', 'test', 'java')):
+ testpath = """
<classpathentry excluding="**/BUILD" kind="src" path="src/test/java"\
out="eclipse-out/test"/>"""
- else:
- testpath = ""
- print("""\
+ else:
+ testpath = ""
+ print("""\
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry excluding="**/BUILD" kind="src" path="src/main/java"/>%(testpath)s
@@ -120,186 +126,193 @@
<classpathentry kind="output" path="eclipse-out/classes"/>
</classpath>""" % {"testpath": testpath}, file=fd)
+
def gen_classpath(ext):
- def make_classpath():
- impl = minidom.getDOMImplementation()
- return impl.createDocument(None, 'classpath', None)
+ def make_classpath():
+ impl = minidom.getDOMImplementation()
+ return impl.createDocument(None, 'classpath', None)
- def classpathentry(kind, path, src=None, out=None, exported=None):
- e = doc.createElement('classpathentry')
- e.setAttribute('kind', kind)
- # TODO(davido): Remove this and other exclude BUILD files hack
- # when this Bazel bug is fixed:
- # https://github.com/bazelbuild/bazel/issues/1083
- if kind == 'src':
- e.setAttribute('excluding', '**/BUILD')
- e.setAttribute('path', path)
- if src:
- e.setAttribute('sourcepath', src)
- if out:
- e.setAttribute('output', out)
- if exported:
- e.setAttribute('exported', 'true')
- doc.documentElement.appendChild(e)
+ def classpathentry(kind, path, src=None, out=None, exported=None):
+ e = doc.createElement('classpathentry')
+ e.setAttribute('kind', kind)
+ # TODO(davido): Remove this and other exclude BUILD files hack
+ # when this Bazel bug is fixed:
+ # https://github.com/bazelbuild/bazel/issues/1083
+ if kind == 'src':
+ e.setAttribute('excluding', '**/BUILD')
+ e.setAttribute('path', path)
+ if src:
+ e.setAttribute('sourcepath', src)
+ if out:
+ e.setAttribute('output', out)
+ if exported:
+ e.setAttribute('exported', 'true')
+ doc.documentElement.appendChild(e)
- doc = make_classpath()
- src = set()
- lib = set()
- proto = set()
- gwt_src = set()
- gwt_lib = set()
- plugins = set()
+ doc = make_classpath()
+ src = set()
+ lib = set()
+ proto = set()
+ gwt_src = set()
+ gwt_lib = set()
+ plugins = set()
- # Classpath entries are absolute for cross-cell support
- java_library = re.compile('bazel-out/.*?-fastbuild/bin/(.*)/[^/]+[.]jar$')
- srcs = re.compile('(.*/external/[^/]+)/jar/(.*)[.]jar')
- for p in _query_classpath(MAIN):
- if p.endswith('-src.jar'):
- # gwt_module() depends on -src.jar for Java to JavaScript compiles.
- if p.startswith("external"):
- p = path.join(ext, p)
- gwt_lib.add(p)
- continue
-
- m = java_library.match(p)
- if m:
- src.add(m.group(1))
- # Exceptions: both source and lib
- if p.endswith('libquery_parser.jar') or \
- p.endswith('libgerrit-prolog-common.jar'):
- lib.add(p)
- # JGit dependency from external repository
- if 'gerrit-' not in p and 'jgit' in p:
- lib.add(p)
- # Assume any jars in /proto/ are from java_proto_library rules
- if '/bin/proto/' in p:
- proto.add(p)
- else:
- # Don't mess up with Bazel internal test runner dependencies.
- # When we use Eclipse we rely on it for running the tests
- if p.endswith("external/bazel_tools/tools/jdk/TestRunner_deploy.jar"):
- continue
- if p.startswith("external"):
- p = path.join(ext, p)
- lib.add(p)
-
- for p in _query_classpath(GWT):
- m = java_library.match(p)
- if m:
- gwt_src.add(m.group(1))
-
- classpathentry('src', 'java')
- classpathentry('src', 'javatests', out='eclipse-out/test')
- classpathentry('src', 'resources')
- for s in sorted(src):
- out = None
-
- if s.startswith('lib/'):
- out = 'eclipse-out/lib'
- elif s.startswith('plugins/'):
- if args.plugins:
- plugins.add(s)
- continue
- out = 'eclipse-out/' + s
-
- p = path.join(s, 'java')
- if path.exists(p):
- classpathentry('src', p, out=out)
- continue
-
- for env in ['main', 'test']:
- o = None
- if out:
- o = out + '/' + env
- elif env == 'test':
- o = 'eclipse-out/test'
-
- for srctype in ['java', 'resources']:
- p = path.join(s, 'src', env, srctype)
- if path.exists(p):
- classpathentry('src', p, out=o)
-
- for libs in [lib, gwt_lib]:
- for j in sorted(libs):
- s = None
- m = srcs.match(j)
- if m:
- prefix = m.group(1)
- suffix = m.group(2)
- p = path.join(prefix, "jar", "%s-src.jar" % suffix)
- if path.exists(p):
- s = p
- if args.plugins:
- classpathentry('lib', j, s, exported=True)
- else:
- # Filter out the source JARs that we pull through transitive closure of
- # GWT plugin API (we add source directories themself). Exception is
- # libEdit-src.jar, that is needed for GWT SDM to work.
- m = java_library.match(j)
- if m:
- if m.group(1).startswith("gerrit-") and \
- j.endswith("-src.jar") and \
- not j.endswith("libEdit-src.jar"):
+ # Classpath entries are absolute for cross-cell support
+ java_library = re.compile('bazel-out/.*?-fastbuild/bin/(.*)/[^/]+[.]jar$')
+ srcs = re.compile('(.*/external/[^/]+)/jar/(.*)[.]jar')
+ for p in _query_classpath(MAIN):
+ if p.endswith('-src.jar'):
+ # gwt_module() depends on -src.jar for Java to JavaScript compiles.
+ if p.startswith("external"):
+ p = path.join(ext, p)
+ gwt_lib.add(p)
continue
- classpathentry('lib', j, s)
- for p in sorted(proto):
- s = p.replace('-fastbuild/bin/proto/lib', '-fastbuild/genfiles/proto/')
- s = s.replace('.jar', '-src.jar')
- classpathentry('lib', p, s)
+ m = java_library.match(p)
+ if m:
+ src.add(m.group(1))
+ # Exceptions: both source and lib
+ if p.endswith('libquery_parser.jar') or \
+ p.endswith('libgerrit-prolog-common.jar'):
+ lib.add(p)
+ # JGit dependency from external repository
+ if 'gerrit-' not in p and 'jgit' in p:
+ lib.add(p)
+ # Assume any jars in /proto/ are from java_proto_library rules
+ if '/bin/proto/' in p:
+ proto.add(p)
+ else:
+ # Don't mess up with Bazel internal test runner dependencies.
+ # When we use Eclipse we rely on it for running the tests
+ if p.endswith(
+ "external/bazel_tools/tools/jdk/TestRunner_deploy.jar"):
+ continue
+ if p.startswith("external"):
+ p = path.join(ext, p)
+ lib.add(p)
- for s in sorted(gwt_src):
- p = path.join(ROOT, s, 'src', 'main', 'java')
- if path.exists(p):
- classpathentry('lib', p, out='eclipse-out/gwtsrc')
+ for p in _query_classpath(GWT):
+ m = java_library.match(p)
+ if m:
+ gwt_src.add(m.group(1))
- classpathentry('con', JRE)
- classpathentry('output', 'eclipse-out/classes')
+ classpathentry('src', 'java')
+ classpathentry('src', 'javatests', out='eclipse-out/test')
+ classpathentry('src', 'resources')
+ for s in sorted(src):
+ out = None
- p = path.join(ROOT, '.classpath')
- with open(p, 'w') as fd:
- doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
+ if s.startswith('lib/'):
+ out = 'eclipse-out/lib'
+ elif s.startswith('plugins/'):
+ if args.plugins:
+ plugins.add(s)
+ continue
+ out = 'eclipse-out/' + s
- if args.plugins:
- for plugin in plugins:
- plugindir = path.join(ROOT, plugin)
- try:
- gen_project(plugin.replace('plugins/', ""), plugindir)
- gen_plugin_classpath(plugindir)
- except (IOError, OSError) as err:
- print('error generating project for %s: %s' % (plugin, err),
- file=sys.stderr)
+ p = path.join(s, 'java')
+ if path.exists(p):
+ classpathentry('src', p, out=out)
+ continue
+
+ for env in ['main', 'test']:
+ o = None
+ if out:
+ o = out + '/' + env
+ elif env == 'test':
+ o = 'eclipse-out/test'
+
+ for srctype in ['java', 'resources']:
+ p = path.join(s, 'src', env, srctype)
+ if path.exists(p):
+ classpathentry('src', p, out=o)
+
+ for libs in [lib, gwt_lib]:
+ for j in sorted(libs):
+ s = None
+ m = srcs.match(j)
+ if m:
+ prefix = m.group(1)
+ suffix = m.group(2)
+ p = path.join(prefix, "jar", "%s-src.jar" % suffix)
+ if path.exists(p):
+ s = p
+ if args.plugins:
+ classpathentry('lib', j, s, exported=True)
+ else:
+ # Filter out the source JARs that we pull through transitive
+ # closure of GWT plugin API (we add source directories
+ # themselves). Exception is libEdit-src.jar, that is needed
+ # for GWT SDM to work.
+ m = java_library.match(j)
+ if m:
+ if m.group(1).startswith("gerrit-") and \
+ j.endswith("-src.jar") and \
+ not j.endswith("libEdit-src.jar"):
+ continue
+ classpathentry('lib', j, s)
+
+ for p in sorted(proto):
+ s = p.replace('-fastbuild/bin/proto/lib', '-fastbuild/genfiles/proto/')
+ s = s.replace('.jar', '-src.jar')
+ classpathentry('lib', p, s)
+
+ for s in sorted(gwt_src):
+ p = path.join(ROOT, s, 'src', 'main', 'java')
+ if path.exists(p):
+ classpathentry('lib', p, out='eclipse-out/gwtsrc')
+
+ classpathentry('con', JRE)
+ classpathentry('output', 'eclipse-out/classes')
+
+ p = path.join(ROOT, '.classpath')
+ with open(p, 'w') as fd:
+ doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
+
+ if args.plugins:
+ for plugin in plugins:
+ plugindir = path.join(ROOT, plugin)
+ try:
+ gen_project(plugin.replace('plugins/', ""), plugindir)
+ gen_plugin_classpath(plugindir)
+ except (IOError, OSError) as err:
+ print('error generating project for %s: %s' % (plugin, err),
+ file=sys.stderr)
+
def gen_factorypath(ext):
- doc = minidom.getDOMImplementation().createDocument(None, 'factorypath', None)
- for jar in _query_classpath(AUTO):
- e = doc.createElement('factorypathentry')
- e.setAttribute('kind', 'EXTJAR')
- e.setAttribute('id', path.join(ext, jar))
- e.setAttribute('enabled', 'true')
- e.setAttribute('runInBatchMode', 'false')
- doc.documentElement.appendChild(e)
+ doc = minidom.getDOMImplementation().createDocument(None, 'factorypath',
+ None)
+ for jar in _query_classpath(AUTO):
+ e = doc.createElement('factorypathentry')
+ e.setAttribute('kind', 'EXTJAR')
+ e.setAttribute('id', path.join(ext, jar))
+ e.setAttribute('enabled', 'true')
+ e.setAttribute('runInBatchMode', 'false')
+ doc.documentElement.appendChild(e)
- p = path.join(ROOT, '.factorypath')
- with open(p, 'w') as fd:
- doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
+ p = path.join(ROOT, '.factorypath')
+ with open(p, 'w') as fd:
+ doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
+
try:
- ext_location = retrieve_ext_location().decode("utf-8")
- gen_project(args.project_name)
- gen_classpath(ext_location)
- gen_factorypath(ext_location)
- gen_bazel_path()
+ ext_location = retrieve_ext_location().decode("utf-8")
+ gen_project(args.project_name)
+ gen_classpath(ext_location)
+ gen_factorypath(ext_location)
+ gen_bazel_path()
- # TODO(davido): Remove this when GWT gone
- gwt_working_dir = ".gwt_work_dir"
- if not path.isdir(gwt_working_dir):
- makedirs(path.join(ROOT, gwt_working_dir))
+ # TODO(davido): Remove this when GWT gone
+ gwt_working_dir = ".gwt_work_dir"
+ if not path.isdir(gwt_working_dir):
+ makedirs(path.join(ROOT, gwt_working_dir))
- try:
- check_call(_build_bazel_cmd('build', MAIN, GWT, '//java/org/eclipse/jgit:libEdit-src.jar'))
- except CalledProcessError:
- exit(1)
+ try:
+ check_call(_build_bazel_cmd('build', MAIN, GWT,
+ '//java/org/eclipse/jgit:libEdit-src.jar'))
+ except CalledProcessError:
+ exit(1)
except KeyboardInterrupt:
- print('Interrupted by user', file=sys.stderr)
- exit(1)
+ print('Interrupted by user', file=sys.stderr)
+ exit(1)
diff --git a/tools/js/bower2bazel.py b/tools/js/bower2bazel.py
index 171ab55..7b24524 100755
--- a/tools/js/bower2bazel.py
+++ b/tools/js/bower2bazel.py
@@ -13,9 +13,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Suggested call sequence:
+"""
+Suggested call sequence:
-python tools/js/bower2bazel.py -w lib/js/bower_archives.bzl -b lib/js/bower_components.bzl
+python tools/js/bower2bazel.py -w lib/js/bower_archives.bzl \
+ -b lib/js/bower_components.bzl
"""
from __future__ import print_function
@@ -31,139 +33,147 @@
import glob
import bowerutil
-# list of licenses for packages that don't specify one in their bower.json file.
+# list of licenses for packages that don't specify one in their bower.json file
package_licenses = {
- "codemirror-minified": "codemirror-minified",
- "es6-promise": "es6-promise",
- "fetch": "fetch",
- "font-roboto": "polymer",
- "iron-a11y-announcer": "polymer",
- "iron-a11y-keys-behavior": "polymer",
- "iron-autogrow-textarea": "polymer",
- "iron-behaviors": "polymer",
- "iron-dropdown": "polymer",
- "iron-fit-behavior": "polymer",
- "iron-flex-layout": "polymer",
- "iron-form-element-behavior": "polymer",
- "iron-icon": "polymer",
- "iron-iconset-svg": "polymer",
- "iron-input": "polymer",
- "iron-menu-behavior": "polymer",
- "iron-meta": "polymer",
- "iron-overlay-behavior": "polymer",
- "iron-resizable-behavior": "polymer",
- "iron-selector": "polymer",
- "iron-validatable-behavior": "polymer",
- "moment": "moment",
- "neon-animation": "polymer",
- "page": "page.js",
- "paper-button": "polymer",
- "paper-icon-button": "polymer",
- "paper-input": "polymer",
- "paper-item": "polymer",
- "paper-listbox": "polymer",
- "paper-toggle-button": "polymer",
- "paper-styles": "polymer",
- "paper-tabs": "polymer",
- "polymer": "polymer",
- "polymer-resin": "polymer",
- "promise-polyfill": "promise-polyfill",
- "web-animations-js": "Apache2.0",
- "webcomponentsjs": "polymer",
- "paper-material": "polymer",
- "paper-styles": "polymer",
- "paper-behaviors": "polymer",
- "paper-ripple": "polymer",
- "iron-checked-element-behavior": "polymer",
- "font-roboto": "polymer",
+ "codemirror-minified": "codemirror-minified",
+ "es6-promise": "es6-promise",
+ "fetch": "fetch",
+ "font-roboto": "polymer",
+ "iron-a11y-announcer": "polymer",
+ "iron-a11y-keys-behavior": "polymer",
+ "iron-autogrow-textarea": "polymer",
+ "iron-behaviors": "polymer",
+ "iron-dropdown": "polymer",
+ "iron-fit-behavior": "polymer",
+ "iron-flex-layout": "polymer",
+ "iron-form-element-behavior": "polymer",
+ "iron-icon": "polymer",
+ "iron-iconset-svg": "polymer",
+ "iron-input": "polymer",
+ "iron-menu-behavior": "polymer",
+ "iron-meta": "polymer",
+ "iron-overlay-behavior": "polymer",
+ "iron-resizable-behavior": "polymer",
+ "iron-selector": "polymer",
+ "iron-validatable-behavior": "polymer",
+ "moment": "moment",
+ "neon-animation": "polymer",
+ "page": "page.js",
+ "paper-button": "polymer",
+ "paper-icon-button": "polymer",
+ "paper-input": "polymer",
+ "paper-item": "polymer",
+ "paper-listbox": "polymer",
+ "paper-toggle-button": "polymer",
+ "paper-styles": "polymer",
+ "paper-tabs": "polymer",
+ "polymer": "polymer",
+ "polymer-resin": "polymer",
+ "promise-polyfill": "promise-polyfill",
+ "web-animations-js": "Apache2.0",
+ "webcomponentsjs": "polymer",
+ "paper-material": "polymer",
+ "paper-styles": "polymer",
+ "paper-behaviors": "polymer",
+ "paper-ripple": "polymer",
+ "iron-checked-element-behavior": "polymer",
+ "font-roboto": "polymer",
}
def build_bower_json(version_targets, seeds):
- """Generate bower JSON file, return its path.
+ """Generate bower JSON file, return its path.
- Args:
- version_targets: bazel target names of the versions.json file.
- seeds: an iterable of bower package names of the seed packages, ie.
- the packages whose versions we control manually.
- """
- bower_json = collections.OrderedDict()
- bower_json['name'] = 'bower2bazel-output'
- bower_json['version'] = '0.0.0'
- bower_json['description'] = 'Auto-generated bower.json for dependency management'
- bower_json['private'] = True
- bower_json['dependencies'] = {}
+ Args:
+ version_targets: bazel target names of the versions.json file.
+ seeds: an iterable of bower package names of the seed packages, ie.
+ the packages whose versions we control manually.
+ """
+ bower_json = collections.OrderedDict()
+ bower_json['name'] = 'bower2bazel-output'
+ bower_json['version'] = '0.0.0'
+ bower_json['description'] = 'Auto-generated bower.json for dependency ' + \
+ 'management'
+ bower_json['private'] = True
+ bower_json['dependencies'] = {}
- seeds = set(seeds)
- for v in version_targets:
- path = os.path.join("bazel-out/*-fastbuild/bin", v.lstrip("/").replace(":", "/"))
- fs = glob.glob(path)
- assert len(fs) == 1, '%s: file not found or multiple files found: %s' % (path, fs)
- with open(fs[0]) as f:
- j = json.load(f)
- if "" in j:
- # drop dummy entries.
- del j[""]
+ seeds = set(seeds)
+ for v in version_targets:
+ path = os.path.join("bazel-out/*-fastbuild/bin",
+ v.lstrip("/").replace(":", "/"))
+ fs = glob.glob(path)
+ err_msg = '%s: file not found or multiple files found: %s' % (path, fs)
+ assert len(fs) == 1, err_msg
+ with open(fs[0]) as f:
+ j = json.load(f)
+ if "" in j:
+ # drop dummy entries.
+ del j[""]
- trimmed = {}
- for k, v in j.items():
- if k in seeds:
- trimmed[k] = v
+ trimmed = {}
+ for k, v in j.items():
+ if k in seeds:
+ trimmed[k] = v
- bower_json['dependencies'].update(trimmed)
+ bower_json['dependencies'].update(trimmed)
- tmpdir = tempfile.mkdtemp()
- ret = os.path.join(tmpdir, 'bower.json')
- with open(ret, 'w') as f:
- json.dump(bower_json, f, indent=2)
- return ret
+ tmpdir = tempfile.mkdtemp()
+ ret = os.path.join(tmpdir, 'bower.json')
+ with open(ret, 'w') as f:
+ json.dump(bower_json, f, indent=2)
+ return ret
+
def decode(input):
- try:
- return input.decode("utf-8")
- except TypeError:
- return input
+ try:
+ return input.decode("utf-8")
+ except TypeError:
+ return input
+
def bower_command(args):
- base = subprocess.check_output(["bazel", "info", "output_base"]).strip()
- exp = os.path.join(decode(base), "external", "bower", "*npm_binary.tgz")
- fs = sorted(glob.glob(exp))
- assert len(fs) == 1, "bower tarball not found or have multiple versions %s" % fs
- return ["python", os.getcwd() + "/tools/js/run_npm_binary.py", sorted(fs)[0]] + args
+ base = subprocess.check_output(["bazel", "info", "output_base"]).strip()
+ exp = os.path.join(decode(base), "external", "bower", "*npm_binary.tgz")
+ fs = sorted(glob.glob(exp))
+ err_msg = "bower tarball not found or have multiple versions %s" % fs
+ assert len(fs) == 1, err_msg
+ return ["python",
+ os.getcwd() + "/tools/js/run_npm_binary.py", sorted(fs)[0]] + args
def main(args):
- opts = optparse.OptionParser()
- opts.add_option('-w', help='.bzl output for WORKSPACE')
- opts.add_option('-b', help='.bzl output for //lib:BUILD')
- opts, args = opts.parse_args()
+ opts = optparse.OptionParser()
+ opts.add_option('-w', help='.bzl output for WORKSPACE')
+ opts.add_option('-b', help='.bzl output for //lib:BUILD')
+ opts, args = opts.parse_args()
- target_str = subprocess.check_output([
- "bazel", "query", "kind(bower_component_bundle, //polygerrit-ui/...)"])
- seed_str = subprocess.check_output([
- "bazel", "query", "attr(seed, 1, kind(bower_component, deps(//polygerrit-ui/...)))"])
- targets = [s for s in decode(target_str).split('\n') if s]
- seeds = [s for s in decode(seed_str).split('\n') if s]
- prefix = "//lib/js:"
- non_seeds = [s for s in seeds if not s.startswith(prefix)]
- assert not non_seeds, non_seeds
- seeds = set([s[len(prefix):] for s in seeds])
+ target_str = subprocess.check_output([
+ "bazel", "query", "kind(bower_component_bundle, //polygerrit-ui/...)"])
+ seed_str = subprocess.check_output(
+ ["bazel", "query",
+ "attr(seed, 1, kind(bower_component, deps(//polygerrit-ui/...)))"])
+ targets = [s for s in decode(target_str).split('\n') if s]
+ seeds = [s for s in decode(seed_str).split('\n') if s]
+ prefix = "//lib/js:"
+ non_seeds = [s for s in seeds if not s.startswith(prefix)]
+ assert not non_seeds, non_seeds
+ seeds = set([s[len(prefix):] for s in seeds])
- version_targets = [t + "-versions.json" for t in targets]
- subprocess.check_call(['bazel', 'build'] + version_targets)
- bower_json_path = build_bower_json(version_targets, seeds)
- dir = os.path.dirname(bower_json_path)
- cmd = bower_command(["install"])
+ version_targets = [t + "-versions.json" for t in targets]
+ subprocess.check_call(['bazel', 'build'] + version_targets)
+ bower_json_path = build_bower_json(version_targets, seeds)
+ dir = os.path.dirname(bower_json_path)
+ cmd = bower_command(["install"])
- build_out = sys.stdout
- if opts.b:
- build_out = open(opts.b + ".tmp", 'w')
+ build_out = sys.stdout
+ if opts.b:
+ build_out = open(opts.b + ".tmp", 'w')
- ws_out = sys.stdout
- if opts.b:
- ws_out = open(opts.w + ".tmp", 'w')
+ ws_out = sys.stdout
+ if opts.b:
+ ws_out = open(opts.w + ".tmp", 'w')
- header = """# DO NOT EDIT
+ header = """# DO NOT EDIT
# generated with the following command:
#
# %s
@@ -171,30 +181,30 @@
""" % ' '.join(sys.argv)
- ws_out.write(header)
- build_out.write(header)
+ ws_out.write(header)
+ build_out.write(header)
- oldwd = os.getcwd()
- os.chdir(dir)
- subprocess.check_call(cmd)
+ oldwd = os.getcwd()
+ os.chdir(dir)
+ subprocess.check_call(cmd)
- interpret_bower_json(seeds, ws_out, build_out)
- ws_out.close()
- build_out.close()
+ interpret_bower_json(seeds, ws_out, build_out)
+ ws_out.close()
+ build_out.close()
- os.chdir(oldwd)
- os.rename(opts.w + ".tmp", opts.w)
- os.rename(opts.b + ".tmp", opts.b)
+ os.chdir(oldwd)
+ os.rename(opts.w + ".tmp", opts.w)
+ os.rename(opts.b + ".tmp", opts.b)
def dump_workspace(data, seeds, out):
- out.write('load("//tools/bzl:js.bzl", "bower_archive")\n\n')
- out.write('def load_bower_archives():\n')
+ out.write('load("//tools/bzl:js.bzl", "bower_archive")\n\n')
+ out.write('def load_bower_archives():\n')
- for d in data:
- if d["name"] in seeds:
- continue
- out.write(""" bower_archive(
+ for d in data:
+ if d["name"] in seeds:
+ continue
+ out.write(""" bower_archive(
name = "%(name)s",
package = "%(normalized-name)s",
version = "%(version)s",
@@ -203,48 +213,49 @@
def dump_build(data, seeds, out):
- out.write('load("//tools/bzl:js.bzl", "bower_component")\n\n')
- out.write('def define_bower_components():\n')
- for d in data:
- out.write(" bower_component(\n")
- out.write(" name = \"%s\",\n" % d["name"])
- out.write(" license = \"//lib:LICENSE-%s\",\n" % d["bazel-license"])
- deps = sorted(d.get("dependencies", {}).keys())
- if deps:
- if len(deps) == 1:
- out.write(" deps = [ \":%s\" ],\n" % deps[0])
- else:
- out.write(" deps = [\n")
- for dep in deps:
- out.write(" \":%s\",\n" % dep)
- out.write(" ],\n")
- if d["name"] in seeds:
- out.write(" seed = True,\n")
- out.write(" )\n")
- # done
+ out.write('load("//tools/bzl:js.bzl", "bower_component")\n\n')
+ out.write('def define_bower_components():\n')
+ for d in data:
+ out.write(" bower_component(\n")
+ out.write(" name = \"%s\",\n" % d["name"])
+ out.write(" license = \"//lib:LICENSE-%s\",\n" % d["bazel-license"])
+ deps = sorted(d.get("dependencies", {}).keys())
+ if deps:
+ if len(deps) == 1:
+ out.write(" deps = [ \":%s\" ],\n" % deps[0])
+ else:
+ out.write(" deps = [\n")
+ for dep in deps:
+ out.write(" \":%s\",\n" % dep)
+ out.write(" ],\n")
+ if d["name"] in seeds:
+ out.write(" seed = True,\n")
+ out.write(" )\n")
+ # done
def interpret_bower_json(seeds, ws_out, build_out):
- out = subprocess.check_output(["find", "bower_components/", "-name", ".bower.json"])
+ out = subprocess.check_output(["find", "bower_components/", "-name",
+ ".bower.json"])
- data = []
- for f in sorted(decode(out).split('\n')):
- if not f:
- continue
- pkg = json.load(open(f))
- pkg_name = pkg["name"]
+ data = []
+ for f in sorted(decode(out).split('\n')):
+ if not f:
+ continue
+ pkg = json.load(open(f))
+ pkg_name = pkg["name"]
- pkg["bazel-sha1"] = bowerutil.hash_bower_component(
- hashlib.sha1(), os.path.dirname(f)).hexdigest()
- license = package_licenses.get(pkg_name, "DO_NOT_DISTRIBUTE")
+ pkg["bazel-sha1"] = bowerutil.hash_bower_component(
+ hashlib.sha1(), os.path.dirname(f)).hexdigest()
+ license = package_licenses.get(pkg_name, "DO_NOT_DISTRIBUTE")
- pkg["bazel-license"] = license
- pkg["normalized-name"] = pkg["_originalSource"]
- data.append(pkg)
+ pkg["bazel-license"] = license
+ pkg["normalized-name"] = pkg["_originalSource"]
+ data.append(pkg)
- dump_workspace(data, seeds, ws_out)
- dump_build(data, seeds, build_out)
+ dump_workspace(data, seeds, ws_out)
+ dump_build(data, seeds, build_out)
if __name__ == '__main__':
- main(sys.argv[1:])
+ main(sys.argv[1:])
diff --git a/tools/js/bowerutil.py b/tools/js/bowerutil.py
index c2e11cd..9fb82af 100644
--- a/tools/js/bowerutil.py
+++ b/tools/js/bowerutil.py
@@ -16,31 +16,31 @@
def hash_bower_component(hash_obj, path):
- """Hash the contents of a bower component directory.
+ """Hash the contents of a bower component directory.
- This is a stable hash of a directory downloaded with `bower install`, minus
- the .bower.json file, which is autogenerated each time by bower. Used in lieu
- of hashing a zipfile of the contents, since zipfiles are difficult to hash in
- a stable manner.
+ This is a stable hash of a directory downloaded with `bower install`, minus
+ the .bower.json file, which is autogenerated each time by bower. Used in
+ lieu of hashing a zipfile of the contents, since zipfiles are difficult to
+ hash in a stable manner.
- Args:
- hash_obj: an open hash object, e.g. hashlib.sha1().
- path: path to the directory to hash.
+ Args:
+ hash_obj: an open hash object, e.g. hashlib.sha1().
+ path: path to the directory to hash.
- Returns:
- The passed-in hash_obj.
- """
- if not os.path.isdir(path):
- raise ValueError('Not a directory: %s' % path)
+ Returns:
+ The passed-in hash_obj.
+ """
+ if not os.path.isdir(path):
+ raise ValueError('Not a directory: %s' % path)
- path = os.path.abspath(path)
- for root, dirs, files in os.walk(path):
- dirs.sort()
- for f in sorted(files):
- if f == '.bower.json':
- continue
- p = os.path.join(root, f)
- hash_obj.update(p[len(path)+1:].encode("utf-8"))
- hash_obj.update(open(p, "rb").read())
+ path = os.path.abspath(path)
+ for root, dirs, files in os.walk(path):
+ dirs.sort()
+ for f in sorted(files):
+ if f == '.bower.json':
+ continue
+ p = os.path.join(root, f)
+ hash_obj.update(p[len(path)+1:].encode("utf-8"))
+ hash_obj.update(open(p, "rb").read())
- return hash_obj
+ return hash_obj
diff --git a/tools/js/download_bower.py b/tools/js/download_bower.py
index 3db39d5..c9a5df6 100755
--- a/tools/js/download_bower.py
+++ b/tools/js/download_bower.py
@@ -30,99 +30,105 @@
def bower_cmd(bower, *args):
- cmd = bower.split(' ')
- cmd.extend(args)
- return cmd
+ cmd = bower.split(' ')
+ cmd.extend(args)
+ return cmd
def bower_info(bower, name, package, version):
- cmd = bower_cmd(bower, '-l=error', '-j',
- 'info', '%s#%s' % (package, version))
- try:
- p = subprocess.Popen(cmd , stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- except:
- sys.stderr.write("error executing: %s\n" % ' '.join(cmd))
- raise
- out, err = p.communicate()
- if p.returncode:
- sys.stderr.write(err)
- raise OSError('Command failed: %s' % ' '.join(cmd))
+ cmd = bower_cmd(bower, '-l=error', '-j',
+ 'info', '%s#%s' % (package, version))
+ try:
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ except:
+ sys.stderr.write("error executing: %s\n" % ' '.join(cmd))
+ raise
+ out, err = p.communicate()
+ if p.returncode:
+ sys.stderr.write(err)
+ raise OSError('Command failed: %s' % ' '.join(cmd))
- try:
- info = json.loads(out)
- except ValueError:
- raise ValueError('invalid JSON from %s:\n%s' % (" ".join(cmd), out))
- info_name = info.get('name')
- if info_name != name:
- raise ValueError('expected package name %s, got: %s' % (name, info_name))
- return info
+ try:
+ info = json.loads(out)
+ except ValueError:
+ raise ValueError('invalid JSON from %s:\n%s' % (" ".join(cmd), out))
+ info_name = info.get('name')
+ if info_name != name:
+ raise ValueError(
+ 'expected package name %s, got: %s' % (name, info_name))
+ return info
def ignore_deps(info):
- # Tell bower to ignore dependencies so we just download this component. This
- # is just an optimization, since we only pick out the component we need, but
- # it's important when downloading sizable dependency trees.
- #
- # As of 1.6.5 I don't think ignoredDependencies can be specified on the
- # command line with --config, so we have to create .bowerrc.
- deps = info.get('dependencies')
- if deps:
- with open(os.path.join('.bowerrc'), 'w') as f:
- json.dump({'ignoredDependencies': list(deps.keys())}, f)
+ # Tell bower to ignore dependencies so we just download this component.
+ # This is just an optimization, since we only pick out the component we
+ # need, but it's important when downloading sizable dependency trees.
+ #
+ # As of 1.6.5 I don't think ignoredDependencies can be specified on the
+ # command line with --config, so we have to create .bowerrc.
+ deps = info.get('dependencies')
+ if deps:
+ with open(os.path.join('.bowerrc'), 'w') as f:
+ json.dump({'ignoredDependencies': list(deps.keys())}, f)
def cache_entry(name, package, version, sha1):
- if not sha1:
- sha1 = hashlib.sha1('%s#%s' % (package, version)).hexdigest()
- return os.path.join(CACHE_DIR, '%s-%s.zip-%s' % (name, version, sha1))
+ if not sha1:
+ sha1 = hashlib.sha1('%s#%s' % (package, version)).hexdigest()
+ return os.path.join(CACHE_DIR, '%s-%s.zip-%s' % (name, version, sha1))
def main(args):
- opts = optparse.OptionParser()
- opts.add_option('-n', help='short name of component')
- opts.add_option('-b', help='bower command')
- opts.add_option('-p', help='full package name of component')
- opts.add_option('-v', help='version number')
- opts.add_option('-s', help='expected content sha1')
- opts.add_option('-o', help='output file location')
- opts, args_ = opts.parse_args(args)
+ opts = optparse.OptionParser()
+ opts.add_option('-n', help='short name of component')
+ opts.add_option('-b', help='bower command')
+ opts.add_option('-p', help='full package name of component')
+ opts.add_option('-v', help='version number')
+ opts.add_option('-s', help='expected content sha1')
+ opts.add_option('-o', help='output file location')
+ opts, args_ = opts.parse_args(args)
- assert opts.p
- assert opts.v
- assert opts.n
+ assert opts.p
+ assert opts.v
+ assert opts.n
- cwd = os.getcwd()
- outzip = os.path.join(cwd, opts.o)
- cached = cache_entry(opts.n, opts.p, opts.v, opts.s)
+ cwd = os.getcwd()
+ outzip = os.path.join(cwd, opts.o)
+ cached = cache_entry(opts.n, opts.p, opts.v, opts.s)
- if not os.path.exists(cached):
- info = bower_info(opts.b, opts.n, opts.p, opts.v)
- ignore_deps(info)
- subprocess.check_call(
- bower_cmd(opts.b, '--quiet', 'install', '%s#%s' % (opts.p, opts.v)))
- bc = os.path.join(cwd, 'bower_components')
- subprocess.check_call(
- ['zip', '-q', '--exclude', '.bower.json', '-r', cached, opts.n],
- cwd=bc)
+ if not os.path.exists(cached):
+ info = bower_info(opts.b, opts.n, opts.p, opts.v)
+ ignore_deps(info)
+ subprocess.check_call(
+ bower_cmd(
+ opts.b, '--quiet', 'install', '%s#%s' % (opts.p, opts.v)))
+ bc = os.path.join(cwd, 'bower_components')
+ subprocess.check_call(
+ ['zip', '-q', '--exclude', '.bower.json', '-r', cached, opts.n],
+ cwd=bc)
- if opts.s:
- path = os.path.join(bc, opts.n)
- sha1 = bowerutil.hash_bower_component(hashlib.sha1(), path).hexdigest()
- if opts.s != sha1:
- print((
- '%s#%s:\n'
- 'expected %s\n'
- 'received %s\n') % (opts.p, opts.v, opts.s, sha1), file=sys.stderr)
- try:
- os.remove(cached)
- except OSError as err:
- if path.exists(cached):
- print('error removing %s: %s' % (cached, err), file=sys.stderr)
- return 1
+ if opts.s:
+ path = os.path.join(bc, opts.n)
+ sha1 = bowerutil.hash_bower_component(
+ hashlib.sha1(), path).hexdigest()
+ if opts.s != sha1:
+ print((
+ '%s#%s:\n'
+ 'expected %s\n'
+ 'received %s\n') % (opts.p, opts.v, opts.s, sha1),
+ file=sys.stderr)
+ try:
+ os.remove(cached)
+ except OSError as err:
+ if path.exists(cached):
+ print('error removing %s: %s' % (cached, err),
+ file=sys.stderr)
+ return 1
- shutil.copyfile(cached, outzip)
- return 0
+ shutil.copyfile(cached, outzip)
+ return 0
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/js/npm_pack.py b/tools/js/npm_pack.py
index de45083..d817701 100755
--- a/tools/js/npm_pack.py
+++ b/tools/js/npm_pack.py
@@ -32,49 +32,49 @@
def is_bundled(tar):
- # No entries for directories, so scan for a matching prefix.
- for entry in tar.getmembers():
- if entry.name.startswith('package/node_modules/'):
- return True
- return False
+ # No entries for directories, so scan for a matching prefix.
+ for entry in tar.getmembers():
+ if entry.name.startswith('package/node_modules/'):
+ return True
+ return False
def bundle_dependencies():
- with open('package.json') as f:
- package = json.load(f)
- package['bundledDependencies'] = list(package['dependencies'].keys())
- with open('package.json', 'w') as f:
- json.dump(package, f)
+ with open('package.json') as f:
+ package = json.load(f)
+ package['bundledDependencies'] = list(package['dependencies'].keys())
+ with open('package.json', 'w') as f:
+ json.dump(package, f)
def main(args):
- if len(args) != 2:
- print('Usage: %s <package> <version>' % sys.argv[0], file=sys.stderr)
- return 1
+ if len(args) != 2:
+ print('Usage: %s <package> <version>' % sys.argv[0], file=sys.stderr)
+ return 1
- name, version = args
- filename = '%s-%s.tgz' % (name, version)
- url = 'http://registry.npmjs.org/%s/-/%s' % (name, filename)
+ name, version = args
+ filename = '%s-%s.tgz' % (name, version)
+ url = 'http://registry.npmjs.org/%s/-/%s' % (name, filename)
- tmpdir = tempfile.mkdtemp();
- tgz = os.path.join(tmpdir, filename)
- atexit.register(lambda: shutil.rmtree(tmpdir))
+ tmpdir = tempfile.mkdtemp()
+ tgz = os.path.join(tmpdir, filename)
+ atexit.register(lambda: shutil.rmtree(tmpdir))
- subprocess.check_call(['curl', '--proxy-anyauth', '-ksfo', tgz, url])
- with tarfile.open(tgz, 'r:gz') as tar:
- if is_bundled(tar):
- print('%s already has bundled node_modules' % filename)
- return 1
- tar.extractall(path=tmpdir)
+ subprocess.check_call(['curl', '--proxy-anyauth', '-ksfo', tgz, url])
+ with tarfile.open(tgz, 'r:gz') as tar:
+ if is_bundled(tar):
+ print('%s already has bundled node_modules' % filename)
+ return 1
+ tar.extractall(path=tmpdir)
- oldpwd = os.getcwd()
- os.chdir(os.path.join(tmpdir, 'package'))
- bundle_dependencies()
- subprocess.check_call(['npm', 'install'])
- subprocess.check_call(['npm', 'pack'])
- shutil.copy(filename, os.path.join(oldpwd, filename))
- return 0
+ oldpwd = os.getcwd()
+ os.chdir(os.path.join(tmpdir, 'package'))
+ bundle_dependencies()
+ subprocess.check_call(['npm', 'install'])
+ subprocess.check_call(['npm', 'pack'])
+ shutil.copy(filename, os.path.join(oldpwd, filename))
+ return 0
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/js/run_npm_binary.py b/tools/js/run_npm_binary.py
index d769b98..dfcdaca 100644
--- a/tools/js/run_npm_binary.py
+++ b/tools/js/run_npm_binary.py
@@ -27,65 +27,68 @@
def extract(path, outdir, bin):
- if os.path.exists(os.path.join(outdir, bin)):
- return # Another process finished extracting, ignore.
+ if os.path.exists(os.path.join(outdir, bin)):
+ return # Another process finished extracting, ignore.
- # Use a temp directory adjacent to outdir so shutil.move can use the same
- # device atomically.
- tmpdir = tempfile.mkdtemp(dir=os.path.dirname(outdir))
- def cleanup():
- try:
- shutil.rmtree(tmpdir)
- except OSError:
- pass # Too late now
- atexit.register(cleanup)
+ # Use a temp directory adjacent to outdir so shutil.move can use the same
+ # device atomically.
+ tmpdir = tempfile.mkdtemp(dir=os.path.dirname(outdir))
- def extract_one(mem):
- dest = os.path.join(outdir, mem.name)
- tar.extract(mem, path=tmpdir)
- try:
- os.makedirs(os.path.dirname(dest))
- except OSError:
- pass # Either exists, or will fail on the next line.
- shutil.move(os.path.join(tmpdir, mem.name), dest)
+ def cleanup():
+ try:
+ shutil.rmtree(tmpdir)
+ except OSError:
+ pass # Too late now
+ atexit.register(cleanup)
- with tarfile.open(path, 'r:gz') as tar:
- for mem in tar.getmembers():
- if mem.name != bin:
- extract_one(mem)
- # Extract bin last so other processes only short circuit when extraction is
- # finished.
- extract_one(tar.getmember(bin))
+ def extract_one(mem):
+ dest = os.path.join(outdir, mem.name)
+ tar.extract(mem, path=tmpdir)
+ try:
+ os.makedirs(os.path.dirname(dest))
+ except OSError:
+ pass # Either exists, or will fail on the next line.
+ shutil.move(os.path.join(tmpdir, mem.name), dest)
+
+ with tarfile.open(path, 'r:gz') as tar:
+ for mem in tar.getmembers():
+ if mem.name != bin:
+ extract_one(mem)
+ # Extract bin last so other processes only short circuit when
+ # extraction is finished.
+ extract_one(tar.getmember(bin))
+
def main(args):
- path = args[0]
- suffix = '.npm_binary.tgz'
- tgz = os.path.basename(path)
+ path = args[0]
+ suffix = '.npm_binary.tgz'
+ tgz = os.path.basename(path)
- parts = tgz[:-len(suffix)].split('@')
+ parts = tgz[:-len(suffix)].split('@')
- if not tgz.endswith(suffix) or len(parts) != 2:
- print('usage: %s <path/to/npm_binary>' % sys.argv[0], file=sys.stderr)
- return 1
+ if not tgz.endswith(suffix) or len(parts) != 2:
+ print('usage: %s <path/to/npm_binary>' % sys.argv[0], file=sys.stderr)
+ return 1
- name, _ = parts
+ name, _ = parts
- # Avoid importing from gerrit because we don't want to depend on the right CWD.
- sha1 = hashlib.sha1(open(path, 'rb').read()).hexdigest()
- outdir = '%s-%s' % (path[:-len(suffix)], sha1)
- rel_bin = os.path.join('package', 'bin', name)
- bin = os.path.join(outdir, rel_bin)
- if not os.path.isfile(bin):
- extract(path, outdir, rel_bin)
+ # Avoid importing from gerrit because we don't want to depend on the right
+ # working directory
+ sha1 = hashlib.sha1(open(path, 'rb').read()).hexdigest()
+ outdir = '%s-%s' % (path[:-len(suffix)], sha1)
+ rel_bin = os.path.join('package', 'bin', name)
+ bin = os.path.join(outdir, rel_bin)
+ if not os.path.isfile(bin):
+ extract(path, outdir, rel_bin)
- nodejs = spawn.find_executable('nodejs')
- if nodejs:
- # Debian installs Node.js as 'nodejs', due to a conflict with another
- # package.
- subprocess.check_call([nodejs, bin] + args[1:])
- else:
- subprocess.check_call([bin] + args[1:])
+ nodejs = spawn.find_executable('nodejs')
+ if nodejs:
+ # Debian installs Node.js as 'nodejs', due to a conflict with another
+ # package.
+ subprocess.check_call([nodejs, bin] + args[1:])
+ else:
+ subprocess.check_call([bin] + args[1:])
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/maven/mvn.py b/tools/maven/mvn.py
index 50c4ac6..d47d027 100755
--- a/tools/maven/mvn.py
+++ b/tools/maven/mvn.py
@@ -29,56 +29,57 @@
args, ctx = opts.parse_args()
if not args.v:
- print('version is empty', file=stderr)
- exit(1)
+ print('version is empty', file=stderr)
+ exit(1)
root = path.abspath(__file__)
while not path.exists(path.join(root, 'WORKSPACE')):
- root = path.dirname(root)
+ root = path.dirname(root)
if 'install' == args.a:
- cmd = [
- 'mvn',
- 'install:install-file',
- '-Dversion=%s' % args.v,
- ]
+ cmd = [
+ 'mvn',
+ 'install:install-file',
+ '-Dversion=%s' % args.v,
+ ]
elif 'deploy' == args.a:
- cmd = [
- 'mvn',
- 'gpg:sign-and-deploy-file',
- '-DrepositoryId=%s' % args.repository,
- '-Durl=%s' % args.url,
- ]
+ cmd = [
+ 'mvn',
+ 'gpg:sign-and-deploy-file',
+ '-DrepositoryId=%s' % args.repository,
+ '-Durl=%s' % args.url,
+ ]
else:
- print("unknown action -a %s" % args.a, file=stderr)
- exit(1)
+ print("unknown action -a %s" % args.a, file=stderr)
+ exit(1)
for spec in args.s:
- artifact, packaging_type, src = spec.split(':')
- exe = cmd + [
- '-DpomFile=%s' % path.join(root, 'tools', 'maven', '%s_pom.xml' % artifact),
- '-Dpackaging=%s' % packaging_type,
- '-Dfile=%s' % src,
- ]
- try:
- if environ.get('VERBOSE'):
- print(' '.join(exe), file=stderr)
- check_output(exe)
- except Exception as e:
- print('%s command failed: %s\n%s' % (args.a, ' '.join(exe), e),
- file=stderr)
- if environ.get('VERBOSE') and isinstance(e, CalledProcessError):
- print('Command output\n%s' % e.output, file=stderr)
- exit(1)
+ artifact, packaging_type, src = spec.split(':')
+ exe = cmd + [
+ '-DpomFile=%s' % path.join(root, 'tools', 'maven',
+ '%s_pom.xml' % artifact),
+ '-Dpackaging=%s' % packaging_type,
+ '-Dfile=%s' % src,
+ ]
+ try:
+ if environ.get('VERBOSE'):
+ print(' '.join(exe), file=stderr)
+ check_output(exe)
+ except Exception as e:
+ print('%s command failed: %s\n%s' % (args.a, ' '.join(exe), e),
+ file=stderr)
+ if environ.get('VERBOSE') and isinstance(e, CalledProcessError):
+ print('Command output\n%s' % e.output, file=stderr)
+ exit(1)
out = stderr
if args.o:
- out = open(args.o, 'w')
+ out = open(args.o, 'w')
with out as fd:
- if args.repository:
- print('Repository: %s' % args.repository, file=fd)
- if args.url:
- print('URL: %s' % args.url, file=fd)
- print('Version: %s' % args.v, file=fd)
+ if args.repository:
+ print('Repository: %s' % args.repository, file=fd)
+ if args.url:
+ print('URL: %s' % args.url, file=fd)
+ print('Version: %s' % args.v, file=fd)
diff --git a/tools/merge_jars.py b/tools/merge_jars.py
index 97a87c4..6b46069 100755
--- a/tools/merge_jars.py
+++ b/tools/merge_jars.py
@@ -17,11 +17,10 @@
import collections
import sys
import zipfile
-import io
if len(sys.argv) < 3:
- print('usage: %s <out.zip> <in.zip>...' % sys.argv[0], file=sys.stderr)
- exit(1)
+ print('usage: %s <out.zip> <in.zip>...' % sys.argv[0], file=sys.stderr)
+ exit(1)
outfile = sys.argv[1]
infiles = sys.argv[2:]
@@ -29,22 +28,22 @@
SERVICES = 'META-INF/services/'
try:
- with zipfile.ZipFile(outfile, 'w') as outzip:
- services = collections.defaultdict(lambda: '')
- for infile in infiles:
- with zipfile.ZipFile(infile) as inzip:
- for info in inzip.infolist():
- n = info.filename
- if n in seen:
- continue
- elif n.startswith(SERVICES):
- # Concatenate all provider configuration files.
- services[n] += inzip.read(n).decode("UTF-8")
- continue
- outzip.writestr(info, inzip.read(n))
- seen.add(n)
+ with zipfile.ZipFile(outfile, 'w') as outzip:
+ services = collections.defaultdict(lambda: '')
+ for infile in infiles:
+ with zipfile.ZipFile(infile) as inzip:
+ for info in inzip.infolist():
+ n = info.filename
+ if n in seen:
+ continue
+ elif n.startswith(SERVICES):
+ # Concatenate all provider configuration files.
+ services[n] += inzip.read(n).decode("UTF-8")
+ continue
+ outzip.writestr(info, inzip.read(n))
+ seen.add(n)
- for n, v in list(services.items()):
- outzip.writestr(n, v)
+ for n, v in list(services.items()):
+ outzip.writestr(n, v)
except Exception as err:
- exit('Failed to merge jars: %s' % err)
+ exit('Failed to merge jars: %s' % err)
diff --git a/tools/release-announcement.py b/tools/release-announcement.py
index f700185..a25a340 100755
--- a/tools/release-announcement.py
+++ b/tools/release-announcement.py
@@ -101,9 +101,9 @@
summary = summary + "."
data = {
- "version": Version(options.version),
- "previous": options.previous,
- "summary": summary
+ "version": Version(options.version),
+ "previous": options.previous,
+ "summary": summary
}
war = os.path.join(
diff --git a/tools/util.py b/tools/util.py
index e8182ed..45d0541 100644
--- a/tools/util.py
+++ b/tools/util.py
@@ -15,57 +15,59 @@
from os import path
REPO_ROOTS = {
- 'GERRIT': 'http://gerrit-maven.storage.googleapis.com',
- 'GERRIT_API': 'https://gerrit-api.commondatastorage.googleapis.com/release',
- 'MAVEN_CENTRAL': 'http://repo1.maven.org/maven2',
- 'MAVEN_LOCAL': 'file://' + path.expanduser('~/.m2/repository'),
- 'MAVEN_SNAPSHOT': 'https://oss.sonatype.org/content/repositories/snapshots',
+ 'GERRIT': 'http://gerrit-maven.storage.googleapis.com',
+ 'GERRIT_API':
+ 'https://gerrit-api.commondatastorage.googleapis.com/release',
+ 'MAVEN_CENTRAL': 'http://repo1.maven.org/maven2',
+ 'MAVEN_LOCAL': 'file://' + path.expanduser('~/.m2/repository'),
+ 'MAVEN_SNAPSHOT':
+ 'https://oss.sonatype.org/content/repositories/snapshots',
}
def resolve_url(url, redirects):
- """ Resolve URL of a Maven artifact.
+ """ Resolve URL of a Maven artifact.
- prefix:path is passed as URL. prefix identifies known or custom
- repositories that can be rewritten in redirects set, passed as
- second arguments.
+ prefix:path is passed as URL. prefix identifies known or custom
+ repositories that can be rewritten in redirects set, passed as
+ second arguments.
- A special case is supported, when prefix neither exists in
- REPO_ROOTS, no in redirects set: the url is returned as is.
- This enables plugins to pass custom maven_repository URL as is
- directly to maven_jar().
+ A special case is supported, when prefix neither exists in
+ REPO_ROOTS, no in redirects set: the url is returned as is.
+ This enables plugins to pass custom maven_repository URL as is
+ directly to maven_jar().
- Returns a resolved path for Maven artifact.
- """
- s = url.find(':')
- if s < 0:
- return url
- scheme, rest = url[:s], url[s+1:]
- if scheme in redirects:
- root = redirects[scheme]
- elif scheme in REPO_ROOTS:
- root = REPO_ROOTS[scheme]
- else:
- return url
- root = root.rstrip('/')
- rest = rest.lstrip('/')
- return '/'.join([root, rest])
+ Returns a resolved path for Maven artifact.
+ """
+ s = url.find(':')
+ if s < 0:
+ return url
+ scheme, rest = url[:s], url[s+1:]
+ if scheme in redirects:
+ root = redirects[scheme]
+ elif scheme in REPO_ROOTS:
+ root = REPO_ROOTS[scheme]
+ else:
+ return url
+ root = root.rstrip('/')
+ rest = rest.lstrip('/')
+ return '/'.join([root, rest])
def hash_file(hash_obj, path):
- """Hash the contents of a file.
+ """Hash the contents of a file.
- Args:
- hash_obj: an open hash object, e.g. hashlib.sha1().
- path: path to the file to hash.
+ Args:
+ hash_obj: an open hash object, e.g. hashlib.sha1().
+ path: path to the file to hash.
- Returns:
- The passed-in hash_obj.
- """
- with open(path, 'rb') as f:
- while True:
- b = f.read(8192)
- if not b:
- break
- hash_obj.update(b)
- return hash_obj
+ Returns:
+ The passed-in hash_obj.
+ """
+ with open(path, 'rb') as f:
+ while True:
+ b = f.read(8192)
+ if not b:
+ break
+ hash_obj.update(b)
+ return hash_obj
diff --git a/tools/util_test.py b/tools/util_test.py
index 30647ba..fa67696 100644
--- a/tools/util_test.py
+++ b/tools/util_test.py
@@ -16,28 +16,32 @@
import unittest
from util import resolve_url
+
class TestResolveUrl(unittest.TestCase):
- """ run to test:
- python -m unittest -v util_test
- """
+ """ run to test:
+ python -m unittest -v util_test
+ """
- def testKnown(self):
- url = resolve_url('GERRIT:foo.jar', {})
- self.assertEqual(url, 'http://gerrit-maven.storage.googleapis.com/foo.jar')
+ def testKnown(self):
+ url = resolve_url('GERRIT:foo.jar', {})
+ self.assertEqual(url,
+ 'http://gerrit-maven.storage.googleapis.com/foo.jar')
- def testKnownRedirect(self):
- url = resolve_url('MAVEN_CENTRAL:foo.jar',
- {'MAVEN_CENTRAL': 'http://my.company.mirror/maven2'})
- self.assertEqual(url, 'http://my.company.mirror/maven2/foo.jar')
+ def testKnownRedirect(self):
+ url = resolve_url('MAVEN_CENTRAL:foo.jar',
+ {'MAVEN_CENTRAL': 'http://my.company.mirror/maven2'})
+ self.assertEqual(url, 'http://my.company.mirror/maven2/foo.jar')
- def testCustom(self):
- url = resolve_url('http://maven.example.com/release/foo.jar', {})
- self.assertEqual(url, 'http://maven.example.com/release/foo.jar')
+ def testCustom(self):
+ url = resolve_url('http://maven.example.com/release/foo.jar', {})
+ self.assertEqual(url, 'http://maven.example.com/release/foo.jar')
- def testCustomRedirect(self):
- url = resolve_url('MAVEN_EXAMPLE:foo.jar',
- {'MAVEN_EXAMPLE': 'http://maven.example.com/release'})
- self.assertEqual(url, 'http://maven.example.com/release/foo.jar')
+ def testCustomRedirect(self):
+ url = resolve_url('MAVEN_EXAMPLE:foo.jar',
+ {'MAVEN_EXAMPLE':
+ 'http://maven.example.com/release'})
+ self.assertEqual(url, 'http://maven.example.com/release/foo.jar')
+
if __name__ == '__main__':
- unittest.main()
+ unittest.main()
diff --git a/tools/version.py b/tools/version.py
index 72b0134..4aafcb0 100755
--- a/tools/version.py
+++ b/tools/version.py
@@ -23,24 +23,24 @@
opts, args = parser.parse_args()
if not len(args):
- parser.error('not enough arguments')
+ parser.error('not enough arguments')
elif len(args) > 1:
- parser.error('too many arguments')
+ parser.error('too many arguments')
DEST_PATTERN = r'\g<1>%s\g<3>' % args[0]
def replace_in_file(filename, src_pattern):
- try:
- f = open(filename, "r")
- s = f.read()
- f.close()
- s = re.sub(src_pattern, DEST_PATTERN, s)
- f = open(filename, "w")
- f.write(s)
- f.close()
- except IOError as err:
- print('error updating %s: %s' % (filename, err), file=sys.stderr)
+ try:
+ f = open(filename, "r")
+ s = f.read()
+ f.close()
+ s = re.sub(src_pattern, DEST_PATTERN, s)
+ f = open(filename, "w")
+ f.write(s)
+ f.close()
+ except IOError as err:
+ print('error updating %s: %s' % (filename, err), file=sys.stderr)
src_pattern = re.compile(r'^(\s*<version>)([-.\w]+)(</version>\s*)$',
@@ -48,8 +48,8 @@
for project in ['gerrit-acceptance-framework', 'gerrit-extension-api',
'gerrit-plugin-api', 'gerrit-plugin-gwtui',
'gerrit-war']:
- pom = os.path.join('tools', 'maven', '%s_pom.xml' % project)
- replace_in_file(pom, src_pattern)
+ pom = os.path.join('tools', 'maven', '%s_pom.xml' % project)
+ replace_in_file(pom, src_pattern)
src_pattern = re.compile(r'^(GERRIT_VERSION = ")([-.\w]+)(")$', re.MULTILINE)
replace_in_file('version.bzl', src_pattern)