Merge "Deduplicate conditions on current change in Submit"
diff --git a/.bazelproject b/.bazelproject
index e3a7a9c..8a726eb 100644
--- a/.bazelproject
+++ b/.bazelproject
@@ -4,6 +4,7 @@
directories:
.
+ -bin
-eclipse-out
-contrib
-gerrit-package-plugins
diff --git a/Documentation/config-gerrit.txt b/Documentation/config-gerrit.txt
index ce7adc2..7ed0e17 100644
--- a/Documentation/config-gerrit.txt
+++ b/Documentation/config-gerrit.txt
@@ -773,10 +773,11 @@
+
Default is 128 MiB per cache, except:
+
+* `"change_notes"`: disk storage is disabled by default
* `"diff_summary"`: default is `1g` (1 GiB of disk space)
+
-If 0, disk storage for the cache is disabled.
+If 0 or negative, disk storage for the cache is disabled.
==== [[cache_names]]Standard Caches
diff --git a/Documentation/replace_macros.py b/Documentation/replace_macros.py
index c76d133..6f90697 100755
--- a/Documentation/replace_macros.py
+++ b/Documentation/replace_macros.py
@@ -183,7 +183,8 @@
element.insertBefore(a, element.firstChild);
// remove the link icon when the mouse is moved away,
- // but keep it shown if the mouse is over the element, the link or the icon
+ // but keep it shown if the mouse is over the element, the link or
+ // the icon
hide = function(evt) {
if (document.elementFromPoint(evt.clientX, evt.clientY) != element
&& document.elementFromPoint(evt.clientX, evt.clientY) != a
@@ -229,54 +230,54 @@
options, _ = opts.parse_args()
try:
- try:
- out_file = open(options.out, 'w', errors='ignore')
- src_file = open(options.src, 'r', errors='ignore')
- except TypeError:
- out_file = open(options.out, 'w')
- src_file = open(options.src, 'r')
- last_line = ''
- ignore_next_line = False
- last_title = ''
- for line in src_file:
- if PAT_GERRIT.match(last_line):
- # Case of "GERRIT\n------" at the footer
- out_file.write(GERRIT_UPLINK)
- last_line = ''
- elif PAT_SEARCHBOX.match(last_line):
- # Case of 'SEARCHBOX\n---------'
- if options.searchbox:
- out_file.write(SEARCH_BOX)
- last_line = ''
- elif PAT_INCLUDE.match(line):
- # Case of 'include::<filename>'
- match = PAT_INCLUDE.match(line)
- out_file.write(last_line)
- last_line = match.group(1) + options.suffix + match.group(2) + '\n'
- elif PAT_STARS.match(line):
- if PAT_TITLE.match(last_line):
- # Case of the title in '.<title>\n****\nget::<url>\n****'
- match = PAT_TITLE.match(last_line)
- last_title = GET_TITLE % match.group(1)
- else:
- out_file.write(last_line)
- last_title = ''
- elif PAT_GET.match(line):
- # Case of '****\nget::<url>\n****' in rest api
- url = PAT_GET.match(line).group(1)
- out_file.write(GET_MACRO.format(url) % last_title)
- ignore_next_line = True
- elif ignore_next_line:
- # Handle the trailing '****' of the 'get::' case
- last_line = ''
- ignore_next_line = False
- else:
- out_file.write(last_line)
- last_line = line
- out_file.write(last_line)
- out_file.write(LINK_SCRIPT)
- out_file.close()
+ try:
+ out_file = open(options.out, 'w', errors='ignore')
+ src_file = open(options.src, 'r', errors='ignore')
+ except TypeError:
+ out_file = open(options.out, 'w')
+ src_file = open(options.src, 'r')
+ last_line = ''
+ ignore_next_line = False
+ last_title = ''
+ for line in src_file:
+ if PAT_GERRIT.match(last_line):
+ # Case of "GERRIT\n------" at the footer
+ out_file.write(GERRIT_UPLINK)
+ last_line = ''
+ elif PAT_SEARCHBOX.match(last_line):
+ # Case of 'SEARCHBOX\n---------'
+ if options.searchbox:
+ out_file.write(SEARCH_BOX)
+ last_line = ''
+ elif PAT_INCLUDE.match(line):
+ # Case of 'include::<filename>'
+ match = PAT_INCLUDE.match(line)
+ out_file.write(last_line)
+ last_line = match.group(1) + options.suffix + match.group(2) + '\n'
+ elif PAT_STARS.match(line):
+ if PAT_TITLE.match(last_line):
+ # Case of the title in '.<title>\n****\nget::<url>\n****'
+ match = PAT_TITLE.match(last_line)
+ last_title = GET_TITLE % match.group(1)
+ else:
+ out_file.write(last_line)
+ last_title = ''
+ elif PAT_GET.match(line):
+ # Case of '****\nget::<url>\n****' in rest api
+ url = PAT_GET.match(line).group(1)
+ out_file.write(GET_MACRO.format(url) % last_title)
+ ignore_next_line = True
+ elif ignore_next_line:
+ # Handle the trailing '****' of the 'get::' case
+ last_line = ''
+ ignore_next_line = False
+ else:
+ out_file.write(last_line)
+ last_line = line
+ out_file.write(last_line)
+ out_file.write(LINK_SCRIPT)
+ out_file.close()
except IOError as err:
- sys.stderr.write(
- "error while expanding %s to %s: %s" % (options.src, options.out, err))
- exit(1)
+ sys.stderr.write(
+ "error while expanding %s to %s: %s" % (options.src, options.out, err))
+ exit(1)
diff --git a/WORKSPACE b/WORKSPACE
index 15d8651..757d86e 100644
--- a/WORKSPACE
+++ b/WORKSPACE
@@ -181,8 +181,8 @@
maven_jar(
name = "gson",
- artifact = "com.google.code.gson:gson:2.8.2",
- sha1 = "3edcfe49d2c6053a70a2a47e4e1c2f94998a49cf",
+ artifact = "com.google.code.gson:gson:2.8.4",
+ sha1 = "d0de1ca9b69e69d1d497ee3c6009d015f64dad57",
)
maven_jar(
@@ -194,8 +194,8 @@
maven_jar(
name = "protobuf",
- artifact = "com.google.protobuf:protobuf-java:3.4.0",
- sha1 = "b32aba0cbe737a4ca953f71688725972e3ee927c",
+ artifact = "com.google.protobuf:protobuf-java:3.5.1",
+ sha1 = "8c3492f7662fa1cbf8ca76a0f5eb1146f7725acd",
)
load("//lib:guava.bzl", "GUAVA_VERSION", "GUAVA_BIN_SHA1")
@@ -711,6 +711,18 @@
sha1 = "636e49d675bc28e0b3ae0edd077d6acbbb159166",
)
+maven_jar(
+ name = "truth-liteproto-extension",
+ artifact = "com.google.truth.extensions:truth-liteproto-extension:" + TRUTH_VERS,
+ sha1 = "21210ac07e5cfbe83f04733f806224a6c0ae4d2d",
+)
+
+maven_jar(
+ name = "truth-proto-extension",
+ artifact = "com.google.truth.extensions:truth-proto-extension:" + TRUTH_VERS,
+ sha1 = "5a2b504143a5fec2b6be8bce292b3b7577a81789",
+)
+
# When bumping the easymock version number, make sure to also move powermock to a compatible version
maven_jar(
name = "easymock",
diff --git a/contrib/check-valid-commit.py b/contrib/check-valid-commit.py
index d26fa58..763ae3e 100755
--- a/contrib/check-valid-commit.py
+++ b/contrib/check-valid-commit.py
@@ -10,13 +10,16 @@
SSH_USER = 'bot'
SSH_HOST = 'localhost'
SSH_PORT = 29418
-SSH_COMMAND = 'ssh %s@%s -p %d gerrit approve ' % (SSH_USER, SSH_HOST, SSH_PORT)
+SSH_COMMAND = 'ssh %s@%s -p %d gerrit approve ' % (SSH_USER,
+ SSH_HOST,
+ SSH_PORT)
FAILURE_SCORE = '--code-review=-2'
FAILURE_MESSAGE = 'This commit message does not match the standard.' \
+ ' Please correct the commit message and upload a replacement patch.'
PASS_SCORE = '--code-review=0'
PASS_MESSAGE = ''
+
def main():
change = None
project = None
@@ -25,8 +28,9 @@
patchset = None
try:
- opts, _args = getopt.getopt(sys.argv[1:], '', \
- ['change=', 'project=', 'branch=', 'commit=', 'patchset='])
+ opts, _args = getopt.getopt(sys.argv[1:], '',
+ ['change=', 'project=', 'branch=',
+ 'commit=', 'patchset='])
except getopt.GetoptError as err:
print('Error: %s' % (err))
usage()
@@ -48,8 +52,7 @@
usage()
sys.exit(-1)
- if change == None or project == None or branch == None \
- or commit == None or patchset == None:
+ if any(p is None for p in [change, project, branch, commit, patchset]):
usage()
sys.exit(-1)
@@ -57,16 +60,16 @@
status, output = subprocess.getstatusoutput(command)
if status != 0:
- print('Error running \'%s\'. status: %s, output:\n\n%s' % \
- (command, status, output))
+ print('Error running \'%s\'. status: %s, output:\n\n%s' %
+ (command, status, output))
sys.exit(-1)
commitMessage = output[(output.find('\n\n')+2):]
commitLines = commitMessage.split('\n')
if len(commitLines) > 1 and len(commitLines[1]) != 0:
- fail(commit, 'Invalid commit summary. The summary must be ' \
- + 'one line followed by a blank line.')
+ fail(commit, 'Invalid commit summary. The summary must be '
+ + 'one line followed by a blank line.')
i = 0
for line in commitLines:
@@ -76,23 +79,27 @@
passes(commit)
+
def usage():
print('Usage:\n')
- print(sys.argv[0] + ' --change <change id> --project <project name> ' \
- + '--branch <branch> --commit <sha1> --patchset <patchset id>')
+ print(sys.argv[0] + ' --change <change id> --project <project name> '
+ + '--branch <branch> --commit <sha1> --patchset <patchset id>')
-def fail( commit, message ):
+
+def fail(commit, message):
command = SSH_COMMAND + FAILURE_SCORE + ' -m \\\"' \
- + _shell_escape( FAILURE_MESSAGE + '\n\n' + message) \
+ + _shell_escape(FAILURE_MESSAGE + '\n\n' + message) \
+ '\\\" ' + commit
subprocess.getstatusoutput(command)
sys.exit(1)
-def passes( commit ):
+
+def passes(commit):
command = SSH_COMMAND + PASS_SCORE + ' -m \\\"' \
+ _shell_escape(PASS_MESSAGE) + ' \\\" ' + commit
subprocess.getstatusoutput(command)
+
def _shell_escape(x):
s = ''
for c in x:
@@ -102,6 +109,6 @@
s = s + c
return s
+
if __name__ == '__main__':
main()
-
diff --git a/contrib/populate-fixture-data.py b/contrib/populate-fixture-data.py
index 93ac34f..07a0f01 100755
--- a/contrib/populate-fixture-data.py
+++ b/contrib/populate-fixture-data.py
@@ -47,228 +47,235 @@
# Random names from US Census Data
FIRST_NAMES = [
- "Casey", "Yesenia", "Shirley", "Tara", "Wanda", "Sheryl", "Jaime", "Elaine",
- "Charlotte", "Carly", "Bonnie", "Kirsten", "Kathryn", "Carla", "Katrina",
- "Melody", "Suzanne", "Sandy", "Joann", "Kristie", "Sally", "Emma", "Susan",
- "Amanda", "Alyssa", "Patty", "Angie", "Dominique", "Cynthia", "Jennifer",
- "Theresa", "Desiree", "Kaylee", "Maureen", "Jeanne", "Kellie", "Valerie",
- "Nina", "Judy", "Diamond", "Anita", "Rebekah", "Stefanie", "Kendra", "Erin",
- "Tammie", "Tracey", "Bridget", "Krystal", "Jasmin", "Sonia", "Meghan",
- "Rebecca", "Jeanette", "Meredith", "Beverly", "Natasha", "Chloe", "Selena",
- "Teresa", "Sheena", "Cassandra", "Rhonda", "Tami", "Jodi", "Shelly", "Angela",
- "Kimberly", "Terry", "Joanna", "Isabella", "Lindsey", "Loretta", "Dana",
- "Veronica", "Carolyn", "Laura", "Karen", "Dawn", "Alejandra", "Cassie",
- "Lorraine", "Yolanda", "Kerry", "Stephanie", "Caitlin", "Melanie", "Kerri",
- "Doris", "Sandra", "Beth", "Carol", "Vicki", "Shelia", "Bethany", "Rachael",
- "Donna", "Alexandra", "Barbara", "Ana", "Jillian", "Ann", "Rachel", "Lauren",
- "Hayley", "Misty", "Brianna", "Tanya", "Danielle", "Courtney", "Jacqueline",
- "Becky", "Christy", "Alisha", "Phyllis", "Faith", "Jocelyn", "Nancy",
- "Gloria", "Kristen", "Evelyn", "Julie", "Julia", "Kara", "Chelsey", "Cassidy",
- "Jean", "Chelsea", "Jenny", "Diana", "Haley", "Kristine", "Kristina", "Erika",
- "Jenna", "Alison", "Deanna", "Abigail", "Melissa", "Sierra", "Linda",
- "Monica", "Tasha", "Traci", "Yvonne", "Tracy", "Marie", "Maria", "Michaela",
- "Stacie", "April", "Morgan", "Cathy", "Darlene", "Cristina", "Emily"
- "Ian", "Russell", "Phillip", "Jay", "Barry", "Brad", "Frederick", "Fernando",
- "Timothy", "Ricardo", "Bernard", "Daniel", "Ruben", "Alexis", "Kyle", "Malik",
- "Norman", "Kent", "Melvin", "Stephen", "Daryl", "Kurt", "Greg", "Alex",
- "Mario", "Riley", "Marvin", "Dan", "Steven", "Roberto", "Lucas", "Leroy",
- "Preston", "Drew", "Fred", "Casey", "Wesley", "Elijah", "Reginald", "Joel",
- "Christopher", "Jacob", "Luis", "Philip", "Mark", "Rickey", "Todd", "Scott",
- "Terrence", "Jim", "Stanley", "Bobby", "Thomas", "Gabriel", "Tracy", "Marcus",
- "Peter", "Michael", "Calvin", "Herbert", "Darryl", "Billy", "Ross", "Dustin",
- "Jaime", "Adam", "Henry", "Xavier", "Dominic", "Lonnie", "Danny", "Victor",
- "Glen", "Perry", "Jackson", "Grant", "Gerald", "Garrett", "Alejandro",
- "Eddie", "Alan", "Ronnie", "Mathew", "Dave", "Wayne", "Joe", "Craig",
- "Terry", "Chris", "Randall", "Parker", "Francis", "Keith", "Neil", "Caleb",
- "Jon", "Earl", "Taylor", "Bryce", "Brady", "Max", "Sergio", "Leon", "Gene",
- "Darin", "Bill", "Edgar", "Antonio", "Dalton", "Arthur", "Austin", "Cristian",
- "Kevin", "Omar", "Kelly", "Aaron", "Ethan", "Tom", "Isaac", "Maurice",
- "Gilbert", "Hunter", "Willie", "Harry", "Dale", "Darius", "Jerome", "Jason",
- "Harold", "Kerry", "Clarence", "Gregg", "Shane", "Eduardo", "Micheal",
- "Howard", "Vernon", "Rodney", "Anthony", "Levi", "Larry", "Franklin", "Jimmy",
- "Jonathon", "Carl",
+ "Casey", "Yesenia", "Shirley", "Tara", "Wanda", "Sheryl", "Jaime",
+ "Elaine", "Charlotte", "Carly", "Bonnie", "Kirsten", "Kathryn", "Carla",
+ "Katrina", "Melody", "Suzanne", "Sandy", "Joann", "Kristie", "Sally",
+ "Emma", "Susan", "Amanda", "Alyssa", "Patty", "Angie", "Dominique",
+ "Cynthia", "Jennifer", "Theresa", "Desiree", "Kaylee", "Maureen",
+ "Jeanne", "Kellie", "Valerie", "Nina", "Judy", "Diamond", "Anita",
+ "Rebekah", "Stefanie", "Kendra", "Erin", "Tammie", "Tracey", "Bridget",
+ "Krystal", "Jasmin", "Sonia", "Meghan", "Rebecca", "Jeanette", "Meredith",
+ "Beverly", "Natasha", "Chloe", "Selena", "Teresa", "Sheena", "Cassandra",
+ "Rhonda", "Tami", "Jodi", "Shelly", "Angela", "Kimberly", "Terry",
+ "Joanna", "Isabella", "Lindsey", "Loretta", "Dana", "Veronica", "Carolyn",
+ "Laura", "Karen", "Dawn", "Alejandra", "Cassie", "Lorraine", "Yolanda",
+ "Kerry", "Stephanie", "Caitlin", "Melanie", "Kerri", "Doris", "Sandra",
+ "Beth", "Carol", "Vicki", "Shelia", "Bethany", "Rachael", "Donna",
+ "Alexandra", "Barbara", "Ana", "Jillian", "Ann", "Rachel", "Lauren",
+ "Hayley", "Misty", "Brianna", "Tanya", "Danielle", "Courtney",
+ "Jacqueline", "Becky", "Christy", "Alisha", "Phyllis", "Faith", "Jocelyn",
+ "Nancy", "Gloria", "Kristen", "Evelyn", "Julie", "Julia", "Kara",
+ "Chelsey", "Cassidy", "Jean", "Chelsea", "Jenny", "Diana", "Haley",
+ "Kristine", "Kristina", "Erika", "Jenna", "Alison", "Deanna", "Abigail",
+ "Melissa", "Sierra", "Linda", "Monica", "Tasha", "Traci", "Yvonne",
+ "Tracy", "Marie", "Maria", "Michaela", "Stacie", "April", "Morgan",
+ "Cathy", "Darlene", "Cristina", "Emily" "Ian", "Russell", "Phillip", "Jay",
+ "Barry", "Brad", "Frederick", "Fernando", "Timothy", "Ricardo", "Bernard",
+ "Daniel", "Ruben", "Alexis", "Kyle", "Malik", "Norman", "Kent", "Melvin",
+ "Stephen", "Daryl", "Kurt", "Greg", "Alex", "Mario", "Riley", "Marvin",
+ "Dan", "Steven", "Roberto", "Lucas", "Leroy", "Preston", "Drew", "Fred",
+ "Casey", "Wesley", "Elijah", "Reginald", "Joel", "Christopher", "Jacob",
+ "Luis", "Philip", "Mark", "Rickey", "Todd", "Scott", "Terrence", "Jim",
+ "Stanley", "Bobby", "Thomas", "Gabriel", "Tracy", "Marcus", "Peter",
+ "Michael", "Calvin", "Herbert", "Darryl", "Billy", "Ross", "Dustin",
+ "Jaime", "Adam", "Henry", "Xavier", "Dominic", "Lonnie", "Danny", "Victor",
+ "Glen", "Perry", "Jackson", "Grant", "Gerald", "Garrett", "Alejandro",
+ "Eddie", "Alan", "Ronnie", "Mathew", "Dave", "Wayne", "Joe", "Craig",
+ "Terry", "Chris", "Randall", "Parker", "Francis", "Keith", "Neil", "Caleb",
+ "Jon", "Earl", "Taylor", "Bryce", "Brady", "Max", "Sergio", "Leon", "Gene",
+ "Darin", "Bill", "Edgar", "Antonio", "Dalton", "Arthur", "Austin",
+ "Cristian", "Kevin", "Omar", "Kelly", "Aaron", "Ethan", "Tom", "Isaac",
+ "Maurice", "Gilbert", "Hunter", "Willie", "Harry", "Dale", "Darius",
+ "Jerome", "Jason", "Harold", "Kerry", "Clarence", "Gregg", "Shane",
+ "Eduardo", "Micheal", "Howard", "Vernon", "Rodney", "Anthony", "Levi",
+ "Larry", "Franklin", "Jimmy", "Jonathon", "Carl",
]
LAST_NAMES = [
- "Savage", "Hendrix", "Moon", "Larsen", "Rocha", "Burgess", "Bailey", "Farley",
- "Moses", "Schmidt", "Brown", "Hoover", "Klein", "Jennings", "Braun", "Rangel",
- "Casey", "Dougherty", "Hancock", "Wolf", "Henry", "Thomas", "Bentley",
- "Barnett", "Kline", "Pitts", "Rojas", "Sosa", "Paul", "Hess", "Chase",
- "Mckay", "Bender", "Colins", "Montoya", "Townsend", "Potts", "Ayala", "Avery",
- "Sherman", "Tapia", "Hamilton", "Ferguson", "Huang", "Hooper", "Zamora",
- "Logan", "Lloyd", "Quinn", "Monroe", "Brock", "Ibarra", "Fowler", "Weiss",
- "Montgomery", "Diaz", "Dixon", "Olson", "Robertson", "Arias", "Benjamin",
- "Abbott", "Stein", "Schroeder", "Beck", "Velasquez", "Barber", "Nichols",
- "Ortiz", "Burns", "Moody", "Stokes", "Wilcox", "Rush", "Michael", "Kidd",
- "Rowland", "Mclean", "Saunders", "Chung", "Newton", "Potter", "Hickman",
- "Ray", "Larson", "Figueroa", "Duncan", "Sparks", "Rose", "Hodge", "Huynh",
- "Joseph", "Morales", "Beasley", "Mora", "Fry", "Ross", "Novak", "Hahn",
- "Wise", "Knight", "Frederick", "Heath", "Pollard", "Vega", "Mcclain",
- "Buckley", "Conrad", "Cantrell", "Bond", "Mejia", "Wang", "Lewis", "Johns",
- "Mcknight", "Callahan", "Reynolds", "Norris", "Burnett", "Carey", "Jacobson",
- "Oneill", "Oconnor", "Leonard", "Mckenzie", "Hale", "Delgado", "Spence",
- "Brandt", "Obrien", "Bowman", "James", "Avila", "Roberts", "Barker", "Cohen",
- "Bradley", "Prince", "Warren", "Summers", "Little", "Caldwell", "Garrett",
- "Hughes", "Norton", "Burke", "Holden", "Merritt", "Lee", "Frank", "Wiley",
- "Ho", "Weber", "Keith", "Winters", "Gray", "Watts", "Brady", "Aguilar",
- "Nicholson", "David", "Pace", "Cervantes", "Davis", "Baxter", "Sanchez",
- "Singleton", "Taylor", "Strickland", "Glenn", "Valentine", "Roy", "Cameron",
- "Beard", "Norman", "Fritz", "Anthony", "Koch", "Parrish", "Herman", "Hines",
- "Sutton", "Gallegos", "Stephenson", "Lozano", "Franklin", "Howe", "Bauer",
- "Love", "Ali", "Ellison", "Lester", "Guzman", "Jarvis", "Espinoza",
- "Fletcher", "Burton", "Woodard", "Peterson", "Barajas", "Richard", "Bryan",
- "Goodman", "Cline", "Rowe", "Faulkner", "Crawford", "Mueller", "Patterson",
- "Hull", "Walton", "Wu", "Flores", "York", "Dickson", "Barnes", "Fisher",
- "Strong", "Juarez", "Fitzgerald", "Schmitt", "Blevins", "Villa", "Sullivan",
- "Velazquez", "Horton", "Meadows", "Riley", "Barrera", "Neal", "Mendez",
- "Mcdonald", "Floyd", "Lynch", "Mcdowell", "Benson", "Hebert", "Livingston",
- "Davies", "Richardson", "Vincent", "Davenport", "Osborn", "Mckee", "Marshall",
- "Ferrell", "Martinez", "Melton", "Mercer", "Yoder", "Jacobs", "Mcdaniel",
- "Mcmillan", "Peters", "Atkinson", "Wood", "Briggs", "Valencia", "Chandler",
- "Rios", "Hunter", "Bean", "Hicks", "Hays", "Lucero", "Malone", "Waller",
- "Banks", "Myers", "Mitchell", "Grimes", "Houston", "Hampton", "Trujillo",
- "Perkins", "Moran", "Welch", "Contreras", "Montes", "Ayers", "Hayden",
- "Daniel", "Weeks", "Porter", "Gill", "Mullen", "Nolan", "Dorsey", "Crane",
- "Estes", "Lam", "Wells", "Cisneros", "Giles", "Watson", "Vang", "Scott",
- "Knox", "Hanna", "Fields",
+ "Savage", "Hendrix", "Moon", "Larsen", "Rocha", "Burgess", "Bailey",
+ "Farley", "Moses", "Schmidt", "Brown", "Hoover", "Klein", "Jennings",
+ "Braun", "Rangel", "Casey", "Dougherty", "Hancock", "Wolf", "Henry",
+ "Thomas", "Bentley", "Barnett", "Kline", "Pitts", "Rojas", "Sosa", "Paul",
+ "Hess", "Chase", "Mckay", "Bender", "Colins", "Montoya", "Townsend",
+ "Potts", "Ayala", "Avery", "Sherman", "Tapia", "Hamilton", "Ferguson",
+ "Huang", "Hooper", "Zamora", "Logan", "Lloyd", "Quinn", "Monroe", "Brock",
+ "Ibarra", "Fowler", "Weiss", "Montgomery", "Diaz", "Dixon", "Olson",
+ "Robertson", "Arias", "Benjamin", "Abbott", "Stein", "Schroeder", "Beck",
+ "Velasquez", "Barber", "Nichols", "Ortiz", "Burns", "Moody", "Stokes",
+ "Wilcox", "Rush", "Michael", "Kidd", "Rowland", "Mclean", "Saunders",
+ "Chung", "Newton", "Potter", "Hickman", "Ray", "Larson", "Figueroa",
+ "Duncan", "Sparks", "Rose", "Hodge", "Huynh", "Joseph", "Morales",
+ "Beasley", "Mora", "Fry", "Ross", "Novak", "Hahn", "Wise", "Knight",
+ "Frederick", "Heath", "Pollard", "Vega", "Mcclain", "Buckley", "Conrad",
+ "Cantrell", "Bond", "Mejia", "Wang", "Lewis", "Johns", "Mcknight",
+ "Callahan", "Reynolds", "Norris", "Burnett", "Carey", "Jacobson", "Oneill",
+ "Oconnor", "Leonard", "Mckenzie", "Hale", "Delgado", "Spence", "Brandt",
+ "Obrien", "Bowman", "James", "Avila", "Roberts", "Barker", "Cohen",
+ "Bradley", "Prince", "Warren", "Summers", "Little", "Caldwell", "Garrett",
+ "Hughes", "Norton", "Burke", "Holden", "Merritt", "Lee", "Frank", "Wiley",
+ "Ho", "Weber", "Keith", "Winters", "Gray", "Watts", "Brady", "Aguilar",
+ "Nicholson", "David", "Pace", "Cervantes", "Davis", "Baxter", "Sanchez",
+ "Singleton", "Taylor", "Strickland", "Glenn", "Valentine", "Roy",
+ "Cameron", "Beard", "Norman", "Fritz", "Anthony", "Koch", "Parrish",
+ "Herman", "Hines", "Sutton", "Gallegos", "Stephenson", "Lozano",
+ "Franklin", "Howe", "Bauer", "Love", "Ali", "Ellison", "Lester", "Guzman",
+ "Jarvis", "Espinoza", "Fletcher", "Burton", "Woodard", "Peterson",
+ "Barajas", "Richard", "Bryan", "Goodman", "Cline", "Rowe", "Faulkner",
+ "Crawford", "Mueller", "Patterson", "Hull", "Walton", "Wu", "Flores",
+ "York", "Dickson", "Barnes", "Fisher", "Strong", "Juarez", "Fitzgerald",
+ "Schmitt", "Blevins", "Villa", "Sullivan", "Velazquez", "Horton",
+ "Meadows", "Riley", "Barrera", "Neal", "Mendez", "Mcdonald", "Floyd",
+ "Lynch", "Mcdowell", "Benson", "Hebert", "Livingston", "Davies",
+ "Richardson", "Vincent", "Davenport", "Osborn", "Mckee", "Marshall",
+ "Ferrell", "Martinez", "Melton", "Mercer", "Yoder", "Jacobs", "Mcdaniel",
+ "Mcmillan", "Peters", "Atkinson", "Wood", "Briggs", "Valencia", "Chandler",
+ "Rios", "Hunter", "Bean", "Hicks", "Hays", "Lucero", "Malone", "Waller",
+ "Banks", "Myers", "Mitchell", "Grimes", "Houston", "Hampton", "Trujillo",
+ "Perkins", "Moran", "Welch", "Contreras", "Montes", "Ayers", "Hayden",
+ "Daniel", "Weeks", "Porter", "Gill", "Mullen", "Nolan", "Dorsey", "Crane",
+ "Estes", "Lam", "Wells", "Cisneros", "Giles", "Watson", "Vang", "Scott",
+ "Knox", "Hanna", "Fields",
]
def clean(json_string):
- # Strip JSON XSS Tag
- json_string = json_string.strip()
- if json_string.startswith(")]}'"):
- return json_string[5:]
- return json_string
+ # Strip JSON XSS Tag
+ json_string = json_string.strip()
+ if json_string.startswith(")]}'"):
+ return json_string[5:]
+ return json_string
def basic_auth(user):
- return requests.auth.HTTPBasicAuth(user["username"], user["http_password"])
+ return requests.auth.HTTPBasicAuth(user["username"], user["http_password"])
def fetch_admin_group():
- global GROUP_ADMIN
- # Get admin group
- r = json.loads(clean(requests.get(BASE_URL + "groups/" + "?suggest=ad&p=All-Projects",
- headers=HEADERS,
- auth=ADMIN_BASIC_AUTH).text))
- admin_group_name = r.keys()[0]
- GROUP_ADMIN = r[admin_group_name]
- GROUP_ADMIN["name"] = admin_group_name
+ global GROUP_ADMIN
+ # Get admin group
+ r = json.loads(clean(requests.get(
+ BASE_URL + "groups/?suggest=ad&p=All-Projects",
+ headers=HEADERS,
+ auth=ADMIN_BASIC_AUTH).text))
+ admin_group_name = r.keys()[0]
+ GROUP_ADMIN = r[admin_group_name]
+ GROUP_ADMIN["name"] = admin_group_name
def generate_random_text():
- return " ".join([random.choice("lorem ipsum "
- "doleret delendam "
- "\n esse".split(" ")) for _ in xrange(1, 100)])
+ return " ".join([random.choice("lorem ipsum "
+ "doleret delendam "
+ "\n esse".split(" ")) for _ in range(1,
+ 100)])
def set_up():
- global TMP_PATH
- TMP_PATH = tempfile.mkdtemp()
- atexit.register(clean_up)
- os.makedirs(TMP_PATH + "/ssh")
- os.makedirs(TMP_PATH + "/repos")
- fetch_admin_group()
+ global TMP_PATH
+ TMP_PATH = tempfile.mkdtemp()
+ atexit.register(clean_up)
+ os.makedirs(TMP_PATH + "/ssh")
+ os.makedirs(TMP_PATH + "/repos")
+ fetch_admin_group()
def get_random_users(num_users):
- users = random.sample([(f, l) for f in FIRST_NAMES for l in LAST_NAMES],
- num_users)
- names = []
- for u in users:
- names.append({"firstname": u[0],
- "lastname": u[1],
- "name": u[0] + " " + u[1],
- "username": u[0] + u[1],
- "email": u[0] + "." + u[1] + "@gerritcodereview.com",
- "http_password": "secret",
- "groups": []})
- return names
+ users = random.sample([(f, l) for f in FIRST_NAMES for l in LAST_NAMES],
+ num_users)
+ names = []
+ for u in users:
+ names.append({"firstname": u[0],
+ "lastname": u[1],
+ "name": u[0] + " " + u[1],
+ "username": u[0] + u[1],
+ "email": u[0] + "." + u[1] + "@gerritcodereview.com",
+ "http_password": "secret",
+ "groups": []})
+ return names
def generate_ssh_keys(gerrit_users):
- for user in gerrit_users:
- key_file = TMP_PATH + "/ssh/" + user["username"] + ".key"
- subprocess.check_output(["ssh-keygen", "-f", key_file, "-N", ""])
- with open(key_file + ".pub", "r") as f:
- user["ssh_key"] = f.read()
+ for user in gerrit_users:
+ key_file = TMP_PATH + "/ssh/" + user["username"] + ".key"
+ subprocess.check_output(["ssh-keygen", "-f", key_file, "-N", ""])
+ with open(key_file + ".pub", "r") as f:
+ user["ssh_key"] = f.read()
def create_gerrit_groups():
- groups = [
- {"name": "iOS-Maintainers", "description": "iOS Maintainers",
- "visible_to_all": True, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]},
- {"name": "Android-Maintainers", "description": "Android Maintainers",
- "visible_to_all": True, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]},
- {"name": "Backend-Maintainers", "description": "Backend Maintainers",
- "visible_to_all": True, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]},
- {"name": "Script-Maintainers", "description": "Script Maintainers",
- "visible_to_all": True, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]},
- {"name": "Security-Team", "description": "Sec Team",
- "visible_to_all": False, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]}]
- for g in groups:
- requests.put(BASE_URL + "groups/" + g["name"],
- json.dumps(g),
- headers=HEADERS,
- auth=ADMIN_BASIC_AUTH)
- return [g["name"] for g in groups]
+ groups = [
+ {"name": "iOS-Maintainers", "description": "iOS Maintainers",
+ "visible_to_all": True, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]},
+ {"name": "Android-Maintainers", "description": "Android Maintainers",
+ "visible_to_all": True, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]},
+ {"name": "Backend-Maintainers", "description": "Backend Maintainers",
+ "visible_to_all": True, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]},
+ {"name": "Script-Maintainers", "description": "Script Maintainers",
+ "visible_to_all": True, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]},
+ {"name": "Security-Team", "description": "Sec Team",
+ "visible_to_all": False, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]}]
+ for g in groups:
+ requests.put(BASE_URL + "groups/" + g["name"],
+ json.dumps(g),
+ headers=HEADERS,
+ auth=ADMIN_BASIC_AUTH)
+ return [g["name"] for g in groups]
def create_gerrit_projects(owner_groups):
- projects = [
- {"id": "android", "name": "Android", "parent": "All-Projects",
- "branches": ["master"], "description": "Our android app.",
- "owners": [owner_groups[0]], "create_empty_commit": True},
- {"id": "ios", "name": "iOS", "parent": "All-Projects",
- "branches": ["master"], "description": "Our ios app.",
- "owners": [owner_groups[1]], "create_empty_commit": True},
- {"id": "backend", "name": "Backend", "parent": "All-Projects",
- "branches": ["master"], "description": "Our awesome backend.",
- "owners": [owner_groups[2]], "create_empty_commit": True},
- {"id": "scripts", "name": "Scripts", "parent": "All-Projects",
- "branches": ["master"], "description": "some small scripts.",
- "owners": [owner_groups[3]], "create_empty_commit": True}]
- for p in projects:
- requests.put(BASE_URL + "projects/" + p["name"],
- json.dumps(p),
- headers=HEADERS,
- auth=ADMIN_BASIC_AUTH)
- return [p["name"] for p in projects]
+ projects = [
+ {"id": "android", "name": "Android", "parent": "All-Projects",
+ "branches": ["master"], "description": "Our android app.",
+ "owners": [owner_groups[0]], "create_empty_commit": True},
+ {"id": "ios", "name": "iOS", "parent": "All-Projects",
+ "branches": ["master"], "description": "Our ios app.",
+ "owners": [owner_groups[1]], "create_empty_commit": True},
+ {"id": "backend", "name": "Backend", "parent": "All-Projects",
+ "branches": ["master"], "description": "Our awesome backend.",
+ "owners": [owner_groups[2]], "create_empty_commit": True},
+ {"id": "scripts", "name": "Scripts", "parent": "All-Projects",
+ "branches": ["master"], "description": "some small scripts.",
+ "owners": [owner_groups[3]], "create_empty_commit": True}]
+ for p in projects:
+ requests.put(BASE_URL + "projects/" + p["name"],
+ json.dumps(p),
+ headers=HEADERS,
+ auth=ADMIN_BASIC_AUTH)
+ return [p["name"] for p in projects]
def create_gerrit_users(gerrit_users):
- for user in gerrit_users:
- requests.put(BASE_URL + "accounts/" + user["username"],
- json.dumps(user),
- headers=HEADERS,
- auth=ADMIN_BASIC_AUTH)
+ for user in gerrit_users:
+ requests.put(BASE_URL + "accounts/" + user["username"],
+ json.dumps(user),
+ headers=HEADERS,
+ auth=ADMIN_BASIC_AUTH)
def create_change(user, project_name):
- random_commit_message = generate_random_text()
- change = {
- "project": project_name,
- "subject": random_commit_message.split("\n")[0],
- "branch": "master",
- "status": "NEW",
- }
- requests.post(BASE_URL + "changes/",
- json.dumps(change),
- headers=HEADERS,
- auth=basic_auth(user))
+ random_commit_message = generate_random_text()
+ change = {
+ "project": project_name,
+ "subject": random_commit_message.split("\n")[0],
+ "branch": "master",
+ "status": "NEW",
+ }
+ requests.post(BASE_URL + "changes/",
+ json.dumps(change),
+ headers=HEADERS,
+ auth=basic_auth(user))
def clean_up():
- shutil.rmtree(TMP_PATH)
+ shutil.rmtree(TMP_PATH)
def main():
+<<<<<<< HEAD
p = optparse.OptionParser()
p.add_option("-u", "--user_count", action="store",
default=100,
@@ -299,7 +306,42 @@
project_names = create_gerrit_projects(group_names)
for idx, u in enumerate(gerrit_users):
- for _ in xrange(random.randint(1, 5)):
+ for _ in range(random.randint(1, 5)):
create_change(u, project_names[4 * idx / len(gerrit_users)])
+=======
+ p = optparse.OptionParser()
+ p.add_option("-u", "--user_count", action="store",
+ default=100,
+ type='int',
+ help="number of users to generate")
+ p.add_option("-p", "--port", action="store",
+ default=8080,
+ type='int',
+ help="port of server")
+ (options, _) = p.parse_args()
+ global BASE_URL
+ BASE_URL = BASE_URL % options.port
+ print(BASE_URL)
+
+ set_up()
+ gerrit_users = get_random_users(options.user_count)
+
+ group_names = create_gerrit_groups()
+ for idx, u in enumerate(gerrit_users):
+ u["groups"].append(group_names[idx % len(group_names)])
+ if idx % 5 == 0:
+ # Also add to security group
+ u["groups"].append(group_names[4])
+
+ generate_ssh_keys(gerrit_users)
+ create_gerrit_users(gerrit_users)
+
+ project_names = create_gerrit_projects(group_names)
+
+ for idx, u in enumerate(gerrit_users):
+ for _ in xrange(random.randint(1, 5)):
+ create_change(u, project_names[4 * idx / len(gerrit_users)])
+
+>>>>>>> 730efd14f4... Python cleanups, round 1: whitespace
main()
diff --git a/java/com/google/gerrit/reviewdb/server/ReviewDbCodecs.java b/java/com/google/gerrit/reviewdb/server/ReviewDbCodecs.java
index 631e7f5..2958464 100644
--- a/java/com/google/gerrit/reviewdb/server/ReviewDbCodecs.java
+++ b/java/com/google/gerrit/reviewdb/server/ReviewDbCodecs.java
@@ -15,6 +15,7 @@
package com.google.gerrit.reviewdb.server;
import com.google.gerrit.reviewdb.client.Change;
+import com.google.gerrit.reviewdb.client.ChangeMessage;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.PatchSetApproval;
import com.google.gwtorm.protobuf.CodecFactory;
@@ -27,6 +28,9 @@
public static final ProtobufCodec<Change> CHANGE_CODEC = CodecFactory.encoder(Change.class);
+ public static final ProtobufCodec<ChangeMessage> MESSAGE_CODEC =
+ CodecFactory.encoder(ChangeMessage.class);
+
public static final ProtobufCodec<PatchSet> PATCH_SET_CODEC =
CodecFactory.encoder(PatchSet.class);
diff --git a/java/com/google/gerrit/server/account/AccountsUpdate.java b/java/com/google/gerrit/server/account/AccountsUpdate.java
index 2f36cf2..996e602 100644
--- a/java/com/google/gerrit/server/account/AccountsUpdate.java
+++ b/java/com/google/gerrit/server/account/AccountsUpdate.java
@@ -106,7 +106,8 @@
* <li>binding {@link GitReferenceUpdated#DISABLED} and
* <li>passing an {@link
* com.google.gerrit.server.account.externalids.ExternalIdNotes.FactoryNoReindex} factory as
- * parameter of {@link AccountsUpdate.Factory#create(IdentifiedUser, ExternalIdNotesLoader)}
+ * parameter of {@link AccountsUpdate.Factory#create(IdentifiedUser,
+ * ExternalIdNotes.ExternalIdNotesLoader)}
* </ul>
*
* <p>If there are concurrent account updates updating the user branch in NoteDb may fail with
diff --git a/java/com/google/gerrit/server/auth/oauth/OAuthTokenCache.java b/java/com/google/gerrit/server/auth/oauth/OAuthTokenCache.java
index f380051..13a09a1 100644
--- a/java/com/google/gerrit/server/auth/oauth/OAuthTokenCache.java
+++ b/java/com/google/gerrit/server/auth/oauth/OAuthTokenCache.java
@@ -32,7 +32,6 @@
import com.google.inject.Module;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
-import java.io.IOException;
@Singleton
public class OAuthTokenCache {
@@ -70,12 +69,7 @@
@Override
public OAuthToken deserialize(byte[] in) {
- OAuthTokenProto proto;
- try {
- proto = OAuthTokenProto.parseFrom(in);
- } catch (IOException e) {
- throw new IllegalArgumentException("failed to deserialize OAuthToken");
- }
+ OAuthTokenProto proto = ProtoCacheSerializers.parseUnchecked(OAuthTokenProto.parser(), in);
return new OAuthToken(
proto.getToken(),
proto.getSecret(),
diff --git a/java/com/google/gerrit/server/cache/PersistentCacheBinding.java b/java/com/google/gerrit/server/cache/PersistentCacheBinding.java
index 429f5ab..794d3bb 100644
--- a/java/com/google/gerrit/server/cache/PersistentCacheBinding.java
+++ b/java/com/google/gerrit/server/cache/PersistentCacheBinding.java
@@ -34,7 +34,12 @@
PersistentCacheBinding<K, V> version(int version);
- /** Set the total on-disk limit of the cache */
+ /**
+ * Set the total on-disk limit of the cache.
+ *
+ * <p>If 0 or negative, persistence for the cache is disabled by default, but may still be
+ * overridden in the config.
+ */
PersistentCacheBinding<K, V> diskLimit(long limit);
PersistentCacheBinding<K, V> keySerializer(CacheSerializer<K> keySerializer);
diff --git a/java/com/google/gerrit/server/cache/PersistentCacheProvider.java b/java/com/google/gerrit/server/cache/PersistentCacheProvider.java
index 405de4f..46a9e61 100644
--- a/java/com/google/gerrit/server/cache/PersistentCacheProvider.java
+++ b/java/com/google/gerrit/server/cache/PersistentCacheProvider.java
@@ -39,6 +39,7 @@
CacheModule module, String name, TypeLiteral<K> keyType, TypeLiteral<V> valType) {
super(module, name, keyType, valType);
version = -1;
+ diskLimit = 128 << 20;
}
@Inject(optional = true)
@@ -93,10 +94,7 @@
@Override
public long diskLimit() {
- if (diskLimit > 0) {
- return diskLimit;
- }
- return 128 << 20;
+ return diskLimit;
}
@Override
diff --git a/java/com/google/gerrit/server/cache/ProtoCacheSerializers.java b/java/com/google/gerrit/server/cache/ProtoCacheSerializers.java
index 795df72..c6fc0b9 100644
--- a/java/com/google/gerrit/server/cache/ProtoCacheSerializers.java
+++ b/java/com/google/gerrit/server/cache/ProtoCacheSerializers.java
@@ -14,17 +14,24 @@
package com.google.gerrit.server.cache;
+import static com.google.common.base.Preconditions.checkArgument;
+import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
+
+import com.google.gwtorm.protobuf.ProtobufCodec;
+import com.google.protobuf.ByteString;
import com.google.protobuf.CodedOutputStream;
import com.google.protobuf.MessageLite;
+import com.google.protobuf.Parser;
import java.io.IOException;
+import org.eclipse.jgit.lib.ObjectId;
/** Static utilities for writing protobuf-based {@link CacheSerializer} implementations. */
public class ProtoCacheSerializers {
/**
* Serializes a proto to a byte array.
*
- * <p>Guarantees deterministic serialization and thus is suitable for use as a persistent cache
- * key. Should be used in preference to {@link MessageLite#toByteArray()}, which is not guaranteed
+ * <p>Guarantees deterministic serialization and thus is suitable for use in persistent caches.
+ * Should be used in preference to {@link MessageLite#toByteArray()}, which is not guaranteed
* deterministic.
*
* @param message the proto message to serialize.
@@ -39,7 +46,80 @@
cout.checkNoSpaceLeft();
return bytes;
} catch (IOException e) {
- throw new IllegalStateException("exception writing to byte array");
+ throw new IllegalStateException("exception writing to byte array", e);
+ }
+ }
+
+ /**
+ * Serializes an object to a {@link ByteString} using a protobuf codec.
+ *
+ * <p>Guarantees deterministic serialization and thus is suitable for use in persistent caches.
+ * Should be used in preference to {@link ProtobufCodec#encodeToByteString(Object)}, which is not
+ * guaranteed deterministic.
+ *
+ * @param object the object to serialize.
+ * @param codec codec for serializing.
+ * @return a {@code ByteString} with the message contents.
+ */
+ public static <T> ByteString toByteString(T object, ProtobufCodec<T> codec) {
+ try (ByteString.Output bout = ByteString.newOutput()) {
+ CodedOutputStream cout = CodedOutputStream.newInstance(bout);
+ cout.useDeterministicSerialization();
+ codec.encode(object, cout);
+ cout.flush();
+ return bout.toByteString();
+ } catch (IOException e) {
+ throw new IllegalStateException("exception writing to ByteString", e);
+ }
+ }
+
+ /**
+ * Parses a byte array to a protobuf message.
+ *
+ * @param parser parser for the proto type.
+ * @param in byte array with the message contents.
+ * @return parsed proto.
+ */
+ public static <M extends MessageLite> M parseUnchecked(Parser<M> parser, byte[] in) {
+ try {
+ return parser.parseFrom(in);
+ } catch (IOException e) {
+ throw new IllegalArgumentException("exception parsing byte array to proto", e);
+ }
+ }
+
+ /**
+ * Helper for serializing {@link ObjectId} instances to/from protobuf fields.
+ *
+ * <p>Reuse a single instance's {@link #toByteString(ObjectId)} and {@link
+ * #fromByteString(ByteString)} within a single {@link CacheSerializer#serialize} or {@link
+ * CacheSerializer#deserialize} method body to minimize allocation of temporary buffers.
+ *
+ * <p><strong>Note:</strong> This class is not threadsafe. Instances must not be stored in {@link
+ * CacheSerializer} fields if the serializer instances will be used from multiple threads.
+ */
+ public static class ObjectIdConverter {
+ public static ObjectIdConverter create() {
+ return new ObjectIdConverter();
+ }
+
+ private final byte[] buf = new byte[OBJECT_ID_LENGTH];
+
+ private ObjectIdConverter() {}
+
+ public ByteString toByteString(ObjectId id) {
+ id.copyRawTo(buf, 0);
+ return ByteString.copyFrom(buf);
+ }
+
+ public ObjectId fromByteString(ByteString in) {
+ checkArgument(
+ in.size() == OBJECT_ID_LENGTH,
+ "expected ByteString of length %s: %s",
+ OBJECT_ID_LENGTH,
+ in);
+ in.copyTo(buf, 0);
+ return ObjectId.fromRaw(buf);
}
}
diff --git a/java/com/google/gerrit/server/cache/testing/SerializedClassSubject.java b/java/com/google/gerrit/server/cache/testing/SerializedClassSubject.java
index 78900cb..19c5b67 100644
--- a/java/com/google/gerrit/server/cache/testing/SerializedClassSubject.java
+++ b/java/com/google/gerrit/server/cache/testing/SerializedClassSubject.java
@@ -22,8 +22,10 @@
import com.google.common.truth.FailureMetadata;
import com.google.common.truth.Subject;
import java.lang.reflect.Field;
+import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Type;
+import java.util.Arrays;
import java.util.Map;
import org.apache.commons.lang3.reflect.FieldUtils;
@@ -62,6 +64,13 @@
super(metadata, actual);
}
+ public void isAbstract() {
+ isNotNull();
+ assertWithMessage("expected class %s to be abstract", actual().getName())
+ .that(Modifier.isAbstract(actual().getModifiers()))
+ .isTrue();
+ }
+
public void isConcrete() {
isNotNull();
assertWithMessage("expected class %s to be concrete", actual().getName())
@@ -78,4 +87,17 @@
.collect(toImmutableMap(Field::getName, Field::getGenericType)))
.containsExactlyEntriesIn(expectedFields);
}
+
+ public void hasAutoValueMethods(Map<String, Type> expectedMethods) {
+ // Would be nice if we could check clazz is an @AutoValue, but the retention is not RUNTIME.
+ isAbstract();
+ assertThat(
+ Arrays.stream(actual().getDeclaredMethods())
+ .filter(m -> !Modifier.isStatic(m.getModifiers()))
+ .filter(m -> Modifier.isAbstract(m.getModifiers()))
+ .filter(m -> m.getParameters().length == 0)
+ .collect(toImmutableMap(Method::getName, Method::getGenericReturnType)))
+ .named("no-argument abstract methods on %s", actual().getName())
+ .isEqualTo(expectedMethods);
+ }
}
diff --git a/java/com/google/gerrit/server/change/ChangeKindCacheImpl.java b/java/com/google/gerrit/server/change/ChangeKindCacheImpl.java
index a4eb90f..b2eb62d 100644
--- a/java/com/google/gerrit/server/change/ChangeKindCacheImpl.java
+++ b/java/com/google/gerrit/server/change/ChangeKindCacheImpl.java
@@ -31,6 +31,7 @@
import com.google.gerrit.server.cache.CacheSerializer;
import com.google.gerrit.server.cache.EnumCacheSerializer;
import com.google.gerrit.server.cache.ProtoCacheSerializers;
+import com.google.gerrit.server.cache.ProtoCacheSerializers.ObjectIdConverter;
import com.google.gerrit.server.cache.proto.Cache.ChangeKindKeyProto;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.git.GitRepositoryManager;
@@ -41,8 +42,6 @@
import com.google.inject.Inject;
import com.google.inject.Module;
import com.google.inject.name.Named;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.InvalidProtocolBufferException;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
@@ -52,7 +51,6 @@
import java.util.concurrent.ExecutionException;
import org.eclipse.jgit.errors.LargeObjectException;
import org.eclipse.jgit.lib.Config;
-import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.Repository;
@@ -178,27 +176,24 @@
static class Serializer implements CacheSerializer<Key> {
@Override
public byte[] serialize(Key object) {
- byte[] buf = new byte[Constants.OBJECT_ID_LENGTH];
- ChangeKindKeyProto.Builder b = ChangeKindKeyProto.newBuilder();
- object.getPrior().copyRawTo(buf, 0);
- b.setPrior(ByteString.copyFrom(buf));
- object.getNext().copyRawTo(buf, 0);
- b.setNext(ByteString.copyFrom(buf));
- b.setStrategyName(object.getStrategyName());
- return ProtoCacheSerializers.toByteArray(b.build());
+ ObjectIdConverter idConverter = ObjectIdConverter.create();
+ return ProtoCacheSerializers.toByteArray(
+ ChangeKindKeyProto.newBuilder()
+ .setPrior(idConverter.toByteString(object.getPrior()))
+ .setNext(idConverter.toByteString(object.getNext()))
+ .setStrategyName(object.getStrategyName())
+ .build());
}
@Override
public Key deserialize(byte[] in) {
- try {
- ChangeKindKeyProto proto = ChangeKindKeyProto.parseFrom(in);
- return new Key(
- ObjectId.fromRaw(proto.getPrior().toByteArray()),
- ObjectId.fromRaw(proto.getNext().toByteArray()),
- proto.getStrategyName());
- } catch (InvalidProtocolBufferException e) {
- throw new IllegalArgumentException("Failed to deserialize object", e);
- }
+ ChangeKindKeyProto proto =
+ ProtoCacheSerializers.parseUnchecked(ChangeKindKeyProto.parser(), in);
+ ObjectIdConverter idConverter = ObjectIdConverter.create();
+ return new Key(
+ idConverter.fromByteString(proto.getPrior()),
+ idConverter.fromByteString(proto.getNext()),
+ proto.getStrategyName());
}
}
}
diff --git a/java/com/google/gerrit/server/change/MergeabilityCacheImpl.java b/java/com/google/gerrit/server/change/MergeabilityCacheImpl.java
index a192228..b57be15 100644
--- a/java/com/google/gerrit/server/change/MergeabilityCacheImpl.java
+++ b/java/com/google/gerrit/server/change/MergeabilityCacheImpl.java
@@ -29,6 +29,7 @@
import com.google.gerrit.server.cache.CacheModule;
import com.google.gerrit.server.cache.CacheSerializer;
import com.google.gerrit.server.cache.ProtoCacheSerializers;
+import com.google.gerrit.server.cache.ProtoCacheSerializers.ObjectIdConverter;
import com.google.gerrit.server.cache.proto.Cache.MergeabilityKeyProto;
import com.google.gerrit.server.git.CodeReviewCommit;
import com.google.gerrit.server.git.CodeReviewCommit.CodeReviewRevWalk;
@@ -37,13 +38,10 @@
import com.google.inject.Module;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
-import com.google.protobuf.ByteString;
-import java.io.IOException;
import java.util.Arrays;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ExecutionException;
-import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
@@ -145,33 +143,24 @@
@Override
public byte[] serialize(EntryKey object) {
- byte[] buf = new byte[Constants.OBJECT_ID_LENGTH];
- MergeabilityKeyProto.Builder b = MergeabilityKeyProto.newBuilder();
- object.getCommit().copyRawTo(buf, 0);
- b.setCommit(ByteString.copyFrom(buf));
- object.getInto().copyRawTo(buf, 0);
- b.setInto(ByteString.copyFrom(buf));
- b.setSubmitType(SUBMIT_TYPE_CONVERTER.reverse().convert(object.getSubmitType()));
- b.setMergeStrategy(object.getMergeStrategy());
- return ProtoCacheSerializers.toByteArray(b.build());
+ ObjectIdConverter idConverter = ObjectIdConverter.create();
+ return ProtoCacheSerializers.toByteArray(
+ MergeabilityKeyProto.newBuilder()
+ .setCommit(idConverter.toByteString(object.getCommit()))
+ .setInto(idConverter.toByteString(object.getInto()))
+ .setSubmitType(SUBMIT_TYPE_CONVERTER.reverse().convert(object.getSubmitType()))
+ .setMergeStrategy(object.getMergeStrategy())
+ .build());
}
@Override
public EntryKey deserialize(byte[] in) {
- MergeabilityKeyProto proto;
- try {
- proto = MergeabilityKeyProto.parseFrom(in);
- } catch (IOException e) {
- throw new IllegalArgumentException("Failed to deserialize mergeability cache key");
- }
- byte[] buf = new byte[Constants.OBJECT_ID_LENGTH];
- proto.getCommit().copyTo(buf, 0);
- ObjectId commit = ObjectId.fromRaw(buf);
- proto.getInto().copyTo(buf, 0);
- ObjectId into = ObjectId.fromRaw(buf);
+ MergeabilityKeyProto proto =
+ ProtoCacheSerializers.parseUnchecked(MergeabilityKeyProto.parser(), in);
+ ObjectIdConverter idConverter = ObjectIdConverter.create();
return new EntryKey(
- commit,
- into,
+ idConverter.fromByteString(proto.getCommit()),
+ idConverter.fromByteString(proto.getInto()),
SUBMIT_TYPE_CONVERTER.convert(proto.getSubmitType()),
proto.getMergeStrategy());
}
diff --git a/java/com/google/gerrit/server/git/GarbageCollection.java b/java/com/google/gerrit/server/git/GarbageCollection.java
index 3bf89c7..997907e 100644
--- a/java/com/google/gerrit/server/git/GarbageCollection.java
+++ b/java/com/google/gerrit/server/git/GarbageCollection.java
@@ -41,9 +41,6 @@
public class GarbageCollection {
private static final Logger log = LoggerFactory.getLogger(GarbageCollection.class);
- public static final String LOG_NAME = "gc_log";
- private static final Logger gcLog = LoggerFactory.getLogger(LOG_NAME);
-
private final GitRepositoryManager repoManager;
private final GarbageCollectionQueue gcQueue;
private final GcConfig gcConfig;
@@ -142,7 +139,7 @@
}
b.append(s);
}
- gcLog.info(b.toString());
+ log.info(b.toString());
}
private static void logGcConfiguration(
@@ -182,7 +179,6 @@
print(writer, "failed.\n\n");
StringBuilder b = new StringBuilder();
b.append("[").append(projectName.get()).append("]");
- gcLog.error(b.toString(), e);
log.error(b.toString(), e);
}
diff --git a/java/com/google/gerrit/server/git/GarbageCollectionLogFile.java b/java/com/google/gerrit/server/git/GarbageCollectionLogFile.java
index e03ef67..8796fdf 100644
--- a/java/com/google/gerrit/server/git/GarbageCollectionLogFile.java
+++ b/java/com/google/gerrit/server/git/GarbageCollectionLogFile.java
@@ -26,6 +26,8 @@
import org.eclipse.jgit.lib.Config;
public class GarbageCollectionLogFile implements LifecycleListener {
+ private static final String LOG_NAME = "gc_log";
+
@Inject
public GarbageCollectionLogFile(SitePaths sitePaths, @GerritServerConfig Config config) {
if (SystemLog.shouldConfigure()) {
@@ -38,15 +40,20 @@
@Override
public void stop() {
- LogManager.getLogger(GarbageCollection.LOG_NAME).removeAllAppenders();
+ LogManager.getLogger(GarbageCollection.class).removeAllAppenders();
+ LogManager.getLogger(GarbageCollectionRunner.class).removeAllAppenders();
}
private static void initLogSystem(Path logdir, boolean rotate) {
- Logger gcLogger = LogManager.getLogger(GarbageCollection.LOG_NAME);
+ initGcLogger(logdir, rotate, LogManager.getLogger(GarbageCollection.class));
+ initGcLogger(logdir, rotate, LogManager.getLogger(GarbageCollectionRunner.class));
+ }
+
+ private static void initGcLogger(Path logdir, boolean rotate, Logger gcLogger) {
gcLogger.removeAllAppenders();
gcLogger.addAppender(
SystemLog.createAppender(
- logdir, GarbageCollection.LOG_NAME, new PatternLayout("[%d] %-5p %x: %m%n"), rotate));
+ logdir, LOG_NAME, new PatternLayout("[%d] %-5p %x: %m%n"), rotate));
gcLogger.setAdditivity(false);
}
}
diff --git a/java/com/google/gerrit/server/git/GarbageCollectionRunner.java b/java/com/google/gerrit/server/git/GarbageCollectionRunner.java
index e4316c5..054e56a 100644
--- a/java/com/google/gerrit/server/git/GarbageCollectionRunner.java
+++ b/java/com/google/gerrit/server/git/GarbageCollectionRunner.java
@@ -24,7 +24,7 @@
/** Runnable to enable scheduling gc to run periodically */
public class GarbageCollectionRunner implements Runnable {
- private static final Logger gcLog = LoggerFactory.getLogger(GarbageCollection.LOG_NAME);
+ private static final Logger log = LoggerFactory.getLogger(GarbageCollectionRunner.class);
static class Lifecycle implements LifecycleListener {
private final WorkQueue queue;
@@ -61,7 +61,7 @@
@Override
public void run() {
- gcLog.info("Triggering gc on all repositories");
+ log.info("Triggering gc on all repositories");
garbageCollectionFactory.create().run(Lists.newArrayList(projectCache.all()));
}
diff --git a/java/com/google/gerrit/server/group/db/InternalGroupUpdate.java b/java/com/google/gerrit/server/group/db/InternalGroupUpdate.java
index 5ce3c1c..bff2952 100644
--- a/java/com/google/gerrit/server/group/db/InternalGroupUpdate.java
+++ b/java/com/google/gerrit/server/group/db/InternalGroupUpdate.java
@@ -142,8 +142,8 @@
* InternalGroupUpdate}.
*
* <p>This modification can be tweaked further and passed to {@link
- * #setMemberModification(MemberModification)} in order to combine multiple member additions,
- * deletions, or other modifications into one update.
+ * #setMemberModification(InternalGroupUpdate.MemberModification)} in order to combine multiple
+ * member additions, deletions, or other modifications into one update.
*/
public abstract MemberModification getMemberModification();
@@ -155,8 +155,8 @@
* InternalGroupUpdate}.
*
* <p>This modification can be tweaked further and passed to {@link
- * #setSubgroupModification(SubgroupModification)} in order to combine multiple subgroup
- * additions, deletions, or other modifications into one update.
+ * #setSubgroupModification(InternalGroupUpdate.SubgroupModification)} in order to combine
+ * multiple subgroup additions, deletions, or other modifications into one update.
*/
public abstract SubgroupModification getSubgroupModification();
diff --git a/java/com/google/gerrit/server/index/change/ChangeField.java b/java/com/google/gerrit/server/index/change/ChangeField.java
index 5db347e..82253f2 100644
--- a/java/com/google/gerrit/server/index/change/ChangeField.java
+++ b/java/com/google/gerrit/server/index/change/ChangeField.java
@@ -643,7 +643,7 @@
* <p>Stored fields need to use a stable format over a long period; this type insulates the index
* from implementation changes in SubmitRecord itself.
*/
- static class StoredSubmitRecord {
+ public static class StoredSubmitRecord {
static class StoredLabel {
String label;
SubmitRecord.Label.Status status;
@@ -661,7 +661,7 @@
List<StoredRequirement> requirements;
String errorMessage;
- StoredSubmitRecord(SubmitRecord rec) {
+ public StoredSubmitRecord(SubmitRecord rec) {
this.status = rec.status;
this.errorMessage = rec.errorMessage;
if (rec.labels != null) {
@@ -686,7 +686,7 @@
}
}
- private SubmitRecord toSubmitRecord() {
+ public SubmitRecord toSubmitRecord() {
SubmitRecord rec = new SubmitRecord();
rec.status = status;
rec.errorMessage = errorMessage;
diff --git a/java/com/google/gerrit/server/notedb/ChangeNotesCache.java b/java/com/google/gerrit/server/notedb/ChangeNotesCache.java
index 5658569..06d940e 100644
--- a/java/com/google/gerrit/server/notedb/ChangeNotesCache.java
+++ b/java/com/google/gerrit/server/notedb/ChangeNotesCache.java
@@ -25,6 +25,10 @@
import com.google.gerrit.server.ReviewerByEmailSet;
import com.google.gerrit.server.ReviewerSet;
import com.google.gerrit.server.cache.CacheModule;
+import com.google.gerrit.server.cache.CacheSerializer;
+import com.google.gerrit.server.cache.ProtoCacheSerializers;
+import com.google.gerrit.server.cache.ProtoCacheSerializers.ObjectIdConverter;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesKeyProto;
import com.google.gerrit.server.notedb.AbstractChangeNotes.Args;
import com.google.gerrit.server.notedb.ChangeNotesCommit.ChangeNotesRevWalk;
import com.google.inject.Inject;
@@ -49,20 +53,53 @@
@Override
protected void configure() {
bind(ChangeNotesCache.class);
- cache(CACHE_NAME, Key.class, ChangeNotesState.class)
+ persist(CACHE_NAME, Key.class, ChangeNotesState.class)
.weigher(Weigher.class)
- .maximumWeight(10 << 20);
+ .maximumWeight(10 << 20)
+ .diskLimit(-1)
+ .version(1)
+ .keySerializer(Key.Serializer.INSTANCE)
+ .valueSerializer(ChangeNotesState.Serializer.INSTANCE);
}
};
}
@AutoValue
public abstract static class Key {
+ static Key create(Project.NameKey project, Change.Id changeId, ObjectId id) {
+ return new AutoValue_ChangeNotesCache_Key(project, changeId, id.copy());
+ }
+
abstract Project.NameKey project();
abstract Change.Id changeId();
abstract ObjectId id();
+
+ @VisibleForTesting
+ static enum Serializer implements CacheSerializer<Key> {
+ INSTANCE;
+
+ @Override
+ public byte[] serialize(Key object) {
+ return ProtoCacheSerializers.toByteArray(
+ ChangeNotesKeyProto.newBuilder()
+ .setProject(object.project().get())
+ .setChangeId(object.changeId().get())
+ .setId(ObjectIdConverter.create().toByteString(object.id()))
+ .build());
+ }
+
+ @Override
+ public Key deserialize(byte[] in) {
+ ChangeNotesKeyProto proto =
+ ProtoCacheSerializers.parseUnchecked(ChangeNotesKeyProto.parser(), in);
+ return Key.create(
+ new Project.NameKey(proto.getProject()),
+ new Change.Id(proto.getChangeId()),
+ ObjectIdConverter.create().fromByteString(proto.getId()));
+ }
+ }
}
public static class Weigher implements com.google.common.cache.Weigher<Key, ChangeNotesState> {
@@ -134,7 +171,7 @@
+ T // readOnlyUntil
+ 1 // isPrivate
+ 1 // workInProgress
- + 1; // hasReviewStarted
+ + 1; // reviewStarted
}
private static int ptr(Object o, int size) {
@@ -330,7 +367,7 @@
Value get(Project.NameKey project, Change.Id changeId, ObjectId metaId, ChangeNotesRevWalk rw)
throws IOException {
try {
- Key key = new AutoValue_ChangeNotesCache_Key(project, changeId, metaId.copy());
+ Key key = Key.create(project, changeId, metaId);
Loader loader = new Loader(key, rw);
ChangeNotesState s = cache.get(key, loader);
return new AutoValue_ChangeNotesCache_Value(s, loader.revisionNoteMap);
diff --git a/java/com/google/gerrit/server/notedb/ChangeNotesState.java b/java/com/google/gerrit/server/notedb/ChangeNotesState.java
index 78734f9..3eb06b2 100644
--- a/java/com/google/gerrit/server/notedb/ChangeNotesState.java
+++ b/java/com/google/gerrit/server/notedb/ChangeNotesState.java
@@ -14,15 +14,29 @@
package com.google.gerrit.server.notedb;
+import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
+import static com.google.common.collect.ImmutableList.toImmutableList;
+import static com.google.common.collect.ImmutableListMultimap.toImmutableListMultimap;
+import static com.google.common.collect.ImmutableSet.toImmutableSet;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.APPROVAL_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.MESSAGE_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.PATCH_SET_CODEC;
+import static com.google.gerrit.server.cache.ProtoCacheSerializers.toByteString;
import com.google.auto.value.AutoValue;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Converter;
+import com.google.common.base.Enums;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.ImmutableTable;
import com.google.common.collect.ListMultimap;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Table;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.common.data.SubmitRecord;
import com.google.gerrit.reviewdb.client.Account;
@@ -34,10 +48,22 @@
import com.google.gerrit.reviewdb.client.PatchSetApproval;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.RevId;
+import com.google.gerrit.server.OutputFormat;
import com.google.gerrit.server.ReviewerByEmailSet;
import com.google.gerrit.server.ReviewerSet;
import com.google.gerrit.server.ReviewerStatusUpdate;
+import com.google.gerrit.server.cache.CacheSerializer;
+import com.google.gerrit.server.cache.ProtoCacheSerializers;
+import com.google.gerrit.server.cache.ProtoCacheSerializers.ObjectIdConverter;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ChangeColumnsProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerByEmailSetEntryProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerSetEntryProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerStatusUpdateProto;
+import com.google.gerrit.server.index.change.ChangeField.StoredSubmitRecord;
+import com.google.gerrit.server.mail.Address;
import com.google.gerrit.server.notedb.NoteDbChangeState.PrimaryStorage;
+import com.google.gson.Gson;
import java.io.IOException;
import java.sql.Timestamp;
import java.util.List;
@@ -95,7 +121,7 @@
@Nullable Timestamp readOnlyUntil,
boolean isPrivate,
boolean workInProgress,
- boolean hasReviewStarted,
+ boolean reviewStarted,
@Nullable Change.Id revertOf) {
checkNotNull(
metaId,
@@ -106,22 +132,22 @@
.metaId(metaId)
.changeId(changeId)
.columns(
- new AutoValue_ChangeNotesState_ChangeColumns.Builder()
+ ChangeColumns.builder()
.changeKey(changeKey)
.createdOn(createdOn)
.lastUpdatedOn(lastUpdatedOn)
.owner(owner)
.branch(branch)
+ .status(status)
.currentPatchSetId(currentPatchSetId)
.subject(subject)
.topic(topic)
.originalSubject(originalSubject)
.submissionId(submissionId)
.assignee(assignee)
- .status(status)
.isPrivate(isPrivate)
- .isWorkInProgress(workInProgress)
- .hasReviewStarted(hasReviewStarted)
+ .workInProgress(workInProgress)
+ .reviewStarted(reviewStarted)
.revertOf(revertOf)
.build())
.pastAssignees(pastAssignees)
@@ -147,10 +173,14 @@
* <p>Notable exceptions include rowVersion and noteDbState, which are only make sense when read
* from NoteDb, so they cannot be cached.
*
- * <p>Fields are in listed column order.
+ * <p>Fields should match the column names in {@link Change}, and are in listed column order.
*/
@AutoValue
abstract static class ChangeColumns {
+ static Builder builder() {
+ return new AutoValue_ChangeNotesState_ChangeColumns.Builder();
+ }
+
abstract Change.Key changeKey();
abstract Timestamp createdOn();
@@ -162,6 +192,10 @@
// Project not included, as it's not stored anywhere in the meta ref.
abstract String branch();
+ // TODO(dborowitz): Use a sensible default other than null
+ @Nullable
+ abstract Change.Status status();
+
@Nullable
abstract PatchSet.Id currentPatchSetId();
@@ -178,19 +212,18 @@
@Nullable
abstract Account.Id assignee();
- // TODO(dborowitz): Use a sensible default other than null
- @Nullable
- abstract Change.Status status();
abstract boolean isPrivate();
- abstract boolean isWorkInProgress();
+ abstract boolean workInProgress();
- abstract boolean hasReviewStarted();
+ abstract boolean reviewStarted();
@Nullable
abstract Change.Id revertOf();
+ abstract Builder toBuilder();
+
@AutoValue.Builder
abstract static class Builder {
abstract Builder changeKey(Change.Key changeKey);
@@ -219,9 +252,9 @@
abstract Builder isPrivate(boolean isPrivate);
- abstract Builder isWorkInProgress(boolean isWorkInProgress);
+ abstract Builder workInProgress(boolean workInProgress);
- abstract Builder hasReviewStarted(boolean hasReviewStarted);
+ abstract Builder reviewStarted(boolean reviewStarted);
abstract Builder revertOf(@Nullable Change.Id revertOf);
@@ -327,8 +360,8 @@
change.setSubmissionId(c.submissionId());
change.setAssignee(c.assignee());
change.setPrivate(c.isPrivate());
- change.setWorkInProgress(c.isWorkInProgress());
- change.setReviewStarted(c.hasReviewStarted());
+ change.setWorkInProgress(c.workInProgress());
+ change.setReviewStarted(c.reviewStarted());
change.setRevertOf(c.revertOf());
if (!patchSets().isEmpty()) {
@@ -368,7 +401,7 @@
abstract Builder pastAssignees(Set<Account.Id> pastAssignees);
- abstract Builder hashtags(Set<String> hashtags);
+ abstract Builder hashtags(Iterable<String> hashtags);
abstract Builder patchSets(Iterable<Map.Entry<PatchSet.Id, PatchSet>> patchSets);
@@ -396,4 +429,267 @@
abstract ChangeNotesState build();
}
+
+ static enum Serializer implements CacheSerializer<ChangeNotesState> {
+ INSTANCE;
+
+ @VisibleForTesting static final Gson GSON = OutputFormat.JSON_COMPACT.newGson();
+
+ private static final Converter<String, Change.Status> STATUS_CONVERTER =
+ Enums.stringConverter(Change.Status.class);
+ private static final Converter<String, ReviewerStateInternal> REVIEWER_STATE_CONVERTER =
+ Enums.stringConverter(ReviewerStateInternal.class);
+
+ @Override
+ public byte[] serialize(ChangeNotesState object) {
+ checkArgument(object.metaId() != null, "meta ID is required in: %s", object);
+ checkArgument(object.columns() != null, "ChangeColumns is required in: %s", object);
+ ChangeNotesStateProto.Builder b = ChangeNotesStateProto.newBuilder();
+
+ b.setMetaId(ObjectIdConverter.create().toByteString(object.metaId()))
+ .setChangeId(object.changeId().get())
+ .setColumns(toChangeColumnsProto(object.columns()));
+
+ object.pastAssignees().forEach(a -> b.addPastAssignee(a.get()));
+ object.hashtags().forEach(b::addHashtag);
+ object.patchSets().forEach(e -> b.addPatchSet(toByteString(e.getValue(), PATCH_SET_CODEC)));
+ object.approvals().forEach(e -> b.addApproval(toByteString(e.getValue(), APPROVAL_CODEC)));
+
+ object.reviewers().asTable().cellSet().forEach(c -> b.addReviewer(toReviewerSetEntry(c)));
+ object
+ .reviewersByEmail()
+ .asTable()
+ .cellSet()
+ .forEach(c -> b.addReviewerByEmail(toReviewerByEmailSetEntry(c)));
+ object
+ .pendingReviewers()
+ .asTable()
+ .cellSet()
+ .forEach(c -> b.addPendingReviewer(toReviewerSetEntry(c)));
+ object
+ .pendingReviewersByEmail()
+ .asTable()
+ .cellSet()
+ .forEach(c -> b.addPendingReviewerByEmail(toReviewerByEmailSetEntry(c)));
+
+ object.allPastReviewers().forEach(a -> b.addPastReviewer(a.get()));
+ object.reviewerUpdates().forEach(u -> b.addReviewerUpdate(toReviewerStatusUpdateProto(u)));
+ object
+ .submitRecords()
+ .forEach(r -> b.addSubmitRecord(GSON.toJson(new StoredSubmitRecord(r))));
+ object.changeMessages().forEach(m -> b.addChangeMessage(toByteString(m, MESSAGE_CODEC)));
+ object.publishedComments().values().forEach(c -> b.addPublishedComment(GSON.toJson(c)));
+
+ if (object.readOnlyUntil() != null) {
+ b.setReadOnlyUntil(object.readOnlyUntil().getTime()).setHasReadOnlyUntil(true);
+ }
+
+ return ProtoCacheSerializers.toByteArray(b.build());
+ }
+
+ private static ChangeColumnsProto toChangeColumnsProto(ChangeColumns cols) {
+ ChangeColumnsProto.Builder b =
+ ChangeColumnsProto.newBuilder()
+ .setChangeKey(cols.changeKey().get())
+ .setCreatedOn(cols.createdOn().getTime())
+ .setLastUpdatedOn(cols.lastUpdatedOn().getTime())
+ .setOwner(cols.owner().get())
+ .setBranch(cols.branch());
+ if (cols.currentPatchSetId() != null) {
+ b.setCurrentPatchSetId(cols.currentPatchSetId().get()).setHasCurrentPatchSetId(true);
+ }
+ b.setSubject(cols.subject());
+ if (cols.topic() != null) {
+ b.setTopic(cols.topic()).setHasTopic(true);
+ }
+ if (cols.originalSubject() != null) {
+ b.setOriginalSubject(cols.originalSubject()).setHasOriginalSubject(true);
+ }
+ if (cols.submissionId() != null) {
+ b.setSubmissionId(cols.submissionId()).setHasSubmissionId(true);
+ }
+ if (cols.assignee() != null) {
+ b.setAssignee(cols.assignee().get()).setHasAssignee(true);
+ }
+ if (cols.status() != null) {
+ b.setStatus(STATUS_CONVERTER.reverse().convert(cols.status())).setHasStatus(true);
+ }
+ b.setIsPrivate(cols.isPrivate())
+ .setWorkInProgress(cols.workInProgress())
+ .setReviewStarted(cols.reviewStarted());
+ if (cols.revertOf() != null) {
+ b.setRevertOf(cols.revertOf().get()).setHasRevertOf(true);
+ }
+ return b.build();
+ }
+
+ private static ReviewerSetEntryProto toReviewerSetEntry(
+ Table.Cell<ReviewerStateInternal, Account.Id, Timestamp> c) {
+ return ReviewerSetEntryProto.newBuilder()
+ .setState(REVIEWER_STATE_CONVERTER.reverse().convert(c.getRowKey()))
+ .setAccountId(c.getColumnKey().get())
+ .setTimestamp(c.getValue().getTime())
+ .build();
+ }
+
+ private static ReviewerByEmailSetEntryProto toReviewerByEmailSetEntry(
+ Table.Cell<ReviewerStateInternal, Address, Timestamp> c) {
+ return ReviewerByEmailSetEntryProto.newBuilder()
+ .setState(REVIEWER_STATE_CONVERTER.reverse().convert(c.getRowKey()))
+ .setAddress(c.getColumnKey().toHeaderString())
+ .setTimestamp(c.getValue().getTime())
+ .build();
+ }
+
+ private static ReviewerStatusUpdateProto toReviewerStatusUpdateProto(ReviewerStatusUpdate u) {
+ return ReviewerStatusUpdateProto.newBuilder()
+ .setDate(u.date().getTime())
+ .setUpdatedBy(u.updatedBy().get())
+ .setReviewer(u.reviewer().get())
+ .setState(REVIEWER_STATE_CONVERTER.reverse().convert(u.state()))
+ .build();
+ }
+
+ @Override
+ public ChangeNotesState deserialize(byte[] in) {
+ ChangeNotesStateProto proto =
+ ProtoCacheSerializers.parseUnchecked(ChangeNotesStateProto.parser(), in);
+ Change.Id changeId = new Change.Id(proto.getChangeId());
+
+ ChangeNotesState.Builder b =
+ builder()
+ .metaId(ObjectIdConverter.create().fromByteString(proto.getMetaId()))
+ .changeId(changeId)
+ .columns(toChangeColumns(changeId, proto.getColumns()))
+ .pastAssignees(
+ proto
+ .getPastAssigneeList()
+ .stream()
+ .map(Account.Id::new)
+ .collect(toImmutableSet()))
+ .hashtags(proto.getHashtagList())
+ .patchSets(
+ proto
+ .getPatchSetList()
+ .stream()
+ .map(PATCH_SET_CODEC::decode)
+ .map(ps -> Maps.immutableEntry(ps.getId(), ps))
+ .collect(toImmutableList()))
+ .approvals(
+ proto
+ .getApprovalList()
+ .stream()
+ .map(APPROVAL_CODEC::decode)
+ .map(a -> Maps.immutableEntry(a.getPatchSetId(), a))
+ .collect(toImmutableList()))
+ .reviewers(toReviewerSet(proto.getReviewerList()))
+ .reviewersByEmail(toReviewerByEmailSet(proto.getReviewerByEmailList()))
+ .pendingReviewers(toReviewerSet(proto.getPendingReviewerList()))
+ .pendingReviewersByEmail(toReviewerByEmailSet(proto.getPendingReviewerByEmailList()))
+ .allPastReviewers(
+ proto
+ .getPastReviewerList()
+ .stream()
+ .map(Account.Id::new)
+ .collect(toImmutableList()))
+ .reviewerUpdates(toReviewerStatusUpdateList(proto.getReviewerUpdateList()))
+ .submitRecords(
+ proto
+ .getSubmitRecordList()
+ .stream()
+ .map(r -> GSON.fromJson(r, StoredSubmitRecord.class).toSubmitRecord())
+ .collect(toImmutableList()))
+ .changeMessages(
+ proto
+ .getChangeMessageList()
+ .stream()
+ .map(MESSAGE_CODEC::decode)
+ .collect(toImmutableList()))
+ .publishedComments(
+ proto
+ .getPublishedCommentList()
+ .stream()
+ .map(r -> GSON.fromJson(r, Comment.class))
+ .collect(toImmutableListMultimap(c -> new RevId(c.revId), c -> c)));
+ if (proto.getHasReadOnlyUntil()) {
+ b.readOnlyUntil(new Timestamp(proto.getReadOnlyUntil()));
+ }
+ return b.build();
+ }
+
+ private static ChangeColumns toChangeColumns(Change.Id changeId, ChangeColumnsProto proto) {
+ ChangeColumns.Builder b =
+ ChangeColumns.builder()
+ .changeKey(new Change.Key(proto.getChangeKey()))
+ .createdOn(new Timestamp(proto.getCreatedOn()))
+ .lastUpdatedOn(new Timestamp(proto.getLastUpdatedOn()))
+ .owner(new Account.Id(proto.getOwner()))
+ .branch(proto.getBranch());
+ if (proto.getHasCurrentPatchSetId()) {
+ b.currentPatchSetId(new PatchSet.Id(changeId, proto.getCurrentPatchSetId()));
+ }
+ b.subject(proto.getSubject());
+ if (proto.getHasTopic()) {
+ b.topic(proto.getTopic());
+ }
+ if (proto.getHasOriginalSubject()) {
+ b.originalSubject(proto.getOriginalSubject());
+ }
+ if (proto.getHasSubmissionId()) {
+ b.submissionId(proto.getSubmissionId());
+ }
+ if (proto.getHasAssignee()) {
+ b.assignee(new Account.Id(proto.getAssignee()));
+ }
+ if (proto.getHasStatus()) {
+ b.status(STATUS_CONVERTER.convert(proto.getStatus()));
+ }
+ b.isPrivate(proto.getIsPrivate())
+ .workInProgress(proto.getWorkInProgress())
+ .reviewStarted(proto.getReviewStarted());
+ if (proto.getHasRevertOf()) {
+ b.revertOf(new Change.Id(proto.getRevertOf()));
+ }
+ return b.build();
+ }
+
+ private static ReviewerSet toReviewerSet(List<ReviewerSetEntryProto> protos) {
+ ImmutableTable.Builder<ReviewerStateInternal, Account.Id, Timestamp> b =
+ ImmutableTable.builder();
+ for (ReviewerSetEntryProto e : protos) {
+ b.put(
+ REVIEWER_STATE_CONVERTER.convert(e.getState()),
+ new Account.Id(e.getAccountId()),
+ new Timestamp(e.getTimestamp()));
+ }
+ return ReviewerSet.fromTable(b.build());
+ }
+
+ private static ReviewerByEmailSet toReviewerByEmailSet(
+ List<ReviewerByEmailSetEntryProto> protos) {
+ ImmutableTable.Builder<ReviewerStateInternal, Address, Timestamp> b =
+ ImmutableTable.builder();
+ for (ReviewerByEmailSetEntryProto e : protos) {
+ b.put(
+ REVIEWER_STATE_CONVERTER.convert(e.getState()),
+ Address.parse(e.getAddress()),
+ new Timestamp(e.getTimestamp()));
+ }
+ return ReviewerByEmailSet.fromTable(b.build());
+ }
+
+ private static ImmutableList<ReviewerStatusUpdate> toReviewerStatusUpdateList(
+ List<ReviewerStatusUpdateProto> protos) {
+ ImmutableList.Builder<ReviewerStatusUpdate> b = ImmutableList.builder();
+ for (ReviewerStatusUpdateProto proto : protos) {
+ b.add(
+ ReviewerStatusUpdate.create(
+ new Timestamp(proto.getDate()),
+ new Account.Id(proto.getUpdatedBy()),
+ new Account.Id(proto.getReviewer()),
+ REVIEWER_STATE_CONVERTER.convert(proto.getState())));
+ }
+ return b.build();
+ }
+ }
}
diff --git a/java/com/google/gerrit/server/query/change/ConflictKey.java b/java/com/google/gerrit/server/query/change/ConflictKey.java
index 0101ffe..9daf886 100644
--- a/java/com/google/gerrit/server/query/change/ConflictKey.java
+++ b/java/com/google/gerrit/server/query/change/ConflictKey.java
@@ -14,62 +14,80 @@
package com.google.gerrit.server.query.change;
+import com.google.auto.value.AutoValue;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Converter;
+import com.google.common.base.Enums;
+import com.google.common.collect.Ordering;
import com.google.gerrit.extensions.client.SubmitType;
-import java.io.Serializable;
-import java.util.Objects;
+import com.google.gerrit.server.cache.CacheSerializer;
+import com.google.gerrit.server.cache.ProtoCacheSerializers;
+import com.google.gerrit.server.cache.ProtoCacheSerializers.ObjectIdConverter;
+import com.google.gerrit.server.cache.proto.Cache.ConflictKeyProto;
+import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId;
-public class ConflictKey implements Serializable {
- private static final long serialVersionUID = 2L;
-
- private final ObjectId commit;
- private final ObjectId otherCommit;
- private final SubmitType submitType;
- private final boolean contentMerge;
-
- public ConflictKey(
- ObjectId commit, ObjectId otherCommit, SubmitType submitType, boolean contentMerge) {
- if (SubmitType.FAST_FORWARD_ONLY.equals(submitType) || commit.compareTo(otherCommit) < 0) {
- this.commit = commit;
- this.otherCommit = otherCommit;
- } else {
- this.commit = otherCommit;
- this.otherCommit = commit;
+@AutoValue
+public abstract class ConflictKey {
+ public static ConflictKey create(
+ AnyObjectId commit, AnyObjectId otherCommit, SubmitType submitType, boolean contentMerge) {
+ ObjectId commitCopy = commit.copy();
+ ObjectId otherCommitCopy = otherCommit.copy();
+ if (submitType == SubmitType.FAST_FORWARD_ONLY) {
+ // The conflict check for FF-only is non-symmetrical, and we need to treat (X, Y) differently
+ // from (Y, X). Store the commits in the input order.
+ return new AutoValue_ConflictKey(commitCopy, otherCommitCopy, submitType, contentMerge);
}
- this.submitType = submitType;
- this.contentMerge = contentMerge;
+ // Otherwise, the check is symmetrical; sort commit/otherCommit before storing, so the actual
+ // key is independent of the order in which they are passed to this method.
+ return new AutoValue_ConflictKey(
+ Ordering.natural().min(commitCopy, otherCommitCopy),
+ Ordering.natural().max(commitCopy, otherCommitCopy),
+ submitType,
+ contentMerge);
}
- public ObjectId getCommit() {
- return commit;
+ @VisibleForTesting
+ static ConflictKey createWithoutNormalization(
+ AnyObjectId commit, AnyObjectId otherCommit, SubmitType submitType, boolean contentMerge) {
+ return new AutoValue_ConflictKey(commit.copy(), otherCommit.copy(), submitType, contentMerge);
}
- public ObjectId getOtherCommit() {
- return otherCommit;
- }
+ public abstract ObjectId commit();
- public SubmitType getSubmitType() {
- return submitType;
- }
+ public abstract ObjectId otherCommit();
- public boolean isContentMerge() {
- return contentMerge;
- }
+ public abstract SubmitType submitType();
- @Override
- public boolean equals(Object o) {
- if (!(o instanceof ConflictKey)) {
- return false;
+ public abstract boolean contentMerge();
+
+ public static enum Serializer implements CacheSerializer<ConflictKey> {
+ INSTANCE;
+
+ private static final Converter<String, SubmitType> SUBMIT_TYPE_CONVERTER =
+ Enums.stringConverter(SubmitType.class);
+
+ @Override
+ public byte[] serialize(ConflictKey object) {
+ ObjectIdConverter idConverter = ObjectIdConverter.create();
+ return ProtoCacheSerializers.toByteArray(
+ ConflictKeyProto.newBuilder()
+ .setCommit(idConverter.toByteString(object.commit()))
+ .setOtherCommit(idConverter.toByteString(object.otherCommit()))
+ .setSubmitType(SUBMIT_TYPE_CONVERTER.reverse().convert(object.submitType()))
+ .setContentMerge(object.contentMerge())
+ .build());
}
- ConflictKey other = (ConflictKey) o;
- return commit.equals(other.commit)
- && otherCommit.equals(other.otherCommit)
- && submitType.equals(other.submitType)
- && contentMerge == other.contentMerge;
- }
- @Override
- public int hashCode() {
- return Objects.hash(commit, otherCommit, submitType, contentMerge);
+ @Override
+ public ConflictKey deserialize(byte[] in) {
+ ConflictKeyProto proto = ProtoCacheSerializers.parseUnchecked(ConflictKeyProto.parser(), in);
+ ObjectIdConverter idConverter = ObjectIdConverter.create();
+ return create(
+ idConverter.fromByteString(proto.getCommit()),
+ idConverter.fromByteString(proto.getOtherCommit()),
+ SUBMIT_TYPE_CONVERTER.convert(proto.getSubmitType()),
+ proto.getContentMerge());
+ }
}
}
diff --git a/java/com/google/gerrit/server/query/change/ConflictsCache.java b/java/com/google/gerrit/server/query/change/ConflictsCache.java
index e8b2fef..c7ee79b 100644
--- a/java/com/google/gerrit/server/query/change/ConflictsCache.java
+++ b/java/com/google/gerrit/server/query/change/ConflictsCache.java
@@ -18,7 +18,7 @@
public interface ConflictsCache {
- void put(ConflictKey key, Boolean value);
+ void put(ConflictKey key, boolean value);
@Nullable
Boolean getIfPresent(ConflictKey key);
diff --git a/java/com/google/gerrit/server/query/change/ConflictsCacheImpl.java b/java/com/google/gerrit/server/query/change/ConflictsCacheImpl.java
index 1185677..0b8c5ee 100644
--- a/java/com/google/gerrit/server/query/change/ConflictsCacheImpl.java
+++ b/java/com/google/gerrit/server/query/change/ConflictsCacheImpl.java
@@ -15,6 +15,7 @@
package com.google.gerrit.server.query.change;
import com.google.common.cache.Cache;
+import com.google.gerrit.server.cache.BooleanCacheSerializer;
import com.google.gerrit.server.cache.CacheModule;
import com.google.inject.Inject;
import com.google.inject.Module;
@@ -29,7 +30,11 @@
return new CacheModule() {
@Override
protected void configure() {
- persist(NAME, ConflictKey.class, Boolean.class).maximumWeight(37400);
+ persist(NAME, ConflictKey.class, Boolean.class)
+ .version(1)
+ .keySerializer(ConflictKey.Serializer.INSTANCE)
+ .valueSerializer(BooleanCacheSerializer.INSTANCE)
+ .maximumWeight(37400);
bind(ConflictsCache.class).to(ConflictsCacheImpl.class);
}
};
@@ -43,7 +48,7 @@
}
@Override
- public void put(ConflictKey key, Boolean value) {
+ public void put(ConflictKey key, boolean value) {
conflictsCache.put(key, value);
}
diff --git a/java/com/google/gerrit/server/query/change/ConflictsPredicate.java b/java/com/google/gerrit/server/query/change/ConflictsPredicate.java
index f870951..7dc7a0b 100644
--- a/java/com/google/gerrit/server/query/change/ConflictsPredicate.java
+++ b/java/com/google/gerrit/server/query/change/ConflictsPredicate.java
@@ -115,19 +115,19 @@
ObjectId other = ObjectId.fromString(object.currentPatchSet().getRevision().get());
ConflictKey conflictsKey =
- new ConflictKey(
+ ConflictKey.create(
changeDataCache.getTestAgainst(),
other,
str.type,
projectState.is(BooleanProjectConfig.USE_CONTENT_MERGE));
- Boolean conflicts = args.conflictsCache.getIfPresent(conflictsKey);
- if (conflicts != null) {
- return conflicts;
+ Boolean maybeConflicts = args.conflictsCache.getIfPresent(conflictsKey);
+ if (maybeConflicts != null) {
+ return maybeConflicts;
}
try (Repository repo = args.repoManager.openRepository(otherChange.getProject());
CodeReviewRevWalk rw = CodeReviewCommit.newRevWalk(repo)) {
- conflicts =
+ boolean conflicts =
!args.submitDryRun.run(
str.type,
repo,
diff --git a/javatests/com/google/gerrit/server/BUILD b/javatests/com/google/gerrit/server/BUILD
index 1b11dd65..3113a8a 100644
--- a/javatests/com/google/gerrit/server/BUILD
+++ b/javatests/com/google/gerrit/server/BUILD
@@ -65,6 +65,7 @@
"//lib/jgit/org.eclipse.jgit.junit:junit",
"//lib/truth",
"//lib/truth:truth-java8-extension",
+ "//lib/truth:truth-proto-extension",
"//proto:cache_java_proto",
],
)
diff --git a/javatests/com/google/gerrit/server/auth/oauth/OAuthTokenCacheTest.java b/javatests/com/google/gerrit/server/auth/oauth/OAuthTokenCacheTest.java
index 586c065..5e93a09 100644
--- a/javatests/com/google/gerrit/server/auth/oauth/OAuthTokenCacheTest.java
+++ b/javatests/com/google/gerrit/server/auth/oauth/OAuthTokenCacheTest.java
@@ -1,6 +1,7 @@
package com.google.gerrit.server.auth.oauth;
import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
import static com.google.gerrit.server.cache.testing.SerializedClassSubject.assertThatSerializedClass;
import com.google.common.collect.ImmutableMap;
diff --git a/javatests/com/google/gerrit/server/cache/BUILD b/javatests/com/google/gerrit/server/cache/BUILD
index 278330b..ab88169 100644
--- a/javatests/com/google/gerrit/server/cache/BUILD
+++ b/javatests/com/google/gerrit/server/cache/BUILD
@@ -5,12 +5,16 @@
srcs = glob(["*.java"]),
deps = [
"//java/com/google/gerrit/server",
+ "//java/com/google/gerrit/server/cache/testing",
"//lib:guava",
"//lib:gwtorm",
"//lib:junit",
"//lib:protobuf",
"//lib/auto:auto-value",
"//lib/auto:auto-value-annotations",
+ "//lib/jgit/org.eclipse.jgit:jgit",
"//lib/truth",
+ "//lib/truth:truth-proto-extension",
+ "//proto:cache_java_proto",
],
)
diff --git a/javatests/com/google/gerrit/server/cache/ProtoCacheSerializersTest.java b/javatests/com/google/gerrit/server/cache/ProtoCacheSerializersTest.java
new file mode 100644
index 0000000..8bf9762
--- /dev/null
+++ b/javatests/com/google/gerrit/server/cache/ProtoCacheSerializersTest.java
@@ -0,0 +1,116 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.cache;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.Truth.assert_;
+import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
+import static com.google.gerrit.server.cache.testing.CacheSerializerTestUtil.bytes;
+
+import com.google.gerrit.server.cache.ProtoCacheSerializers.ObjectIdConverter;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesKeyProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto;
+import com.google.protobuf.ByteString;
+import org.eclipse.jgit.lib.ObjectId;
+import org.junit.Test;
+
+public class ProtoCacheSerializersTest {
+ @Test
+ public void objectIdFromByteString() {
+ ObjectIdConverter idConverter = ObjectIdConverter.create();
+ assertThat(
+ idConverter.fromByteString(
+ bytes(
+ 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+ 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa)))
+ .isEqualTo(ObjectId.fromString("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"));
+ assertThat(
+ idConverter.fromByteString(
+ bytes(
+ 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb,
+ 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb)))
+ .isEqualTo(ObjectId.fromString("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"));
+ }
+
+ @Test
+ public void objectIdFromByteStringWrongSize() {
+ try {
+ ObjectIdConverter.create().fromByteString(ByteString.copyFromUtf8("foo"));
+ assert_().fail("expected IllegalArgumentException");
+ } catch (IllegalArgumentException e) {
+ // Expected.
+ }
+ }
+
+ @Test
+ public void objectIdToByteString() {
+ ObjectIdConverter idConverter = ObjectIdConverter.create();
+ assertThat(
+ idConverter.toByteString(
+ ObjectId.fromString("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")))
+ .isEqualTo(
+ bytes(
+ 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+ 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa));
+ assertThat(
+ idConverter.toByteString(
+ ObjectId.fromString("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb")))
+ .isEqualTo(
+ bytes(
+ 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb,
+ 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb));
+ }
+
+ @Test
+ public void parseUncheckedWrongProtoType() {
+ ChangeNotesKeyProto proto =
+ ChangeNotesKeyProto.newBuilder()
+ .setProject("project")
+ .setChangeId(1234)
+ .setId(ByteString.copyFromUtf8("foo"))
+ .build();
+ byte[] bytes = ProtoCacheSerializers.toByteArray(proto);
+ try {
+ ProtoCacheSerializers.parseUnchecked(ChangeNotesStateProto.parser(), bytes);
+ assert_().fail("expected IllegalArgumentException");
+ } catch (IllegalArgumentException e) {
+ // Expected.
+ }
+ }
+
+ @Test
+ public void parseUncheckedInvalidData() {
+ byte[] bytes = new byte[] {0x00};
+ try {
+ ProtoCacheSerializers.parseUnchecked(ChangeNotesStateProto.parser(), bytes);
+ assert_().fail("expected IllegalArgumentException");
+ } catch (IllegalArgumentException e) {
+ // Expected.
+ }
+ }
+
+ @Test
+ public void parseUnchecked() {
+ ChangeNotesKeyProto proto =
+ ChangeNotesKeyProto.newBuilder()
+ .setProject("project")
+ .setChangeId(1234)
+ .setId(ByteString.copyFromUtf8("foo"))
+ .build();
+ byte[] bytes = ProtoCacheSerializers.toByteArray(proto);
+ assertThat(ProtoCacheSerializers.parseUnchecked(ChangeNotesKeyProto.parser(), bytes))
+ .isEqualTo(proto);
+ }
+}
diff --git a/javatests/com/google/gerrit/server/change/ChangeKindCacheImplTest.java b/javatests/com/google/gerrit/server/change/ChangeKindCacheImplTest.java
index 5b77094..b0d7ae4 100644
--- a/javatests/com/google/gerrit/server/change/ChangeKindCacheImplTest.java
+++ b/javatests/com/google/gerrit/server/change/ChangeKindCacheImplTest.java
@@ -15,6 +15,7 @@
package com.google.gerrit.server.change;
import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
import static com.google.gerrit.server.cache.testing.CacheSerializerTestUtil.bytes;
import static com.google.gerrit.server.cache.testing.SerializedClassSubject.assertThatSerializedClass;
diff --git a/javatests/com/google/gerrit/server/change/MergeabilityCacheImplTest.java b/javatests/com/google/gerrit/server/change/MergeabilityCacheImplTest.java
index 69fc531..c8e6f2b 100644
--- a/javatests/com/google/gerrit/server/change/MergeabilityCacheImplTest.java
+++ b/javatests/com/google/gerrit/server/change/MergeabilityCacheImplTest.java
@@ -15,6 +15,7 @@
package com.google.gerrit.server.change;
import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
import static com.google.gerrit.server.cache.testing.CacheSerializerTestUtil.bytes;
import static com.google.gerrit.server.cache.testing.SerializedClassSubject.assertThatSerializedClass;
diff --git a/javatests/com/google/gerrit/server/notedb/ChangeNotesCacheTest.java b/javatests/com/google/gerrit/server/notedb/ChangeNotesCacheTest.java
new file mode 100644
index 0000000..5a7d812
--- /dev/null
+++ b/javatests/com/google/gerrit/server/notedb/ChangeNotesCacheTest.java
@@ -0,0 +1,60 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.notedb;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
+import static com.google.gerrit.server.cache.testing.CacheSerializerTestUtil.bytes;
+import static com.google.gerrit.server.cache.testing.SerializedClassSubject.assertThatSerializedClass;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.gerrit.reviewdb.client.Change;
+import com.google.gerrit.reviewdb.client.Project;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesKeyProto;
+import org.eclipse.jgit.lib.ObjectId;
+import org.junit.Test;
+
+public final class ChangeNotesCacheTest {
+ @Test
+ public void keySerializer() throws Exception {
+ ChangeNotesCache.Key key =
+ ChangeNotesCache.Key.create(
+ new Project.NameKey("project"),
+ new Change.Id(1234),
+ ObjectId.fromString("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef"));
+ byte[] serialized = ChangeNotesCache.Key.Serializer.INSTANCE.serialize(key);
+ assertThat(ChangeNotesKeyProto.parseFrom(serialized))
+ .isEqualTo(
+ ChangeNotesKeyProto.newBuilder()
+ .setProject("project")
+ .setChangeId(1234)
+ .setId(
+ bytes(
+ 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+ 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef))
+ .build());
+ assertThat(ChangeNotesCache.Key.Serializer.INSTANCE.deserialize(serialized)).isEqualTo(key);
+ }
+
+ @Test
+ public void keyMethods() throws Exception {
+ assertThatSerializedClass(ChangeNotesCache.Key.class)
+ .hasAutoValueMethods(
+ ImmutableMap.of(
+ "project", Project.NameKey.class,
+ "changeId", Change.Id.class,
+ "id", ObjectId.class));
+ }
+}
diff --git a/javatests/com/google/gerrit/server/notedb/ChangeNotesParserTest.java b/javatests/com/google/gerrit/server/notedb/ChangeNotesParserTest.java
index d974877..b8f544a 100644
--- a/javatests/com/google/gerrit/server/notedb/ChangeNotesParserTest.java
+++ b/javatests/com/google/gerrit/server/notedb/ChangeNotesParserTest.java
@@ -442,17 +442,17 @@
// Change created in WIP remains in WIP.
RevCommit commit = writeCommit("Update WIP change\n" + "\n" + "Patch-set: 1\n", true);
ChangeNotesState state = newParser(commit).parseAll();
- assertThat(state.columns().hasReviewStarted()).isFalse();
+ assertThat(state.columns().reviewStarted()).isFalse();
// Moving change out of WIP starts review.
commit =
writeCommit("New ready change\n" + "\n" + "Patch-set: 1\n" + "Work-in-progress: false\n");
state = newParser(commit).parseAll();
- assertThat(state.columns().hasReviewStarted()).isTrue();
+ assertThat(state.columns().reviewStarted()).isTrue();
// Change created not in WIP has always been in review started state.
state = assertParseSucceeds("New change that doesn't declare WIP\n" + "\n" + "Patch-set: 1\n");
- assertThat(state.columns().hasReviewStarted()).isTrue();
+ assertThat(state.columns().reviewStarted()).isTrue();
}
@Test
diff --git a/javatests/com/google/gerrit/server/notedb/ChangeNotesStateTest.java b/javatests/com/google/gerrit/server/notedb/ChangeNotesStateTest.java
new file mode 100644
index 0000000..3d65eae
--- /dev/null
+++ b/javatests/com/google/gerrit/server/notedb/ChangeNotesStateTest.java
@@ -0,0 +1,946 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.notedb;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.APPROVAL_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.MESSAGE_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.PATCH_SET_CODEC;
+import static com.google.gerrit.server.cache.ProtoCacheSerializers.toByteString;
+import static com.google.gerrit.server.cache.testing.SerializedClassSubject.assertThatSerializedClass;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableListMultimap;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.ImmutableTable;
+import com.google.common.collect.Iterables;
+import com.google.gerrit.common.data.SubmitRecord;
+import com.google.gerrit.common.data.SubmitRequirement;
+import com.google.gerrit.reviewdb.client.Account;
+import com.google.gerrit.reviewdb.client.Change;
+import com.google.gerrit.reviewdb.client.ChangeMessage;
+import com.google.gerrit.reviewdb.client.Comment;
+import com.google.gerrit.reviewdb.client.LabelId;
+import com.google.gerrit.reviewdb.client.PatchSet;
+import com.google.gerrit.reviewdb.client.PatchSetApproval;
+import com.google.gerrit.reviewdb.client.RevId;
+import com.google.gerrit.server.ReviewerByEmailSet;
+import com.google.gerrit.server.ReviewerSet;
+import com.google.gerrit.server.ReviewerStatusUpdate;
+import com.google.gerrit.server.cache.ProtoCacheSerializers.ObjectIdConverter;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ChangeColumnsProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerByEmailSetEntryProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerSetEntryProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerStatusUpdateProto;
+import com.google.gerrit.server.mail.Address;
+import com.google.gerrit.server.notedb.ChangeNotesState.ChangeColumns;
+import com.google.gerrit.server.notedb.ChangeNotesState.Serializer;
+import com.google.gwtorm.client.KeyUtil;
+import com.google.gwtorm.server.StandardKeyEncoder;
+import com.google.inject.TypeLiteral;
+import com.google.protobuf.ByteString;
+import java.lang.reflect.Type;
+import java.sql.Timestamp;
+import java.util.List;
+import java.util.Map;
+import org.eclipse.jgit.lib.ObjectId;
+import org.junit.Before;
+import org.junit.Test;
+
+public class ChangeNotesStateTest {
+ static {
+ KeyUtil.setEncoderImpl(new StandardKeyEncoder());
+ }
+
+ private static final Change.Id ID = new Change.Id(123);
+ private static final ObjectId SHA =
+ ObjectId.fromString("1234567812345678123456781234567812345678");
+ private static final ByteString SHA_BYTES = ObjectIdConverter.create().toByteString(SHA);
+ private static final String CHANGE_KEY = "Iabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd";
+
+ private ChangeColumns cols;
+ private ChangeColumnsProto colsProto;
+
+ @Before
+ public void setUp() throws Exception {
+ cols =
+ ChangeColumns.builder()
+ .changeKey(new Change.Key(CHANGE_KEY))
+ .createdOn(new Timestamp(123456L))
+ .lastUpdatedOn(new Timestamp(234567L))
+ .owner(new Account.Id(1000))
+ .branch("refs/heads/master")
+ .subject("Test change")
+ .isPrivate(false)
+ .workInProgress(false)
+ .reviewStarted(true)
+ .build();
+ colsProto = toProto(newBuilder().build()).getColumns();
+ }
+
+ private ChangeNotesState.Builder newBuilder() {
+ return ChangeNotesState.Builder.empty(ID).metaId(SHA).columns(cols);
+ }
+
+ @Test
+ public void serializeChangeKey() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .columns(
+ cols.toBuilder()
+ .changeKey(new Change.Key("Ieeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"))
+ .build())
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(
+ colsProto.toBuilder().setChangeKey("Ieeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"))
+ .build());
+ }
+
+ @Test
+ public void serializeCreatedOn() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().createdOn(new Timestamp(98765L)).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setCreatedOn(98765L))
+ .build());
+ }
+
+ @Test
+ public void serializeLastUpdatedOn() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().lastUpdatedOn(new Timestamp(98765L)).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setLastUpdatedOn(98765L))
+ .build());
+ }
+
+ @Test
+ public void serializeOwner() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().owner(new Account.Id(7777)).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setOwner(7777))
+ .build());
+ }
+
+ @Test
+ public void serializeBranch() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().branch("refs/heads/bar").build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setBranch("refs/heads/bar"))
+ .build());
+ }
+
+ @Test
+ public void serializeSubject() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().subject("A different test change").build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setSubject("A different test change"))
+ .build());
+ }
+
+ @Test
+ public void serializeCurrentPatchSetId() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .columns(cols.toBuilder().currentPatchSetId(new PatchSet.Id(ID, 2)).build())
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setCurrentPatchSetId(2).setHasCurrentPatchSetId(true))
+ .build());
+ }
+
+ @Test
+ public void serializeNullTopic() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().topic(null).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .build());
+ }
+
+ @Test
+ public void serializeEmptyTopic() throws Exception {
+ ChangeNotesState state = newBuilder().columns(cols.toBuilder().topic("").build()).build();
+ assertRoundTrip(
+ state,
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setTopic("").setHasTopic(true))
+ .build());
+ }
+
+ @Test
+ public void serializeNonEmptyTopic() throws Exception {
+ ChangeNotesState state = newBuilder().columns(cols.toBuilder().topic("topic").build()).build();
+ assertRoundTrip(
+ state,
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setTopic("topic").setHasTopic(true))
+ .build());
+ }
+
+ @Test
+ public void serializeOriginalSubject() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .columns(cols.toBuilder().originalSubject("The first patch set").build())
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(
+ colsProto
+ .toBuilder()
+ .setOriginalSubject("The first patch set")
+ .setHasOriginalSubject(true))
+ .build());
+ }
+
+ @Test
+ public void serializeSubmissionId() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().submissionId("xyz").build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setSubmissionId("xyz").setHasSubmissionId(true))
+ .build());
+ }
+
+ @Test
+ public void serializeAssignee() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().assignee(new Account.Id(2000)).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setAssignee(2000).setHasAssignee(true))
+ .build());
+ }
+
+ @Test
+ public void serializeStatus() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().status(Change.Status.MERGED).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setStatus("MERGED").setHasStatus(true))
+ .build());
+ }
+
+ @Test
+ public void serializeIsPrivate() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().isPrivate(true).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setIsPrivate(true))
+ .build());
+ }
+
+ @Test
+ public void serializeIsWorkInProgress() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().workInProgress(true).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setWorkInProgress(true))
+ .build());
+ }
+
+ @Test
+ public void serializeHasReviewStarted() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().reviewStarted(true).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setReviewStarted(true))
+ .build());
+ }
+
+ @Test
+ public void serializeRevertOf() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().revertOf(new Change.Id(999)).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setRevertOf(999).setHasRevertOf(true))
+ .build());
+ }
+
+ @Test
+ public void serializePastAssignees() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .pastAssignees(ImmutableSet.of(new Account.Id(2002), new Account.Id(2001)))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPastAssignee(2002)
+ .addPastAssignee(2001)
+ .build());
+ }
+
+ @Test
+ public void serializeHashtags() throws Exception {
+ assertRoundTrip(
+ newBuilder().hashtags(ImmutableSet.of("tag2", "tag1")).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addHashtag("tag2")
+ .addHashtag("tag1")
+ .build());
+ }
+
+ @Test
+ public void serializePatchSets() throws Exception {
+ PatchSet ps1 = new PatchSet(new PatchSet.Id(ID, 1));
+ ps1.setUploader(new Account.Id(2000));
+ ps1.setRevision(new RevId("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"));
+ ps1.setCreatedOn(cols.createdOn());
+ ByteString ps1Bytes = toByteString(ps1, PATCH_SET_CODEC);
+ assertThat(ps1Bytes.size()).isEqualTo(66);
+
+ PatchSet ps2 = new PatchSet(new PatchSet.Id(ID, 2));
+ ps2.setUploader(new Account.Id(3000));
+ ps2.setRevision(new RevId("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"));
+ ps2.setCreatedOn(cols.lastUpdatedOn());
+ ByteString ps2Bytes = toByteString(ps2, PATCH_SET_CODEC);
+ assertThat(ps2Bytes.size()).isEqualTo(66);
+ assertThat(ps2Bytes).isNotEqualTo(ps1Bytes);
+
+ assertRoundTrip(
+ newBuilder()
+ .patchSets(ImmutableMap.of(ps2.getId(), ps2, ps1.getId(), ps1).entrySet())
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPatchSet(ps2Bytes)
+ .addPatchSet(ps1Bytes)
+ .build());
+ }
+
+ @Test
+ public void serializeApprovals() throws Exception {
+ PatchSetApproval a1 =
+ new PatchSetApproval(
+ new PatchSetApproval.Key(
+ new PatchSet.Id(ID, 1), new Account.Id(2001), new LabelId("Code-Review")),
+ (short) 1,
+ new Timestamp(1212L));
+ ByteString a1Bytes = toByteString(a1, APPROVAL_CODEC);
+ assertThat(a1Bytes.size()).isEqualTo(43);
+
+ PatchSetApproval a2 =
+ new PatchSetApproval(
+ new PatchSetApproval.Key(
+ new PatchSet.Id(ID, 1), new Account.Id(2002), new LabelId("Verified")),
+ (short) -1,
+ new Timestamp(3434L));
+ ByteString a2Bytes = toByteString(a2, APPROVAL_CODEC);
+ assertThat(a2Bytes.size()).isEqualTo(49);
+ assertThat(a2Bytes).isNotEqualTo(a1Bytes);
+
+ assertRoundTrip(
+ newBuilder()
+ .approvals(
+ ImmutableListMultimap.of(a2.getPatchSetId(), a2, a1.getPatchSetId(), a1).entries())
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addApproval(a2Bytes)
+ .addApproval(a1Bytes)
+ .build());
+ }
+
+ @Test
+ public void serializeReviewers() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .reviewers(
+ ReviewerSet.fromTable(
+ ImmutableTable.<ReviewerStateInternal, Account.Id, Timestamp>builder()
+ .put(ReviewerStateInternal.CC, new Account.Id(2001), new Timestamp(1212L))
+ .put(
+ ReviewerStateInternal.REVIEWER,
+ new Account.Id(2002),
+ new Timestamp(3434L))
+ .build()))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addReviewer(
+ ReviewerSetEntryProto.newBuilder()
+ .setState("CC")
+ .setAccountId(2001)
+ .setTimestamp(1212L))
+ .addReviewer(
+ ReviewerSetEntryProto.newBuilder()
+ .setState("REVIEWER")
+ .setAccountId(2002)
+ .setTimestamp(3434L))
+ .build());
+ }
+
+ @Test
+ public void serializeReviewersByEmail() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .reviewersByEmail(
+ ReviewerByEmailSet.fromTable(
+ ImmutableTable.<ReviewerStateInternal, Address, Timestamp>builder()
+ .put(
+ ReviewerStateInternal.CC,
+ new Address("Name1", "email1@example.com"),
+ new Timestamp(1212L))
+ .put(
+ ReviewerStateInternal.REVIEWER,
+ new Address("Name2", "email2@example.com"),
+ new Timestamp(3434L))
+ .build()))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addReviewerByEmail(
+ ReviewerByEmailSetEntryProto.newBuilder()
+ .setState("CC")
+ .setAddress("Name1 <email1@example.com>")
+ .setTimestamp(1212L))
+ .addReviewerByEmail(
+ ReviewerByEmailSetEntryProto.newBuilder()
+ .setState("REVIEWER")
+ .setAddress("Name2 <email2@example.com>")
+ .setTimestamp(3434L))
+ .build());
+ }
+
+ @Test
+ public void serializeReviewersByEmailWithNullName() throws Exception {
+ ChangeNotesState actual =
+ assertRoundTrip(
+ newBuilder()
+ .reviewersByEmail(
+ ReviewerByEmailSet.fromTable(
+ ImmutableTable.of(
+ ReviewerStateInternal.CC,
+ new Address("emailonly@example.com"),
+ new Timestamp(1212L))))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addReviewerByEmail(
+ ReviewerByEmailSetEntryProto.newBuilder()
+ .setState("CC")
+ .setAddress("emailonly@example.com")
+ .setTimestamp(1212L))
+ .build());
+
+ // Address doesn't consider the name field in equals, so we have to check it manually.
+ // TODO(dborowitz): Fix Address#equals.
+ ImmutableSet<Address> ccs = actual.reviewersByEmail().byState(ReviewerStateInternal.CC);
+ assertThat(ccs).hasSize(1);
+ Address address = Iterables.getOnlyElement(ccs);
+ assertThat(address.getName()).isNull();
+ assertThat(address.getEmail()).isEqualTo("emailonly@example.com");
+ }
+
+ @Test
+ public void serializePendingReviewers() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .pendingReviewers(
+ ReviewerSet.fromTable(
+ ImmutableTable.<ReviewerStateInternal, Account.Id, Timestamp>builder()
+ .put(ReviewerStateInternal.CC, new Account.Id(2001), new Timestamp(1212L))
+ .put(
+ ReviewerStateInternal.REVIEWER,
+ new Account.Id(2002),
+ new Timestamp(3434L))
+ .build()))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPendingReviewer(
+ ReviewerSetEntryProto.newBuilder()
+ .setState("CC")
+ .setAccountId(2001)
+ .setTimestamp(1212L))
+ .addPendingReviewer(
+ ReviewerSetEntryProto.newBuilder()
+ .setState("REVIEWER")
+ .setAccountId(2002)
+ .setTimestamp(3434L))
+ .build());
+ }
+
+ @Test
+ public void serializePendingReviewersByEmail() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .pendingReviewersByEmail(
+ ReviewerByEmailSet.fromTable(
+ ImmutableTable.<ReviewerStateInternal, Address, Timestamp>builder()
+ .put(
+ ReviewerStateInternal.CC,
+ new Address("Name1", "email1@example.com"),
+ new Timestamp(1212L))
+ .put(
+ ReviewerStateInternal.REVIEWER,
+ new Address("Name2", "email2@example.com"),
+ new Timestamp(3434L))
+ .build()))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPendingReviewerByEmail(
+ ReviewerByEmailSetEntryProto.newBuilder()
+ .setState("CC")
+ .setAddress("Name1 <email1@example.com>")
+ .setTimestamp(1212L))
+ .addPendingReviewerByEmail(
+ ReviewerByEmailSetEntryProto.newBuilder()
+ .setState("REVIEWER")
+ .setAddress("Name2 <email2@example.com>")
+ .setTimestamp(3434L))
+ .build());
+ }
+
+ @Test
+ public void serializeAllPastReviewers() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .allPastReviewers(ImmutableList.of(new Account.Id(2002), new Account.Id(2001)))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPastReviewer(2002)
+ .addPastReviewer(2001)
+ .build());
+ }
+
+ @Test
+ public void serializeReviewerUpdates() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .reviewerUpdates(
+ ImmutableList.of(
+ ReviewerStatusUpdate.create(
+ new Timestamp(1212L),
+ new Account.Id(1000),
+ new Account.Id(2002),
+ ReviewerStateInternal.CC),
+ ReviewerStatusUpdate.create(
+ new Timestamp(3434L),
+ new Account.Id(1000),
+ new Account.Id(2001),
+ ReviewerStateInternal.REVIEWER)))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addReviewerUpdate(
+ ReviewerStatusUpdateProto.newBuilder()
+ .setDate(1212L)
+ .setUpdatedBy(1000)
+ .setReviewer(2002)
+ .setState("CC"))
+ .addReviewerUpdate(
+ ReviewerStatusUpdateProto.newBuilder()
+ .setDate(3434L)
+ .setUpdatedBy(1000)
+ .setReviewer(2001)
+ .setState("REVIEWER"))
+ .build());
+ }
+
+ @Test
+ public void serializeSubmitRecords() throws Exception {
+ SubmitRecord sr1 = new SubmitRecord();
+ sr1.status = SubmitRecord.Status.OK;
+
+ SubmitRecord sr2 = new SubmitRecord();
+ sr2.status = SubmitRecord.Status.FORCED;
+
+ assertRoundTrip(
+ newBuilder().submitRecords(ImmutableList.of(sr2, sr1)).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addSubmitRecord("{\"status\":\"FORCED\"}")
+ .addSubmitRecord("{\"status\":\"OK\"}")
+ .build());
+ }
+
+ @Test
+ public void serializeChangeMessages() throws Exception {
+ ChangeMessage m1 =
+ new ChangeMessage(
+ new ChangeMessage.Key(ID, "uuid1"),
+ new Account.Id(1000),
+ new Timestamp(1212L),
+ new PatchSet.Id(ID, 1));
+ ByteString m1Bytes = toByteString(m1, MESSAGE_CODEC);
+ assertThat(m1Bytes.size()).isEqualTo(35);
+
+ ChangeMessage m2 =
+ new ChangeMessage(
+ new ChangeMessage.Key(ID, "uuid2"),
+ new Account.Id(2000),
+ new Timestamp(3434L),
+ new PatchSet.Id(ID, 2));
+ ByteString m2Bytes = toByteString(m2, MESSAGE_CODEC);
+ assertThat(m2Bytes.size()).isEqualTo(35);
+ assertThat(m2Bytes).isNotEqualTo(m1Bytes);
+
+ assertRoundTrip(
+ newBuilder().changeMessages(ImmutableList.of(m2, m1)).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addChangeMessage(m2Bytes)
+ .addChangeMessage(m1Bytes)
+ .build());
+ }
+
+ @Test
+ public void serializePublishedComments() throws Exception {
+ Comment c1 =
+ new Comment(
+ new Comment.Key("uuid1", "file1", 1),
+ new Account.Id(1001),
+ new Timestamp(1212L),
+ (short) 1,
+ "message 1",
+ "serverId",
+ false);
+ c1.setRevId(new RevId("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"));
+ String c1Json = Serializer.GSON.toJson(c1);
+
+ Comment c2 =
+ new Comment(
+ new Comment.Key("uuid2", "file2", 2),
+ new Account.Id(1002),
+ new Timestamp(3434L),
+ (short) 2,
+ "message 2",
+ "serverId",
+ true);
+ c2.setRevId(new RevId("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"));
+ String c2Json = Serializer.GSON.toJson(c2);
+
+ assertRoundTrip(
+ newBuilder()
+ .publishedComments(
+ ImmutableListMultimap.of(new RevId(c2.revId), c2, new RevId(c1.revId), c1))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPublishedComment(c2Json)
+ .addPublishedComment(c1Json)
+ .build());
+ }
+
+ @Test
+ public void serializeReadOnlyUntil() throws Exception {
+ assertRoundTrip(
+ newBuilder().readOnlyUntil(new Timestamp(1212L)).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .setReadOnlyUntil(1212L)
+ .setHasReadOnlyUntil(true)
+ .build());
+ }
+
+ @Test
+ public void changeNotesStateMethods() throws Exception {
+ assertThatSerializedClass(ChangeNotesState.class)
+ .hasAutoValueMethods(
+ ImmutableMap.<String, Type>builder()
+ .put("metaId", ObjectId.class)
+ .put("changeId", Change.Id.class)
+ .put("columns", ChangeColumns.class)
+ .put("pastAssignees", new TypeLiteral<ImmutableSet<Account.Id>>() {}.getType())
+ .put("hashtags", new TypeLiteral<ImmutableSet<String>>() {}.getType())
+ .put(
+ "patchSets",
+ new TypeLiteral<ImmutableList<Map.Entry<PatchSet.Id, PatchSet>>>() {}.getType())
+ .put(
+ "approvals",
+ new TypeLiteral<
+ ImmutableList<Map.Entry<PatchSet.Id, PatchSetApproval>>>() {}.getType())
+ .put("reviewers", ReviewerSet.class)
+ .put("reviewersByEmail", ReviewerByEmailSet.class)
+ .put("pendingReviewers", ReviewerSet.class)
+ .put("pendingReviewersByEmail", ReviewerByEmailSet.class)
+ .put("allPastReviewers", new TypeLiteral<ImmutableList<Account.Id>>() {}.getType())
+ .put(
+ "reviewerUpdates",
+ new TypeLiteral<ImmutableList<ReviewerStatusUpdate>>() {}.getType())
+ .put("submitRecords", new TypeLiteral<ImmutableList<SubmitRecord>>() {}.getType())
+ .put("changeMessages", new TypeLiteral<ImmutableList<ChangeMessage>>() {}.getType())
+ .put(
+ "publishedComments",
+ new TypeLiteral<ImmutableListMultimap<RevId, Comment>>() {}.getType())
+ .put("readOnlyUntil", Timestamp.class)
+ .build());
+ }
+
+ @Test
+ public void changeColumnsMethods() throws Exception {
+ assertThatSerializedClass(ChangeColumns.class)
+ .hasAutoValueMethods(
+ ImmutableMap.<String, Type>builder()
+ .put("changeKey", Change.Key.class)
+ .put("createdOn", Timestamp.class)
+ .put("lastUpdatedOn", Timestamp.class)
+ .put("owner", Account.Id.class)
+ .put("branch", String.class)
+ .put("currentPatchSetId", PatchSet.Id.class)
+ .put("subject", String.class)
+ .put("topic", String.class)
+ .put("originalSubject", String.class)
+ .put("submissionId", String.class)
+ .put("assignee", Account.Id.class)
+ .put("status", Change.Status.class)
+ .put("isPrivate", boolean.class)
+ .put("workInProgress", boolean.class)
+ .put("reviewStarted", boolean.class)
+ .put("revertOf", Change.Id.class)
+ .put("toBuilder", ChangeNotesState.ChangeColumns.Builder.class)
+ .build());
+ }
+
+ @Test
+ public void patchSetFields() throws Exception {
+ assertThatSerializedClass(PatchSet.class)
+ .hasFields(
+ ImmutableMap.<String, Type>builder()
+ .put("id", PatchSet.Id.class)
+ .put("revision", RevId.class)
+ .put("uploader", Account.Id.class)
+ .put("createdOn", Timestamp.class)
+ .put("groups", String.class)
+ .put("pushCertificate", String.class)
+ .put("description", String.class)
+ .build());
+ }
+
+ @Test
+ public void patchSetApprovalFields() throws Exception {
+ assertThatSerializedClass(PatchSetApproval.Key.class)
+ .hasFields(
+ ImmutableMap.<String, Type>builder()
+ .put("patchSetId", PatchSet.Id.class)
+ .put("accountId", Account.Id.class)
+ .put("categoryId", LabelId.class)
+ .build());
+ assertThatSerializedClass(PatchSetApproval.class)
+ .hasFields(
+ ImmutableMap.<String, Type>builder()
+ .put("key", PatchSetApproval.Key.class)
+ .put("value", short.class)
+ .put("granted", Timestamp.class)
+ .put("tag", String.class)
+ .put("realAccountId", Account.Id.class)
+ .put("postSubmit", boolean.class)
+ .build());
+ }
+
+ @Test
+ public void reviewerSetFields() throws Exception {
+ assertThatSerializedClass(ReviewerSet.class)
+ .hasFields(
+ ImmutableMap.of(
+ "table",
+ new TypeLiteral<
+ ImmutableTable<
+ ReviewerStateInternal, Account.Id, Timestamp>>() {}.getType(),
+ "accounts", new TypeLiteral<ImmutableSet<Account.Id>>() {}.getType()));
+ }
+
+ @Test
+ public void reviewerByEmailSetFields() throws Exception {
+ assertThatSerializedClass(ReviewerByEmailSet.class)
+ .hasFields(
+ ImmutableMap.of(
+ "table",
+ new TypeLiteral<
+ ImmutableTable<ReviewerStateInternal, Address, Timestamp>>() {}.getType(),
+ "users", new TypeLiteral<ImmutableSet<Address>>() {}.getType()));
+ }
+
+ @Test
+ public void reviewerStatusUpdateMethods() throws Exception {
+ assertThatSerializedClass(ReviewerStatusUpdate.class)
+ .hasAutoValueMethods(
+ ImmutableMap.of(
+ "date", Timestamp.class,
+ "updatedBy", Account.Id.class,
+ "reviewer", Account.Id.class,
+ "state", ReviewerStateInternal.class));
+ }
+
+ @Test
+ public void submitRecordFields() throws Exception {
+ assertThatSerializedClass(SubmitRecord.class)
+ .hasFields(
+ ImmutableMap.of(
+ "status",
+ SubmitRecord.Status.class,
+ "labels",
+ new TypeLiteral<List<SubmitRecord.Label>>() {}.getType(),
+ "requirements",
+ new TypeLiteral<List<SubmitRequirement>>() {}.getType(),
+ "errorMessage",
+ String.class));
+ assertThatSerializedClass(SubmitRecord.Label.class)
+ .hasFields(
+ ImmutableMap.of(
+ "label", String.class,
+ "status", SubmitRecord.Label.Status.class,
+ "appliedBy", Account.Id.class));
+ assertThatSerializedClass(SubmitRequirement.class)
+ .hasAutoValueMethods(
+ ImmutableMap.of(
+ "fallbackText", String.class,
+ "type", String.class,
+ "data", new TypeLiteral<ImmutableMap<String, String>>() {}.getType()));
+ }
+
+ @Test
+ public void changeMessageFields() throws Exception {
+ assertThatSerializedClass(ChangeMessage.Key.class)
+ .hasFields(ImmutableMap.of("changeId", Change.Id.class, "uuid", String.class));
+ assertThatSerializedClass(ChangeMessage.class)
+ .hasFields(
+ ImmutableMap.<String, Type>builder()
+ .put("key", ChangeMessage.Key.class)
+ .put("author", Account.Id.class)
+ .put("writtenOn", Timestamp.class)
+ .put("message", String.class)
+ .put("patchset", PatchSet.Id.class)
+ .put("tag", String.class)
+ .put("realAuthor", Account.Id.class)
+ .build());
+ }
+
+ @Test
+ public void commentFields() throws Exception {
+ assertThatSerializedClass(Comment.Key.class)
+ .hasFields(
+ ImmutableMap.of(
+ "uuid", String.class, "filename", String.class, "patchSetId", int.class));
+ assertThatSerializedClass(Comment.Identity.class).hasFields(ImmutableMap.of("id", int.class));
+ assertThatSerializedClass(Comment.Range.class)
+ .hasFields(
+ ImmutableMap.of(
+ "startLine", int.class,
+ "startChar", int.class,
+ "endLine", int.class,
+ "endChar", int.class));
+ assertThatSerializedClass(Comment.class)
+ .hasFields(
+ ImmutableMap.<String, Type>builder()
+ .put("key", Comment.Key.class)
+ .put("lineNbr", int.class)
+ .put("author", Comment.Identity.class)
+ .put("realAuthor", Comment.Identity.class)
+ .put("writtenOn", Timestamp.class)
+ .put("side", short.class)
+ .put("message", String.class)
+ .put("parentUuid", String.class)
+ .put("range", Comment.Range.class)
+ .put("tag", String.class)
+ .put("revId", String.class)
+ .put("serverId", String.class)
+ .put("unresolved", boolean.class)
+ .put("legacyFormat", boolean.class)
+ .build());
+ }
+
+ private static ChangeNotesStateProto toProto(ChangeNotesState state) throws Exception {
+ return ChangeNotesStateProto.parseFrom(Serializer.INSTANCE.serialize(state));
+ }
+
+ private static ChangeNotesState assertRoundTrip(
+ ChangeNotesState state, ChangeNotesStateProto expectedProto) throws Exception {
+ ChangeNotesStateProto actualProto = toProto(state);
+ assertThat(actualProto).isEqualTo(expectedProto);
+ ChangeNotesState actual = Serializer.INSTANCE.deserialize(Serializer.INSTANCE.serialize(state));
+ assertThat(actual).isEqualTo(state);
+ // It's possible that ChangeNotesState contains objects which implement equals without taking
+ // into account all fields. Return the actual deserialized instance so that callers can perform
+ // additional assertions if necessary.
+ return actual;
+ }
+}
diff --git a/javatests/com/google/gerrit/server/query/change/BUILD b/javatests/com/google/gerrit/server/query/change/BUILD
index 78ec176..09e3243 100644
--- a/javatests/com/google/gerrit/server/query/change/BUILD
+++ b/javatests/com/google/gerrit/server/query/change/BUILD
@@ -28,13 +28,12 @@
],
)
+LUCENE_QUERY_TEST = ["LuceneQueryChangesTest.java"]
+
junit_tests(
name = "lucene_query_test",
size = "large",
- srcs = glob(
- ["*.java"],
- exclude = ABSTRACT_QUERY_TEST,
- ),
+ srcs = LUCENE_QUERY_TEST,
visibility = ["//visibility:public"],
deps = [
":abstract_query_tests",
@@ -50,3 +49,26 @@
"//lib/truth",
],
)
+
+junit_tests(
+ name = "small_tests",
+ size = "small",
+ srcs = glob(
+ ["*.java"],
+ exclude = ABSTRACT_QUERY_TEST + LUCENE_QUERY_TEST,
+ ),
+ visibility = ["//visibility:public"],
+ deps = [
+ "//java/com/google/gerrit/extensions:api",
+ "//java/com/google/gerrit/reviewdb:server",
+ "//java/com/google/gerrit/server",
+ "//java/com/google/gerrit/server/cache/testing",
+ "//java/com/google/gerrit/testing:gerrit-test-util",
+ "//lib:guava",
+ "//lib:gwtorm",
+ "//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
+ "//lib/truth:truth-proto-extension",
+ "//proto:cache_java_proto",
+ ],
+)
diff --git a/javatests/com/google/gerrit/server/query/change/ConflictKeyTest.java b/javatests/com/google/gerrit/server/query/change/ConflictKeyTest.java
new file mode 100644
index 0000000..b87bbf7
--- /dev/null
+++ b/javatests/com/google/gerrit/server/query/change/ConflictKeyTest.java
@@ -0,0 +1,98 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.query.change;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
+import static com.google.gerrit.extensions.client.SubmitType.FAST_FORWARD_ONLY;
+import static com.google.gerrit.extensions.client.SubmitType.MERGE_IF_NECESSARY;
+import static com.google.gerrit.server.cache.testing.CacheSerializerTestUtil.bytes;
+import static com.google.gerrit.server.cache.testing.SerializedClassSubject.assertThatSerializedClass;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.gerrit.extensions.client.SubmitType;
+import com.google.gerrit.server.cache.proto.Cache.ConflictKeyProto;
+import org.eclipse.jgit.lib.ObjectId;
+import org.junit.Test;
+
+public class ConflictKeyTest {
+ @Test
+ public void ffOnlyPreservesInputOrder() {
+ ObjectId id1 = ObjectId.fromString("badc0feebadc0feebadc0feebadc0feebadc0fee");
+ ObjectId id2 = ObjectId.fromString("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef");
+ ConflictKey id1First = ConflictKey.create(id1, id2, FAST_FORWARD_ONLY, true);
+ ConflictKey id2First = ConflictKey.create(id2, id1, FAST_FORWARD_ONLY, true);
+
+ assertThat(id1First)
+ .isEqualTo(ConflictKey.createWithoutNormalization(id1, id2, FAST_FORWARD_ONLY, true));
+ assertThat(id2First)
+ .isEqualTo(ConflictKey.createWithoutNormalization(id2, id1, FAST_FORWARD_ONLY, true));
+ assertThat(id1First).isNotEqualTo(id2First);
+ }
+
+ @Test
+ public void nonFfOnlyNormalizesInputOrder() {
+ ObjectId id1 = ObjectId.fromString("badc0feebadc0feebadc0feebadc0feebadc0fee");
+ ObjectId id2 = ObjectId.fromString("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef");
+ ConflictKey id1First = ConflictKey.create(id1, id2, MERGE_IF_NECESSARY, true);
+ ConflictKey id2First = ConflictKey.create(id2, id1, MERGE_IF_NECESSARY, true);
+ ConflictKey expected =
+ ConflictKey.createWithoutNormalization(id1, id2, MERGE_IF_NECESSARY, true);
+
+ assertThat(id1First).isEqualTo(expected);
+ assertThat(id2First).isEqualTo(expected);
+ }
+
+ @Test
+ public void serializer() throws Exception {
+ ConflictKey key =
+ ConflictKey.create(
+ ObjectId.fromString("badc0feebadc0feebadc0feebadc0feebadc0fee"),
+ ObjectId.fromString("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef"),
+ SubmitType.MERGE_IF_NECESSARY,
+ false);
+ byte[] serialized = ConflictKey.Serializer.INSTANCE.serialize(key);
+ assertThat(ConflictKeyProto.parseFrom(serialized))
+ .isEqualTo(
+ ConflictKeyProto.newBuilder()
+ .setCommit(
+ bytes(
+ 0xba, 0xdc, 0x0f, 0xee, 0xba, 0xdc, 0x0f, 0xee, 0xba, 0xdc, 0x0f, 0xee,
+ 0xba, 0xdc, 0x0f, 0xee, 0xba, 0xdc, 0x0f, 0xee))
+ .setOtherCommit(
+ bytes(
+ 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+ 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef))
+ .setSubmitType("MERGE_IF_NECESSARY")
+ .setContentMerge(false)
+ .build());
+ assertThat(ConflictKey.Serializer.INSTANCE.deserialize(serialized)).isEqualTo(key);
+ }
+
+ /**
+ * See {@link com.google.gerrit.server.cache.testing.SerializedClassSubject} for background and
+ * what to do if this test fails.
+ */
+ @Test
+ public void methods() throws Exception {
+ assertThatSerializedClass(ConflictKey.class)
+ .hasAutoValueMethods(
+ ImmutableMap.of(
+ "commit", ObjectId.class,
+ "otherCommit", ObjectId.class,
+ "submitType", SubmitType.class,
+ "contentMerge", boolean.class));
+ }
+}
diff --git a/lib/truth/BUILD b/lib/truth/BUILD
index cb17269..82cd98a 100644
--- a/lib/truth/BUILD
+++ b/lib/truth/BUILD
@@ -19,3 +19,31 @@
"//lib:guava",
],
)
+
+java_library(
+ name = "truth-liteproto-extension",
+ data = ["//lib:LICENSE-DO_NOT_DISTRIBUTE"],
+ visibility = ["//visibility:private"],
+ exports = ["@truth-liteproto-extension//jar"],
+ runtime_deps = [
+ ":truth",
+ "//lib:guava",
+ "//lib:protobuf",
+ ],
+)
+
+java_library(
+ name = "truth-proto-extension",
+ data = ["//lib:LICENSE-DO_NOT_DISTRIBUTE"],
+ visibility = ["//visibility:public"],
+ exports = [
+ ":truth-liteproto-extension",
+ "@truth-proto-extension//jar",
+ ],
+ runtime_deps = [
+ ":truth",
+ ":truth-liteproto-extension",
+ "//lib:guava",
+ "//lib:protobuf",
+ ],
+)
diff --git a/plugins/codemirror-editor b/plugins/codemirror-editor
index c97e280..ee50e45 160000
--- a/plugins/codemirror-editor
+++ b/plugins/codemirror-editor
@@ -1 +1 @@
-Subproject commit c97e2806532cff00fea6424cde0d440f9ea5016d
+Subproject commit ee50e45b449e282ed78917175daf8b359da8d943
diff --git a/polygerrit-ui/app/behaviors/gr-tooltip-behavior/gr-tooltip-behavior.js b/polygerrit-ui/app/behaviors/gr-tooltip-behavior/gr-tooltip-behavior.js
index 4d53631..04d8b6e 100644
--- a/polygerrit-ui/app/behaviors/gr-tooltip-behavior/gr-tooltip-behavior.js
+++ b/polygerrit-ui/app/behaviors/gr-tooltip-behavior/gr-tooltip-behavior.js
@@ -51,7 +51,6 @@
detached() {
this._handleHideTooltip();
- this.unlisten(window, 'scroll', '_handleWindowScroll');
},
_setupTooltipListeners() {
@@ -59,9 +58,6 @@
this._hasSetupTooltipListeners = true;
this.addEventListener('mouseenter', this._handleShowTooltip.bind(this));
- this.addEventListener('mouseleave', this._handleHideTooltip.bind(this));
- this.addEventListener('tap', this._handleHideTooltip.bind(this));
- this.listen(window, 'scroll', '_handleWindowScroll');
},
_handleShowTooltip(e) {
@@ -91,6 +87,9 @@
tooltip.style.visibility = null;
this._tooltip = tooltip;
+ this.listen(window, 'scroll', '_handleWindowScroll');
+ this.listen(this, 'mouseleave', '_handleHideTooltip');
+ this.listen(this, 'tap', '_handleHideTooltip');
},
_handleHideTooltip(e) {
@@ -100,6 +99,9 @@
return;
}
+ this.unlisten(window, 'scroll', '_handleWindowScroll');
+ this.unlisten(this, 'mouseleave', '_handleHideTooltip');
+ this.unlisten(this, 'tap', '_handleHideTooltip');
this.setAttribute('title', this._titleText);
if (this._tooltip && this._tooltip.parentNode) {
this._tooltip.parentNode.removeChild(this._tooltip);
diff --git a/polygerrit-ui/app/elements/admin/gr-repo-detail-list/gr-repo-detail-list.js b/polygerrit-ui/app/elements/admin/gr-repo-detail-list/gr-repo-detail-list.js
index feaadc7..8512a5d 100644
--- a/polygerrit-ui/app/elements/admin/gr-repo-detail-list/gr-repo-detail-list.js
+++ b/polygerrit-ui/app/elements/admin/gr-repo-detail-list/gr-repo-detail-list.js
@@ -209,8 +209,7 @@
_handleDeleteItemConfirm() {
this.$.overlay.close();
if (this.detailType === DETAIL_TYPES.BRANCHES) {
- return this.$.restAPI.deleteRepoBranches(this._repo,
- this._refName)
+ return this.$.restAPI.deleteRepoBranches(this._repo, this._refName)
.then(itemDeleted => {
if (itemDeleted.status === 204) {
this._getItems(
@@ -219,8 +218,7 @@
}
});
} else if (this.detailType === DETAIL_TYPES.TAGS) {
- return this.$.restAPI.deleteRepoTags(this._repo,
- this._refName)
+ return this.$.restAPI.deleteRepoTags(this._repo, this._refName)
.then(itemDeleted => {
if (itemDeleted.status === 204) {
this._getItems(
diff --git a/polygerrit-ui/app/elements/change/gr-change-actions/gr-change-actions.js b/polygerrit-ui/app/elements/change/gr-change-actions/gr-change-actions.js
index 3f967c8..cfdf88c 100644
--- a/polygerrit-ui/app/elements/change/gr-change-actions/gr-change-actions.js
+++ b/polygerrit-ui/app/elements/change/gr-change-actions/gr-change-actions.js
@@ -1188,7 +1188,7 @@
}
const patchNum = revisionAction ? this.latestPatchNum : null;
return this.$.restAPI.getChangeURLAndSend(this.changeNum, method,
- patchNum, actionEndpoint, payload, handleError, this)
+ patchNum, actionEndpoint, payload, handleError)
.then(response => {
cleanupFn.call(this);
return response;
diff --git a/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.js b/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.js
index 29ffec8..cb9f4c5 100644
--- a/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.js
+++ b/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.js
@@ -57,6 +57,8 @@
UNIFIED: 'UNIFIED_DIFF',
};
+ const CHANGE_DATA_TIMING_LABEL = 'ChangeDataLoaded';
+
Polymer({
is: 'gr-change-view',
@@ -624,6 +626,8 @@
this.$.fileList.collapseAllDiffs();
this._patchRange = patchRange;
+ // If the change has already been loaded and the parameter change is only
+ // in the patch range, then don't do a full reload.
if (this._initialLoadComplete && patchChanged) {
if (patchRange.patchNum == null) {
patchRange.patchNum = this.computeLatestPatchNum(this._allPatchSets);
@@ -637,7 +641,7 @@
this._changeNum = value.changeNum;
this.$.relatedChanges.clear();
- this._reload().then(() => {
+ this._reload(true).then(() => {
this._performPostLoadTasks();
});
},
@@ -651,7 +655,6 @@
},
_performPostLoadTasks() {
- this.$.relatedChanges.reload();
this._maybeShowReplyDialog();
this._maybeShowRevertDialog();
@@ -1199,43 +1202,102 @@
});
},
- _reload() {
+ /**
+ * Reload the change.
+ * @param {boolean=} opt_reloadRelatedChanges Reloads the related chanegs
+ * when true.
+ * @return {Promise} A promise that resolves when the core data has loaded.
+ * Some non-core data loading may still be in-flight when the core data
+ * promise resolves.
+ */
+ _reload(opt_reloadRelatedChanges) {
this._loading = true;
this._relatedChangesCollapsed = true;
- const detailCompletes = this._getChangeDetail().then(() => {
- this._loading = false;
- this._getProjectConfig();
- });
+ // Array to house all promises related to data requests.
+ const allDataPromises = [];
- this._reloadComments();
+ // Resolves when the change detail and the edit patch set (if available)
+ // are loaded.
+ const detailCompletes = this._getChangeDetail();
+ allDataPromises.push(detailCompletes);
- let reloadPromise;
+ // Resolves when the loading flag is set to false, meaning that some
+ // change content may start appearing.
+ const loadingFlagSet = detailCompletes
+ .then(() => { this._loading = false; });
+ // Resolves when the project config has loaded.
+ const projectConfigLoaded = detailCompletes
+ .then(() => this._getProjectConfig());
+ allDataPromises.push(projectConfigLoaded);
+
+ // Resolves when change comments have loaded (comments, drafts and robot
+ // comments).
+ const commentsLoaded = this._reloadComments();
+ allDataPromises.push(commentsLoaded);
+
+ let coreDataPromise;
+
+ // If the patch number is specified
if (this._patchRange.patchNum) {
- reloadPromise = Promise.all([
- this._reloadPatchNumDependentResources(),
- detailCompletes,
- ]).then(() => {
- return Promise.all([
- this._getMergeability(),
- this.$.actions.reload(),
- ]);
- });
+ // Because a specific patchset is specified, reload the resources that
+ // are keyed by patch number or patch range.
+ const patchResourcesLoaded = this._reloadPatchNumDependentResources();
+ allDataPromises.push(patchResourcesLoaded);
+
+ // Promise resolves when the change detail and patch dependent resources
+ // have loaded.
+ const detailAndPatchResourcesLoaded =
+ Promise.all([patchResourcesLoaded, loadingFlagSet]);
+
+ // Promise resolves when mergeability information has loaded.
+ const mergeabilityLoaded = detailAndPatchResourcesLoaded
+ .then(() => this._getMergeability());
+ allDataPromises.push(mergeabilityLoaded);
+
+ // Promise resovles when the change actions have loaded.
+ const actionsLoaded = detailAndPatchResourcesLoaded
+ .then(() => this.$.actions.reload());
+ allDataPromises.push(actionsLoaded);
+
+ // The core data is loaded when both mergeability and actions are known.
+ coreDataPromise = Promise.all([mergeabilityLoaded, actionsLoaded]);
} else {
- // The patch number is reliant on the change detail request.
- reloadPromise = detailCompletes.then(() => {
- this.$.fileList.reload();
- if (!this._latestCommitMessage) {
- this._getLatestCommitMessage();
- }
- return this._getMergeability();
+ // Resolves when the file list has loaded.
+ const fileListReload = loadingFlagSet
+ .then(() => this.$.fileList.reload());
+ allDataPromises.push(fileListReload);
+
+ const latestCommitMessageLoaded = loadingFlagSet.then(() => {
+ // If the latest commit message is known, there is nothing to do.
+ if (this._latestCommitMessage) { return Promise.resolve(); }
+ return this._getLatestCommitMessage();
});
+ allDataPromises.push(latestCommitMessageLoaded);
+
+ // Promise resolves when mergeability information has loaded.
+ const mergeabilityLoaded = loadingFlagSet
+ .then(() => this._getMergeability());
+ allDataPromises.push(mergeabilityLoaded);
+
+ // Core data is loaded when mergeability has been loaded.
+ coreDataPromise = mergeabilityLoaded;
}
- return reloadPromise.then(() => {
- this.$.reporting.changeDisplayed();
+ if (opt_reloadRelatedChanges) {
+ const relatedChangesLoaded = coreDataPromise
+ .then(() => this.$.relatedChanges.reload());
+ allDataPromises.push(relatedChangesLoaded);
+ }
+
+ this.$.reporting.time(CHANGE_DATA_TIMING_LABEL);
+ Promise.all(allDataPromises).then(() => {
+ this.$.reporting.timeEnd(CHANGE_DATA_TIMING_LABEL);
});
+
+ return coreDataPromise
+ .then(() => { this.$.reporting.changeDisplayed(); });
},
/**
diff --git a/polygerrit-ui/app/elements/change/gr-included-in-dialog/gr-included-in-dialog.html b/polygerrit-ui/app/elements/change/gr-included-in-dialog/gr-included-in-dialog.html
index cf79a31..b824f1c 100644
--- a/polygerrit-ui/app/elements/change/gr-included-in-dialog/gr-included-in-dialog.html
+++ b/polygerrit-ui/app/elements/change/gr-included-in-dialog/gr-included-in-dialog.html
@@ -30,6 +30,7 @@
padding: 4.5em 1em 1em 1em;
}
header {
+ background-color: var(--dialog-background-color);
border-bottom: 1px solid var(--border-color);
left: 0;
padding: 1em;
diff --git a/polygerrit-ui/app/elements/core/gr-account-dropdown/gr-account-dropdown.html b/polygerrit-ui/app/elements/core/gr-account-dropdown/gr-account-dropdown.html
index bbe2877..d1ae719 100644
--- a/polygerrit-ui/app/elements/core/gr-account-dropdown/gr-account-dropdown.html
+++ b/polygerrit-ui/app/elements/core/gr-account-dropdown/gr-account-dropdown.html
@@ -31,7 +31,7 @@
color: var(--header-text-color);
}
--gr-dropdown-item: {
- color: var(--header-text-color);
+ color: var(--primary-text-color);
}
}
gr-avatar {
diff --git a/polygerrit-ui/app/elements/diff/gr-diff/gr-diff.html b/polygerrit-ui/app/elements/diff/gr-diff/gr-diff.html
index 81c6d99..540df98 100644
--- a/polygerrit-ui/app/elements/diff/gr-diff/gr-diff.html
+++ b/polygerrit-ui/app/elements/diff/gr-diff/gr-diff.html
@@ -108,7 +108,6 @@
cursor: pointer;
}
.content {
- overflow: hidden;
/* Set min width since setting width on table cells still
allows them to shrink. Do not set max width because
CJK (Chinese-Japanese-Korean) glyphs have variable width */
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.html b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.html
index cd9f9dc..017cd5d 100644
--- a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.html
+++ b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.html
@@ -15,11 +15,11 @@
limitations under the License.
-->
<link rel="import" href="../../../bower_components/polymer/polymer.html">
-<link rel="import" href="../gr-syntax-lib-loader/gr-syntax-lib-loader.html">
+<link rel="import" href="../../shared/gr-lib-loader/gr-lib-loader.html">
<dom-module id="gr-syntax-layer">
<template>
- <gr-syntax-lib-loader id="libLoader"></gr-syntax-lib-loader>
+ <gr-lib-loader id="libLoader"></gr-lib-loader>
</template>
<script src="../gr-diff/gr-diff-line.js"></script>
<script src="../gr-diff-highlight/gr-annotation.js"></script>
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.js b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.js
index f8db343..15a8a0a 100644
--- a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.js
+++ b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.js
@@ -442,7 +442,7 @@
},
_loadHLJS() {
- return this.$.libLoader.get().then(hljs => {
+ return this.$.libLoader.getHLJS().then(hljs => {
this._hljs = hljs;
});
},
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer_test.html b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer_test.html
index 74fc3bf..f2458fc 100644
--- a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer_test.html
+++ b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer_test.html
@@ -185,7 +185,7 @@
const mockHLJS = getMockHLJS();
const highlightSpy = sinon.spy(mockHLJS, 'highlight');
- sandbox.stub(element.$.libLoader, 'get',
+ sandbox.stub(element.$.libLoader, 'getHLJS',
() => { return Promise.resolve(mockHLJS); });
const processNextSpy = sandbox.spy(element, '_processNextLine');
const processPromise = element.process();
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.js b/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.js
deleted file mode 100644
index 6ec7ab2..0000000
--- a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.js
+++ /dev/null
@@ -1,113 +0,0 @@
-/**
- * @license
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-(function() {
- 'use strict';
-
- const HLJS_PATH = 'bower_components/highlightjs/highlight.min.js';
- const LIB_ROOT_PATTERN = /(.+\/)elements\/gr-app\.html/;
-
- Polymer({
- is: 'gr-syntax-lib-loader',
-
- properties: {
- _state: {
- type: Object,
-
- // NOTE: intended singleton.
- value: {
- configured: false,
- loading: false,
- callbacks: [],
- },
- },
- },
-
- get() {
- return new Promise((resolve, reject) => {
- // If the lib is totally loaded, resolve immediately.
- if (this._getHighlightLib()) {
- resolve(this._getHighlightLib());
- return;
- }
-
- // If the library is not currently being loaded, then start loading it.
- if (!this._state.loading) {
- this._state.loading = true;
- this._loadHLJS().then(this._onLibLoaded.bind(this)).catch(reject);
- }
-
- this._state.callbacks.push(resolve);
- });
- },
-
- _onLibLoaded() {
- const lib = this._getHighlightLib();
- this._state.loading = false;
- for (const cb of this._state.callbacks) {
- cb(lib);
- }
- this._state.callbacks = [];
- },
-
- _getHighlightLib() {
- const lib = window.hljs;
- if (lib && !this._state.configured) {
- this._state.configured = true;
-
- lib.configure({classPrefix: 'gr-diff gr-syntax gr-syntax-'});
- }
- return lib;
- },
-
- _getLibRoot() {
- if (this._cachedLibRoot) { return this._cachedLibRoot; }
-
- const appLink = document.head
- .querySelector('link[rel=import][href$="gr-app.html"]');
-
- if (!appLink) { return null; }
-
- return this._cachedLibRoot = appLink
- .href
- .match(LIB_ROOT_PATTERN)[1];
- },
- _cachedLibRoot: null,
-
- _loadHLJS() {
- return new Promise((resolve, reject) => {
- const script = document.createElement('script');
- const src = this._getHLJSUrl();
-
- if (!src) {
- reject(new Error('Unable to load blank HLJS url.'));
- return;
- }
-
- script.src = src;
- script.onload = resolve;
- script.onerror = reject;
- Polymer.dom(document.head).appendChild(script);
- });
- },
-
- _getHLJSUrl() {
- const root = this._getLibRoot();
- if (!root) { return null; }
- return root + HLJS_PATH;
- },
- });
-})();
diff --git a/polygerrit-ui/app/elements/gr-app.html b/polygerrit-ui/app/elements/gr-app.html
index d7d50d1..6a2bfe0 100644
--- a/polygerrit-ui/app/elements/gr-app.html
+++ b/polygerrit-ui/app/elements/gr-app.html
@@ -19,6 +19,11 @@
if (localStorage.getItem('USE_SHADOW_DOM') === 'true') {
window.Polymer = {
dom: 'shadow',
+ passiveTouchGestures: true,
+ };
+ } else if (!window.Polymer) {
+ window.Polymer = {
+ passiveTouchGestures: true,
};
}
</script>
@@ -34,8 +39,8 @@
<link rel="import" href="../behaviors/base-url-behavior/base-url-behavior.html">
<link rel="import" href="../behaviors/keyboard-shortcut-behavior/keyboard-shortcut-behavior.html">
-<link rel="import" href="../styles/app-theme.html">
<link rel="import" href="../styles/shared-styles.html">
+<link rel="import" href="../styles/themes/app-theme.html">
<link rel="import" href="./admin/gr-admin-view/gr-admin-view.html">
<link rel="import" href="./change-list/gr-change-list-view/gr-change-list-view.html">
<link rel="import" href="./change-list/gr-dashboard-view/gr-dashboard-view.html">
@@ -56,6 +61,7 @@
<link rel="import" href="./settings/gr-registration-dialog/gr-registration-dialog.html">
<link rel="import" href="./settings/gr-settings-view/gr-settings-view.html">
<link rel="import" href="./shared/gr-fixed-panel/gr-fixed-panel.html">
+<link rel="import" href="./shared/gr-lib-loader/gr-lib-loader.html">
<link rel="import" href="./shared/gr-rest-api-interface/gr-rest-api-interface.html">
<script src="../scripts/util.js"></script>
@@ -229,6 +235,7 @@
<gr-plugin-host id="plugins"
config="[[_serverConfig]]">
</gr-plugin-host>
+ <gr-lib-loader id="libLoader"></gr-lib-loader>
<gr-external-style id="externalStyle" name="app-theme"></gr-external-style>
</template>
<script src="gr-app.js" crossorigin="anonymous"></script>
diff --git a/polygerrit-ui/app/elements/gr-app.js b/polygerrit-ui/app/elements/gr-app.js
index b866088..53ffc60 100644
--- a/polygerrit-ui/app/elements/gr-app.js
+++ b/polygerrit-ui/app/elements/gr-app.js
@@ -127,6 +127,12 @@
this._version = version;
});
+ if (window.localStorage.getItem('dark-theme')) {
+ this.$.libLoader.getDarkTheme().then(module => {
+ Polymer.dom(this.root).appendChild(module);
+ });
+ }
+
// Note: this is evaluated here to ensure that it only happens after the
// router has been initialized. @see Issue 7837
this._settingsUrl = Gerrit.Nav.getUrlForSettings();
diff --git a/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.html b/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.html
index 48b01f6..14e5e6f 100644
--- a/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.html
+++ b/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.html
@@ -16,7 +16,9 @@
-->
<link rel="import" href="../../../bower_components/polymer/polymer.html">
+
<link rel="import" href="../../../behaviors/docs-url-behavior/docs-url-behavior.html">
+<link rel="import" href="../../../bower_components/paper-toggle-button/paper-toggle-button.html">
<link rel="import" href="../../../styles/gr-form-styles.html">
<link rel="import" href="../../../styles/gr-menu-page-styles.html">
<link rel="import" href="../../../styles/gr-page-nav-styles.html">
@@ -52,12 +54,19 @@
#email {
margin-bottom: 1em;
}
- .filters p {
+ .filters p,
+ .darkToggle p {
margin-bottom: 1em;
}
.queryExample em {
color: violet;
}
+ .toggle {
+ align-items: center;
+ display: flex;
+ margin-bottom: 1rem;
+ margin-right: 1rem;
+ }
</style>
<style include="gr-form-styles"></style>
<style include="gr-menu-page-styles"></style>
@@ -95,6 +104,19 @@
</gr-page-nav>
<main class="gr-form-styles">
<h1>User Settings</h1>
+ <section class="darkToggle">
+ <div class="toggle">
+ <paper-toggle-button
+ checked="[[_isDark]]"
+ on-change="_handleToggleDark"></paper-toggle-button>
+ <div>Dark theme (alpha)</div>
+ </div>
+ <p>
+ Gerrit's dark theme is in early alpha, and almost definitely will
+ not play nicely with themes set by specific Gerrit hosts. Filing
+ feedback via the link in the app footer is strongly encouraged!
+ </p>
+ </section>
<h2
id="Profile"
class$="[[_computeHeaderClass(_accountInfoChanged)]]">Profile</h2>
diff --git a/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.js b/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.js
index 215aaa1..213ab65 100644
--- a/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.js
+++ b/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.js
@@ -35,6 +35,8 @@
const ABSOLUTE_URL_PATTERN = /^https?:/;
const TRAILING_SLASH_PATTERN = /\/$/;
+ const RELOAD_MESSAGE = 'Reloading...';
+
Polymer({
is: 'gr-settings-view',
@@ -45,7 +47,7 @@
*/
/**
- * Fired with email confirmation text.
+ * Fired with email confirmation text, or when the page reloads.
*
* @event show-alert
*/
@@ -132,6 +134,11 @@
_loadingPromise: Object,
_showNumber: Boolean,
+
+ _isDark: {
+ type: Boolean,
+ value: false,
+ },
},
behaviors: [
@@ -149,6 +156,8 @@
attached() {
this.fire('title-change', {title: 'Settings'});
+ this._isDark = !!window.localStorage.getItem('dark-theme');
+
const promises = [
this.$.accountInfo.loadData(),
this.$.watchedProjectsEditor.loadData(),
@@ -410,5 +419,20 @@
return base + GERRIT_DOCS_FILTER_PATH;
},
+
+ _handleToggleDark() {
+ if (this._isDark) {
+ window.localStorage.removeItem('dark-theme');
+ } else {
+ window.localStorage.setItem('dark-theme', 'true');
+ }
+ this.dispatchEvent(new CustomEvent('show-alert', {
+ detail: {message: RELOAD_MESSAGE},
+ bubbles: true,
+ }));
+ this.async(() => {
+ window.location.reload();
+ }, 1);
+ },
});
})();
diff --git a/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context.js b/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context.js
index 84b7f0a..5ac8773 100644
--- a/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context.js
+++ b/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context.js
@@ -93,7 +93,14 @@
}
this.plugin.restApi()
.send(this.action.method, this.action.__url, payload)
- .then(onSuccess);
+ .then(onSuccess)
+ .catch(error => {
+ document.dispatchEvent(new CustomEvent('show-alert', {
+ detail: {
+ message: `Plugin network error: ${error}`,
+ },
+ }));
+ });
};
window.GrPluginActionContext = GrPluginActionContext;
diff --git a/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context_test.html b/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context_test.html
index 7c18a99..bf6a046 100644
--- a/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context_test.html
+++ b/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context_test.html
@@ -128,5 +128,26 @@
assert.isTrue(sendStub.calledWith(
'METHOD', '/changes/1/revisions/2/foo~bar', payload));
});
+
+ test('call error', done => {
+ instance.action = {
+ method: 'METHOD',
+ __key: 'key',
+ __url: '/changes/1/revisions/2/foo~bar',
+ };
+ const sendStub = sandbox.stub().returns(Promise.reject('boom'));
+ sandbox.stub(plugin, 'restApi').returns({
+ send: sendStub,
+ });
+ const errorStub = sandbox.stub();
+ document.addEventListener('network-error', errorStub);
+ instance.call();
+ flush(() => {
+ assert.isTrue(errorStub.calledOnce);
+ assert.equal(errorStub.args[0][0].detail.message,
+ 'Plugin network error: boom');
+ done();
+ });
+ });
});
</script>
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.html b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.html
similarity index 88%
rename from polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.html
rename to polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.html
index f5b71be..f70aff4 100644
--- a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.html
+++ b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.html
@@ -16,6 +16,6 @@
-->
<link rel="import" href="../../../bower_components/polymer/polymer.html">
-<dom-module id="gr-syntax-lib-loader">
- <script src="gr-syntax-lib-loader.js"></script>
+<dom-module id="gr-lib-loader">
+ <script src="gr-lib-loader.js"></script>
</dom-module>
diff --git a/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.js b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.js
new file mode 100644
index 0000000..28ff45d
--- /dev/null
+++ b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.js
@@ -0,0 +1,164 @@
+/**
+ * @license
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+(function() {
+ 'use strict';
+
+ const HLJS_PATH = 'bower_components/highlightjs/highlight.min.js';
+ const DARK_THEME_PATH = 'styles/themes/dark-theme.html';
+ const LIB_ROOT_PATTERN = /(.+\/)elements\/gr-app\.html/;
+
+ Polymer({
+ is: 'gr-lib-loader',
+
+ properties: {
+ _hljsState: {
+ type: Object,
+
+ // NOTE: intended singleton.
+ value: {
+ configured: false,
+ loading: false,
+ callbacks: [],
+ },
+ },
+ },
+
+ /**
+ * Get the HLJS library. Returns a promise that resolves with a reference to
+ * the library after it's been loaded. The promise resolves immediately if
+ * it's already been loaded.
+ * @return {!Promise<Object>}
+ */
+ getHLJS() {
+ return new Promise((resolve, reject) => {
+ // If the lib is totally loaded, resolve immediately.
+ if (this._getHighlightLib()) {
+ resolve(this._getHighlightLib());
+ return;
+ }
+
+ // If the library is not currently being loaded, then start loading it.
+ if (!this._hljsState.loading) {
+ this._hljsState.loading = true;
+ this._loadScript(this._getHLJSUrl())
+ .then(this._onHLJSLibLoaded.bind(this)).catch(reject);
+ }
+
+ this._hljsState.callbacks.push(resolve);
+ });
+ },
+
+ /**
+ * Loads the dark theme document. Returns a promise that resolves with a
+ * custom-style DOM element.
+ * @return {!Promise<Element>}
+ */
+ getDarkTheme() {
+ return new Promise((resolve, reject) => {
+ this.importHref(this._getLibRoot() + DARK_THEME_PATH, () => {
+ const module = document.createElement('style', 'custom-style');
+ module.setAttribute('include', 'dark-theme');
+ resolve(module);
+ });
+ });
+ },
+
+ /**
+ * Execute callbacks awaiting the HLJS lib load.
+ */
+ _onHLJSLibLoaded() {
+ const lib = this._getHighlightLib();
+ this._hljsState.loading = false;
+ for (const cb of this._hljsState.callbacks) {
+ cb(lib);
+ }
+ this._hljsState.callbacks = [];
+ },
+
+ /**
+ * Get the HLJS library, assuming it has been loaded. Configure the library
+ * if it hasn't already been configured.
+ * @return {!Object}
+ */
+ _getHighlightLib() {
+ const lib = window.hljs;
+ if (lib && !this._hljsState.configured) {
+ this._hljsState.configured = true;
+
+ lib.configure({classPrefix: 'gr-diff gr-syntax gr-syntax-'});
+ }
+ return lib;
+ },
+
+ /**
+ * Get the resource path used to load the application. If the application
+ * was loaded through a CDN, then this will be the path to CDN resources.
+ * @return {string}
+ */
+ _getLibRoot() {
+ // TODO(wyatta): Remove the remainder of this method logic once the
+ // STATIC_RESOURCE_PATH variable is being provided generally.
+ if (window.STATIC_RESOURCE_PATH) { return window.STATIC_RESOURCE_PATH; }
+
+ if (this._cachedLibRoot) { return this._cachedLibRoot; }
+
+ const appLink = document.head
+ .querySelector('link[rel=import][href$="gr-app.html"]');
+
+ if (!appLink) { throw new Error('Could not find application link'); }
+
+ this._cachedLibRoot = appLink
+ .href
+ .match(LIB_ROOT_PATTERN)[1];
+
+ if (!this._cachedLibRoot) {
+ throw new Error('Could not extract lib root');
+ }
+
+ return this._cachedLibRoot;
+ },
+ _cachedLibRoot: null,
+
+ /**
+ * Load and execute a JS file from the lib root.
+ * @param {string} src The path to the JS file without the lib root.
+ * @return {Promise} a promise that resolves when the script's onload
+ * executes.
+ */
+ _loadScript(src) {
+ return new Promise((resolve, reject) => {
+ const script = document.createElement('script');
+
+ if (!src) {
+ reject(new Error('Unable to load blank script url.'));
+ return;
+ }
+
+ script.src = src;
+ script.onload = resolve;
+ script.onerror = reject;
+ Polymer.dom(document.head).appendChild(script);
+ });
+ },
+
+ _getHLJSUrl() {
+ const root = this._getLibRoot();
+ if (!root) { return null; }
+ return root + HLJS_PATH;
+ },
+ });
+})();
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader_test.html b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader_test.html
similarity index 77%
rename from polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader_test.html
rename to polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader_test.html
index a260a97..cf9a41c 100644
--- a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader_test.html
+++ b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader_test.html
@@ -17,64 +17,67 @@
-->
<meta name="viewport" content="width=device-width, minimum-scale=1.0, initial-scale=1.0, user-scalable=yes">
-<title>gr-syntax-lib-loader</title>
+<title>gr-lib-loader</title>
<script src="../../../bower_components/webcomponentsjs/webcomponents-lite.min.js"></script>
<script src="../../../bower_components/web-component-tester/browser.js"></script>
<link rel="import" href="../../../test/common-test-setup.html"/>
-<link rel="import" href="gr-syntax-lib-loader.html">
+<link rel="import" href="gr-lib-loader.html">
<script>void(0);</script>
<test-fixture id="basic">
<template>
- <gr-syntax-lib-loader></gr-syntax-lib-loader>
+ <gr-lib-loader></gr-lib-loader>
</template>
</test-fixture>
<script>
- suite('gr-syntax-lib-loader tests', () => {
+ suite('gr-lib-loader tests', () => {
+ let sandbox;
let element;
let resolveLoad;
let loadStub;
setup(() => {
+ sandbox = sinon.sandbox.create();
element = fixture('basic');
- loadStub = sinon.stub(element, '_loadHLJS', () =>
+ loadStub = sandbox.stub(element, '_loadScript', () =>
new Promise(resolve => resolveLoad = resolve)
);
// Assert preconditions:
- assert.isFalse(element._state.loading);
+ assert.isFalse(element._hljsState.loading);
});
teardown(() => {
if (window.hljs) {
delete window.hljs;
}
- loadStub.restore();
+ sandbox.restore();
// Because the element state is a singleton, clean it up.
- element._state.configured = false;
- element._state.loading = false;
- element._state.callbacks = [];
+ element._hljsState.configured = false;
+ element._hljsState.loading = false;
+ element._hljsState.callbacks = [];
});
test('only load once', done => {
+ sandbox.stub(element, '_getHLJSUrl').returns('');
const firstCallHandler = sinon.stub();
- element.get().then(firstCallHandler);
+ element.getHLJS().then(firstCallHandler);
// It should now be in the loading state.
assert.isTrue(loadStub.called);
- assert.isTrue(element._state.loading);
+ assert.isTrue(element._hljsState.loading);
assert.isFalse(firstCallHandler.called);
const secondCallHandler = sinon.stub();
- element.get().then(secondCallHandler);
+ element.getHLJS().then(secondCallHandler);
// No change in state.
- assert.isTrue(element._state.loading);
+ assert.isTrue(element._hljsState.loading);
assert.isFalse(firstCallHandler.called);
assert.isFalse(secondCallHandler.called);
@@ -82,7 +85,7 @@
resolveLoad();
flush(() => {
// The state should be loaded and both handlers called.
- assert.isFalse(element._state.loading);
+ assert.isFalse(element._hljsState.loading);
assert.isTrue(firstCallHandler.called);
assert.isTrue(secondCallHandler.called);
done();
@@ -105,7 +108,7 @@
test('returns hljs', done => {
const firstCallHandler = sinon.stub();
- element.get().then(firstCallHandler);
+ element.getHLJS().then(firstCallHandler);
flush(() => {
assert.isTrue(firstCallHandler.called);
assert.isTrue(firstCallHandler.calledWith(hljsStub));
@@ -114,7 +117,7 @@
});
test('configures hljs', done => {
- element.get().then(() => {
+ element.getHLJS().then(() => {
assert.isTrue(window.hljs.configure.calledOnce);
done();
});
@@ -123,15 +126,10 @@
suite('_getHLJSUrl', () => {
suite('checking _getLibRoot', () => {
- let libRootStub;
let root;
setup(() => {
- libRootStub = sinon.stub(element, '_getLibRoot', () => root);
- });
-
- teardown(() => {
- libRootStub.restore();
+ sandbox.stub(element, '_getLibRoot', () => root);
});
test('with no root', () => {
diff --git a/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface.js b/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface.js
index 9a5851b..c081b30 100644
--- a/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface.js
+++ b/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface.js
@@ -293,47 +293,42 @@
});
},
- saveRepoConfig(repo, config, opt_errFn, opt_ctx) {
+ saveRepoConfig(repo, config, opt_errFn) {
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(repo);
return this.send('PUT', `/projects/${encodeName}/config`, config,
- opt_errFn, opt_ctx);
+ opt_errFn);
},
- runRepoGC(repo, opt_errFn, opt_ctx) {
+ runRepoGC(repo, opt_errFn) {
if (!repo) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(repo);
- return this.send('POST', `/projects/${encodeName}/gc`, '',
- opt_errFn, opt_ctx);
+ return this.send('POST', `/projects/${encodeName}/gc`, '', opt_errFn);
},
/**
* @param {?Object} config
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- createRepo(config, opt_errFn, opt_ctx) {
+ createRepo(config, opt_errFn) {
if (!config.name) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(config.name);
- return this.send('PUT', `/projects/${encodeName}`, config, opt_errFn,
- opt_ctx);
+ return this.send('PUT', `/projects/${encodeName}`, config, opt_errFn);
},
/**
* @param {?Object} config
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- createGroup(config, opt_errFn, opt_ctx) {
+ createGroup(config, opt_errFn) {
if (!config.name) { return ''; }
const encodeName = encodeURIComponent(config.name);
- return this.send('PUT', `/groups/${encodeName}`, config, opt_errFn,
- opt_ctx);
+ return this.send('PUT', `/groups/${encodeName}`, config, opt_errFn);
},
getGroupConfig(group, opt_errFn) {
@@ -347,34 +342,30 @@
* @param {string} repo
* @param {string} ref
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- deleteRepoBranches(repo, ref, opt_errFn, opt_ctx) {
+ deleteRepoBranches(repo, ref, opt_errFn) {
if (!repo || !ref) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(repo);
const encodeRef = encodeURIComponent(ref);
return this.send('DELETE',
- `/projects/${encodeName}/branches/${encodeRef}`, '',
- opt_errFn, opt_ctx);
+ `/projects/${encodeName}/branches/${encodeRef}`, '', opt_errFn);
},
/**
* @param {string} repo
* @param {string} ref
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- deleteRepoTags(repo, ref, opt_errFn, opt_ctx) {
+ deleteRepoTags(repo, ref, opt_errFn) {
if (!repo || !ref) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(repo);
const encodeRef = encodeURIComponent(ref);
return this.send('DELETE',
- `/projects/${encodeName}/tags/${encodeRef}`, '',
- opt_errFn, opt_ctx);
+ `/projects/${encodeName}/tags/${encodeRef}`, '', opt_errFn);
},
/**
@@ -382,9 +373,8 @@
* @param {string} branch
* @param {string} revision
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- createRepoBranch(name, branch, revision, opt_errFn, opt_ctx) {
+ createRepoBranch(name, branch, revision, opt_errFn) {
if (!name || !branch || !revision) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
@@ -392,7 +382,7 @@
const encodeBranch = encodeURIComponent(branch);
return this.send('PUT',
`/projects/${encodeName}/branches/${encodeBranch}`,
- revision, opt_errFn, opt_ctx);
+ revision, opt_errFn);
},
/**
@@ -400,16 +390,15 @@
* @param {string} tag
* @param {string} revision
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- createRepoTag(name, tag, revision, opt_errFn, opt_ctx) {
+ createRepoTag(name, tag, revision, opt_errFn) {
if (!name || !tag || !revision) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(name);
const encodeTag = encodeURIComponent(tag);
return this.send('PUT', `/projects/${encodeName}/tags/${encodeTag}`,
- revision, opt_errFn, opt_ctx);
+ revision, opt_errFn);
},
/**
@@ -562,41 +551,37 @@
/**
* @param {?Object} prefs
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- savePreferences(prefs, opt_errFn, opt_ctx) {
+ savePreferences(prefs, opt_errFn) {
// Note (Issue 5142): normalize the download scheme with lower case before
// saving.
if (prefs.download_scheme) {
prefs.download_scheme = prefs.download_scheme.toLowerCase();
}
- return this.send('PUT', '/accounts/self/preferences', prefs, opt_errFn,
- opt_ctx);
+ return this.send('PUT', '/accounts/self/preferences', prefs, opt_errFn);
},
/**
* @param {?Object} prefs
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- saveDiffPreferences(prefs, opt_errFn, opt_ctx) {
+ saveDiffPreferences(prefs, opt_errFn) {
// Invalidate the cache.
this._cache['/accounts/self/preferences.diff'] = undefined;
return this.send('PUT', '/accounts/self/preferences.diff', prefs,
- opt_errFn, opt_ctx);
+ opt_errFn);
},
/**
* @param {?Object} prefs
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- saveEditPreferences(prefs, opt_errFn, opt_ctx) {
+ saveEditPreferences(prefs, opt_errFn) {
// Invalidate the cache.
this._cache['/accounts/self/preferences.edit'] = undefined;
return this.send('PUT', '/accounts/self/preferences.edit', prefs,
- opt_errFn, opt_ctx);
+ opt_errFn);
},
getAccount() {
@@ -636,46 +621,43 @@
/**
* @param {string} email
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- addAccountEmail(email, opt_errFn, opt_ctx) {
+ addAccountEmail(email, opt_errFn) {
return this.send('PUT', '/accounts/self/emails/' +
- encodeURIComponent(email), null, opt_errFn, opt_ctx);
+ encodeURIComponent(email), null, opt_errFn);
},
/**
* @param {string} email
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- deleteAccountEmail(email, opt_errFn, opt_ctx) {
+ deleteAccountEmail(email, opt_errFn) {
return this.send('DELETE', '/accounts/self/emails/' +
- encodeURIComponent(email), null, opt_errFn, opt_ctx);
+ encodeURIComponent(email), null, opt_errFn);
},
/**
* @param {string} email
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- setPreferredAccountEmail(email, opt_errFn, opt_ctx) {
- return this.send('PUT', '/accounts/self/emails/' +
- encodeURIComponent(email) + '/preferred', null,
- opt_errFn, opt_ctx).then(() => {
- // If result of getAccountEmails is in cache, update it in the cache
- // so we don't have to invalidate it.
- const cachedEmails = this._cache['/accounts/self/emails'];
- if (cachedEmails) {
- const emails = cachedEmails.map(entry => {
- if (entry.email === email) {
- return {email, preferred: true};
- } else {
- return {email};
- }
- });
- this._cache['/accounts/self/emails'] = emails;
+ setPreferredAccountEmail(email, opt_errFn) {
+ const encodedEmail = encodeURIComponent(email);
+ const url = `/accounts/self/emails/${encodedEmail}/preferred`;
+ return this.send('PUT', url, null, opt_errFn).then(() => {
+ // If result of getAccountEmails is in cache, update it in the cache
+ // so we don't have to invalidate it.
+ const cachedEmails = this._cache['/accounts/self/emails'];
+ if (cachedEmails) {
+ const emails = cachedEmails.map(entry => {
+ if (entry.email === email) {
+ return {email, preferred: true};
+ } else {
+ return {email};
}
});
+ this._cache['/accounts/self/emails'] = emails;
+ }
+ });
},
/**
@@ -695,35 +677,31 @@
/**
* @param {string} name
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- setAccountName(name, opt_errFn, opt_ctx) {
- return this.send('PUT', '/accounts/self/name', {name}, opt_errFn, opt_ctx)
- .then(response => this.getResponseObject(response)
- .then(newName => this._updateCachedAccount({name: newName})));
+ setAccountName(name, opt_errFn) {
+ return this.send('PUT', '/accounts/self/name', {name}, opt_errFn)
+ .then(response => this.getResponseObject(response))
+ .then(newName => this._updateCachedAccount({name: newName}));
},
/**
* @param {string} username
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- setAccountUsername(username, opt_errFn, opt_ctx) {
- return this.send('PUT', '/accounts/self/username', {username}, opt_errFn,
- opt_ctx).then(response => this.getResponseObject(response)
- .then(newName => this._updateCachedAccount({username: newName})));
+ setAccountUsername(username, opt_errFn) {
+ return this.send('PUT', '/accounts/self/username', {username}, opt_errFn)
+ .then(response => this.getResponseObject(response))
+ .then(newName => this._updateCachedAccount({username: newName}));
},
/**
* @param {string} status
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- setAccountStatus(status, opt_errFn, opt_ctx) {
- return this.send('PUT', '/accounts/self/status', {status},
- opt_errFn, opt_ctx).then(response => this.getResponseObject(response)
- .then(newStatus => this._updateCachedAccount(
- {status: newStatus})));
+ setAccountStatus(status, opt_errFn) {
+ return this.send('PUT', '/accounts/self/status', {status}, opt_errFn)
+ .then(response => this.getResponseObject(response))
+ .then(newStatus => this._updateCachedAccount({status: newStatus}));
},
getAccountStatus(userId) {
@@ -832,24 +810,20 @@
/**
* @param {string} projects
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- saveWatchedProjects(projects, opt_errFn, opt_ctx) {
- return this.send('POST', '/accounts/self/watched.projects', projects,
- opt_errFn, opt_ctx)
- .then(response => {
- return this.getResponseObject(response);
- });
+ saveWatchedProjects(projects, opt_errFn) {
+ const url = '/accounts/self/watched.projects';
+ return this.send('POST', url, projects, opt_errFn)
+ .then(response => this.getResponseObject(response));
},
/**
* @param {string} projects
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- deleteWatchedProjects(projects, opt_errFn, opt_ctx) {
+ deleteWatchedProjects(projects, opt_errFn) {
return this.send('POST', '/accounts/self/watched.projects:delete',
- projects, opt_errFn, opt_ctx);
+ projects, opt_errFn);
},
/**
@@ -1287,15 +1261,13 @@
* @param {string} inputVal
* @param {number} opt_n
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- getSuggestedGroups(inputVal, opt_n, opt_errFn, opt_ctx) {
+ getSuggestedGroups(inputVal, opt_n, opt_errFn) {
const params = {s: inputVal};
if (opt_n) { params.n = opt_n; }
return this._fetchJSON({
url: '/groups/',
errFn: opt_errFn,
- cancelCondition: opt_ctx,
params,
});
},
@@ -1304,9 +1276,8 @@
* @param {string} inputVal
* @param {number} opt_n
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- getSuggestedProjects(inputVal, opt_n, opt_errFn, opt_ctx) {
+ getSuggestedProjects(inputVal, opt_n, opt_errFn) {
const params = {
m: inputVal,
n: MAX_PROJECT_RESULTS,
@@ -1316,7 +1287,6 @@
return this._fetchJSON({
url: '/projects/',
errFn: opt_errFn,
- cancelCondition: opt_ctx,
params,
});
},
@@ -1325,9 +1295,8 @@
* @param {string} inputVal
* @param {number} opt_n
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- getSuggestedAccounts(inputVal, opt_n, opt_errFn, opt_ctx) {
+ getSuggestedAccounts(inputVal, opt_n, opt_errFn) {
if (!inputVal) {
return Promise.resolve([]);
}
@@ -1336,7 +1305,6 @@
return this._fetchJSON({
url: '/accounts/',
errFn: opt_errFn,
- cancelCondition: opt_ctx,
params,
});
},
@@ -1441,13 +1409,12 @@
* @param {string} path
* @param {boolean} reviewed
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- saveFileReviewed(changeNum, patchNum, path, reviewed, opt_errFn, opt_ctx) {
+ saveFileReviewed(changeNum, patchNum, path, reviewed, opt_errFn) {
const method = reviewed ? 'PUT' : 'DELETE';
- const e = `/files/${encodeURIComponent(path)}/reviewed`;
- return this.getChangeURLAndSend(changeNum, method, patchNum, e, null,
- opt_errFn, opt_ctx);
+ const endpoint = `/files/${encodeURIComponent(path)}/reviewed`;
+ return this.getChangeURLAndSend(changeNum, method, patchNum, endpoint,
+ null, opt_errFn);
},
/**
@@ -1455,15 +1422,14 @@
* @param {number|string} patchNum
* @param {!Object} review
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- saveChangeReview(changeNum, patchNum, review, opt_errFn, opt_ctx) {
+ saveChangeReview(changeNum, patchNum, review, opt_errFn) {
const promises = [
this.awaitPendingDiffDrafts(),
this.getChangeActionURL(changeNum, patchNum, '/review'),
];
return Promise.all(promises).then(([, url]) => {
- return this.send('POST', url, review, opt_errFn, opt_ctx);
+ return this.send('POST', url, review, opt_errFn);
});
},
@@ -1542,7 +1508,7 @@
const e = `/files/${encodeURIComponent(path)}/content`;
const headers = {Accept: 'application/json'};
return this.getChangeURLAndSend(changeNum, 'GET', patchNum, e, null,
- opt_errFn, null, null, headers);
+ opt_errFn, null, headers);
},
/**
@@ -1554,7 +1520,7 @@
const e = '/edit/' + encodeURIComponent(path);
const headers = {Accept: 'application/json'};
return this.getChangeURLAndSend(changeNum, 'GET', null, e, null, null,
- null, null, headers);
+ null, headers);
},
rebaseChangeEdit(changeNum) {
@@ -1583,7 +1549,7 @@
saveChangeEdit(changeNum, path, contents) {
const e = '/edit/' + encodeURIComponent(path);
return this.getChangeURLAndSend(changeNum, 'PUT', null, e, contents, null,
- null, 'text/plain');
+ 'text/plain');
},
// Deprecated, prefer to use putChangeCommitMessage instead.
@@ -1617,12 +1583,10 @@
* number at least.
* @param {?function(?Response, string=)=} opt_errFn
* passed as null sometimes.
- * @param {?=} opt_ctx
* @param {?string=} opt_contentType
* @param {Object=} opt_headers
*/
- send(method, url, opt_body, opt_errFn, opt_ctx, opt_contentType,
- opt_headers) {
+ send(method, url, opt_body, opt_errFn, opt_contentType, opt_headers) {
const options = {method};
if (opt_body) {
options.headers = new Headers();
@@ -1646,7 +1610,7 @@
return this._auth.fetch(url, options).then(response => {
if (!response.ok) {
if (opt_errFn) {
- return opt_errFn.call(opt_ctx || null, response);
+ return opt_errFn.call(null, response);
}
this.fire('server-error', {response});
}
@@ -1654,7 +1618,7 @@
}).catch(err => {
this.fire('network-error', {error: err});
if (opt_errFn) {
- return opt_errFn.call(opt_ctx, null, err);
+ return opt_errFn.call(null, null, err);
} else {
throw err;
}
@@ -2032,7 +1996,7 @@
},
addAccountSSHKey(key) {
- return this.send('POST', '/accounts/self/sshkeys', key, null, null,
+ return this.send('POST', '/accounts/self/sshkeys', key, null,
'plain/text')
.then(response => {
if (response.status < 200 && response.status >= 300) {
@@ -2220,17 +2184,15 @@
* @param {?Object|number|string=} opt_payload gets passed as null, string,
* Object, or number.
* @param {?function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
* @param {?=} opt_contentType
* @param {Object=} opt_headers
* @return {!Promise<!Object>}
*/
getChangeURLAndSend(changeNum, method, patchNum, endpoint, opt_payload,
- opt_errFn, opt_ctx, opt_contentType, opt_headers) {
- return this._changeBaseURL(changeNum, patchNum).then(url => {
- return this.send(method, url + endpoint, opt_payload, opt_errFn,
- opt_ctx, opt_contentType, opt_headers);
- });
+ opt_errFn, opt_contentType, opt_headers) {
+ return this._changeBaseURL(changeNum, patchNum).then(url =>
+ this.send(method, url + endpoint, opt_payload, opt_errFn,
+ opt_contentType, opt_headers));
},
/**
diff --git a/polygerrit-ui/app/embed/embed.html b/polygerrit-ui/app/embed/embed.html
index f3c727e..9fb5c23 100644
--- a/polygerrit-ui/app/embed/embed.html
+++ b/polygerrit-ui/app/embed/embed.html
@@ -21,4 +21,4 @@
<link rel="import" href="../elements/change-list/gr-change-list-view/gr-change-list-view.html">
<link rel="import" href="../elements/change-list/gr-change-list/gr-change-list.html">
<link rel="import" href="../elements/change-list/gr-dashboard-view/gr-dashboard-view.html">
-<link rel="import" href="../styles/app-theme.html">
+<link rel="import" href="../styles/themes/app-theme.html">
diff --git a/polygerrit-ui/app/rules.bzl b/polygerrit-ui/app/rules.bzl
index b60aa22..199a947 100644
--- a/polygerrit-ui/app/rules.bzl
+++ b/polygerrit-ui/app/rules.bzl
@@ -62,6 +62,15 @@
)
native.filegroup(
+ name = name + "_theme_sources",
+ srcs = native.glob(
+ ["styles/themes/*.html"],
+ # app-theme.html already included via an import in gr-app.html.
+ exclude = ["styles/themes/app-theme.html"],
+ ),
+ )
+
+ native.filegroup(
name = name + "_top_sources",
srcs = [
"favicon.ico",
@@ -73,6 +82,7 @@
srcs = [
name + "_app_sources",
name + "_css_sources",
+ name + "_theme_sources",
name + "_top_sources",
"//lib/fonts:robotofonts",
"//lib/js:highlightjs_files",
@@ -82,11 +92,12 @@
],
outs = outs,
cmd = " && ".join([
- "mkdir -p $$TMP/polygerrit_ui/{styles,fonts,bower_components/{highlightjs,webcomponentsjs},elements}",
+ "mkdir -p $$TMP/polygerrit_ui/{styles/themes,fonts,bower_components/{highlightjs,webcomponentsjs},elements}",
"for f in $(locations " + name + "_app_sources); do ext=$${f##*.}; cp -p $$f $$TMP/polygerrit_ui/elements/" + appName + ".$$ext; done",
"cp $(locations //lib/fonts:robotofonts) $$TMP/polygerrit_ui/fonts/",
"for f in $(locations " + name + "_top_sources); do cp $$f $$TMP/polygerrit_ui/; done",
"for f in $(locations "+ name + "_css_sources); do cp $$f $$TMP/polygerrit_ui/styles; done",
+ "for f in $(locations "+ name + "_theme_sources); do cp $$f $$TMP/polygerrit_ui/styles/themes; done",
"for f in $(locations //lib/js:highlightjs_files); do cp $$f $$TMP/polygerrit_ui/bower_components/highlightjs/ ; done",
"unzip -qd $$TMP/polygerrit_ui/bower_components $(location @webcomponentsjs//:zipfile) webcomponentsjs/webcomponents-lite.js",
"cd $$TMP",
diff --git a/polygerrit-ui/app/styles/app-theme.html b/polygerrit-ui/app/styles/themes/app-theme.html
similarity index 98%
rename from polygerrit-ui/app/styles/app-theme.html
rename to polygerrit-ui/app/styles/themes/app-theme.html
index 69262c9..4500e10 100644
--- a/polygerrit-ui/app/styles/app-theme.html
+++ b/polygerrit-ui/app/styles/themes/app-theme.html
@@ -42,7 +42,7 @@
--table-header-background-color: #fafafa;
--table-subheader-background-color: #eaeaea;
- --chip-background-color: var(--header-background-color);
+ --chip-background-color: #eee;
--dropdown-background-color: #fff;
diff --git a/polygerrit-ui/app/styles/themes/dark-theme.html b/polygerrit-ui/app/styles/themes/dark-theme.html
new file mode 100644
index 0000000..1f473da
--- /dev/null
+++ b/polygerrit-ui/app/styles/themes/dark-theme.html
@@ -0,0 +1,83 @@
+<dom-module id="dark-theme">
+ <style is="custom-style">
+ html {
+ --primary-text-color: #e2e2e2;
+ --view-background-color: #212121;
+ --border-color: #555555;
+ --table-header-background-color: #353637;
+ --table-subheader-background-color: rgb(23, 27, 31);
+ --header-background-color: #5487E5;
+ --header-text-color: var(--primary-text-color);
+ --deemphasized-text-color: #9a9a9a;
+ --footer-background-color: var(--table-header-background-color);
+ --expanded-background-color: #26282b;
+ --link-color: #5487E5;
+ --primary-button-background-color: var(--link-color);
+ --primary-button-text-color: var(--primary-text-color);
+ --secondary-button-background-color: var(--primary-text-color);
+ --secondary-button-text-color: var(--deemphasized-text-color);
+ --default-button-text-color: var(--link-color);
+ --default-button-background-color: var(--table-subheader-background-color);
+ --dropdown-background-color: var(--table-header-background-color);
+ --dialog-background-color: var(--view-background-color);
+ --chip-background-color: var(--table-header-background-color);
+
+ --select-background-color: var(--table-subheader-background-color);
+
+ --assignee-highlight-color: rgb(58, 54, 28);
+
+ --diff-selection-background-color: #3A71D8;
+ --light-remove-highlight-color: rgb(53, 27, 27);
+ --light-add-highlight-color: rgb(24, 45, 24);
+ --light-rebased-remove-highlight-color: rgb(60, 37, 8);
+ --light-rebased-add-highlight-color: rgb(72, 113, 101);
+ --dark-remove-highlight-color: rgba(255, 0, 0, 0.15);
+ --dark-add-highlight-color: rgba(0, 255, 0, 0.15);
+ --dark-rebased-remove-highlight-color: rgba(255, 139, 6, 0.15);
+ --dark-rebased-add-highlight-color: rgba(11, 255, 155, 0.15);
+ --diff-context-control-color: var(--table-header-background-color);
+ --diff-context-control-border-color: var(--border-color);
+ --diff-highlight-range-color: rgba(0, 100, 200, 0.5);
+ --diff-highlight-range-hover-color: rgba(0, 150, 255, 0.5);
+ --comment-text-color: var(--primary-text-color);
+ --comment-background-color: #0B162B;
+ --unresolved-comment-background-color: rgb(56, 90, 154);
+
+ --vote-color-approved: rgb(127, 182, 107);
+ --vote-color-recommended: rgb(63, 103, 50);
+ --vote-color-rejected: #ac2d3e;
+ --vote-color-disliked: #bf6874;
+ --vote-color-neutral: #597280;
+
+ --edit-mode-background-color: rgb(92, 10, 54);
+ --emphasis-color: #383f4a;
+
+ --tooltip-background-color: #111;
+
+ --syntax-default-color: var(--primary-text-color);
+ --syntax-meta-color: #6D7EEE;
+ --syntax-keyword-color: #CD4CF0;
+ --syntax-number-color: #00998A;
+ --syntax-selector-class-color: #FFCB68;
+ --syntax-variable-color: #F77669;
+ --syntax-template-variable-color: #F77669;
+ --syntax-comment-color: var(--deemphasized-text-color);
+ --syntax-string-color: #C3E88D;
+ --syntax-selector-id-color: #F77669;
+ --syntax-built_in-color: rgb(247, 195, 105);
+ --syntax-tag-color: #F77669;
+ --syntax-link-color: #C792EA;
+ --syntax-meta-keyword-color: #EEFFF7;
+ --syntax-type-color: #DD5F5F;
+ --syntax-title-color: #75A5FF;
+ --syntax-attr-color: #80CBBF;
+ --syntax-literal-color: #EEFFF7;
+ --syntax-selector-pseudo-color: #C792EA;
+ --syntax-regexp-color: #F77669;
+ --syntax-selector-attr-color: #80CBBF;
+ --syntax-template-tag-color: #C792EA;
+
+ background-color: var(--view-background-color);
+ }
+ </style>
+</dom-module>
\ No newline at end of file
diff --git a/polygerrit-ui/app/template_test_srcs/convert_for_template_tests.py b/polygerrit-ui/app/template_test_srcs/convert_for_template_tests.py
index 3a5cd83b..579e783 100644
--- a/polygerrit-ui/app/template_test_srcs/convert_for_template_tests.py
+++ b/polygerrit-ui/app/template_test_srcs/convert_for_template_tests.py
@@ -1,5 +1,6 @@
-import os, re, json
-from shutil import copyfile, rmtree
+import json
+import os
+import re
polymerRegex = r"Polymer\({"
polymerCompiledRegex = re.compile(polymerRegex)
@@ -10,103 +11,119 @@
regexBehavior = r"<script>(.+)<\/script>"
behaviorCompiledRegex = re.compile(regexBehavior, re.DOTALL)
+
def _open(filename, mode="r"):
- try:
- return open(filename, mode, encoding="utf-8")
- except TypeError:
- return open(filename, mode)
+ try:
+ return open(filename, mode, encoding="utf-8")
+ except TypeError:
+ return open(filename, mode)
-def replaceBehaviorLikeHTML (fileIn, fileOut):
- with _open(fileIn) as f:
- file_str = f.read()
- match = behaviorCompiledRegex.search(file_str)
- if (match):
- with _open("polygerrit-ui/temp/behaviors/" + fileOut.replace("html", "js") , "w+") as f:
- f.write(match.group(1))
-def replaceBehaviorLikeJS (fileIn, fileOut):
- with _open(fileIn) as f:
- file_str = f.read()
- with _open("polygerrit-ui/temp/behaviors/" + fileOut , "w+") as f:
- f.write(file_str)
+def replaceBehaviorLikeHTML(fileIn, fileOut):
+ with _open(fileIn) as f:
+ file_str = f.read()
+ match = behaviorCompiledRegex.search(file_str)
+ if match:
+ with _open("polygerrit-ui/temp/behaviors/" +
+ fileOut.replace("html", "js"), "w+") as f:
+ f.write(match.group(1))
+
+
+def replaceBehaviorLikeJS(fileIn, fileOut):
+ with _open(fileIn) as f:
+ file_str = f.read()
+ with _open("polygerrit-ui/temp/behaviors/" + fileOut, "w+") as f:
+ f.write(file_str)
+
def generateStubBehavior(behaviorName):
- with _open("polygerrit-ui/temp/behaviors/" + behaviorName + ".js", "w+") as f:
- f.write("/** @polymerBehavior **/\n" + behaviorName + "= {};")
+ with _open("polygerrit-ui/temp/behaviors/" +
+ behaviorName + ".js", "w+") as f:
+ f.write("/** @polymerBehavior **/\n" + behaviorName + "= {};")
-def replacePolymerElement (fileIn, fileOut, root):
- with _open(fileIn) as f:
- key = fileOut.split('.')[0]
- # Removed self invoked function
- file_str = f.read()
- file_str_no_fn = fnCompiledRegex.search(file_str)
- if file_str_no_fn:
- package = root.replace("/", ".") + "." + fileOut
+def replacePolymerElement(fileIn, fileOut, root):
+ with _open(fileIn) as f:
+ key = fileOut.split('.')[0]
+ # Removed self invoked function
+ file_str = f.read()
+ file_str_no_fn = fnCompiledRegex.search(file_str)
- with _open("polygerrit-ui/temp/" + fileOut, "w+") as f:
- mainFileContents = re.sub(polymerCompiledRegex, "exports = Polymer({", file_str_no_fn.group(1)).replace("'use strict';", "")
- f.write("/** \n" \
- "* @fileoverview \n" \
- "* @suppress {missingProperties} \n" \
- "*/ \n\n" \
- "goog.module('polygerrit." + package + "')\n\n" + mainFileContents)
+ if file_str_no_fn:
+ package = root.replace("/", ".") + "." + fileOut
- # Add package and javascript to files object.
- elements[key]["js"] = "polygerrit-ui/temp/" + fileOut
- elements[key]["package"] = package
+ with _open("polygerrit-ui/temp/" + fileOut, "w+") as f:
+ mainFileContents = re.sub(
+ polymerCompiledRegex,
+ "exports = Polymer({",
+ file_str_no_fn.group(1)).replace("'use strict';", "")
+ f.write("/** \n"
+ "* @fileoverview \n"
+ "* @suppress {missingProperties} \n"
+ "*/ \n\n"
+ "goog.module('polygerrit." + package + "')\n\n" +
+ mainFileContents)
+
+ # Add package and javascript to files object.
+ elements[key]["js"] = "polygerrit-ui/temp/" + fileOut
+ elements[key]["package"] = package
+
def writeTempFile(file, root):
- # This is included in an extern because it is directly on the window object.
- # (for now at least).
- if "gr-reporting" in file:
- return
- key = file.split('.')[0]
- if not key in elements:
- # gr-app doesn't have an additional level
- elements[key] = {"directory": 'gr-app' if len(root.split("/")) < 4 else root.split("/")[3]}
- if file.endswith(".html") and not file.endswith("_test.html"):
- # gr-navigation is treated like a behavior rather than a standard element
- # because of the way it added to the Gerrit object.
- if file.endswith("gr-navigation.html"):
- replaceBehaviorLikeHTML(os.path.join(root, file), file)
- else:
- elements[key]["html"] = os.path.join(root, file)
- if file.endswith(".js"):
- replacePolymerElement(os.path.join(root, file), file, root)
+ # This is included in an extern because it is directly on the window object
+ # (for now at least).
+ if "gr-reporting" in file:
+ return
+ key = file.split('.')[0]
+ if key not in elements:
+ # gr-app doesn't have an additional level
+ elements[key] = {
+ "directory":
+ 'gr-app' if len(root.split("/")) < 4 else root.split("/")[3]
+ }
+ if file.endswith(".html") and not file.endswith("_test.html"):
+ # gr-navigation is treated like a behavior rather than a standard
+ # element because of the way it added to the Gerrit object.
+ if file.endswith("gr-navigation.html"):
+ replaceBehaviorLikeHTML(os.path.join(root, file), file)
+ else:
+ elements[key]["html"] = os.path.join(root, file)
+ if file.endswith(".js"):
+ replacePolymerElement(os.path.join(root, file), file, root)
if __name__ == "__main__":
- # Create temp directory.
- if not os.path.exists("polygerrit-ui/temp"):
- os.makedirs("polygerrit-ui/temp")
+ # Create temp directory.
+ if not os.path.exists("polygerrit-ui/temp"):
+ os.makedirs("polygerrit-ui/temp")
- # Within temp directory create behavior directory.
- if not os.path.exists("polygerrit-ui/temp/behaviors"):
- os.makedirs("polygerrit-ui/temp/behaviors")
+ # Within temp directory create behavior directory.
+ if not os.path.exists("polygerrit-ui/temp/behaviors"):
+ os.makedirs("polygerrit-ui/temp/behaviors")
- elements = {}
+ elements = {}
- # Go through every file in app/elements, and re-write accordingly to temp
- # directory, and also added to elements object, which is used to generate a
- # map of html files, package names, and javascript files.
- for root, dirs, files in os.walk("polygerrit-ui/app/elements"):
- for file in files:
- writeTempFile(file, root)
+ # Go through every file in app/elements, and re-write accordingly to temp
+ # directory, and also added to elements object, which is used to generate a
+ # map of html files, package names, and javascript files.
+ for root, dirs, files in os.walk("polygerrit-ui/app/elements"):
+ for file in files:
+ writeTempFile(file, root)
- # Special case for polymer behaviors we are using.
- replaceBehaviorLikeHTML("polygerrit-ui/app/bower_components/iron-a11y-keys-behavior/iron-a11y-keys-behavior.html", "iron-a11y-keys-behavior.html")
- generateStubBehavior("Polymer.IronOverlayBehavior")
- generateStubBehavior("Polymer.IronFitBehavior")
+ # Special case for polymer behaviors we are using.
+ replaceBehaviorLikeHTML("polygerrit-ui/app/bower_components/iron-a11y-keys-behavior/iron-a11y-keys-behavior.html", "iron-a11y-keys-behavior.html")
+ generateStubBehavior("Polymer.IronOverlayBehavior")
+ generateStubBehavior("Polymer.IronFitBehavior")
- #TODO figure out something to do with iron-overlay-behavior. it is hard-coded reformatted.
+ # TODO figure out something to do with iron-overlay-behavior.
+ # it is hard-coded reformatted.
- with _open("polygerrit-ui/temp/map.json", "w+") as f:
- f.write(json.dumps(elements))
+ with _open("polygerrit-ui/temp/map.json", "w+") as f:
+ f.write(json.dumps(elements))
- for root, dirs, files in os.walk("polygerrit-ui/app/behaviors"):
- for file in files:
- if file.endswith("behavior.html"):
- replaceBehaviorLikeHTML(os.path.join(root, file), file)
- elif file.endswith("behavior.js"):
- replaceBehaviorLikeJS(os.path.join(root, file), file)
+ for root, dirs, files in os.walk("polygerrit-ui/app/behaviors"):
+ for file in files:
+ if file.endswith("behavior.html"):
+ replaceBehaviorLikeHTML(os.path.join(root, file), file)
+ elif file.endswith("behavior.js"):
+ replaceBehaviorLikeJS(os.path.join(root, file), file)
diff --git a/polygerrit-ui/app/test/index.html b/polygerrit-ui/app/test/index.html
index 6a562fc..5a5dbcd 100644
--- a/polygerrit-ui/app/test/index.html
+++ b/polygerrit-ui/app/test/index.html
@@ -112,7 +112,6 @@
'diff/gr-ranged-comment-layer/gr-ranged-comment-layer_test.html',
'diff/gr-selection-action-box/gr-selection-action-box_test.html',
'diff/gr-syntax-layer/gr-syntax-layer_test.html',
- 'diff/gr-syntax-lib-loader/gr-syntax-lib-loader_test.html',
'edit/gr-default-editor/gr-default-editor_test.html',
'edit/gr-edit-controls/gr-edit-controls_test.html',
'edit/gr-edit-file-controls/gr-edit-file-controls_test.html',
@@ -165,6 +164,7 @@
'shared/gr-js-api-interface/gr-plugin-endpoints_test.html',
'shared/gr-js-api-interface/gr-plugin-rest-api_test.html',
'shared/gr-fixed-panel/gr-fixed-panel_test.html',
+ 'shared/gr-lib-loader/gr-lib-loader_test.html',
'shared/gr-limited-text/gr-limited-text_test.html',
'shared/gr-linked-chip/gr-linked-chip_test.html',
'shared/gr-linked-text/gr-linked-text_test.html',
diff --git a/proto/cache.proto b/proto/cache.proto
index 634b595..a826f8c 100644
--- a/proto/cache.proto
+++ b/proto/cache.proto
@@ -45,3 +45,151 @@
int64 expires_at = 4;
string provider_id = 5;
}
+
+
+// Serialized form of com.google.gerrit.server.notedb.ChangeNotesCache.Key.
+// Next ID: 4
+message ChangeNotesKeyProto {
+ string project = 1;
+ int32 change_id = 2;
+ bytes id = 3;
+}
+
+// Serialized from of com.google.gerrit.server.notedb.ChangeNotesState.
+//
+// Note on embedded protos: this is just for storing in a cache, so some formats
+// were chosen ease of coding the initial implementation. In particular, where
+// there already exists another serialization mechanism in Gerrit for
+// serializing a particular field, we use that rather than defining a new proto
+// type. This includes ReviewDb types that can be serialized to proto using
+// ProtobufCodec as well as NoteDb and indexed types that are serialized using
+// JSON. We can always revisit this decision later, particularly when we
+// eliminate the ReviewDb types; it just requires bumping the cache version.
+//
+// Note on nullability: there are a lot of nullable fields in ChangeNotesState
+// and its dependencies. It's likely we could make some of them non-nullable,
+// but each one of those would be a potentially significant amount of cleanup,
+// and there's no guarantee we'd be able to eliminate all of them. (For a less
+// complex class, it's likely the cleanup would be more feasible.)
+//
+// Instead, we just take the tedious yet simple approach of having a "has_foo"
+// field for each nullable field "foo", indicating whether or not foo is null.
+//
+// Next ID: 19
+message ChangeNotesStateProto {
+ // Effectively required, even though the corresponding ChangeNotesState field
+ // is optional, since the field is only absent when NoteDb is disabled, in
+ // which case attempting to use the ChangeNotesCache is programmer error.
+ bytes meta_id = 1;
+
+ int32 change_id = 2;
+
+ // Next ID: 24
+ message ChangeColumnsProto {
+ string change_key = 1;
+
+ int64 created_on = 2;
+
+ int64 last_updated_on = 3;
+
+ int32 owner = 4;
+
+ string branch = 5;
+
+ int32 current_patch_set_id = 6;
+ bool has_current_patch_set_id = 7;
+
+ string subject = 8;
+
+ string topic = 9;
+ bool has_topic = 10;
+
+ string original_subject = 11;
+ bool has_original_subject = 12;
+
+ string submission_id = 13;
+ bool has_submission_id = 14;
+
+ int32 assignee = 15;
+ bool has_assignee = 16;
+
+ string status = 17;
+ bool has_status = 18;
+
+ bool is_private = 19;
+
+ bool work_in_progress = 20;
+
+ bool review_started = 21;
+
+ int32 revert_of = 22;
+ bool has_revert_of = 23;
+ }
+ // Effectively required, even though the corresponding ChangeNotesState field
+ // is optional, since the field is only absent when NoteDb is disabled, in
+ // which case attempting to use the ChangeNotesCache is programmer error.
+ ChangeColumnsProto columns = 3;
+
+ repeated int32 past_assignee = 4;
+
+ repeated string hashtag = 5;
+
+ // Raw PatchSet proto as produced by ProtobufCodec.
+ repeated bytes patch_set = 6;
+
+ // Raw PatchSetApproval proto as produced by ProtobufCodec.
+ repeated bytes approval = 7;
+
+ // Next ID: 4
+ message ReviewerSetEntryProto {
+ string state = 1;
+ int32 account_id = 2;
+ int64 timestamp = 3;
+ }
+ repeated ReviewerSetEntryProto reviewer = 8;
+
+ // Next ID: 4
+ message ReviewerByEmailSetEntryProto {
+ string state = 1;
+ string address = 2;
+ int64 timestamp = 3;
+ }
+ repeated ReviewerByEmailSetEntryProto reviewer_by_email = 9;
+
+ repeated ReviewerSetEntryProto pending_reviewer = 10;
+
+ repeated ReviewerByEmailSetEntryProto pending_reviewer_by_email = 11;
+
+ repeated int32 past_reviewer = 12;
+
+ // Next ID: 5
+ message ReviewerStatusUpdateProto {
+ int64 date = 1;
+ int32 updated_by = 2;
+ int32 reviewer = 3;
+ string state = 4;
+ }
+ repeated ReviewerStatusUpdateProto reviewer_update = 13;
+
+ // JSON produced from
+ // com.google.gerrit.server.index.change.ChangeField.StoredSubmitRecord.
+ repeated string submit_record = 14;
+
+ // Raw ChangeMessage proto as produced by ProtobufCodec.
+ repeated bytes change_message = 15;
+
+ // JSON produced from com.google.gerrit.reviewdb.client.Comment.
+ repeated string published_comment = 16;
+
+ int64 read_only_until = 17;
+ bool has_read_only_until = 18;
+}
+
+
+// Serialized form of com.google.gerrit.server.query.change.ConflictKey
+message ConflictKeyProto {
+ bytes commit = 1;
+ bytes other_commit = 2;
+ string submit_type = 3;
+ bool content_merge = 4;
+}
diff --git a/resources/com/google/gerrit/httpd/raw/PolyGerritIndexHtml.soy b/resources/com/google/gerrit/httpd/raw/PolyGerritIndexHtml.soy
index 699dd0e..c51e9dc 100644
--- a/resources/com/google/gerrit/httpd/raw/PolyGerritIndexHtml.soy
+++ b/resources/com/google/gerrit/httpd/raw/PolyGerritIndexHtml.soy
@@ -33,6 +33,7 @@
window.CLOSURE_NO_DEPS = true;
{if $canonicalPath != ''}window.CANONICAL_PATH = '{$canonicalPath}';{/if}
{if $versionInfo}window.VERSION_INFO = '{$versionInfo}';{/if}
+ {if $staticResourcePath != ''}window.STATIC_RESOURCE_PATH = '{$staticResourcePath}';{/if}
</script>{\n}
{if $faviconPath}
diff --git a/resources/com/google/gerrit/pgm/Startup.py b/resources/com/google/gerrit/pgm/Startup.py
index 469d5df..ec18f42 100644
--- a/resources/com/google/gerrit/pgm/Startup.py
+++ b/resources/com/google/gerrit/pgm/Startup.py
@@ -19,14 +19,16 @@
from __future__ import print_function
import sys
+
def print_help():
- for (n, v) in vars(sys.modules['__main__']).items():
- if not n.startswith("__") and not n in ['help', 'reload'] \
- and str(type(v)) != "<type 'javapackage'>" \
- and not str(v).startswith("<module"):
- print("\"%s\" is \"%s\"" % (n, v))
- print()
- print("Welcome to the Gerrit Inspector")
- print("Enter help() to see the above again, EOF to quit and stop Gerrit")
+ for (n, v) in vars(sys.modules['__main__']).items():
+ if not n.startswith("__") and n not in ['help', 'reload'] \
+ and str(type(v)) != "<type 'javapackage'>" \
+ and not str(v).startswith("<module"):
+ print("\"%s\" is \"%s\"" % (n, v))
+ print()
+ print("Welcome to the Gerrit Inspector")
+ print("Enter help() to see the above again, EOF to quit and stop Gerrit")
+
print_help()
diff --git a/tools/bzl/license-map.py b/tools/bzl/license-map.py
index 74a84cc..476ccb9 100644
--- a/tools/bzl/license-map.py
+++ b/tools/bzl/license-map.py
@@ -25,35 +25,34 @@
handled_rules = []
for xml in args.xmls:
- tree = ET.parse(xml)
- root = tree.getroot()
+ tree = ET.parse(xml)
+ root = tree.getroot()
- for child in root:
- rule_name = child.attrib["name"]
- if rule_name in handled_rules:
- # already handled in other xml files
- continue
+ for child in root:
+ rule_name = child.attrib["name"]
+ if rule_name in handled_rules:
+ # already handled in other xml files
+ continue
- handled_rules.append(rule_name)
- for c in child.getchildren():
- if c.tag != "rule-input":
- continue
+ handled_rules.append(rule_name)
+ for c in child.getchildren():
+ if c.tag != "rule-input":
+ continue
- license_name = c.attrib["name"]
- if LICENSE_PREFIX in license_name:
- entries[rule_name].append(license_name)
- graph[license_name].append(rule_name)
+ license_name = c.attrib["name"]
+ if LICENSE_PREFIX in license_name:
+ entries[rule_name].append(license_name)
+ graph[license_name].append(rule_name)
if len(graph[DO_NOT_DISTRIBUTE]):
- print("DO_NOT_DISTRIBUTE license found in:", file=stderr)
- for target in graph[DO_NOT_DISTRIBUTE]:
- print(target, file=stderr)
- exit(1)
+ print("DO_NOT_DISTRIBUTE license found in:", file=stderr)
+ for target in graph[DO_NOT_DISTRIBUTE]:
+ print(target, file=stderr)
+ exit(1)
if args.asciidoctor:
- print(
-# We don't want any blank line before "= Gerrit Code Review - Licenses"
-"""= Gerrit Code Review - Licenses
+ # We don't want any blank line before "= Gerrit Code Review - Licenses"
+ print("""= Gerrit Code Review - Licenses
Gerrit open source software is licensed under the <<Apache2_0,Apache
License 2.0>>. Executable distributions also include other software
@@ -93,40 +92,39 @@
""")
for n in sorted(graph.keys()):
- if len(graph[n]) == 0:
- continue
+ if len(graph[n]) == 0:
+ continue
- name = n[len(LICENSE_PREFIX):]
- safename = name.replace(".", "_")
- print()
- print("[[%s]]" % safename)
- print(name)
- print()
- for d in sorted(graph[n]):
- if d.startswith("//lib:") or d.startswith("//lib/"):
- p = d[len("//lib:"):]
- else:
- p = d[d.index(":")+1:].lower()
- if "__" in p:
- p = p[:p.index("__")]
- print("* " + p)
- print()
- print("[[%s_license]]" % safename)
- print("----")
- filename = n[2:].replace(":", "/")
- try:
- with open(filename, errors='ignore') as fd:
- copyfileobj(fd, stdout)
- except TypeError:
- with open(filename) as fd:
- copyfileobj(fd, stdout)
- print()
- print("----")
- print()
+ name = n[len(LICENSE_PREFIX):]
+ safename = name.replace(".", "_")
+ print()
+ print("[[%s]]" % safename)
+ print(name)
+ print()
+ for d in sorted(graph[n]):
+ if d.startswith("//lib:") or d.startswith("//lib/"):
+ p = d[len("//lib:"):]
+ else:
+ p = d[d.index(":")+1:].lower()
+ if "__" in p:
+ p = p[:p.index("__")]
+ print("* " + p)
+ print()
+ print("[[%s_license]]" % safename)
+ print("----")
+ filename = n[2:].replace(":", "/")
+ try:
+ with open(filename, errors='ignore') as fd:
+ copyfileobj(fd, stdout)
+ except TypeError:
+ with open(filename) as fd:
+ copyfileobj(fd, stdout)
+ print()
+ print("----")
+ print()
if args.asciidoctor:
- print(
-"""
+ print("""
GERRIT
------
Part of link:index.html[Gerrit Code Review]
diff --git a/tools/download_file.py b/tools/download_file.py
index 26671f0..29398e6 100755
--- a/tools/download_file.py
+++ b/tools/download_file.py
@@ -30,49 +30,50 @@
def safe_mkdirs(d):
- if path.isdir(d):
- return
- try:
- makedirs(d)
- except OSError as err:
- if not path.isdir(d):
- raise err
+ if path.isdir(d):
+ return
+ try:
+ makedirs(d)
+ except OSError as err:
+ if not path.isdir(d):
+ raise err
def download_properties(root_dir):
- """ Get the download properties.
+ """ Get the download properties.
- First tries to find the properties file in the given root directory,
- and if not found there, tries in the Gerrit settings folder in the
- user's home directory.
+ First tries to find the properties file in the given root directory,
+ and if not found there, tries in the Gerrit settings folder in the
+ user's home directory.
- Returns a set of download properties, which may be empty.
+ Returns a set of download properties, which may be empty.
- """
- p = {}
- local_prop = path.join(root_dir, LOCAL_PROPERTIES)
- if not path.isfile(local_prop):
- local_prop = path.join(GERRIT_HOME, LOCAL_PROPERTIES)
- if path.isfile(local_prop):
- try:
- with open(local_prop) as fd:
- for line in fd:
- if line.startswith('download.'):
- d = [e.strip() for e in line.split('=', 1)]
- name, url = d[0], d[1]
- p[name[len('download.'):]] = url
- except OSError:
- pass
- return p
+ """
+ p = {}
+ local_prop = path.join(root_dir, LOCAL_PROPERTIES)
+ if not path.isfile(local_prop):
+ local_prop = path.join(GERRIT_HOME, LOCAL_PROPERTIES)
+ if path.isfile(local_prop):
+ try:
+ with open(local_prop) as fd:
+ for line in fd:
+ if line.startswith('download.'):
+ d = [e.strip() for e in line.split('=', 1)]
+ name, url = d[0], d[1]
+ p[name[len('download.'):]] = url
+ except OSError:
+ pass
+ return p
def cache_entry(args):
- if args.v:
- h = args.v
- else:
- h = sha1(args.u.encode('utf-8')).hexdigest()
- name = '%s-%s' % (path.basename(args.o), h)
- return path.join(CACHE_DIR, name)
+ if args.v:
+ h = args.v
+ else:
+ h = sha1(args.u.encode('utf-8')).hexdigest()
+ name = '%s-%s' % (path.basename(args.o), h)
+ return path.join(CACHE_DIR, name)
+
opts = OptionParser()
opts.add_option('-o', help='local output file')
@@ -85,89 +86,90 @@
root_dir = args.o
while root_dir and path.dirname(root_dir) != root_dir:
- root_dir, n = path.split(root_dir)
- if n == 'WORKSPACE':
- break
+ root_dir, n = path.split(root_dir)
+ if n == 'WORKSPACE':
+ break
redirects = download_properties(root_dir)
cache_ent = cache_entry(args)
src_url = resolve_url(args.u, redirects)
if not path.exists(cache_ent):
- try:
- safe_mkdirs(path.dirname(cache_ent))
- except OSError as err:
- print('error creating directory %s: %s' %
- (path.dirname(cache_ent), err), file=stderr)
- exit(1)
+ try:
+ safe_mkdirs(path.dirname(cache_ent))
+ except OSError as err:
+ print('error creating directory %s: %s' %
+ (path.dirname(cache_ent), err), file=stderr)
+ exit(1)
- print('Download %s' % src_url, file=stderr)
- try:
- check_call(['curl', '--proxy-anyauth', '-ksSfLo', cache_ent, src_url])
- except OSError as err:
- print('could not invoke curl: %s\nis curl installed?' % err, file=stderr)
- exit(1)
- except CalledProcessError as err:
- print('error using curl: %s' % err, file=stderr)
- exit(1)
+ print('Download %s' % src_url, file=stderr)
+ try:
+ check_call(['curl', '--proxy-anyauth', '-ksSfLo', cache_ent, src_url])
+ except OSError as err:
+ print('could not invoke curl: %s\nis curl installed?' % err,
+ file=stderr)
+ exit(1)
+ except CalledProcessError as err:
+ print('error using curl: %s' % err, file=stderr)
+ exit(1)
if args.v:
- have = hash_file(sha1(), cache_ent).hexdigest()
- if args.v != have:
- print((
- '%s:\n' +
- 'expected %s\n' +
- 'received %s\n') % (src_url, args.v, have), file=stderr)
- try:
- remove(cache_ent)
- except OSError as err:
- if path.exists(cache_ent):
- print('error removing %s: %s' % (cache_ent, err), file=stderr)
- exit(1)
+ have = hash_file(sha1(), cache_ent).hexdigest()
+ if args.v != have:
+ print((
+ '%s:\n' +
+ 'expected %s\n' +
+ 'received %s\n') % (src_url, args.v, have), file=stderr)
+ try:
+ remove(cache_ent)
+ except OSError as err:
+ if path.exists(cache_ent):
+ print('error removing %s: %s' % (cache_ent, err), file=stderr)
+ exit(1)
exclude = []
if args.x:
- exclude += args.x
+ exclude += args.x
if args.exclude_java_sources:
- try:
- with ZipFile(cache_ent, 'r') as zf:
- for n in zf.namelist():
- if n.endswith('.java'):
- exclude.append(n)
- except (BadZipfile, LargeZipFile) as err:
- print('error opening %s: %s' % (cache_ent, err), file=stderr)
- exit(1)
+ try:
+ with ZipFile(cache_ent, 'r') as zf:
+ for n in zf.namelist():
+ if n.endswith('.java'):
+ exclude.append(n)
+ except (BadZipfile, LargeZipFile) as err:
+ print('error opening %s: %s' % (cache_ent, err), file=stderr)
+ exit(1)
if args.unsign:
- try:
- with ZipFile(cache_ent, 'r') as zf:
- for n in zf.namelist():
- if (n.endswith('.RSA')
- or n.endswith('.SF')
- or n.endswith('.LIST')):
- exclude.append(n)
- except (BadZipfile, LargeZipFile) as err:
- print('error opening %s: %s' % (cache_ent, err), file=stderr)
- exit(1)
+ try:
+ with ZipFile(cache_ent, 'r') as zf:
+ for n in zf.namelist():
+ if (n.endswith('.RSA')
+ or n.endswith('.SF')
+ or n.endswith('.LIST')):
+ exclude.append(n)
+ except (BadZipfile, LargeZipFile) as err:
+ print('error opening %s: %s' % (cache_ent, err), file=stderr)
+ exit(1)
safe_mkdirs(path.dirname(args.o))
if exclude:
- try:
- shutil.copyfile(cache_ent, args.o)
- except (shutil.Error, IOError) as err:
- print('error copying to %s: %s' % (args.o, err), file=stderr)
- exit(1)
- try:
- check_call(['zip', '-d', args.o] + exclude)
- except CalledProcessError as err:
- print('error removing files from zip: %s' % err, file=stderr)
- exit(1)
-else:
- try:
- link(cache_ent, args.o)
- except OSError as err:
try:
- shutil.copyfile(cache_ent, args.o)
+ shutil.copyfile(cache_ent, args.o)
except (shutil.Error, IOError) as err:
- print('error copying to %s: %s' % (args.o, err), file=stderr)
- exit(1)
+ print('error copying to %s: %s' % (args.o, err), file=stderr)
+ exit(1)
+ try:
+ check_call(['zip', '-d', args.o] + exclude)
+ except CalledProcessError as err:
+ print('error removing files from zip: %s' % err, file=stderr)
+ exit(1)
+else:
+ try:
+ link(cache_ent, args.o)
+ except OSError as err:
+ try:
+ shutil.copyfile(cache_ent, args.o)
+ except (shutil.Error, IOError) as err:
+ print('error copying to %s: %s' % (args.o, err), file=stderr)
+ exit(1)
diff --git a/tools/eclipse/project.py b/tools/eclipse/project.py
index a6b0964..b99c04e 100755
--- a/tools/eclipse/project.py
+++ b/tools/eclipse/project.py
@@ -30,20 +30,20 @@
GWT = '//gerrit-gwtui:ui_module'
AUTO = '//lib/auto:auto-value'
JRE = '/'.join([
- 'org.eclipse.jdt.launching.JRE_CONTAINER',
- 'org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType',
- 'JavaSE-1.8',
+ 'org.eclipse.jdt.launching.JRE_CONTAINER',
+ 'org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType',
+ 'JavaSE-1.8',
])
# Map of targets to corresponding classpath collector rules
cp_targets = {
- AUTO: '//tools/eclipse:autovalue_classpath_collect',
- GWT: '//tools/eclipse:gwt_classpath_collect',
- MAIN: '//tools/eclipse:main_classpath_collect',
+ AUTO: '//tools/eclipse:autovalue_classpath_collect',
+ GWT: '//tools/eclipse:gwt_classpath_collect',
+ MAIN: '//tools/eclipse:main_classpath_collect',
}
ROOT = path.abspath(__file__)
while not path.exists(path.join(ROOT, 'WORKSPACE')):
- ROOT = path.dirname(ROOT)
+ ROOT = path.dirname(ROOT)
opts = OptionParser()
opts.add_option('--plugins', help='create eclipse projects for plugins',
@@ -56,38 +56,43 @@
batch_option = '--batch' if args.batch else None
+
def _build_bazel_cmd(*args):
- cmd = ['bazel']
- if batch_option:
- cmd.append('--batch')
- for arg in args:
- cmd.append(arg)
- return cmd
+ cmd = ['bazel']
+ if batch_option:
+ cmd.append('--batch')
+ for arg in args:
+ cmd.append(arg)
+ return cmd
+
def retrieve_ext_location():
- return check_output(_build_bazel_cmd('info', 'output_base')).strip()
+ return check_output(_build_bazel_cmd('info', 'output_base')).strip()
+
def gen_bazel_path():
- bazel = check_output(['which', 'bazel']).strip().decode('UTF-8')
- with open(path.join(ROOT, ".bazel_path"), 'w') as fd:
- fd.write("bazel=%s\n" % bazel)
- fd.write("PATH=%s\n" % environ["PATH"])
+ bazel = check_output(['which', 'bazel']).strip().decode('UTF-8')
+ with open(path.join(ROOT, ".bazel_path"), 'w') as fd:
+ fd.write("bazel=%s\n" % bazel)
+ fd.write("PATH=%s\n" % environ["PATH"])
+
def _query_classpath(target):
- deps = []
- t = cp_targets[target]
- try:
- check_call(_build_bazel_cmd('build', t))
- except CalledProcessError:
- exit(1)
- name = 'bazel-bin/tools/eclipse/' + t.split(':')[1] + '.runtime_classpath'
- deps = [line.rstrip('\n') for line in open(name)]
- return deps
+ deps = []
+ t = cp_targets[target]
+ try:
+ check_call(_build_bazel_cmd('build', t))
+ except CalledProcessError:
+ exit(1)
+ name = 'bazel-bin/tools/eclipse/' + t.split(':')[1] + '.runtime_classpath'
+ deps = [line.rstrip('\n') for line in open(name)]
+ return deps
+
def gen_project(name='gerrit', root=ROOT):
- p = path.join(root, '.project')
- with open(p, 'w') as fd:
- print("""\
+ p = path.join(root, '.project')
+ with open(p, 'w') as fd:
+ print("""\
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>%(name)s</name>
@@ -102,16 +107,17 @@
</projectDescription>\
""" % {"name": name}, file=fd)
+
def gen_plugin_classpath(root):
- p = path.join(root, '.classpath')
- with open(p, 'w') as fd:
- if path.exists(path.join(root, 'src', 'test', 'java')):
- testpath = """
+ p = path.join(root, '.classpath')
+ with open(p, 'w') as fd:
+ if path.exists(path.join(root, 'src', 'test', 'java')):
+ testpath = """
<classpathentry excluding="**/BUILD" kind="src" path="src/test/java"\
out="eclipse-out/test"/>"""
- else:
- testpath = ""
- print("""\
+ else:
+ testpath = ""
+ print("""\
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry excluding="**/BUILD" kind="src" path="src/main/java"/>%(testpath)s
@@ -120,186 +126,193 @@
<classpathentry kind="output" path="eclipse-out/classes"/>
</classpath>""" % {"testpath": testpath}, file=fd)
+
def gen_classpath(ext):
- def make_classpath():
- impl = minidom.getDOMImplementation()
- return impl.createDocument(None, 'classpath', None)
+ def make_classpath():
+ impl = minidom.getDOMImplementation()
+ return impl.createDocument(None, 'classpath', None)
- def classpathentry(kind, path, src=None, out=None, exported=None):
- e = doc.createElement('classpathentry')
- e.setAttribute('kind', kind)
- # TODO(davido): Remove this and other exclude BUILD files hack
- # when this Bazel bug is fixed:
- # https://github.com/bazelbuild/bazel/issues/1083
- if kind == 'src':
- e.setAttribute('excluding', '**/BUILD')
- e.setAttribute('path', path)
- if src:
- e.setAttribute('sourcepath', src)
- if out:
- e.setAttribute('output', out)
- if exported:
- e.setAttribute('exported', 'true')
- doc.documentElement.appendChild(e)
+ def classpathentry(kind, path, src=None, out=None, exported=None):
+ e = doc.createElement('classpathentry')
+ e.setAttribute('kind', kind)
+ # TODO(davido): Remove this and other exclude BUILD files hack
+ # when this Bazel bug is fixed:
+ # https://github.com/bazelbuild/bazel/issues/1083
+ if kind == 'src':
+ e.setAttribute('excluding', '**/BUILD')
+ e.setAttribute('path', path)
+ if src:
+ e.setAttribute('sourcepath', src)
+ if out:
+ e.setAttribute('output', out)
+ if exported:
+ e.setAttribute('exported', 'true')
+ doc.documentElement.appendChild(e)
- doc = make_classpath()
- src = set()
- lib = set()
- proto = set()
- gwt_src = set()
- gwt_lib = set()
- plugins = set()
+ doc = make_classpath()
+ src = set()
+ lib = set()
+ proto = set()
+ gwt_src = set()
+ gwt_lib = set()
+ plugins = set()
- # Classpath entries are absolute for cross-cell support
- java_library = re.compile('bazel-out/.*?-fastbuild/bin/(.*)/[^/]+[.]jar$')
- srcs = re.compile('(.*/external/[^/]+)/jar/(.*)[.]jar')
- for p in _query_classpath(MAIN):
- if p.endswith('-src.jar'):
- # gwt_module() depends on -src.jar for Java to JavaScript compiles.
- if p.startswith("external"):
- p = path.join(ext, p)
- gwt_lib.add(p)
- continue
-
- m = java_library.match(p)
- if m:
- src.add(m.group(1))
- # Exceptions: both source and lib
- if p.endswith('libquery_parser.jar') or \
- p.endswith('libgerrit-prolog-common.jar'):
- lib.add(p)
- # JGit dependency from external repository
- if 'gerrit-' not in p and 'jgit' in p:
- lib.add(p)
- # Assume any jars in /proto/ are from java_proto_library rules
- if '/bin/proto/' in p:
- proto.add(p)
- else:
- # Don't mess up with Bazel internal test runner dependencies.
- # When we use Eclipse we rely on it for running the tests
- if p.endswith("external/bazel_tools/tools/jdk/TestRunner_deploy.jar"):
- continue
- if p.startswith("external"):
- p = path.join(ext, p)
- lib.add(p)
-
- for p in _query_classpath(GWT):
- m = java_library.match(p)
- if m:
- gwt_src.add(m.group(1))
-
- classpathentry('src', 'java')
- classpathentry('src', 'javatests', out='eclipse-out/test')
- classpathentry('src', 'resources')
- for s in sorted(src):
- out = None
-
- if s.startswith('lib/'):
- out = 'eclipse-out/lib'
- elif s.startswith('plugins/'):
- if args.plugins:
- plugins.add(s)
- continue
- out = 'eclipse-out/' + s
-
- p = path.join(s, 'java')
- if path.exists(p):
- classpathentry('src', p, out=out)
- continue
-
- for env in ['main', 'test']:
- o = None
- if out:
- o = out + '/' + env
- elif env == 'test':
- o = 'eclipse-out/test'
-
- for srctype in ['java', 'resources']:
- p = path.join(s, 'src', env, srctype)
- if path.exists(p):
- classpathentry('src', p, out=o)
-
- for libs in [lib, gwt_lib]:
- for j in sorted(libs):
- s = None
- m = srcs.match(j)
- if m:
- prefix = m.group(1)
- suffix = m.group(2)
- p = path.join(prefix, "jar", "%s-src.jar" % suffix)
- if path.exists(p):
- s = p
- if args.plugins:
- classpathentry('lib', j, s, exported=True)
- else:
- # Filter out the source JARs that we pull through transitive closure of
- # GWT plugin API (we add source directories themself). Exception is
- # libEdit-src.jar, that is needed for GWT SDM to work.
- m = java_library.match(j)
- if m:
- if m.group(1).startswith("gerrit-") and \
- j.endswith("-src.jar") and \
- not j.endswith("libEdit-src.jar"):
+ # Classpath entries are absolute for cross-cell support
+ java_library = re.compile('bazel-out/.*?-fastbuild/bin/(.*)/[^/]+[.]jar$')
+ srcs = re.compile('(.*/external/[^/]+)/jar/(.*)[.]jar')
+ for p in _query_classpath(MAIN):
+ if p.endswith('-src.jar'):
+ # gwt_module() depends on -src.jar for Java to JavaScript compiles.
+ if p.startswith("external"):
+ p = path.join(ext, p)
+ gwt_lib.add(p)
continue
- classpathentry('lib', j, s)
- for p in sorted(proto):
- s = p.replace('-fastbuild/bin/proto/lib', '-fastbuild/genfiles/proto/')
- s = s.replace('.jar', '-src.jar')
- classpathentry('lib', p, s)
+ m = java_library.match(p)
+ if m:
+ src.add(m.group(1))
+ # Exceptions: both source and lib
+ if p.endswith('libquery_parser.jar') or \
+ p.endswith('libgerrit-prolog-common.jar'):
+ lib.add(p)
+ # JGit dependency from external repository
+ if 'gerrit-' not in p and 'jgit' in p:
+ lib.add(p)
+ # Assume any jars in /proto/ are from java_proto_library rules
+ if '/bin/proto/' in p:
+ proto.add(p)
+ else:
+ # Don't mess up with Bazel internal test runner dependencies.
+ # When we use Eclipse we rely on it for running the tests
+ if p.endswith(
+ "external/bazel_tools/tools/jdk/TestRunner_deploy.jar"):
+ continue
+ if p.startswith("external"):
+ p = path.join(ext, p)
+ lib.add(p)
- for s in sorted(gwt_src):
- p = path.join(ROOT, s, 'src', 'main', 'java')
- if path.exists(p):
- classpathentry('lib', p, out='eclipse-out/gwtsrc')
+ for p in _query_classpath(GWT):
+ m = java_library.match(p)
+ if m:
+ gwt_src.add(m.group(1))
- classpathentry('con', JRE)
- classpathentry('output', 'eclipse-out/classes')
+ classpathentry('src', 'java')
+ classpathentry('src', 'javatests', out='eclipse-out/test')
+ classpathentry('src', 'resources')
+ for s in sorted(src):
+ out = None
- p = path.join(ROOT, '.classpath')
- with open(p, 'w') as fd:
- doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
+ if s.startswith('lib/'):
+ out = 'eclipse-out/lib'
+ elif s.startswith('plugins/'):
+ if args.plugins:
+ plugins.add(s)
+ continue
+ out = 'eclipse-out/' + s
- if args.plugins:
- for plugin in plugins:
- plugindir = path.join(ROOT, plugin)
- try:
- gen_project(plugin.replace('plugins/', ""), plugindir)
- gen_plugin_classpath(plugindir)
- except (IOError, OSError) as err:
- print('error generating project for %s: %s' % (plugin, err),
- file=sys.stderr)
+ p = path.join(s, 'java')
+ if path.exists(p):
+ classpathentry('src', p, out=out)
+ continue
+
+ for env in ['main', 'test']:
+ o = None
+ if out:
+ o = out + '/' + env
+ elif env == 'test':
+ o = 'eclipse-out/test'
+
+ for srctype in ['java', 'resources']:
+ p = path.join(s, 'src', env, srctype)
+ if path.exists(p):
+ classpathentry('src', p, out=o)
+
+ for libs in [lib, gwt_lib]:
+ for j in sorted(libs):
+ s = None
+ m = srcs.match(j)
+ if m:
+ prefix = m.group(1)
+ suffix = m.group(2)
+ p = path.join(prefix, "jar", "%s-src.jar" % suffix)
+ if path.exists(p):
+ s = p
+ if args.plugins:
+ classpathentry('lib', j, s, exported=True)
+ else:
+ # Filter out the source JARs that we pull through transitive
+ # closure of GWT plugin API (we add source directories
+ # themselves). Exception is libEdit-src.jar, that is needed
+ # for GWT SDM to work.
+ m = java_library.match(j)
+ if m:
+ if m.group(1).startswith("gerrit-") and \
+ j.endswith("-src.jar") and \
+ not j.endswith("libEdit-src.jar"):
+ continue
+ classpathentry('lib', j, s)
+
+ for p in sorted(proto):
+ s = p.replace('-fastbuild/bin/proto/lib', '-fastbuild/genfiles/proto/')
+ s = s.replace('.jar', '-src.jar')
+ classpathentry('lib', p, s)
+
+ for s in sorted(gwt_src):
+ p = path.join(ROOT, s, 'src', 'main', 'java')
+ if path.exists(p):
+ classpathentry('lib', p, out='eclipse-out/gwtsrc')
+
+ classpathentry('con', JRE)
+ classpathentry('output', 'eclipse-out/classes')
+
+ p = path.join(ROOT, '.classpath')
+ with open(p, 'w') as fd:
+ doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
+
+ if args.plugins:
+ for plugin in plugins:
+ plugindir = path.join(ROOT, plugin)
+ try:
+ gen_project(plugin.replace('plugins/', ""), plugindir)
+ gen_plugin_classpath(plugindir)
+ except (IOError, OSError) as err:
+ print('error generating project for %s: %s' % (plugin, err),
+ file=sys.stderr)
+
def gen_factorypath(ext):
- doc = minidom.getDOMImplementation().createDocument(None, 'factorypath', None)
- for jar in _query_classpath(AUTO):
- e = doc.createElement('factorypathentry')
- e.setAttribute('kind', 'EXTJAR')
- e.setAttribute('id', path.join(ext, jar))
- e.setAttribute('enabled', 'true')
- e.setAttribute('runInBatchMode', 'false')
- doc.documentElement.appendChild(e)
+ doc = minidom.getDOMImplementation().createDocument(None, 'factorypath',
+ None)
+ for jar in _query_classpath(AUTO):
+ e = doc.createElement('factorypathentry')
+ e.setAttribute('kind', 'EXTJAR')
+ e.setAttribute('id', path.join(ext, jar))
+ e.setAttribute('enabled', 'true')
+ e.setAttribute('runInBatchMode', 'false')
+ doc.documentElement.appendChild(e)
- p = path.join(ROOT, '.factorypath')
- with open(p, 'w') as fd:
- doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
+ p = path.join(ROOT, '.factorypath')
+ with open(p, 'w') as fd:
+ doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
+
try:
- ext_location = retrieve_ext_location().decode("utf-8")
- gen_project(args.project_name)
- gen_classpath(ext_location)
- gen_factorypath(ext_location)
- gen_bazel_path()
+ ext_location = retrieve_ext_location().decode("utf-8")
+ gen_project(args.project_name)
+ gen_classpath(ext_location)
+ gen_factorypath(ext_location)
+ gen_bazel_path()
- # TODO(davido): Remove this when GWT gone
- gwt_working_dir = ".gwt_work_dir"
- if not path.isdir(gwt_working_dir):
- makedirs(path.join(ROOT, gwt_working_dir))
+ # TODO(davido): Remove this when GWT gone
+ gwt_working_dir = ".gwt_work_dir"
+ if not path.isdir(gwt_working_dir):
+ makedirs(path.join(ROOT, gwt_working_dir))
- try:
- check_call(_build_bazel_cmd('build', MAIN, GWT, '//java/org/eclipse/jgit:libEdit-src.jar'))
- except CalledProcessError:
- exit(1)
+ try:
+ check_call(_build_bazel_cmd('build', MAIN, GWT,
+ '//java/org/eclipse/jgit:libEdit-src.jar'))
+ except CalledProcessError:
+ exit(1)
except KeyboardInterrupt:
- print('Interrupted by user', file=sys.stderr)
- exit(1)
+ print('Interrupted by user', file=sys.stderr)
+ exit(1)
diff --git a/tools/js/bower2bazel.py b/tools/js/bower2bazel.py
index 171ab55..7b24524 100755
--- a/tools/js/bower2bazel.py
+++ b/tools/js/bower2bazel.py
@@ -13,9 +13,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Suggested call sequence:
+"""
+Suggested call sequence:
-python tools/js/bower2bazel.py -w lib/js/bower_archives.bzl -b lib/js/bower_components.bzl
+python tools/js/bower2bazel.py -w lib/js/bower_archives.bzl \
+ -b lib/js/bower_components.bzl
"""
from __future__ import print_function
@@ -31,139 +33,147 @@
import glob
import bowerutil
-# list of licenses for packages that don't specify one in their bower.json file.
+# list of licenses for packages that don't specify one in their bower.json file
package_licenses = {
- "codemirror-minified": "codemirror-minified",
- "es6-promise": "es6-promise",
- "fetch": "fetch",
- "font-roboto": "polymer",
- "iron-a11y-announcer": "polymer",
- "iron-a11y-keys-behavior": "polymer",
- "iron-autogrow-textarea": "polymer",
- "iron-behaviors": "polymer",
- "iron-dropdown": "polymer",
- "iron-fit-behavior": "polymer",
- "iron-flex-layout": "polymer",
- "iron-form-element-behavior": "polymer",
- "iron-icon": "polymer",
- "iron-iconset-svg": "polymer",
- "iron-input": "polymer",
- "iron-menu-behavior": "polymer",
- "iron-meta": "polymer",
- "iron-overlay-behavior": "polymer",
- "iron-resizable-behavior": "polymer",
- "iron-selector": "polymer",
- "iron-validatable-behavior": "polymer",
- "moment": "moment",
- "neon-animation": "polymer",
- "page": "page.js",
- "paper-button": "polymer",
- "paper-icon-button": "polymer",
- "paper-input": "polymer",
- "paper-item": "polymer",
- "paper-listbox": "polymer",
- "paper-toggle-button": "polymer",
- "paper-styles": "polymer",
- "paper-tabs": "polymer",
- "polymer": "polymer",
- "polymer-resin": "polymer",
- "promise-polyfill": "promise-polyfill",
- "web-animations-js": "Apache2.0",
- "webcomponentsjs": "polymer",
- "paper-material": "polymer",
- "paper-styles": "polymer",
- "paper-behaviors": "polymer",
- "paper-ripple": "polymer",
- "iron-checked-element-behavior": "polymer",
- "font-roboto": "polymer",
+ "codemirror-minified": "codemirror-minified",
+ "es6-promise": "es6-promise",
+ "fetch": "fetch",
+ "font-roboto": "polymer",
+ "iron-a11y-announcer": "polymer",
+ "iron-a11y-keys-behavior": "polymer",
+ "iron-autogrow-textarea": "polymer",
+ "iron-behaviors": "polymer",
+ "iron-dropdown": "polymer",
+ "iron-fit-behavior": "polymer",
+ "iron-flex-layout": "polymer",
+ "iron-form-element-behavior": "polymer",
+ "iron-icon": "polymer",
+ "iron-iconset-svg": "polymer",
+ "iron-input": "polymer",
+ "iron-menu-behavior": "polymer",
+ "iron-meta": "polymer",
+ "iron-overlay-behavior": "polymer",
+ "iron-resizable-behavior": "polymer",
+ "iron-selector": "polymer",
+ "iron-validatable-behavior": "polymer",
+ "moment": "moment",
+ "neon-animation": "polymer",
+ "page": "page.js",
+ "paper-button": "polymer",
+ "paper-icon-button": "polymer",
+ "paper-input": "polymer",
+ "paper-item": "polymer",
+ "paper-listbox": "polymer",
+ "paper-toggle-button": "polymer",
+ "paper-styles": "polymer",
+ "paper-tabs": "polymer",
+ "polymer": "polymer",
+ "polymer-resin": "polymer",
+ "promise-polyfill": "promise-polyfill",
+ "web-animations-js": "Apache2.0",
+ "webcomponentsjs": "polymer",
+ "paper-material": "polymer",
+ "paper-styles": "polymer",
+ "paper-behaviors": "polymer",
+ "paper-ripple": "polymer",
+ "iron-checked-element-behavior": "polymer",
+ "font-roboto": "polymer",
}
def build_bower_json(version_targets, seeds):
- """Generate bower JSON file, return its path.
+ """Generate bower JSON file, return its path.
- Args:
- version_targets: bazel target names of the versions.json file.
- seeds: an iterable of bower package names of the seed packages, ie.
- the packages whose versions we control manually.
- """
- bower_json = collections.OrderedDict()
- bower_json['name'] = 'bower2bazel-output'
- bower_json['version'] = '0.0.0'
- bower_json['description'] = 'Auto-generated bower.json for dependency management'
- bower_json['private'] = True
- bower_json['dependencies'] = {}
+ Args:
+ version_targets: bazel target names of the versions.json file.
+ seeds: an iterable of bower package names of the seed packages, ie.
+ the packages whose versions we control manually.
+ """
+ bower_json = collections.OrderedDict()
+ bower_json['name'] = 'bower2bazel-output'
+ bower_json['version'] = '0.0.0'
+ bower_json['description'] = 'Auto-generated bower.json for dependency ' + \
+ 'management'
+ bower_json['private'] = True
+ bower_json['dependencies'] = {}
- seeds = set(seeds)
- for v in version_targets:
- path = os.path.join("bazel-out/*-fastbuild/bin", v.lstrip("/").replace(":", "/"))
- fs = glob.glob(path)
- assert len(fs) == 1, '%s: file not found or multiple files found: %s' % (path, fs)
- with open(fs[0]) as f:
- j = json.load(f)
- if "" in j:
- # drop dummy entries.
- del j[""]
+ seeds = set(seeds)
+ for v in version_targets:
+ path = os.path.join("bazel-out/*-fastbuild/bin",
+ v.lstrip("/").replace(":", "/"))
+ fs = glob.glob(path)
+ err_msg = '%s: file not found or multiple files found: %s' % (path, fs)
+ assert len(fs) == 1, err_msg
+ with open(fs[0]) as f:
+ j = json.load(f)
+ if "" in j:
+ # drop dummy entries.
+ del j[""]
- trimmed = {}
- for k, v in j.items():
- if k in seeds:
- trimmed[k] = v
+ trimmed = {}
+ for k, v in j.items():
+ if k in seeds:
+ trimmed[k] = v
- bower_json['dependencies'].update(trimmed)
+ bower_json['dependencies'].update(trimmed)
- tmpdir = tempfile.mkdtemp()
- ret = os.path.join(tmpdir, 'bower.json')
- with open(ret, 'w') as f:
- json.dump(bower_json, f, indent=2)
- return ret
+ tmpdir = tempfile.mkdtemp()
+ ret = os.path.join(tmpdir, 'bower.json')
+ with open(ret, 'w') as f:
+ json.dump(bower_json, f, indent=2)
+ return ret
+
def decode(input):
- try:
- return input.decode("utf-8")
- except TypeError:
- return input
+ try:
+ return input.decode("utf-8")
+ except TypeError:
+ return input
+
def bower_command(args):
- base = subprocess.check_output(["bazel", "info", "output_base"]).strip()
- exp = os.path.join(decode(base), "external", "bower", "*npm_binary.tgz")
- fs = sorted(glob.glob(exp))
- assert len(fs) == 1, "bower tarball not found or have multiple versions %s" % fs
- return ["python", os.getcwd() + "/tools/js/run_npm_binary.py", sorted(fs)[0]] + args
+ base = subprocess.check_output(["bazel", "info", "output_base"]).strip()
+ exp = os.path.join(decode(base), "external", "bower", "*npm_binary.tgz")
+ fs = sorted(glob.glob(exp))
+ err_msg = "bower tarball not found or have multiple versions %s" % fs
+ assert len(fs) == 1, err_msg
+ return ["python",
+ os.getcwd() + "/tools/js/run_npm_binary.py", sorted(fs)[0]] + args
def main(args):
- opts = optparse.OptionParser()
- opts.add_option('-w', help='.bzl output for WORKSPACE')
- opts.add_option('-b', help='.bzl output for //lib:BUILD')
- opts, args = opts.parse_args()
+ opts = optparse.OptionParser()
+ opts.add_option('-w', help='.bzl output for WORKSPACE')
+ opts.add_option('-b', help='.bzl output for //lib:BUILD')
+ opts, args = opts.parse_args()
- target_str = subprocess.check_output([
- "bazel", "query", "kind(bower_component_bundle, //polygerrit-ui/...)"])
- seed_str = subprocess.check_output([
- "bazel", "query", "attr(seed, 1, kind(bower_component, deps(//polygerrit-ui/...)))"])
- targets = [s for s in decode(target_str).split('\n') if s]
- seeds = [s for s in decode(seed_str).split('\n') if s]
- prefix = "//lib/js:"
- non_seeds = [s for s in seeds if not s.startswith(prefix)]
- assert not non_seeds, non_seeds
- seeds = set([s[len(prefix):] for s in seeds])
+ target_str = subprocess.check_output([
+ "bazel", "query", "kind(bower_component_bundle, //polygerrit-ui/...)"])
+ seed_str = subprocess.check_output(
+ ["bazel", "query",
+ "attr(seed, 1, kind(bower_component, deps(//polygerrit-ui/...)))"])
+ targets = [s for s in decode(target_str).split('\n') if s]
+ seeds = [s for s in decode(seed_str).split('\n') if s]
+ prefix = "//lib/js:"
+ non_seeds = [s for s in seeds if not s.startswith(prefix)]
+ assert not non_seeds, non_seeds
+ seeds = set([s[len(prefix):] for s in seeds])
- version_targets = [t + "-versions.json" for t in targets]
- subprocess.check_call(['bazel', 'build'] + version_targets)
- bower_json_path = build_bower_json(version_targets, seeds)
- dir = os.path.dirname(bower_json_path)
- cmd = bower_command(["install"])
+ version_targets = [t + "-versions.json" for t in targets]
+ subprocess.check_call(['bazel', 'build'] + version_targets)
+ bower_json_path = build_bower_json(version_targets, seeds)
+ dir = os.path.dirname(bower_json_path)
+ cmd = bower_command(["install"])
- build_out = sys.stdout
- if opts.b:
- build_out = open(opts.b + ".tmp", 'w')
+ build_out = sys.stdout
+ if opts.b:
+ build_out = open(opts.b + ".tmp", 'w')
- ws_out = sys.stdout
- if opts.b:
- ws_out = open(opts.w + ".tmp", 'w')
+ ws_out = sys.stdout
+ if opts.b:
+ ws_out = open(opts.w + ".tmp", 'w')
- header = """# DO NOT EDIT
+ header = """# DO NOT EDIT
# generated with the following command:
#
# %s
@@ -171,30 +181,30 @@
""" % ' '.join(sys.argv)
- ws_out.write(header)
- build_out.write(header)
+ ws_out.write(header)
+ build_out.write(header)
- oldwd = os.getcwd()
- os.chdir(dir)
- subprocess.check_call(cmd)
+ oldwd = os.getcwd()
+ os.chdir(dir)
+ subprocess.check_call(cmd)
- interpret_bower_json(seeds, ws_out, build_out)
- ws_out.close()
- build_out.close()
+ interpret_bower_json(seeds, ws_out, build_out)
+ ws_out.close()
+ build_out.close()
- os.chdir(oldwd)
- os.rename(opts.w + ".tmp", opts.w)
- os.rename(opts.b + ".tmp", opts.b)
+ os.chdir(oldwd)
+ os.rename(opts.w + ".tmp", opts.w)
+ os.rename(opts.b + ".tmp", opts.b)
def dump_workspace(data, seeds, out):
- out.write('load("//tools/bzl:js.bzl", "bower_archive")\n\n')
- out.write('def load_bower_archives():\n')
+ out.write('load("//tools/bzl:js.bzl", "bower_archive")\n\n')
+ out.write('def load_bower_archives():\n')
- for d in data:
- if d["name"] in seeds:
- continue
- out.write(""" bower_archive(
+ for d in data:
+ if d["name"] in seeds:
+ continue
+ out.write(""" bower_archive(
name = "%(name)s",
package = "%(normalized-name)s",
version = "%(version)s",
@@ -203,48 +213,49 @@
def dump_build(data, seeds, out):
- out.write('load("//tools/bzl:js.bzl", "bower_component")\n\n')
- out.write('def define_bower_components():\n')
- for d in data:
- out.write(" bower_component(\n")
- out.write(" name = \"%s\",\n" % d["name"])
- out.write(" license = \"//lib:LICENSE-%s\",\n" % d["bazel-license"])
- deps = sorted(d.get("dependencies", {}).keys())
- if deps:
- if len(deps) == 1:
- out.write(" deps = [ \":%s\" ],\n" % deps[0])
- else:
- out.write(" deps = [\n")
- for dep in deps:
- out.write(" \":%s\",\n" % dep)
- out.write(" ],\n")
- if d["name"] in seeds:
- out.write(" seed = True,\n")
- out.write(" )\n")
- # done
+ out.write('load("//tools/bzl:js.bzl", "bower_component")\n\n')
+ out.write('def define_bower_components():\n')
+ for d in data:
+ out.write(" bower_component(\n")
+ out.write(" name = \"%s\",\n" % d["name"])
+ out.write(" license = \"//lib:LICENSE-%s\",\n" % d["bazel-license"])
+ deps = sorted(d.get("dependencies", {}).keys())
+ if deps:
+ if len(deps) == 1:
+ out.write(" deps = [ \":%s\" ],\n" % deps[0])
+ else:
+ out.write(" deps = [\n")
+ for dep in deps:
+ out.write(" \":%s\",\n" % dep)
+ out.write(" ],\n")
+ if d["name"] in seeds:
+ out.write(" seed = True,\n")
+ out.write(" )\n")
+ # done
def interpret_bower_json(seeds, ws_out, build_out):
- out = subprocess.check_output(["find", "bower_components/", "-name", ".bower.json"])
+ out = subprocess.check_output(["find", "bower_components/", "-name",
+ ".bower.json"])
- data = []
- for f in sorted(decode(out).split('\n')):
- if not f:
- continue
- pkg = json.load(open(f))
- pkg_name = pkg["name"]
+ data = []
+ for f in sorted(decode(out).split('\n')):
+ if not f:
+ continue
+ pkg = json.load(open(f))
+ pkg_name = pkg["name"]
- pkg["bazel-sha1"] = bowerutil.hash_bower_component(
- hashlib.sha1(), os.path.dirname(f)).hexdigest()
- license = package_licenses.get(pkg_name, "DO_NOT_DISTRIBUTE")
+ pkg["bazel-sha1"] = bowerutil.hash_bower_component(
+ hashlib.sha1(), os.path.dirname(f)).hexdigest()
+ license = package_licenses.get(pkg_name, "DO_NOT_DISTRIBUTE")
- pkg["bazel-license"] = license
- pkg["normalized-name"] = pkg["_originalSource"]
- data.append(pkg)
+ pkg["bazel-license"] = license
+ pkg["normalized-name"] = pkg["_originalSource"]
+ data.append(pkg)
- dump_workspace(data, seeds, ws_out)
- dump_build(data, seeds, build_out)
+ dump_workspace(data, seeds, ws_out)
+ dump_build(data, seeds, build_out)
if __name__ == '__main__':
- main(sys.argv[1:])
+ main(sys.argv[1:])
diff --git a/tools/js/bowerutil.py b/tools/js/bowerutil.py
index c2e11cd..9fb82af 100644
--- a/tools/js/bowerutil.py
+++ b/tools/js/bowerutil.py
@@ -16,31 +16,31 @@
def hash_bower_component(hash_obj, path):
- """Hash the contents of a bower component directory.
+ """Hash the contents of a bower component directory.
- This is a stable hash of a directory downloaded with `bower install`, minus
- the .bower.json file, which is autogenerated each time by bower. Used in lieu
- of hashing a zipfile of the contents, since zipfiles are difficult to hash in
- a stable manner.
+ This is a stable hash of a directory downloaded with `bower install`, minus
+ the .bower.json file, which is autogenerated each time by bower. Used in
+ lieu of hashing a zipfile of the contents, since zipfiles are difficult to
+ hash in a stable manner.
- Args:
- hash_obj: an open hash object, e.g. hashlib.sha1().
- path: path to the directory to hash.
+ Args:
+ hash_obj: an open hash object, e.g. hashlib.sha1().
+ path: path to the directory to hash.
- Returns:
- The passed-in hash_obj.
- """
- if not os.path.isdir(path):
- raise ValueError('Not a directory: %s' % path)
+ Returns:
+ The passed-in hash_obj.
+ """
+ if not os.path.isdir(path):
+ raise ValueError('Not a directory: %s' % path)
- path = os.path.abspath(path)
- for root, dirs, files in os.walk(path):
- dirs.sort()
- for f in sorted(files):
- if f == '.bower.json':
- continue
- p = os.path.join(root, f)
- hash_obj.update(p[len(path)+1:].encode("utf-8"))
- hash_obj.update(open(p, "rb").read())
+ path = os.path.abspath(path)
+ for root, dirs, files in os.walk(path):
+ dirs.sort()
+ for f in sorted(files):
+ if f == '.bower.json':
+ continue
+ p = os.path.join(root, f)
+ hash_obj.update(p[len(path)+1:].encode("utf-8"))
+ hash_obj.update(open(p, "rb").read())
- return hash_obj
+ return hash_obj
diff --git a/tools/js/download_bower.py b/tools/js/download_bower.py
index 3db39d5..c9a5df6 100755
--- a/tools/js/download_bower.py
+++ b/tools/js/download_bower.py
@@ -30,99 +30,105 @@
def bower_cmd(bower, *args):
- cmd = bower.split(' ')
- cmd.extend(args)
- return cmd
+ cmd = bower.split(' ')
+ cmd.extend(args)
+ return cmd
def bower_info(bower, name, package, version):
- cmd = bower_cmd(bower, '-l=error', '-j',
- 'info', '%s#%s' % (package, version))
- try:
- p = subprocess.Popen(cmd , stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- except:
- sys.stderr.write("error executing: %s\n" % ' '.join(cmd))
- raise
- out, err = p.communicate()
- if p.returncode:
- sys.stderr.write(err)
- raise OSError('Command failed: %s' % ' '.join(cmd))
+ cmd = bower_cmd(bower, '-l=error', '-j',
+ 'info', '%s#%s' % (package, version))
+ try:
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ except:
+ sys.stderr.write("error executing: %s\n" % ' '.join(cmd))
+ raise
+ out, err = p.communicate()
+ if p.returncode:
+ sys.stderr.write(err)
+ raise OSError('Command failed: %s' % ' '.join(cmd))
- try:
- info = json.loads(out)
- except ValueError:
- raise ValueError('invalid JSON from %s:\n%s' % (" ".join(cmd), out))
- info_name = info.get('name')
- if info_name != name:
- raise ValueError('expected package name %s, got: %s' % (name, info_name))
- return info
+ try:
+ info = json.loads(out)
+ except ValueError:
+ raise ValueError('invalid JSON from %s:\n%s' % (" ".join(cmd), out))
+ info_name = info.get('name')
+ if info_name != name:
+ raise ValueError(
+ 'expected package name %s, got: %s' % (name, info_name))
+ return info
def ignore_deps(info):
- # Tell bower to ignore dependencies so we just download this component. This
- # is just an optimization, since we only pick out the component we need, but
- # it's important when downloading sizable dependency trees.
- #
- # As of 1.6.5 I don't think ignoredDependencies can be specified on the
- # command line with --config, so we have to create .bowerrc.
- deps = info.get('dependencies')
- if deps:
- with open(os.path.join('.bowerrc'), 'w') as f:
- json.dump({'ignoredDependencies': list(deps.keys())}, f)
+ # Tell bower to ignore dependencies so we just download this component.
+ # This is just an optimization, since we only pick out the component we
+ # need, but it's important when downloading sizable dependency trees.
+ #
+ # As of 1.6.5 I don't think ignoredDependencies can be specified on the
+ # command line with --config, so we have to create .bowerrc.
+ deps = info.get('dependencies')
+ if deps:
+ with open(os.path.join('.bowerrc'), 'w') as f:
+ json.dump({'ignoredDependencies': list(deps.keys())}, f)
def cache_entry(name, package, version, sha1):
- if not sha1:
- sha1 = hashlib.sha1('%s#%s' % (package, version)).hexdigest()
- return os.path.join(CACHE_DIR, '%s-%s.zip-%s' % (name, version, sha1))
+ if not sha1:
+ sha1 = hashlib.sha1('%s#%s' % (package, version)).hexdigest()
+ return os.path.join(CACHE_DIR, '%s-%s.zip-%s' % (name, version, sha1))
def main(args):
- opts = optparse.OptionParser()
- opts.add_option('-n', help='short name of component')
- opts.add_option('-b', help='bower command')
- opts.add_option('-p', help='full package name of component')
- opts.add_option('-v', help='version number')
- opts.add_option('-s', help='expected content sha1')
- opts.add_option('-o', help='output file location')
- opts, args_ = opts.parse_args(args)
+ opts = optparse.OptionParser()
+ opts.add_option('-n', help='short name of component')
+ opts.add_option('-b', help='bower command')
+ opts.add_option('-p', help='full package name of component')
+ opts.add_option('-v', help='version number')
+ opts.add_option('-s', help='expected content sha1')
+ opts.add_option('-o', help='output file location')
+ opts, args_ = opts.parse_args(args)
- assert opts.p
- assert opts.v
- assert opts.n
+ assert opts.p
+ assert opts.v
+ assert opts.n
- cwd = os.getcwd()
- outzip = os.path.join(cwd, opts.o)
- cached = cache_entry(opts.n, opts.p, opts.v, opts.s)
+ cwd = os.getcwd()
+ outzip = os.path.join(cwd, opts.o)
+ cached = cache_entry(opts.n, opts.p, opts.v, opts.s)
- if not os.path.exists(cached):
- info = bower_info(opts.b, opts.n, opts.p, opts.v)
- ignore_deps(info)
- subprocess.check_call(
- bower_cmd(opts.b, '--quiet', 'install', '%s#%s' % (opts.p, opts.v)))
- bc = os.path.join(cwd, 'bower_components')
- subprocess.check_call(
- ['zip', '-q', '--exclude', '.bower.json', '-r', cached, opts.n],
- cwd=bc)
+ if not os.path.exists(cached):
+ info = bower_info(opts.b, opts.n, opts.p, opts.v)
+ ignore_deps(info)
+ subprocess.check_call(
+ bower_cmd(
+ opts.b, '--quiet', 'install', '%s#%s' % (opts.p, opts.v)))
+ bc = os.path.join(cwd, 'bower_components')
+ subprocess.check_call(
+ ['zip', '-q', '--exclude', '.bower.json', '-r', cached, opts.n],
+ cwd=bc)
- if opts.s:
- path = os.path.join(bc, opts.n)
- sha1 = bowerutil.hash_bower_component(hashlib.sha1(), path).hexdigest()
- if opts.s != sha1:
- print((
- '%s#%s:\n'
- 'expected %s\n'
- 'received %s\n') % (opts.p, opts.v, opts.s, sha1), file=sys.stderr)
- try:
- os.remove(cached)
- except OSError as err:
- if path.exists(cached):
- print('error removing %s: %s' % (cached, err), file=sys.stderr)
- return 1
+ if opts.s:
+ path = os.path.join(bc, opts.n)
+ sha1 = bowerutil.hash_bower_component(
+ hashlib.sha1(), path).hexdigest()
+ if opts.s != sha1:
+ print((
+ '%s#%s:\n'
+ 'expected %s\n'
+ 'received %s\n') % (opts.p, opts.v, opts.s, sha1),
+ file=sys.stderr)
+ try:
+ os.remove(cached)
+ except OSError as err:
+ if path.exists(cached):
+ print('error removing %s: %s' % (cached, err),
+ file=sys.stderr)
+ return 1
- shutil.copyfile(cached, outzip)
- return 0
+ shutil.copyfile(cached, outzip)
+ return 0
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/js/npm_pack.py b/tools/js/npm_pack.py
index de45083..d817701 100755
--- a/tools/js/npm_pack.py
+++ b/tools/js/npm_pack.py
@@ -32,49 +32,49 @@
def is_bundled(tar):
- # No entries for directories, so scan for a matching prefix.
- for entry in tar.getmembers():
- if entry.name.startswith('package/node_modules/'):
- return True
- return False
+ # No entries for directories, so scan for a matching prefix.
+ for entry in tar.getmembers():
+ if entry.name.startswith('package/node_modules/'):
+ return True
+ return False
def bundle_dependencies():
- with open('package.json') as f:
- package = json.load(f)
- package['bundledDependencies'] = list(package['dependencies'].keys())
- with open('package.json', 'w') as f:
- json.dump(package, f)
+ with open('package.json') as f:
+ package = json.load(f)
+ package['bundledDependencies'] = list(package['dependencies'].keys())
+ with open('package.json', 'w') as f:
+ json.dump(package, f)
def main(args):
- if len(args) != 2:
- print('Usage: %s <package> <version>' % sys.argv[0], file=sys.stderr)
- return 1
+ if len(args) != 2:
+ print('Usage: %s <package> <version>' % sys.argv[0], file=sys.stderr)
+ return 1
- name, version = args
- filename = '%s-%s.tgz' % (name, version)
- url = 'http://registry.npmjs.org/%s/-/%s' % (name, filename)
+ name, version = args
+ filename = '%s-%s.tgz' % (name, version)
+ url = 'http://registry.npmjs.org/%s/-/%s' % (name, filename)
- tmpdir = tempfile.mkdtemp();
- tgz = os.path.join(tmpdir, filename)
- atexit.register(lambda: shutil.rmtree(tmpdir))
+ tmpdir = tempfile.mkdtemp()
+ tgz = os.path.join(tmpdir, filename)
+ atexit.register(lambda: shutil.rmtree(tmpdir))
- subprocess.check_call(['curl', '--proxy-anyauth', '-ksfo', tgz, url])
- with tarfile.open(tgz, 'r:gz') as tar:
- if is_bundled(tar):
- print('%s already has bundled node_modules' % filename)
- return 1
- tar.extractall(path=tmpdir)
+ subprocess.check_call(['curl', '--proxy-anyauth', '-ksfo', tgz, url])
+ with tarfile.open(tgz, 'r:gz') as tar:
+ if is_bundled(tar):
+ print('%s already has bundled node_modules' % filename)
+ return 1
+ tar.extractall(path=tmpdir)
- oldpwd = os.getcwd()
- os.chdir(os.path.join(tmpdir, 'package'))
- bundle_dependencies()
- subprocess.check_call(['npm', 'install'])
- subprocess.check_call(['npm', 'pack'])
- shutil.copy(filename, os.path.join(oldpwd, filename))
- return 0
+ oldpwd = os.getcwd()
+ os.chdir(os.path.join(tmpdir, 'package'))
+ bundle_dependencies()
+ subprocess.check_call(['npm', 'install'])
+ subprocess.check_call(['npm', 'pack'])
+ shutil.copy(filename, os.path.join(oldpwd, filename))
+ return 0
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/js/run_npm_binary.py b/tools/js/run_npm_binary.py
index d769b98..dfcdaca 100644
--- a/tools/js/run_npm_binary.py
+++ b/tools/js/run_npm_binary.py
@@ -27,65 +27,68 @@
def extract(path, outdir, bin):
- if os.path.exists(os.path.join(outdir, bin)):
- return # Another process finished extracting, ignore.
+ if os.path.exists(os.path.join(outdir, bin)):
+ return # Another process finished extracting, ignore.
- # Use a temp directory adjacent to outdir so shutil.move can use the same
- # device atomically.
- tmpdir = tempfile.mkdtemp(dir=os.path.dirname(outdir))
- def cleanup():
- try:
- shutil.rmtree(tmpdir)
- except OSError:
- pass # Too late now
- atexit.register(cleanup)
+ # Use a temp directory adjacent to outdir so shutil.move can use the same
+ # device atomically.
+ tmpdir = tempfile.mkdtemp(dir=os.path.dirname(outdir))
- def extract_one(mem):
- dest = os.path.join(outdir, mem.name)
- tar.extract(mem, path=tmpdir)
- try:
- os.makedirs(os.path.dirname(dest))
- except OSError:
- pass # Either exists, or will fail on the next line.
- shutil.move(os.path.join(tmpdir, mem.name), dest)
+ def cleanup():
+ try:
+ shutil.rmtree(tmpdir)
+ except OSError:
+ pass # Too late now
+ atexit.register(cleanup)
- with tarfile.open(path, 'r:gz') as tar:
- for mem in tar.getmembers():
- if mem.name != bin:
- extract_one(mem)
- # Extract bin last so other processes only short circuit when extraction is
- # finished.
- extract_one(tar.getmember(bin))
+ def extract_one(mem):
+ dest = os.path.join(outdir, mem.name)
+ tar.extract(mem, path=tmpdir)
+ try:
+ os.makedirs(os.path.dirname(dest))
+ except OSError:
+ pass # Either exists, or will fail on the next line.
+ shutil.move(os.path.join(tmpdir, mem.name), dest)
+
+ with tarfile.open(path, 'r:gz') as tar:
+ for mem in tar.getmembers():
+ if mem.name != bin:
+ extract_one(mem)
+ # Extract bin last so other processes only short circuit when
+ # extraction is finished.
+ extract_one(tar.getmember(bin))
+
def main(args):
- path = args[0]
- suffix = '.npm_binary.tgz'
- tgz = os.path.basename(path)
+ path = args[0]
+ suffix = '.npm_binary.tgz'
+ tgz = os.path.basename(path)
- parts = tgz[:-len(suffix)].split('@')
+ parts = tgz[:-len(suffix)].split('@')
- if not tgz.endswith(suffix) or len(parts) != 2:
- print('usage: %s <path/to/npm_binary>' % sys.argv[0], file=sys.stderr)
- return 1
+ if not tgz.endswith(suffix) or len(parts) != 2:
+ print('usage: %s <path/to/npm_binary>' % sys.argv[0], file=sys.stderr)
+ return 1
- name, _ = parts
+ name, _ = parts
- # Avoid importing from gerrit because we don't want to depend on the right CWD.
- sha1 = hashlib.sha1(open(path, 'rb').read()).hexdigest()
- outdir = '%s-%s' % (path[:-len(suffix)], sha1)
- rel_bin = os.path.join('package', 'bin', name)
- bin = os.path.join(outdir, rel_bin)
- if not os.path.isfile(bin):
- extract(path, outdir, rel_bin)
+ # Avoid importing from gerrit because we don't want to depend on the right
+ # working directory
+ sha1 = hashlib.sha1(open(path, 'rb').read()).hexdigest()
+ outdir = '%s-%s' % (path[:-len(suffix)], sha1)
+ rel_bin = os.path.join('package', 'bin', name)
+ bin = os.path.join(outdir, rel_bin)
+ if not os.path.isfile(bin):
+ extract(path, outdir, rel_bin)
- nodejs = spawn.find_executable('nodejs')
- if nodejs:
- # Debian installs Node.js as 'nodejs', due to a conflict with another
- # package.
- subprocess.check_call([nodejs, bin] + args[1:])
- else:
- subprocess.check_call([bin] + args[1:])
+ nodejs = spawn.find_executable('nodejs')
+ if nodejs:
+ # Debian installs Node.js as 'nodejs', due to a conflict with another
+ # package.
+ subprocess.check_call([nodejs, bin] + args[1:])
+ else:
+ subprocess.check_call([bin] + args[1:])
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/maven/mvn.py b/tools/maven/mvn.py
index 50c4ac6..d47d027 100755
--- a/tools/maven/mvn.py
+++ b/tools/maven/mvn.py
@@ -29,56 +29,57 @@
args, ctx = opts.parse_args()
if not args.v:
- print('version is empty', file=stderr)
- exit(1)
+ print('version is empty', file=stderr)
+ exit(1)
root = path.abspath(__file__)
while not path.exists(path.join(root, 'WORKSPACE')):
- root = path.dirname(root)
+ root = path.dirname(root)
if 'install' == args.a:
- cmd = [
- 'mvn',
- 'install:install-file',
- '-Dversion=%s' % args.v,
- ]
+ cmd = [
+ 'mvn',
+ 'install:install-file',
+ '-Dversion=%s' % args.v,
+ ]
elif 'deploy' == args.a:
- cmd = [
- 'mvn',
- 'gpg:sign-and-deploy-file',
- '-DrepositoryId=%s' % args.repository,
- '-Durl=%s' % args.url,
- ]
+ cmd = [
+ 'mvn',
+ 'gpg:sign-and-deploy-file',
+ '-DrepositoryId=%s' % args.repository,
+ '-Durl=%s' % args.url,
+ ]
else:
- print("unknown action -a %s" % args.a, file=stderr)
- exit(1)
+ print("unknown action -a %s" % args.a, file=stderr)
+ exit(1)
for spec in args.s:
- artifact, packaging_type, src = spec.split(':')
- exe = cmd + [
- '-DpomFile=%s' % path.join(root, 'tools', 'maven', '%s_pom.xml' % artifact),
- '-Dpackaging=%s' % packaging_type,
- '-Dfile=%s' % src,
- ]
- try:
- if environ.get('VERBOSE'):
- print(' '.join(exe), file=stderr)
- check_output(exe)
- except Exception as e:
- print('%s command failed: %s\n%s' % (args.a, ' '.join(exe), e),
- file=stderr)
- if environ.get('VERBOSE') and isinstance(e, CalledProcessError):
- print('Command output\n%s' % e.output, file=stderr)
- exit(1)
+ artifact, packaging_type, src = spec.split(':')
+ exe = cmd + [
+ '-DpomFile=%s' % path.join(root, 'tools', 'maven',
+ '%s_pom.xml' % artifact),
+ '-Dpackaging=%s' % packaging_type,
+ '-Dfile=%s' % src,
+ ]
+ try:
+ if environ.get('VERBOSE'):
+ print(' '.join(exe), file=stderr)
+ check_output(exe)
+ except Exception as e:
+ print('%s command failed: %s\n%s' % (args.a, ' '.join(exe), e),
+ file=stderr)
+ if environ.get('VERBOSE') and isinstance(e, CalledProcessError):
+ print('Command output\n%s' % e.output, file=stderr)
+ exit(1)
out = stderr
if args.o:
- out = open(args.o, 'w')
+ out = open(args.o, 'w')
with out as fd:
- if args.repository:
- print('Repository: %s' % args.repository, file=fd)
- if args.url:
- print('URL: %s' % args.url, file=fd)
- print('Version: %s' % args.v, file=fd)
+ if args.repository:
+ print('Repository: %s' % args.repository, file=fd)
+ if args.url:
+ print('URL: %s' % args.url, file=fd)
+ print('Version: %s' % args.v, file=fd)
diff --git a/tools/merge_jars.py b/tools/merge_jars.py
index 97a87c4..6b46069 100755
--- a/tools/merge_jars.py
+++ b/tools/merge_jars.py
@@ -17,11 +17,10 @@
import collections
import sys
import zipfile
-import io
if len(sys.argv) < 3:
- print('usage: %s <out.zip> <in.zip>...' % sys.argv[0], file=sys.stderr)
- exit(1)
+ print('usage: %s <out.zip> <in.zip>...' % sys.argv[0], file=sys.stderr)
+ exit(1)
outfile = sys.argv[1]
infiles = sys.argv[2:]
@@ -29,22 +28,22 @@
SERVICES = 'META-INF/services/'
try:
- with zipfile.ZipFile(outfile, 'w') as outzip:
- services = collections.defaultdict(lambda: '')
- for infile in infiles:
- with zipfile.ZipFile(infile) as inzip:
- for info in inzip.infolist():
- n = info.filename
- if n in seen:
- continue
- elif n.startswith(SERVICES):
- # Concatenate all provider configuration files.
- services[n] += inzip.read(n).decode("UTF-8")
- continue
- outzip.writestr(info, inzip.read(n))
- seen.add(n)
+ with zipfile.ZipFile(outfile, 'w') as outzip:
+ services = collections.defaultdict(lambda: '')
+ for infile in infiles:
+ with zipfile.ZipFile(infile) as inzip:
+ for info in inzip.infolist():
+ n = info.filename
+ if n in seen:
+ continue
+ elif n.startswith(SERVICES):
+ # Concatenate all provider configuration files.
+ services[n] += inzip.read(n).decode("UTF-8")
+ continue
+ outzip.writestr(info, inzip.read(n))
+ seen.add(n)
- for n, v in list(services.items()):
- outzip.writestr(n, v)
+ for n, v in list(services.items()):
+ outzip.writestr(n, v)
except Exception as err:
- exit('Failed to merge jars: %s' % err)
+ exit('Failed to merge jars: %s' % err)
diff --git a/tools/release-announcement.py b/tools/release-announcement.py
index f700185..a25a340 100755
--- a/tools/release-announcement.py
+++ b/tools/release-announcement.py
@@ -101,9 +101,9 @@
summary = summary + "."
data = {
- "version": Version(options.version),
- "previous": options.previous,
- "summary": summary
+ "version": Version(options.version),
+ "previous": options.previous,
+ "summary": summary
}
war = os.path.join(
diff --git a/tools/util.py b/tools/util.py
index e8182ed..45d0541 100644
--- a/tools/util.py
+++ b/tools/util.py
@@ -15,57 +15,59 @@
from os import path
REPO_ROOTS = {
- 'GERRIT': 'http://gerrit-maven.storage.googleapis.com',
- 'GERRIT_API': 'https://gerrit-api.commondatastorage.googleapis.com/release',
- 'MAVEN_CENTRAL': 'http://repo1.maven.org/maven2',
- 'MAVEN_LOCAL': 'file://' + path.expanduser('~/.m2/repository'),
- 'MAVEN_SNAPSHOT': 'https://oss.sonatype.org/content/repositories/snapshots',
+ 'GERRIT': 'http://gerrit-maven.storage.googleapis.com',
+ 'GERRIT_API':
+ 'https://gerrit-api.commondatastorage.googleapis.com/release',
+ 'MAVEN_CENTRAL': 'http://repo1.maven.org/maven2',
+ 'MAVEN_LOCAL': 'file://' + path.expanduser('~/.m2/repository'),
+ 'MAVEN_SNAPSHOT':
+ 'https://oss.sonatype.org/content/repositories/snapshots',
}
def resolve_url(url, redirects):
- """ Resolve URL of a Maven artifact.
+ """ Resolve URL of a Maven artifact.
- prefix:path is passed as URL. prefix identifies known or custom
- repositories that can be rewritten in redirects set, passed as
- second arguments.
+ prefix:path is passed as URL. prefix identifies known or custom
+ repositories that can be rewritten in redirects set, passed as
+ second arguments.
- A special case is supported, when prefix neither exists in
- REPO_ROOTS, no in redirects set: the url is returned as is.
- This enables plugins to pass custom maven_repository URL as is
- directly to maven_jar().
+ A special case is supported, when prefix neither exists in
+ REPO_ROOTS, no in redirects set: the url is returned as is.
+ This enables plugins to pass custom maven_repository URL as is
+ directly to maven_jar().
- Returns a resolved path for Maven artifact.
- """
- s = url.find(':')
- if s < 0:
- return url
- scheme, rest = url[:s], url[s+1:]
- if scheme in redirects:
- root = redirects[scheme]
- elif scheme in REPO_ROOTS:
- root = REPO_ROOTS[scheme]
- else:
- return url
- root = root.rstrip('/')
- rest = rest.lstrip('/')
- return '/'.join([root, rest])
+ Returns a resolved path for Maven artifact.
+ """
+ s = url.find(':')
+ if s < 0:
+ return url
+ scheme, rest = url[:s], url[s+1:]
+ if scheme in redirects:
+ root = redirects[scheme]
+ elif scheme in REPO_ROOTS:
+ root = REPO_ROOTS[scheme]
+ else:
+ return url
+ root = root.rstrip('/')
+ rest = rest.lstrip('/')
+ return '/'.join([root, rest])
def hash_file(hash_obj, path):
- """Hash the contents of a file.
+ """Hash the contents of a file.
- Args:
- hash_obj: an open hash object, e.g. hashlib.sha1().
- path: path to the file to hash.
+ Args:
+ hash_obj: an open hash object, e.g. hashlib.sha1().
+ path: path to the file to hash.
- Returns:
- The passed-in hash_obj.
- """
- with open(path, 'rb') as f:
- while True:
- b = f.read(8192)
- if not b:
- break
- hash_obj.update(b)
- return hash_obj
+ Returns:
+ The passed-in hash_obj.
+ """
+ with open(path, 'rb') as f:
+ while True:
+ b = f.read(8192)
+ if not b:
+ break
+ hash_obj.update(b)
+ return hash_obj
diff --git a/tools/util_test.py b/tools/util_test.py
index 30647ba..fa67696 100644
--- a/tools/util_test.py
+++ b/tools/util_test.py
@@ -16,28 +16,32 @@
import unittest
from util import resolve_url
+
class TestResolveUrl(unittest.TestCase):
- """ run to test:
- python -m unittest -v util_test
- """
+ """ run to test:
+ python -m unittest -v util_test
+ """
- def testKnown(self):
- url = resolve_url('GERRIT:foo.jar', {})
- self.assertEqual(url, 'http://gerrit-maven.storage.googleapis.com/foo.jar')
+ def testKnown(self):
+ url = resolve_url('GERRIT:foo.jar', {})
+ self.assertEqual(url,
+ 'http://gerrit-maven.storage.googleapis.com/foo.jar')
- def testKnownRedirect(self):
- url = resolve_url('MAVEN_CENTRAL:foo.jar',
- {'MAVEN_CENTRAL': 'http://my.company.mirror/maven2'})
- self.assertEqual(url, 'http://my.company.mirror/maven2/foo.jar')
+ def testKnownRedirect(self):
+ url = resolve_url('MAVEN_CENTRAL:foo.jar',
+ {'MAVEN_CENTRAL': 'http://my.company.mirror/maven2'})
+ self.assertEqual(url, 'http://my.company.mirror/maven2/foo.jar')
- def testCustom(self):
- url = resolve_url('http://maven.example.com/release/foo.jar', {})
- self.assertEqual(url, 'http://maven.example.com/release/foo.jar')
+ def testCustom(self):
+ url = resolve_url('http://maven.example.com/release/foo.jar', {})
+ self.assertEqual(url, 'http://maven.example.com/release/foo.jar')
- def testCustomRedirect(self):
- url = resolve_url('MAVEN_EXAMPLE:foo.jar',
- {'MAVEN_EXAMPLE': 'http://maven.example.com/release'})
- self.assertEqual(url, 'http://maven.example.com/release/foo.jar')
+ def testCustomRedirect(self):
+ url = resolve_url('MAVEN_EXAMPLE:foo.jar',
+ {'MAVEN_EXAMPLE':
+ 'http://maven.example.com/release'})
+ self.assertEqual(url, 'http://maven.example.com/release/foo.jar')
+
if __name__ == '__main__':
- unittest.main()
+ unittest.main()
diff --git a/tools/version.py b/tools/version.py
index 72b0134..4aafcb0 100755
--- a/tools/version.py
+++ b/tools/version.py
@@ -23,24 +23,24 @@
opts, args = parser.parse_args()
if not len(args):
- parser.error('not enough arguments')
+ parser.error('not enough arguments')
elif len(args) > 1:
- parser.error('too many arguments')
+ parser.error('too many arguments')
DEST_PATTERN = r'\g<1>%s\g<3>' % args[0]
def replace_in_file(filename, src_pattern):
- try:
- f = open(filename, "r")
- s = f.read()
- f.close()
- s = re.sub(src_pattern, DEST_PATTERN, s)
- f = open(filename, "w")
- f.write(s)
- f.close()
- except IOError as err:
- print('error updating %s: %s' % (filename, err), file=sys.stderr)
+ try:
+ f = open(filename, "r")
+ s = f.read()
+ f.close()
+ s = re.sub(src_pattern, DEST_PATTERN, s)
+ f = open(filename, "w")
+ f.write(s)
+ f.close()
+ except IOError as err:
+ print('error updating %s: %s' % (filename, err), file=sys.stderr)
src_pattern = re.compile(r'^(\s*<version>)([-.\w]+)(</version>\s*)$',
@@ -48,8 +48,8 @@
for project in ['gerrit-acceptance-framework', 'gerrit-extension-api',
'gerrit-plugin-api', 'gerrit-plugin-gwtui',
'gerrit-war']:
- pom = os.path.join('tools', 'maven', '%s_pom.xml' % project)
- replace_in_file(pom, src_pattern)
+ pom = os.path.join('tools', 'maven', '%s_pom.xml' % project)
+ replace_in_file(pom, src_pattern)
src_pattern = re.compile(r'^(GERRIT_VERSION = ")([-.\w]+)(")$', re.MULTILINE)
replace_in_file('version.bzl', src_pattern)