Merge "Measure and report time spent loading change data"
diff --git a/.settings/org.eclipse.jdt.core.prefs b/.settings/org.eclipse.jdt.core.prefs
index d542a0b..40e022d 100644
--- a/.settings/org.eclipse.jdt.core.prefs
+++ b/.settings/org.eclipse.jdt.core.prefs
@@ -17,6 +17,7 @@
org.eclipse.jdt.core.compiler.debug.localVariable=generate
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
org.eclipse.jdt.core.compiler.doc.comment.support=enabled
+org.eclipse.jdt.core.compiler.problem.APILeak=warning
org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=ignore
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
org.eclipse.jdt.core.compiler.problem.autoboxing=ignore
@@ -91,6 +92,7 @@
org.eclipse.jdt.core.compiler.problem.suppressWarnings=enabled
org.eclipse.jdt.core.compiler.problem.syntacticNullAnalysisForFields=disabled
org.eclipse.jdt.core.compiler.problem.syntheticAccessEmulation=ignore
+org.eclipse.jdt.core.compiler.problem.terminalDeprecation=warning
org.eclipse.jdt.core.compiler.problem.typeParameterHiding=warning
org.eclipse.jdt.core.compiler.problem.unavoidableGenericTypeProblems=enabled
org.eclipse.jdt.core.compiler.problem.uncheckedTypeOperation=warning
diff --git a/Documentation/config-gerrit.txt b/Documentation/config-gerrit.txt
index ce7adc2..7ed0e17 100644
--- a/Documentation/config-gerrit.txt
+++ b/Documentation/config-gerrit.txt
@@ -773,10 +773,11 @@
+
Default is 128 MiB per cache, except:
+
+* `"change_notes"`: disk storage is disabled by default
* `"diff_summary"`: default is `1g` (1 GiB of disk space)
+
-If 0, disk storage for the cache is disabled.
+If 0 or negative, disk storage for the cache is disabled.
==== [[cache_names]]Standard Caches
diff --git a/Documentation/replace_macros.py b/Documentation/replace_macros.py
index c76d133..6f90697 100755
--- a/Documentation/replace_macros.py
+++ b/Documentation/replace_macros.py
@@ -183,7 +183,8 @@
element.insertBefore(a, element.firstChild);
// remove the link icon when the mouse is moved away,
- // but keep it shown if the mouse is over the element, the link or the icon
+ // but keep it shown if the mouse is over the element, the link or
+ // the icon
hide = function(evt) {
if (document.elementFromPoint(evt.clientX, evt.clientY) != element
&& document.elementFromPoint(evt.clientX, evt.clientY) != a
@@ -229,54 +230,54 @@
options, _ = opts.parse_args()
try:
- try:
- out_file = open(options.out, 'w', errors='ignore')
- src_file = open(options.src, 'r', errors='ignore')
- except TypeError:
- out_file = open(options.out, 'w')
- src_file = open(options.src, 'r')
- last_line = ''
- ignore_next_line = False
- last_title = ''
- for line in src_file:
- if PAT_GERRIT.match(last_line):
- # Case of "GERRIT\n------" at the footer
- out_file.write(GERRIT_UPLINK)
- last_line = ''
- elif PAT_SEARCHBOX.match(last_line):
- # Case of 'SEARCHBOX\n---------'
- if options.searchbox:
- out_file.write(SEARCH_BOX)
- last_line = ''
- elif PAT_INCLUDE.match(line):
- # Case of 'include::<filename>'
- match = PAT_INCLUDE.match(line)
- out_file.write(last_line)
- last_line = match.group(1) + options.suffix + match.group(2) + '\n'
- elif PAT_STARS.match(line):
- if PAT_TITLE.match(last_line):
- # Case of the title in '.<title>\n****\nget::<url>\n****'
- match = PAT_TITLE.match(last_line)
- last_title = GET_TITLE % match.group(1)
- else:
- out_file.write(last_line)
- last_title = ''
- elif PAT_GET.match(line):
- # Case of '****\nget::<url>\n****' in rest api
- url = PAT_GET.match(line).group(1)
- out_file.write(GET_MACRO.format(url) % last_title)
- ignore_next_line = True
- elif ignore_next_line:
- # Handle the trailing '****' of the 'get::' case
- last_line = ''
- ignore_next_line = False
- else:
- out_file.write(last_line)
- last_line = line
- out_file.write(last_line)
- out_file.write(LINK_SCRIPT)
- out_file.close()
+ try:
+ out_file = open(options.out, 'w', errors='ignore')
+ src_file = open(options.src, 'r', errors='ignore')
+ except TypeError:
+ out_file = open(options.out, 'w')
+ src_file = open(options.src, 'r')
+ last_line = ''
+ ignore_next_line = False
+ last_title = ''
+ for line in src_file:
+ if PAT_GERRIT.match(last_line):
+ # Case of "GERRIT\n------" at the footer
+ out_file.write(GERRIT_UPLINK)
+ last_line = ''
+ elif PAT_SEARCHBOX.match(last_line):
+ # Case of 'SEARCHBOX\n---------'
+ if options.searchbox:
+ out_file.write(SEARCH_BOX)
+ last_line = ''
+ elif PAT_INCLUDE.match(line):
+ # Case of 'include::<filename>'
+ match = PAT_INCLUDE.match(line)
+ out_file.write(last_line)
+ last_line = match.group(1) + options.suffix + match.group(2) + '\n'
+ elif PAT_STARS.match(line):
+ if PAT_TITLE.match(last_line):
+ # Case of the title in '.<title>\n****\nget::<url>\n****'
+ match = PAT_TITLE.match(last_line)
+ last_title = GET_TITLE % match.group(1)
+ else:
+ out_file.write(last_line)
+ last_title = ''
+ elif PAT_GET.match(line):
+ # Case of '****\nget::<url>\n****' in rest api
+ url = PAT_GET.match(line).group(1)
+ out_file.write(GET_MACRO.format(url) % last_title)
+ ignore_next_line = True
+ elif ignore_next_line:
+ # Handle the trailing '****' of the 'get::' case
+ last_line = ''
+ ignore_next_line = False
+ else:
+ out_file.write(last_line)
+ last_line = line
+ out_file.write(last_line)
+ out_file.write(LINK_SCRIPT)
+ out_file.close()
except IOError as err:
- sys.stderr.write(
- "error while expanding %s to %s: %s" % (options.src, options.out, err))
- exit(1)
+ sys.stderr.write(
+ "error while expanding %s to %s: %s" % (options.src, options.out, err))
+ exit(1)
diff --git a/WORKSPACE b/WORKSPACE
index d482577..94138e4 100644
--- a/WORKSPACE
+++ b/WORKSPACE
@@ -680,8 +680,8 @@
maven_jar(
name = "junit",
- artifact = "junit:junit:4.11",
- sha1 = "4e031bb61df09069aeb2bffb4019e7a5034a4ee0",
+ artifact = "junit:junit:4.12",
+ sha1 = "2973d150c0dc1fefe998f834810d68f278ea58ec",
)
maven_jar(
@@ -697,18 +697,30 @@
sha1 = "4785a3c21320980282f9f33d0d1264a69040538f",
)
-TRUTH_VERS = "0.39"
+TRUTH_VERS = "0.40"
maven_jar(
name = "truth",
artifact = "com.google.truth:truth:" + TRUTH_VERS,
- sha1 = "bd1bf5706ff34eb7ff80fef8b0c4320f112ef899",
+ sha1 = "0d74e716afec045cc4a178dbbfde2a8314ae5574",
)
maven_jar(
name = "truth-java8-extension",
artifact = "com.google.truth.extensions:truth-java8-extension:" + TRUTH_VERS,
- sha1 = "1499bc88cda9d674afb30da9813b44bcd4512d0d",
+ sha1 = "636e49d675bc28e0b3ae0edd077d6acbbb159166",
+)
+
+maven_jar(
+ name = "truth-liteproto-extension",
+ artifact = "com.google.truth.extensions:truth-liteproto-extension:" + TRUTH_VERS,
+ sha1 = "21210ac07e5cfbe83f04733f806224a6c0ae4d2d",
+)
+
+maven_jar(
+ name = "truth-proto-extension",
+ artifact = "com.google.truth.extensions:truth-proto-extension:" + TRUTH_VERS,
+ sha1 = "5a2b504143a5fec2b6be8bce292b3b7577a81789",
)
# When bumping the easymock version number, make sure to also move powermock to a compatible version
diff --git a/contrib/check-valid-commit.py b/contrib/check-valid-commit.py
index d26fa58..763ae3e 100755
--- a/contrib/check-valid-commit.py
+++ b/contrib/check-valid-commit.py
@@ -10,13 +10,16 @@
SSH_USER = 'bot'
SSH_HOST = 'localhost'
SSH_PORT = 29418
-SSH_COMMAND = 'ssh %s@%s -p %d gerrit approve ' % (SSH_USER, SSH_HOST, SSH_PORT)
+SSH_COMMAND = 'ssh %s@%s -p %d gerrit approve ' % (SSH_USER,
+ SSH_HOST,
+ SSH_PORT)
FAILURE_SCORE = '--code-review=-2'
FAILURE_MESSAGE = 'This commit message does not match the standard.' \
+ ' Please correct the commit message and upload a replacement patch.'
PASS_SCORE = '--code-review=0'
PASS_MESSAGE = ''
+
def main():
change = None
project = None
@@ -25,8 +28,9 @@
patchset = None
try:
- opts, _args = getopt.getopt(sys.argv[1:], '', \
- ['change=', 'project=', 'branch=', 'commit=', 'patchset='])
+ opts, _args = getopt.getopt(sys.argv[1:], '',
+ ['change=', 'project=', 'branch=',
+ 'commit=', 'patchset='])
except getopt.GetoptError as err:
print('Error: %s' % (err))
usage()
@@ -48,8 +52,7 @@
usage()
sys.exit(-1)
- if change == None or project == None or branch == None \
- or commit == None or patchset == None:
+ if any(p is None for p in [change, project, branch, commit, patchset]):
usage()
sys.exit(-1)
@@ -57,16 +60,16 @@
status, output = subprocess.getstatusoutput(command)
if status != 0:
- print('Error running \'%s\'. status: %s, output:\n\n%s' % \
- (command, status, output))
+ print('Error running \'%s\'. status: %s, output:\n\n%s' %
+ (command, status, output))
sys.exit(-1)
commitMessage = output[(output.find('\n\n')+2):]
commitLines = commitMessage.split('\n')
if len(commitLines) > 1 and len(commitLines[1]) != 0:
- fail(commit, 'Invalid commit summary. The summary must be ' \
- + 'one line followed by a blank line.')
+ fail(commit, 'Invalid commit summary. The summary must be '
+ + 'one line followed by a blank line.')
i = 0
for line in commitLines:
@@ -76,23 +79,27 @@
passes(commit)
+
def usage():
print('Usage:\n')
- print(sys.argv[0] + ' --change <change id> --project <project name> ' \
- + '--branch <branch> --commit <sha1> --patchset <patchset id>')
+ print(sys.argv[0] + ' --change <change id> --project <project name> '
+ + '--branch <branch> --commit <sha1> --patchset <patchset id>')
-def fail( commit, message ):
+
+def fail(commit, message):
command = SSH_COMMAND + FAILURE_SCORE + ' -m \\\"' \
- + _shell_escape( FAILURE_MESSAGE + '\n\n' + message) \
+ + _shell_escape(FAILURE_MESSAGE + '\n\n' + message) \
+ '\\\" ' + commit
subprocess.getstatusoutput(command)
sys.exit(1)
-def passes( commit ):
+
+def passes(commit):
command = SSH_COMMAND + PASS_SCORE + ' -m \\\"' \
+ _shell_escape(PASS_MESSAGE) + ' \\\" ' + commit
subprocess.getstatusoutput(command)
+
def _shell_escape(x):
s = ''
for c in x:
@@ -102,6 +109,6 @@
s = s + c
return s
+
if __name__ == '__main__':
main()
-
diff --git a/contrib/populate-fixture-data.py b/contrib/populate-fixture-data.py
index 93ac34f..07a0f01 100755
--- a/contrib/populate-fixture-data.py
+++ b/contrib/populate-fixture-data.py
@@ -47,228 +47,235 @@
# Random names from US Census Data
FIRST_NAMES = [
- "Casey", "Yesenia", "Shirley", "Tara", "Wanda", "Sheryl", "Jaime", "Elaine",
- "Charlotte", "Carly", "Bonnie", "Kirsten", "Kathryn", "Carla", "Katrina",
- "Melody", "Suzanne", "Sandy", "Joann", "Kristie", "Sally", "Emma", "Susan",
- "Amanda", "Alyssa", "Patty", "Angie", "Dominique", "Cynthia", "Jennifer",
- "Theresa", "Desiree", "Kaylee", "Maureen", "Jeanne", "Kellie", "Valerie",
- "Nina", "Judy", "Diamond", "Anita", "Rebekah", "Stefanie", "Kendra", "Erin",
- "Tammie", "Tracey", "Bridget", "Krystal", "Jasmin", "Sonia", "Meghan",
- "Rebecca", "Jeanette", "Meredith", "Beverly", "Natasha", "Chloe", "Selena",
- "Teresa", "Sheena", "Cassandra", "Rhonda", "Tami", "Jodi", "Shelly", "Angela",
- "Kimberly", "Terry", "Joanna", "Isabella", "Lindsey", "Loretta", "Dana",
- "Veronica", "Carolyn", "Laura", "Karen", "Dawn", "Alejandra", "Cassie",
- "Lorraine", "Yolanda", "Kerry", "Stephanie", "Caitlin", "Melanie", "Kerri",
- "Doris", "Sandra", "Beth", "Carol", "Vicki", "Shelia", "Bethany", "Rachael",
- "Donna", "Alexandra", "Barbara", "Ana", "Jillian", "Ann", "Rachel", "Lauren",
- "Hayley", "Misty", "Brianna", "Tanya", "Danielle", "Courtney", "Jacqueline",
- "Becky", "Christy", "Alisha", "Phyllis", "Faith", "Jocelyn", "Nancy",
- "Gloria", "Kristen", "Evelyn", "Julie", "Julia", "Kara", "Chelsey", "Cassidy",
- "Jean", "Chelsea", "Jenny", "Diana", "Haley", "Kristine", "Kristina", "Erika",
- "Jenna", "Alison", "Deanna", "Abigail", "Melissa", "Sierra", "Linda",
- "Monica", "Tasha", "Traci", "Yvonne", "Tracy", "Marie", "Maria", "Michaela",
- "Stacie", "April", "Morgan", "Cathy", "Darlene", "Cristina", "Emily"
- "Ian", "Russell", "Phillip", "Jay", "Barry", "Brad", "Frederick", "Fernando",
- "Timothy", "Ricardo", "Bernard", "Daniel", "Ruben", "Alexis", "Kyle", "Malik",
- "Norman", "Kent", "Melvin", "Stephen", "Daryl", "Kurt", "Greg", "Alex",
- "Mario", "Riley", "Marvin", "Dan", "Steven", "Roberto", "Lucas", "Leroy",
- "Preston", "Drew", "Fred", "Casey", "Wesley", "Elijah", "Reginald", "Joel",
- "Christopher", "Jacob", "Luis", "Philip", "Mark", "Rickey", "Todd", "Scott",
- "Terrence", "Jim", "Stanley", "Bobby", "Thomas", "Gabriel", "Tracy", "Marcus",
- "Peter", "Michael", "Calvin", "Herbert", "Darryl", "Billy", "Ross", "Dustin",
- "Jaime", "Adam", "Henry", "Xavier", "Dominic", "Lonnie", "Danny", "Victor",
- "Glen", "Perry", "Jackson", "Grant", "Gerald", "Garrett", "Alejandro",
- "Eddie", "Alan", "Ronnie", "Mathew", "Dave", "Wayne", "Joe", "Craig",
- "Terry", "Chris", "Randall", "Parker", "Francis", "Keith", "Neil", "Caleb",
- "Jon", "Earl", "Taylor", "Bryce", "Brady", "Max", "Sergio", "Leon", "Gene",
- "Darin", "Bill", "Edgar", "Antonio", "Dalton", "Arthur", "Austin", "Cristian",
- "Kevin", "Omar", "Kelly", "Aaron", "Ethan", "Tom", "Isaac", "Maurice",
- "Gilbert", "Hunter", "Willie", "Harry", "Dale", "Darius", "Jerome", "Jason",
- "Harold", "Kerry", "Clarence", "Gregg", "Shane", "Eduardo", "Micheal",
- "Howard", "Vernon", "Rodney", "Anthony", "Levi", "Larry", "Franklin", "Jimmy",
- "Jonathon", "Carl",
+ "Casey", "Yesenia", "Shirley", "Tara", "Wanda", "Sheryl", "Jaime",
+ "Elaine", "Charlotte", "Carly", "Bonnie", "Kirsten", "Kathryn", "Carla",
+ "Katrina", "Melody", "Suzanne", "Sandy", "Joann", "Kristie", "Sally",
+ "Emma", "Susan", "Amanda", "Alyssa", "Patty", "Angie", "Dominique",
+ "Cynthia", "Jennifer", "Theresa", "Desiree", "Kaylee", "Maureen",
+ "Jeanne", "Kellie", "Valerie", "Nina", "Judy", "Diamond", "Anita",
+ "Rebekah", "Stefanie", "Kendra", "Erin", "Tammie", "Tracey", "Bridget",
+ "Krystal", "Jasmin", "Sonia", "Meghan", "Rebecca", "Jeanette", "Meredith",
+ "Beverly", "Natasha", "Chloe", "Selena", "Teresa", "Sheena", "Cassandra",
+ "Rhonda", "Tami", "Jodi", "Shelly", "Angela", "Kimberly", "Terry",
+ "Joanna", "Isabella", "Lindsey", "Loretta", "Dana", "Veronica", "Carolyn",
+ "Laura", "Karen", "Dawn", "Alejandra", "Cassie", "Lorraine", "Yolanda",
+ "Kerry", "Stephanie", "Caitlin", "Melanie", "Kerri", "Doris", "Sandra",
+ "Beth", "Carol", "Vicki", "Shelia", "Bethany", "Rachael", "Donna",
+ "Alexandra", "Barbara", "Ana", "Jillian", "Ann", "Rachel", "Lauren",
+ "Hayley", "Misty", "Brianna", "Tanya", "Danielle", "Courtney",
+ "Jacqueline", "Becky", "Christy", "Alisha", "Phyllis", "Faith", "Jocelyn",
+ "Nancy", "Gloria", "Kristen", "Evelyn", "Julie", "Julia", "Kara",
+ "Chelsey", "Cassidy", "Jean", "Chelsea", "Jenny", "Diana", "Haley",
+ "Kristine", "Kristina", "Erika", "Jenna", "Alison", "Deanna", "Abigail",
+ "Melissa", "Sierra", "Linda", "Monica", "Tasha", "Traci", "Yvonne",
+ "Tracy", "Marie", "Maria", "Michaela", "Stacie", "April", "Morgan",
+ "Cathy", "Darlene", "Cristina", "Emily" "Ian", "Russell", "Phillip", "Jay",
+ "Barry", "Brad", "Frederick", "Fernando", "Timothy", "Ricardo", "Bernard",
+ "Daniel", "Ruben", "Alexis", "Kyle", "Malik", "Norman", "Kent", "Melvin",
+ "Stephen", "Daryl", "Kurt", "Greg", "Alex", "Mario", "Riley", "Marvin",
+ "Dan", "Steven", "Roberto", "Lucas", "Leroy", "Preston", "Drew", "Fred",
+ "Casey", "Wesley", "Elijah", "Reginald", "Joel", "Christopher", "Jacob",
+ "Luis", "Philip", "Mark", "Rickey", "Todd", "Scott", "Terrence", "Jim",
+ "Stanley", "Bobby", "Thomas", "Gabriel", "Tracy", "Marcus", "Peter",
+ "Michael", "Calvin", "Herbert", "Darryl", "Billy", "Ross", "Dustin",
+ "Jaime", "Adam", "Henry", "Xavier", "Dominic", "Lonnie", "Danny", "Victor",
+ "Glen", "Perry", "Jackson", "Grant", "Gerald", "Garrett", "Alejandro",
+ "Eddie", "Alan", "Ronnie", "Mathew", "Dave", "Wayne", "Joe", "Craig",
+ "Terry", "Chris", "Randall", "Parker", "Francis", "Keith", "Neil", "Caleb",
+ "Jon", "Earl", "Taylor", "Bryce", "Brady", "Max", "Sergio", "Leon", "Gene",
+ "Darin", "Bill", "Edgar", "Antonio", "Dalton", "Arthur", "Austin",
+ "Cristian", "Kevin", "Omar", "Kelly", "Aaron", "Ethan", "Tom", "Isaac",
+ "Maurice", "Gilbert", "Hunter", "Willie", "Harry", "Dale", "Darius",
+ "Jerome", "Jason", "Harold", "Kerry", "Clarence", "Gregg", "Shane",
+ "Eduardo", "Micheal", "Howard", "Vernon", "Rodney", "Anthony", "Levi",
+ "Larry", "Franklin", "Jimmy", "Jonathon", "Carl",
]
LAST_NAMES = [
- "Savage", "Hendrix", "Moon", "Larsen", "Rocha", "Burgess", "Bailey", "Farley",
- "Moses", "Schmidt", "Brown", "Hoover", "Klein", "Jennings", "Braun", "Rangel",
- "Casey", "Dougherty", "Hancock", "Wolf", "Henry", "Thomas", "Bentley",
- "Barnett", "Kline", "Pitts", "Rojas", "Sosa", "Paul", "Hess", "Chase",
- "Mckay", "Bender", "Colins", "Montoya", "Townsend", "Potts", "Ayala", "Avery",
- "Sherman", "Tapia", "Hamilton", "Ferguson", "Huang", "Hooper", "Zamora",
- "Logan", "Lloyd", "Quinn", "Monroe", "Brock", "Ibarra", "Fowler", "Weiss",
- "Montgomery", "Diaz", "Dixon", "Olson", "Robertson", "Arias", "Benjamin",
- "Abbott", "Stein", "Schroeder", "Beck", "Velasquez", "Barber", "Nichols",
- "Ortiz", "Burns", "Moody", "Stokes", "Wilcox", "Rush", "Michael", "Kidd",
- "Rowland", "Mclean", "Saunders", "Chung", "Newton", "Potter", "Hickman",
- "Ray", "Larson", "Figueroa", "Duncan", "Sparks", "Rose", "Hodge", "Huynh",
- "Joseph", "Morales", "Beasley", "Mora", "Fry", "Ross", "Novak", "Hahn",
- "Wise", "Knight", "Frederick", "Heath", "Pollard", "Vega", "Mcclain",
- "Buckley", "Conrad", "Cantrell", "Bond", "Mejia", "Wang", "Lewis", "Johns",
- "Mcknight", "Callahan", "Reynolds", "Norris", "Burnett", "Carey", "Jacobson",
- "Oneill", "Oconnor", "Leonard", "Mckenzie", "Hale", "Delgado", "Spence",
- "Brandt", "Obrien", "Bowman", "James", "Avila", "Roberts", "Barker", "Cohen",
- "Bradley", "Prince", "Warren", "Summers", "Little", "Caldwell", "Garrett",
- "Hughes", "Norton", "Burke", "Holden", "Merritt", "Lee", "Frank", "Wiley",
- "Ho", "Weber", "Keith", "Winters", "Gray", "Watts", "Brady", "Aguilar",
- "Nicholson", "David", "Pace", "Cervantes", "Davis", "Baxter", "Sanchez",
- "Singleton", "Taylor", "Strickland", "Glenn", "Valentine", "Roy", "Cameron",
- "Beard", "Norman", "Fritz", "Anthony", "Koch", "Parrish", "Herman", "Hines",
- "Sutton", "Gallegos", "Stephenson", "Lozano", "Franklin", "Howe", "Bauer",
- "Love", "Ali", "Ellison", "Lester", "Guzman", "Jarvis", "Espinoza",
- "Fletcher", "Burton", "Woodard", "Peterson", "Barajas", "Richard", "Bryan",
- "Goodman", "Cline", "Rowe", "Faulkner", "Crawford", "Mueller", "Patterson",
- "Hull", "Walton", "Wu", "Flores", "York", "Dickson", "Barnes", "Fisher",
- "Strong", "Juarez", "Fitzgerald", "Schmitt", "Blevins", "Villa", "Sullivan",
- "Velazquez", "Horton", "Meadows", "Riley", "Barrera", "Neal", "Mendez",
- "Mcdonald", "Floyd", "Lynch", "Mcdowell", "Benson", "Hebert", "Livingston",
- "Davies", "Richardson", "Vincent", "Davenport", "Osborn", "Mckee", "Marshall",
- "Ferrell", "Martinez", "Melton", "Mercer", "Yoder", "Jacobs", "Mcdaniel",
- "Mcmillan", "Peters", "Atkinson", "Wood", "Briggs", "Valencia", "Chandler",
- "Rios", "Hunter", "Bean", "Hicks", "Hays", "Lucero", "Malone", "Waller",
- "Banks", "Myers", "Mitchell", "Grimes", "Houston", "Hampton", "Trujillo",
- "Perkins", "Moran", "Welch", "Contreras", "Montes", "Ayers", "Hayden",
- "Daniel", "Weeks", "Porter", "Gill", "Mullen", "Nolan", "Dorsey", "Crane",
- "Estes", "Lam", "Wells", "Cisneros", "Giles", "Watson", "Vang", "Scott",
- "Knox", "Hanna", "Fields",
+ "Savage", "Hendrix", "Moon", "Larsen", "Rocha", "Burgess", "Bailey",
+ "Farley", "Moses", "Schmidt", "Brown", "Hoover", "Klein", "Jennings",
+ "Braun", "Rangel", "Casey", "Dougherty", "Hancock", "Wolf", "Henry",
+ "Thomas", "Bentley", "Barnett", "Kline", "Pitts", "Rojas", "Sosa", "Paul",
+ "Hess", "Chase", "Mckay", "Bender", "Colins", "Montoya", "Townsend",
+ "Potts", "Ayala", "Avery", "Sherman", "Tapia", "Hamilton", "Ferguson",
+ "Huang", "Hooper", "Zamora", "Logan", "Lloyd", "Quinn", "Monroe", "Brock",
+ "Ibarra", "Fowler", "Weiss", "Montgomery", "Diaz", "Dixon", "Olson",
+ "Robertson", "Arias", "Benjamin", "Abbott", "Stein", "Schroeder", "Beck",
+ "Velasquez", "Barber", "Nichols", "Ortiz", "Burns", "Moody", "Stokes",
+ "Wilcox", "Rush", "Michael", "Kidd", "Rowland", "Mclean", "Saunders",
+ "Chung", "Newton", "Potter", "Hickman", "Ray", "Larson", "Figueroa",
+ "Duncan", "Sparks", "Rose", "Hodge", "Huynh", "Joseph", "Morales",
+ "Beasley", "Mora", "Fry", "Ross", "Novak", "Hahn", "Wise", "Knight",
+ "Frederick", "Heath", "Pollard", "Vega", "Mcclain", "Buckley", "Conrad",
+ "Cantrell", "Bond", "Mejia", "Wang", "Lewis", "Johns", "Mcknight",
+ "Callahan", "Reynolds", "Norris", "Burnett", "Carey", "Jacobson", "Oneill",
+ "Oconnor", "Leonard", "Mckenzie", "Hale", "Delgado", "Spence", "Brandt",
+ "Obrien", "Bowman", "James", "Avila", "Roberts", "Barker", "Cohen",
+ "Bradley", "Prince", "Warren", "Summers", "Little", "Caldwell", "Garrett",
+ "Hughes", "Norton", "Burke", "Holden", "Merritt", "Lee", "Frank", "Wiley",
+ "Ho", "Weber", "Keith", "Winters", "Gray", "Watts", "Brady", "Aguilar",
+ "Nicholson", "David", "Pace", "Cervantes", "Davis", "Baxter", "Sanchez",
+ "Singleton", "Taylor", "Strickland", "Glenn", "Valentine", "Roy",
+ "Cameron", "Beard", "Norman", "Fritz", "Anthony", "Koch", "Parrish",
+ "Herman", "Hines", "Sutton", "Gallegos", "Stephenson", "Lozano",
+ "Franklin", "Howe", "Bauer", "Love", "Ali", "Ellison", "Lester", "Guzman",
+ "Jarvis", "Espinoza", "Fletcher", "Burton", "Woodard", "Peterson",
+ "Barajas", "Richard", "Bryan", "Goodman", "Cline", "Rowe", "Faulkner",
+ "Crawford", "Mueller", "Patterson", "Hull", "Walton", "Wu", "Flores",
+ "York", "Dickson", "Barnes", "Fisher", "Strong", "Juarez", "Fitzgerald",
+ "Schmitt", "Blevins", "Villa", "Sullivan", "Velazquez", "Horton",
+ "Meadows", "Riley", "Barrera", "Neal", "Mendez", "Mcdonald", "Floyd",
+ "Lynch", "Mcdowell", "Benson", "Hebert", "Livingston", "Davies",
+ "Richardson", "Vincent", "Davenport", "Osborn", "Mckee", "Marshall",
+ "Ferrell", "Martinez", "Melton", "Mercer", "Yoder", "Jacobs", "Mcdaniel",
+ "Mcmillan", "Peters", "Atkinson", "Wood", "Briggs", "Valencia", "Chandler",
+ "Rios", "Hunter", "Bean", "Hicks", "Hays", "Lucero", "Malone", "Waller",
+ "Banks", "Myers", "Mitchell", "Grimes", "Houston", "Hampton", "Trujillo",
+ "Perkins", "Moran", "Welch", "Contreras", "Montes", "Ayers", "Hayden",
+ "Daniel", "Weeks", "Porter", "Gill", "Mullen", "Nolan", "Dorsey", "Crane",
+ "Estes", "Lam", "Wells", "Cisneros", "Giles", "Watson", "Vang", "Scott",
+ "Knox", "Hanna", "Fields",
]
def clean(json_string):
- # Strip JSON XSS Tag
- json_string = json_string.strip()
- if json_string.startswith(")]}'"):
- return json_string[5:]
- return json_string
+ # Strip JSON XSS Tag
+ json_string = json_string.strip()
+ if json_string.startswith(")]}'"):
+ return json_string[5:]
+ return json_string
def basic_auth(user):
- return requests.auth.HTTPBasicAuth(user["username"], user["http_password"])
+ return requests.auth.HTTPBasicAuth(user["username"], user["http_password"])
def fetch_admin_group():
- global GROUP_ADMIN
- # Get admin group
- r = json.loads(clean(requests.get(BASE_URL + "groups/" + "?suggest=ad&p=All-Projects",
- headers=HEADERS,
- auth=ADMIN_BASIC_AUTH).text))
- admin_group_name = r.keys()[0]
- GROUP_ADMIN = r[admin_group_name]
- GROUP_ADMIN["name"] = admin_group_name
+ global GROUP_ADMIN
+ # Get admin group
+ r = json.loads(clean(requests.get(
+ BASE_URL + "groups/?suggest=ad&p=All-Projects",
+ headers=HEADERS,
+ auth=ADMIN_BASIC_AUTH).text))
+ admin_group_name = r.keys()[0]
+ GROUP_ADMIN = r[admin_group_name]
+ GROUP_ADMIN["name"] = admin_group_name
def generate_random_text():
- return " ".join([random.choice("lorem ipsum "
- "doleret delendam "
- "\n esse".split(" ")) for _ in xrange(1, 100)])
+ return " ".join([random.choice("lorem ipsum "
+ "doleret delendam "
+ "\n esse".split(" ")) for _ in range(1,
+ 100)])
def set_up():
- global TMP_PATH
- TMP_PATH = tempfile.mkdtemp()
- atexit.register(clean_up)
- os.makedirs(TMP_PATH + "/ssh")
- os.makedirs(TMP_PATH + "/repos")
- fetch_admin_group()
+ global TMP_PATH
+ TMP_PATH = tempfile.mkdtemp()
+ atexit.register(clean_up)
+ os.makedirs(TMP_PATH + "/ssh")
+ os.makedirs(TMP_PATH + "/repos")
+ fetch_admin_group()
def get_random_users(num_users):
- users = random.sample([(f, l) for f in FIRST_NAMES for l in LAST_NAMES],
- num_users)
- names = []
- for u in users:
- names.append({"firstname": u[0],
- "lastname": u[1],
- "name": u[0] + " " + u[1],
- "username": u[0] + u[1],
- "email": u[0] + "." + u[1] + "@gerritcodereview.com",
- "http_password": "secret",
- "groups": []})
- return names
+ users = random.sample([(f, l) for f in FIRST_NAMES for l in LAST_NAMES],
+ num_users)
+ names = []
+ for u in users:
+ names.append({"firstname": u[0],
+ "lastname": u[1],
+ "name": u[0] + " " + u[1],
+ "username": u[0] + u[1],
+ "email": u[0] + "." + u[1] + "@gerritcodereview.com",
+ "http_password": "secret",
+ "groups": []})
+ return names
def generate_ssh_keys(gerrit_users):
- for user in gerrit_users:
- key_file = TMP_PATH + "/ssh/" + user["username"] + ".key"
- subprocess.check_output(["ssh-keygen", "-f", key_file, "-N", ""])
- with open(key_file + ".pub", "r") as f:
- user["ssh_key"] = f.read()
+ for user in gerrit_users:
+ key_file = TMP_PATH + "/ssh/" + user["username"] + ".key"
+ subprocess.check_output(["ssh-keygen", "-f", key_file, "-N", ""])
+ with open(key_file + ".pub", "r") as f:
+ user["ssh_key"] = f.read()
def create_gerrit_groups():
- groups = [
- {"name": "iOS-Maintainers", "description": "iOS Maintainers",
- "visible_to_all": True, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]},
- {"name": "Android-Maintainers", "description": "Android Maintainers",
- "visible_to_all": True, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]},
- {"name": "Backend-Maintainers", "description": "Backend Maintainers",
- "visible_to_all": True, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]},
- {"name": "Script-Maintainers", "description": "Script Maintainers",
- "visible_to_all": True, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]},
- {"name": "Security-Team", "description": "Sec Team",
- "visible_to_all": False, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]}]
- for g in groups:
- requests.put(BASE_URL + "groups/" + g["name"],
- json.dumps(g),
- headers=HEADERS,
- auth=ADMIN_BASIC_AUTH)
- return [g["name"] for g in groups]
+ groups = [
+ {"name": "iOS-Maintainers", "description": "iOS Maintainers",
+ "visible_to_all": True, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]},
+ {"name": "Android-Maintainers", "description": "Android Maintainers",
+ "visible_to_all": True, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]},
+ {"name": "Backend-Maintainers", "description": "Backend Maintainers",
+ "visible_to_all": True, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]},
+ {"name": "Script-Maintainers", "description": "Script Maintainers",
+ "visible_to_all": True, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]},
+ {"name": "Security-Team", "description": "Sec Team",
+ "visible_to_all": False, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]}]
+ for g in groups:
+ requests.put(BASE_URL + "groups/" + g["name"],
+ json.dumps(g),
+ headers=HEADERS,
+ auth=ADMIN_BASIC_AUTH)
+ return [g["name"] for g in groups]
def create_gerrit_projects(owner_groups):
- projects = [
- {"id": "android", "name": "Android", "parent": "All-Projects",
- "branches": ["master"], "description": "Our android app.",
- "owners": [owner_groups[0]], "create_empty_commit": True},
- {"id": "ios", "name": "iOS", "parent": "All-Projects",
- "branches": ["master"], "description": "Our ios app.",
- "owners": [owner_groups[1]], "create_empty_commit": True},
- {"id": "backend", "name": "Backend", "parent": "All-Projects",
- "branches": ["master"], "description": "Our awesome backend.",
- "owners": [owner_groups[2]], "create_empty_commit": True},
- {"id": "scripts", "name": "Scripts", "parent": "All-Projects",
- "branches": ["master"], "description": "some small scripts.",
- "owners": [owner_groups[3]], "create_empty_commit": True}]
- for p in projects:
- requests.put(BASE_URL + "projects/" + p["name"],
- json.dumps(p),
- headers=HEADERS,
- auth=ADMIN_BASIC_AUTH)
- return [p["name"] for p in projects]
+ projects = [
+ {"id": "android", "name": "Android", "parent": "All-Projects",
+ "branches": ["master"], "description": "Our android app.",
+ "owners": [owner_groups[0]], "create_empty_commit": True},
+ {"id": "ios", "name": "iOS", "parent": "All-Projects",
+ "branches": ["master"], "description": "Our ios app.",
+ "owners": [owner_groups[1]], "create_empty_commit": True},
+ {"id": "backend", "name": "Backend", "parent": "All-Projects",
+ "branches": ["master"], "description": "Our awesome backend.",
+ "owners": [owner_groups[2]], "create_empty_commit": True},
+ {"id": "scripts", "name": "Scripts", "parent": "All-Projects",
+ "branches": ["master"], "description": "some small scripts.",
+ "owners": [owner_groups[3]], "create_empty_commit": True}]
+ for p in projects:
+ requests.put(BASE_URL + "projects/" + p["name"],
+ json.dumps(p),
+ headers=HEADERS,
+ auth=ADMIN_BASIC_AUTH)
+ return [p["name"] for p in projects]
def create_gerrit_users(gerrit_users):
- for user in gerrit_users:
- requests.put(BASE_URL + "accounts/" + user["username"],
- json.dumps(user),
- headers=HEADERS,
- auth=ADMIN_BASIC_AUTH)
+ for user in gerrit_users:
+ requests.put(BASE_URL + "accounts/" + user["username"],
+ json.dumps(user),
+ headers=HEADERS,
+ auth=ADMIN_BASIC_AUTH)
def create_change(user, project_name):
- random_commit_message = generate_random_text()
- change = {
- "project": project_name,
- "subject": random_commit_message.split("\n")[0],
- "branch": "master",
- "status": "NEW",
- }
- requests.post(BASE_URL + "changes/",
- json.dumps(change),
- headers=HEADERS,
- auth=basic_auth(user))
+ random_commit_message = generate_random_text()
+ change = {
+ "project": project_name,
+ "subject": random_commit_message.split("\n")[0],
+ "branch": "master",
+ "status": "NEW",
+ }
+ requests.post(BASE_URL + "changes/",
+ json.dumps(change),
+ headers=HEADERS,
+ auth=basic_auth(user))
def clean_up():
- shutil.rmtree(TMP_PATH)
+ shutil.rmtree(TMP_PATH)
def main():
+<<<<<<< HEAD
p = optparse.OptionParser()
p.add_option("-u", "--user_count", action="store",
default=100,
@@ -299,7 +306,42 @@
project_names = create_gerrit_projects(group_names)
for idx, u in enumerate(gerrit_users):
- for _ in xrange(random.randint(1, 5)):
+ for _ in range(random.randint(1, 5)):
create_change(u, project_names[4 * idx / len(gerrit_users)])
+=======
+ p = optparse.OptionParser()
+ p.add_option("-u", "--user_count", action="store",
+ default=100,
+ type='int',
+ help="number of users to generate")
+ p.add_option("-p", "--port", action="store",
+ default=8080,
+ type='int',
+ help="port of server")
+ (options, _) = p.parse_args()
+ global BASE_URL
+ BASE_URL = BASE_URL % options.port
+ print(BASE_URL)
+
+ set_up()
+ gerrit_users = get_random_users(options.user_count)
+
+ group_names = create_gerrit_groups()
+ for idx, u in enumerate(gerrit_users):
+ u["groups"].append(group_names[idx % len(group_names)])
+ if idx % 5 == 0:
+ # Also add to security group
+ u["groups"].append(group_names[4])
+
+ generate_ssh_keys(gerrit_users)
+ create_gerrit_users(gerrit_users)
+
+ project_names = create_gerrit_projects(group_names)
+
+ for idx, u in enumerate(gerrit_users):
+ for _ in xrange(random.randint(1, 5)):
+ create_change(u, project_names[4 * idx / len(gerrit_users)])
+
+>>>>>>> 730efd14f4... Python cleanups, round 1: whitespace
main()
diff --git a/gerrit-gwtui/BUILD b/gerrit-gwtui/BUILD
index a6c9763..56ac0ea 100644
--- a/gerrit-gwtui/BUILD
+++ b/gerrit-gwtui/BUILD
@@ -34,8 +34,8 @@
"//java/com/google/gerrit/common:client",
"//java/com/google/gerrit/extensions:client",
"//lib:junit",
- "//lib:truth",
"//lib/gwt:dev",
"//lib/gwt:user",
+ "//lib/truth",
],
)
diff --git a/java/com/google/gerrit/acceptance/BUILD b/java/com/google/gerrit/acceptance/BUILD
index acd5130a..9587860 100644
--- a/java/com/google/gerrit/acceptance/BUILD
+++ b/java/com/google/gerrit/acceptance/BUILD
@@ -76,9 +76,8 @@
"//java/com/google/gerrit/server/group/testing",
"//java/com/google/gerrit/server/project/testing:project-test-util",
"//java/com/google/gerrit/testing:gerrit-test-util",
+ "//lib:guava",
"//lib:jimfs",
- "//lib:truth",
- "//lib:truth-java8-extension",
"//lib/auto:auto-value",
"//lib/auto:auto-value-annotations",
"//lib/httpcomponents:fluent-hc",
@@ -88,6 +87,8 @@
"//lib/jgit/org.eclipse.jgit.junit:junit",
"//lib/log:impl_log4j",
"//lib/log:log4j",
+ "//lib/truth",
+ "//lib/truth:truth-java8-extension",
"//prolog:gerrit-prolog-common",
],
visibility = ["//visibility:public"],
diff --git a/java/com/google/gerrit/common/data/testing/BUILD b/java/com/google/gerrit/common/data/testing/BUILD
index 83f1c06..3899e39 100644
--- a/java/com/google/gerrit/common/data/testing/BUILD
+++ b/java/com/google/gerrit/common/data/testing/BUILD
@@ -6,6 +6,6 @@
deps = [
"//java/com/google/gerrit/common:server",
"//java/com/google/gerrit/reviewdb:server",
- "//lib:truth",
+ "//lib/truth",
],
)
diff --git a/java/com/google/gerrit/elasticsearch/ElasticChangeIndex.java b/java/com/google/gerrit/elasticsearch/ElasticChangeIndex.java
index 0a06c31..58a298e 100644
--- a/java/com/google/gerrit/elasticsearch/ElasticChangeIndex.java
+++ b/java/com/google/gerrit/elasticsearch/ElasticChangeIndex.java
@@ -15,9 +15,9 @@
package com.google.gerrit.elasticsearch;
import static com.google.common.base.Preconditions.checkNotNull;
-import static com.google.gerrit.server.index.change.ChangeField.APPROVAL_CODEC;
-import static com.google.gerrit.server.index.change.ChangeField.CHANGE_CODEC;
-import static com.google.gerrit.server.index.change.ChangeField.PATCH_SET_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.APPROVAL_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.CHANGE_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.PATCH_SET_CODEC;
import static com.google.gerrit.server.index.change.ChangeIndexRewriter.CLOSED_STATUSES;
import static com.google.gerrit.server.index.change.ChangeIndexRewriter.OPEN_STATUSES;
import static java.nio.charset.StandardCharsets.UTF_8;
diff --git a/java/com/google/gerrit/extensions/auth/oauth/OAuthToken.java b/java/com/google/gerrit/extensions/auth/oauth/OAuthToken.java
index b736262..84b6a04 100644
--- a/java/com/google/gerrit/extensions/auth/oauth/OAuthToken.java
+++ b/java/com/google/gerrit/extensions/auth/oauth/OAuthToken.java
@@ -14,9 +14,20 @@
package com.google.gerrit.extensions.auth.oauth;
-import java.io.Serializable;
+import static com.google.common.base.Preconditions.checkNotNull;
-/* OAuth token */
+import com.google.common.base.MoreObjects;
+import com.google.common.base.Strings;
+import com.google.gerrit.common.Nullable;
+import java.io.Serializable;
+import java.util.Objects;
+
+/**
+ * OAuth token.
+ *
+ * <p>Only implements {@link Serializable} for backwards compatibility; new extensions should not
+ * depend on the serialized format.
+ */
public class OAuthToken implements Serializable {
private static final long serialVersionUID = 1L;
@@ -32,8 +43,9 @@
private final long expiresAt;
/**
- * The identifier of the OAuth provider that issued this token in the form
- * <tt>"plugin-name:provider-name"</tt>, or {@code null}.
+ * The identifier of the OAuth provider that issued this token in the form {@code
+ * "plugin-name:provider-name"}, or {@code null}. The empty string {@code ""} is treated the same
+ * as {@code null}.
*/
private final String providerId;
@@ -41,12 +53,13 @@
this(token, secret, raw, Long.MAX_VALUE, null);
}
- public OAuthToken(String token, String secret, String raw, long expiresAt, String providerId) {
- this.token = token;
- this.secret = secret;
- this.raw = raw;
+ public OAuthToken(
+ String token, String secret, String raw, long expiresAt, @Nullable String providerId) {
+ this.token = checkNotNull(token, "token");
+ this.secret = checkNotNull(secret, "secret");
+ this.raw = checkNotNull(raw, "raw");
this.expiresAt = expiresAt;
- this.providerId = providerId;
+ this.providerId = Strings.emptyToNull(providerId);
}
public String getToken() {
@@ -69,7 +82,37 @@
return System.currentTimeMillis() > expiresAt;
}
+ @Nullable
public String getProviderId() {
return providerId;
}
+
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof OAuthToken)) {
+ return false;
+ }
+ OAuthToken t = (OAuthToken) o;
+ return token.equals(t.token)
+ && secret.equals(t.secret)
+ && raw.equals(t.raw)
+ && expiresAt == t.expiresAt
+ && Objects.equals(providerId, t.providerId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(token, secret, raw, expiresAt, providerId);
+ }
+
+ @Override
+ public String toString() {
+ return MoreObjects.toStringHelper(this)
+ .add("token", token)
+ .add("secret", secret)
+ .add("raw", raw)
+ .add("expiresAt", expiresAt)
+ .add("providerId", providerId)
+ .toString();
+ }
}
diff --git a/java/com/google/gerrit/extensions/common/testing/BUILD b/java/com/google/gerrit/extensions/common/testing/BUILD
index 82dd425..94fecbf 100644
--- a/java/com/google/gerrit/extensions/common/testing/BUILD
+++ b/java/com/google/gerrit/extensions/common/testing/BUILD
@@ -6,7 +6,7 @@
deps = [
"//java/com/google/gerrit/extensions:api",
"//java/com/google/gerrit/truth",
- "//lib:truth",
"//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
],
)
diff --git a/java/com/google/gerrit/extensions/registration/DynamicMap.java b/java/com/google/gerrit/extensions/registration/DynamicMap.java
index e0db0c7..7178a16 100644
--- a/java/com/google/gerrit/extensions/registration/DynamicMap.java
+++ b/java/com/google/gerrit/extensions/registration/DynamicMap.java
@@ -83,6 +83,11 @@
binder.bind(key).toProvider(new DynamicMapProvider<>(member)).in(Scopes.SINGLETON);
}
+ /** Returns an empty DynamicMap instance * */
+ public static <T> DynamicMap<T> emptyMap() {
+ return new PrivateInternals_DynamicMapImpl<>();
+ }
+
final ConcurrentMap<NamePair, Provider<T>> items;
DynamicMap() {
@@ -188,8 +193,8 @@
private final String exportName;
NamePair(String pn, String en) {
- this.pluginName = pn;
- this.exportName = en;
+ pluginName = pn;
+ exportName = en;
}
@Override
@@ -206,8 +211,4 @@
return false;
}
}
-
- public static <T> DynamicMap<T> emptyMap() {
- return new DynamicMap<T>() {};
- }
}
diff --git a/java/com/google/gerrit/extensions/registration/DynamicSet.java b/java/com/google/gerrit/extensions/registration/DynamicSet.java
index 5cdf267..7ffb86d 100644
--- a/java/com/google/gerrit/extensions/registration/DynamicSet.java
+++ b/java/com/google/gerrit/extensions/registration/DynamicSet.java
@@ -139,7 +139,7 @@
}
public DynamicSet() {
- this(Collections.<AtomicReference<Provider<T>>>emptySet());
+ this(Collections.emptySet());
}
@Override
diff --git a/java/com/google/gerrit/extensions/registration/PrivateInternals_DynamicMapImpl.java b/java/com/google/gerrit/extensions/registration/PrivateInternals_DynamicMapImpl.java
index 50aed7d..1973f70 100644
--- a/java/com/google/gerrit/extensions/registration/PrivateInternals_DynamicMapImpl.java
+++ b/java/com/google/gerrit/extensions/registration/PrivateInternals_DynamicMapImpl.java
@@ -14,6 +14,8 @@
package com.google.gerrit.extensions.registration;
+import static com.google.common.base.Preconditions.checkNotNull;
+
import com.google.gerrit.extensions.annotations.Export;
import com.google.inject.Key;
import com.google.inject.Provider;
@@ -31,6 +33,7 @@
* @return handle to remove the item at a later point in time.
*/
public RegistrationHandle put(String pluginName, String exportName, Provider<T> item) {
+ checkNotNull(item);
final NamePair key = new NamePair(pluginName, exportName);
items.put(key, item);
return new RegistrationHandle() {
@@ -53,6 +56,7 @@
* the collection.
*/
public ReloadableRegistrationHandle<T> put(String pluginName, Key<T> key, Provider<T> item) {
+ checkNotNull(item);
String exportName = ((Export) key.getAnnotation()).value();
NamePair np = new NamePair(pluginName, exportName);
items.put(np, item);
diff --git a/java/com/google/gerrit/extensions/restapi/testing/BUILD b/java/com/google/gerrit/extensions/restapi/testing/BUILD
index d035816..434591e 100644
--- a/java/com/google/gerrit/extensions/restapi/testing/BUILD
+++ b/java/com/google/gerrit/extensions/restapi/testing/BUILD
@@ -6,6 +6,6 @@
deps = [
"//java/com/google/gerrit/extensions:api",
"//java/com/google/gerrit/truth",
- "//lib:truth",
+ "//lib/truth",
],
)
diff --git a/java/com/google/gerrit/git/testing/BUILD b/java/com/google/gerrit/git/testing/BUILD
index 0b83560..4900339 100644
--- a/java/com/google/gerrit/git/testing/BUILD
+++ b/java/com/google/gerrit/git/testing/BUILD
@@ -7,8 +7,8 @@
deps = [
"//java/com/google/gerrit/common:annotations",
"//lib:guava",
- "//lib:truth",
- "//lib:truth-java8-extension",
"//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
+ "//lib/truth:truth-java8-extension",
],
)
diff --git a/java/com/google/gerrit/gpg/server/PostGpgKeys.java b/java/com/google/gerrit/gpg/server/PostGpgKeys.java
index e487a54..4b92ec3 100644
--- a/java/com/google/gerrit/gpg/server/PostGpgKeys.java
+++ b/java/com/google/gerrit/gpg/server/PostGpgKeys.java
@@ -259,10 +259,10 @@
if (accountStates.size() > 1) {
StringBuilder msg = new StringBuilder();
- msg.append("GPG key ").append(extIdKey.get()).append(" associated with multiple accounts: ");
- Joiner.on(", ")
- .appendTo(msg, Lists.transform(accountStates, AccountState.ACCOUNT_ID_FUNCTION));
- log.error(msg.toString());
+ msg.append("GPG key ")
+ .append(extIdKey.get())
+ .append(" associated with multiple accounts: ")
+ .append(Lists.transform(accountStates, AccountState.ACCOUNT_ID_FUNCTION));
throw new IllegalStateException(msg.toString());
}
diff --git a/java/com/google/gerrit/httpd/ProjectBasicAuthFilter.java b/java/com/google/gerrit/httpd/ProjectBasicAuthFilter.java
index 55bd4d5..6174644 100644
--- a/java/com/google/gerrit/httpd/ProjectBasicAuthFilter.java
+++ b/java/com/google/gerrit/httpd/ProjectBasicAuthFilter.java
@@ -167,6 +167,8 @@
rsp.sendError(SC_UNAUTHORIZED);
return false;
} catch (AuthenticationFailedException e) {
+ // This exception is thrown if the user provided wrong credentials, we don't need to log a
+ // stacktrace for it.
log.warn(authenticationFailedMsg(username, req) + ": " + e.getMessage());
rsp.sendError(SC_UNAUTHORIZED);
return false;
diff --git a/java/com/google/gerrit/httpd/auth/ldap/LdapLoginServlet.java b/java/com/google/gerrit/httpd/auth/ldap/LdapLoginServlet.java
index 24ba4ac..4671475 100644
--- a/java/com/google/gerrit/httpd/auth/ldap/LdapLoginServlet.java
+++ b/java/com/google/gerrit/httpd/auth/ldap/LdapLoginServlet.java
@@ -30,6 +30,7 @@
import com.google.gerrit.server.account.AccountUserNameException;
import com.google.gerrit.server.account.AuthRequest;
import com.google.gerrit.server.account.AuthResult;
+import com.google.gerrit.server.account.AuthenticationFailedException;
import com.google.gerrit.server.auth.AuthenticationUnavailableException;
import com.google.gwtexpui.server.CacheHeaders;
import com.google.inject.Inject;
@@ -126,10 +127,16 @@
} catch (AuthenticationUnavailableException e) {
sendForm(req, res, "Authentication unavailable at this time.");
return;
- } catch (AccountException e) {
- log.info(String.format("'%s' failed to sign in: %s", username, e.getMessage()));
+ } catch (AuthenticationFailedException e) {
+ // This exception is thrown if the user provided wrong credentials, we don't need to log a
+ // stacktrace for it.
+ log.warn("'{}' failed to sign in: {}", username, e.getMessage());
sendForm(req, res, "Invalid username or password.");
return;
+ } catch (AccountException e) {
+ log.warn("'{}' failed to sign in", username, e);
+ sendForm(req, res, "Authentication failed.");
+ return;
} catch (RuntimeException e) {
log.error("LDAP authentication failed", e);
sendForm(req, res, "Authentication unavailable at this time.");
diff --git a/java/com/google/gerrit/httpd/gitweb/GitwebServlet.java b/java/com/google/gerrit/httpd/gitweb/GitwebServlet.java
index 5b60a36f..cc22d24 100644
--- a/java/com/google/gerrit/httpd/gitweb/GitwebServlet.java
+++ b/java/com/google/gerrit/httpd/gitweb/GitwebServlet.java
@@ -659,7 +659,7 @@
dst.close();
}
} catch (IOException e) {
- log.debug("Unexpected error copying input to CGI", e);
+ log.error("Unexpected error copying input to CGI", e);
}
},
"Gitweb-InputFeeder")
@@ -669,14 +669,19 @@
private void copyStderrToLog(InputStream in) {
new Thread(
() -> {
+ StringBuilder b = new StringBuilder();
try (BufferedReader br =
new BufferedReader(new InputStreamReader(in, ISO_8859_1.name()))) {
String line;
while ((line = br.readLine()) != null) {
- log.error("CGI: " + line);
+ if (b.length() > 0) {
+ b.append('\n');
+ }
+ b.append("CGI: ").append(line);
}
+ log.error(b.toString());
} catch (IOException e) {
- log.debug("Unexpected error copying stderr from CGI", e);
+ log.error("Unexpected error copying stderr from CGI", e);
}
},
"Gitweb-ErrorLogger")
diff --git a/java/com/google/gerrit/httpd/init/WebAppInitializer.java b/java/com/google/gerrit/httpd/init/WebAppInitializer.java
index 6cbb357..690d1ac 100644
--- a/java/com/google/gerrit/httpd/init/WebAppInitializer.java
+++ b/java/com/google/gerrit/httpd/init/WebAppInitializer.java
@@ -79,6 +79,7 @@
import com.google.gerrit.server.plugins.PluginModule;
import com.google.gerrit.server.plugins.PluginRestApiModule;
import com.google.gerrit.server.project.DefaultProjectNameLockManager;
+import com.google.gerrit.server.restapi.RestApiModule;
import com.google.gerrit.server.restapi.config.RestCacheAdminModule;
import com.google.gerrit.server.schema.DataSourceModule;
import com.google.gerrit.server.schema.DataSourceProvider;
@@ -359,6 +360,7 @@
modules.add(new PluginModule());
modules.add(new PluginRestApiModule());
+ modules.add(new RestApiModule());
modules.add(new RestCacheAdminModule());
modules.add(new GpgModule(config));
modules.add(new StartupChecks.Module());
diff --git a/java/com/google/gerrit/httpd/raw/BazelBuild.java b/java/com/google/gerrit/httpd/raw/BazelBuild.java
index 85453fb..f52792c 100644
--- a/java/com/google/gerrit/httpd/raw/BazelBuild.java
+++ b/java/com/google/gerrit/httpd/raw/BazelBuild.java
@@ -17,6 +17,7 @@
import static com.google.common.base.MoreObjects.firstNonNull;
import static java.nio.charset.StandardCharsets.UTF_8;
+import com.google.common.base.Joiner;
import com.google.common.escape.Escaper;
import com.google.common.html.HtmlEscapers;
import com.google.common.io.ByteStreams;
@@ -62,7 +63,8 @@
try {
status = rebuild.waitFor();
} catch (InterruptedException e) {
- throw new InterruptedIOException("interrupted waiting for " + proc.toString());
+ throw new InterruptedIOException(
+ "interrupted waiting for: " + Joiner.on(' ').join(proc.command()));
}
if (status != 0) {
log.warn("build failed: " + new String(out, UTF_8));
diff --git a/java/com/google/gerrit/lucene/LuceneChangeIndex.java b/java/com/google/gerrit/lucene/LuceneChangeIndex.java
index 468aa67..c8f8fff 100644
--- a/java/com/google/gerrit/lucene/LuceneChangeIndex.java
+++ b/java/com/google/gerrit/lucene/LuceneChangeIndex.java
@@ -16,11 +16,11 @@
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.gerrit.lucene.AbstractLuceneIndex.sortFieldName;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.APPROVAL_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.CHANGE_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.PATCH_SET_CODEC;
import static com.google.gerrit.server.git.QueueProvider.QueueType.INTERACTIVE;
-import static com.google.gerrit.server.index.change.ChangeField.APPROVAL_CODEC;
-import static com.google.gerrit.server.index.change.ChangeField.CHANGE_CODEC;
import static com.google.gerrit.server.index.change.ChangeField.LEGACY_ID;
-import static com.google.gerrit.server.index.change.ChangeField.PATCH_SET_CODEC;
import static com.google.gerrit.server.index.change.ChangeField.PROJECT;
import static com.google.gerrit.server.index.change.ChangeIndexRewriter.CLOSED_STATUSES;
import static com.google.gerrit.server.index.change.ChangeIndexRewriter.OPEN_STATUSES;
diff --git a/java/com/google/gerrit/metrics/proc/OperatingSystemMXBeanProvider.java b/java/com/google/gerrit/metrics/proc/OperatingSystemMXBeanProvider.java
index 7256e8c..bc2846a 100644
--- a/java/com/google/gerrit/metrics/proc/OperatingSystemMXBeanProvider.java
+++ b/java/com/google/gerrit/metrics/proc/OperatingSystemMXBeanProvider.java
@@ -41,7 +41,7 @@
return new OperatingSystemMXBeanProvider(sys);
}
} catch (ReflectiveOperationException e) {
- log.debug(String.format("No implementation for %s: %s", name, e.getMessage()));
+ log.debug("No implementation for {}", name, e);
}
}
log.warn("No implementation of UnixOperatingSystemMXBean found");
diff --git a/java/com/google/gerrit/pgm/Daemon.java b/java/com/google/gerrit/pgm/Daemon.java
index 417b00d..730f219 100644
--- a/java/com/google/gerrit/pgm/Daemon.java
+++ b/java/com/google/gerrit/pgm/Daemon.java
@@ -89,6 +89,7 @@
import com.google.gerrit.server.plugins.PluginModule;
import com.google.gerrit.server.plugins.PluginRestApiModule;
import com.google.gerrit.server.project.DefaultProjectNameLockManager;
+import com.google.gerrit.server.restapi.RestApiModule;
import com.google.gerrit.server.restapi.config.RestCacheAdminModule;
import com.google.gerrit.server.schema.DataSourceProvider;
import com.google.gerrit.server.schema.InMemoryAccountPatchReviewStore;
@@ -433,6 +434,7 @@
modules.add(new SmtpEmailSender.Module());
}
modules.add(new SignedTokenEmailTokenVerifier.Module());
+ modules.add(new RestApiModule());
modules.add(new PluginRestApiModule());
modules.add(new RestCacheAdminModule());
modules.add(new GpgModule(config));
diff --git a/java/com/google/gerrit/pgm/http/jetty/JettyServer.java b/java/com/google/gerrit/pgm/http/jetty/JettyServer.java
index b6eac05..25a28a4 100644
--- a/java/com/google/gerrit/pgm/http/jetty/JettyServer.java
+++ b/java/com/google/gerrit/pgm/http/jetty/JettyServer.java
@@ -69,13 +69,9 @@
import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.eclipse.jetty.util.thread.ThreadPool;
import org.eclipse.jgit.lib.Config;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
@Singleton
public class JettyServer {
- private static final Logger log = LoggerFactory.getLogger(JettyServer.class);
-
static class Lifecycle implements LifecycleListener {
private final JettyServer server;
private final Config cfg;
@@ -425,9 +421,8 @@
"/*",
EnumSet.of(DispatcherType.REQUEST, DispatcherType.ASYNC));
} catch (Throwable e) {
- String errorMessage = "Unable to instantiate front-end HTTP Filter " + filterClassName;
- log.error(errorMessage, e);
- throw new IllegalArgumentException(errorMessage, e);
+ throw new IllegalArgumentException(
+ "Unable to instantiate front-end HTTP Filter " + filterClassName, e);
}
}
diff --git a/java/com/google/gerrit/pgm/init/api/AllProjectsConfig.java b/java/com/google/gerrit/pgm/init/api/AllProjectsConfig.java
index c1112ae..5073200 100644
--- a/java/com/google/gerrit/pgm/init/api/AllProjectsConfig.java
+++ b/java/com/google/gerrit/pgm/init/api/AllProjectsConfig.java
@@ -30,7 +30,6 @@
import org.slf4j.LoggerFactory;
public class AllProjectsConfig extends VersionedMetaDataOnInit {
-
private static final Logger log = LoggerFactory.getLogger(AllProjectsConfig.class);
private Config cfg;
@@ -65,7 +64,7 @@
return GroupList.parse(
new Project.NameKey(project),
readUTF8(GroupList.FILE_NAME),
- GroupList.createLoggerSink(GroupList.FILE_NAME, log));
+ error -> log.error("Error parsing file {}: {}", GroupList.FILE_NAME, error.getMessage()));
}
public void save(String pluginName, String message) throws IOException, ConfigInvalidException {
diff --git a/java/com/google/gerrit/reviewdb/client/ChangeMessage.java b/java/com/google/gerrit/reviewdb/client/ChangeMessage.java
index edc022f..8e397f0 100644
--- a/java/com/google/gerrit/reviewdb/client/ChangeMessage.java
+++ b/java/com/google/gerrit/reviewdb/client/ChangeMessage.java
@@ -149,6 +149,26 @@
}
@Override
+ public boolean equals(Object o) {
+ if (!(o instanceof ChangeMessage)) {
+ return false;
+ }
+ ChangeMessage m = (ChangeMessage) o;
+ return Objects.equals(key, m.key)
+ && Objects.equals(author, m.author)
+ && Objects.equals(writtenOn, m.writtenOn)
+ && Objects.equals(message, m.message)
+ && Objects.equals(patchset, m.patchset)
+ && Objects.equals(tag, m.tag)
+ && Objects.equals(realAuthor, m.realAuthor);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(key, author, writtenOn, message, patchset, tag, realAuthor);
+ }
+
+ @Override
public String toString() {
return "ChangeMessage{"
+ "key="
diff --git a/java/com/google/gerrit/reviewdb/client/PatchSet.java b/java/com/google/gerrit/reviewdb/client/PatchSet.java
index 4536b67..849fd75 100644
--- a/java/com/google/gerrit/reviewdb/client/PatchSet.java
+++ b/java/com/google/gerrit/reviewdb/client/PatchSet.java
@@ -20,6 +20,7 @@
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
+import java.util.Objects;
/** A single revision of a {@link Change}. */
public final class PatchSet {
@@ -280,6 +281,26 @@
}
@Override
+ public boolean equals(Object o) {
+ if (!(o instanceof PatchSet)) {
+ return false;
+ }
+ PatchSet p = (PatchSet) o;
+ return Objects.equals(id, p.id)
+ && Objects.equals(revision, p.revision)
+ && Objects.equals(uploader, p.uploader)
+ && Objects.equals(createdOn, p.createdOn)
+ && Objects.equals(groups, p.groups)
+ && Objects.equals(pushCertificate, p.pushCertificate)
+ && Objects.equals(description, p.description);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id, revision, uploader, createdOn, groups, pushCertificate, description);
+ }
+
+ @Override
public String toString() {
return "[PatchSet " + getId().toString() + "]";
}
diff --git a/java/com/google/gerrit/reviewdb/server/ReviewDbCodecs.java b/java/com/google/gerrit/reviewdb/server/ReviewDbCodecs.java
new file mode 100644
index 0000000..2958464
--- /dev/null
+++ b/java/com/google/gerrit/reviewdb/server/ReviewDbCodecs.java
@@ -0,0 +1,38 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.reviewdb.server;
+
+import com.google.gerrit.reviewdb.client.Change;
+import com.google.gerrit.reviewdb.client.ChangeMessage;
+import com.google.gerrit.reviewdb.client.PatchSet;
+import com.google.gerrit.reviewdb.client.PatchSetApproval;
+import com.google.gwtorm.protobuf.CodecFactory;
+import com.google.gwtorm.protobuf.ProtobufCodec;
+
+/** {@link ProtobufCodec} instances for ReviewDb types. */
+public class ReviewDbCodecs {
+ public static final ProtobufCodec<PatchSetApproval> APPROVAL_CODEC =
+ CodecFactory.encoder(PatchSetApproval.class);
+
+ public static final ProtobufCodec<Change> CHANGE_CODEC = CodecFactory.encoder(Change.class);
+
+ public static final ProtobufCodec<ChangeMessage> MESSAGE_CODEC =
+ CodecFactory.encoder(ChangeMessage.class);
+
+ public static final ProtobufCodec<PatchSet> PATCH_SET_CODEC =
+ CodecFactory.encoder(PatchSet.class);
+
+ private ReviewDbCodecs() {}
+}
diff --git a/java/com/google/gerrit/server/ChangeMessagesUtil.java b/java/com/google/gerrit/server/ChangeMessagesUtil.java
index 75a9991..e635072 100644
--- a/java/com/google/gerrit/server/ChangeMessagesUtil.java
+++ b/java/com/google/gerrit/server/ChangeMessagesUtil.java
@@ -116,14 +116,6 @@
return notes.load().getChangeMessages();
}
- public Iterable<ChangeMessage> byPatchSet(ReviewDb db, ChangeNotes notes, PatchSet.Id psId)
- throws OrmException {
- if (!migration.readChanges()) {
- return db.changeMessages().byPatchSet(psId);
- }
- return notes.load().getChangeMessagesByPatchSet().get(psId);
- }
-
public void addChangeMessage(ReviewDb db, ChangeUpdate update, ChangeMessage changeMessage)
throws OrmException {
checkState(
diff --git a/java/com/google/gerrit/server/account/AccountsUpdate.java b/java/com/google/gerrit/server/account/AccountsUpdate.java
index 2f36cf2..996e602 100644
--- a/java/com/google/gerrit/server/account/AccountsUpdate.java
+++ b/java/com/google/gerrit/server/account/AccountsUpdate.java
@@ -106,7 +106,8 @@
* <li>binding {@link GitReferenceUpdated#DISABLED} and
* <li>passing an {@link
* com.google.gerrit.server.account.externalids.ExternalIdNotes.FactoryNoReindex} factory as
- * parameter of {@link AccountsUpdate.Factory#create(IdentifiedUser, ExternalIdNotesLoader)}
+ * parameter of {@link AccountsUpdate.Factory#create(IdentifiedUser,
+ * ExternalIdNotes.ExternalIdNotesLoader)}
* </ul>
*
* <p>If there are concurrent account updates updating the user branch in NoteDb may fail with
diff --git a/java/com/google/gerrit/server/account/VersionedAccountDestinations.java b/java/com/google/gerrit/server/account/VersionedAccountDestinations.java
index a57dc7b..1064546 100644
--- a/java/com/google/gerrit/server/account/VersionedAccountDestinations.java
+++ b/java/com/google/gerrit/server/account/VersionedAccountDestinations.java
@@ -16,8 +16,6 @@
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.RefNames;
-import com.google.gerrit.server.git.ValidationError;
-import com.google.gerrit.server.git.meta.TabFile;
import com.google.gerrit.server.git.meta.VersionedMetaData;
import java.io.IOException;
import org.eclipse.jgit.errors.ConfigInvalidException;
@@ -61,17 +59,15 @@
String path = p.path;
if (path.startsWith(prefix)) {
String label = path.substring(prefix.length());
- ValidationError.Sink errors = TabFile.createLoggerSink(path, log);
- destinations.parseLabel(label, readUTF8(path), errors);
+ destinations.parseLabel(
+ label,
+ readUTF8(path),
+ error -> log.error("Error parsing file {}: {}", path, error.getMessage()));
}
}
}
}
- public ValidationError.Sink createSink(String file) {
- return ValidationError.createLoggerSink(file, log);
- }
-
@Override
protected boolean onSave(CommitBuilder commit) throws IOException, ConfigInvalidException {
throw new UnsupportedOperationException("Cannot yet save destinations");
diff --git a/java/com/google/gerrit/server/account/VersionedAccountQueries.java b/java/com/google/gerrit/server/account/VersionedAccountQueries.java
index b43a65d..b021d24 100644
--- a/java/com/google/gerrit/server/account/VersionedAccountQueries.java
+++ b/java/com/google/gerrit/server/account/VersionedAccountQueries.java
@@ -51,7 +51,9 @@
protected void onLoad() throws IOException, ConfigInvalidException {
queryList =
QueryList.parse(
- readUTF8(QueryList.FILE_NAME), QueryList.createLoggerSink(QueryList.FILE_NAME, log));
+ readUTF8(QueryList.FILE_NAME),
+ error ->
+ log.error("Error parsing file {}: {}", QueryList.FILE_NAME, error.getMessage()));
}
@Override
diff --git a/java/com/google/gerrit/server/account/externalids/ExternalId.java b/java/com/google/gerrit/server/account/externalids/ExternalId.java
index 442bc2a..db8ea41 100644
--- a/java/com/google/gerrit/server/account/externalids/ExternalId.java
+++ b/java/com/google/gerrit/server/account/externalids/ExternalId.java
@@ -118,6 +118,8 @@
* AuthType#HTTP_LDAP}, and {@link AuthType#LDAP_BIND} usernames.
*
* <p>The name {@code gerrit:} was a very poor choice.
+ *
+ * <p>Scheme names must not contain colons (':').
*/
public static final String SCHEME_GERRIT = "gerrit";
@@ -140,6 +142,13 @@
public abstract static class Key implements Serializable {
private static final long serialVersionUID = 1L;
+ /**
+ * Creates an external ID key.
+ *
+ * @param scheme the scheme name, must not contain colons (':'), can be {@code null}
+ * @param id the external ID, must not contain colons (':')
+ * @return the created external ID key
+ */
public static Key create(@Nullable String scheme, String id) {
return new AutoValue_ExternalId_Key(Strings.emptyToNull(scheme), id);
}
@@ -198,10 +207,28 @@
}
}
+ /**
+ * Creates an external ID.
+ *
+ * @param scheme the scheme name, must not contain colons (':')
+ * @param id the external ID, must not contain colons (':')
+ * @param accountId the ID of the account to which the external ID belongs
+ * @return the created external ID
+ */
public static ExternalId create(String scheme, String id, Account.Id accountId) {
return create(Key.create(scheme, id), accountId, null, null);
}
+ /**
+ * Creates an external ID.
+ *
+ * @param scheme the scheme name, must not contain colons (':')
+ * @param id the external ID, must not contain colons (':')
+ * @param accountId the ID of the account to which the external ID belongs
+ * @param email the email of the external ID, may be {@code null}
+ * @param hashedPassword the hashed password of the external ID, may be {@code null}
+ * @return the created external ID
+ */
public static ExternalId create(
String scheme,
String id,
@@ -222,17 +249,35 @@
}
public static ExternalId createWithPassword(
- Key key, Account.Id accountId, @Nullable String email, String plainPassword) {
+ Key key, Account.Id accountId, @Nullable String email, @Nullable String plainPassword) {
plainPassword = Strings.emptyToNull(plainPassword);
String hashedPassword =
plainPassword != null ? HashedPassword.fromPassword(plainPassword).encode() : null;
return create(key, accountId, email, hashedPassword);
}
- public static ExternalId createUsername(String id, Account.Id accountId, String plainPassword) {
+ /**
+ * Create a external ID for a username (scheme "username").
+ *
+ * @param id the external ID, must not contain colons (':')
+ * @param accountId the ID of the account to which the external ID belongs
+ * @param plainPassword the plain HTTP password, may be {@code null}
+ * @return the created external ID
+ */
+ public static ExternalId createUsername(
+ String id, Account.Id accountId, @Nullable String plainPassword) {
return createWithPassword(Key.create(SCHEME_USERNAME, id), accountId, null, plainPassword);
}
+ /**
+ * Creates an external ID with an email.
+ *
+ * @param scheme the scheme name, must not contain colons (':')
+ * @param id the external ID, must not contain colons (':')
+ * @param accountId the ID of the account to which the external ID belongs
+ * @param email the email of the external ID, may be {@code null}
+ * @return the created external ID
+ */
public static ExternalId createWithEmail(
String scheme, String id, Account.Id accountId, @Nullable String email) {
return createWithEmail(Key.create(scheme, id), accountId, email);
diff --git a/java/com/google/gerrit/server/api/GerritApiImpl.java b/java/com/google/gerrit/server/api/GerritApiImpl.java
index 24fad34..6a6415e 100644
--- a/java/com/google/gerrit/server/api/GerritApiImpl.java
+++ b/java/com/google/gerrit/server/api/GerritApiImpl.java
@@ -25,7 +25,7 @@
import com.google.inject.Singleton;
@Singleton
-public class GerritApiImpl implements GerritApi {
+class GerritApiImpl implements GerritApi {
private final Accounts accounts;
private final Changes changes;
private final Config config;
diff --git a/java/com/google/gerrit/server/auth/ldap/Helper.java b/java/com/google/gerrit/server/auth/ldap/Helper.java
index 5af730f..16c1724 100644
--- a/java/com/google/gerrit/server/auth/ldap/Helper.java
+++ b/java/com/google/gerrit/server/auth/ldap/Helper.java
@@ -196,7 +196,7 @@
Throwables.throwIfInstanceOf(e.getException(), IOException.class);
Throwables.throwIfInstanceOf(e.getException(), NamingException.class);
Throwables.throwIfInstanceOf(e.getException(), RuntimeException.class);
- LdapRealm.log.warn("Internal error", e.getException());
+ log.warn("Internal error", e.getException());
return null;
} finally {
ctx.logout();
@@ -343,7 +343,7 @@
}
}
} catch (NamingException e) {
- LdapRealm.log.warn("Could not find group " + groupDN, e);
+ log.warn("Could not find group {}", groupDN, e);
}
cachedParentsDNs = dns.build();
parentGroups.put(groupDN, cachedParentsDNs);
@@ -474,10 +474,10 @@
try {
return LdapType.guessType(ctx);
} catch (NamingException e) {
- LdapRealm.log.warn(
- "Cannot discover type of LDAP server at "
- + server
- + ", assuming the server is RFC 2307 compliant.",
+ log.warn(
+ "Cannot discover type of LDAP server at {},"
+ + " assuming the server is RFC 2307 compliant.",
+ server,
e);
return LdapType.RFC_2307;
}
diff --git a/java/com/google/gerrit/server/auth/ldap/LdapRealm.java b/java/com/google/gerrit/server/auth/ldap/LdapRealm.java
index 6184674..b83c7b2 100644
--- a/java/com/google/gerrit/server/auth/ldap/LdapRealm.java
+++ b/java/com/google/gerrit/server/auth/ldap/LdapRealm.java
@@ -61,7 +61,8 @@
@Singleton
class LdapRealm extends AbstractRealm {
- static final Logger log = LoggerFactory.getLogger(LdapRealm.class);
+ private static final Logger log = LoggerFactory.getLogger(LdapRealm.class);
+
static final String LDAP = "com.sun.jndi.ldap.LdapCtxFactory";
static final String USERNAME = "username";
diff --git a/java/com/google/gerrit/server/auth/oauth/OAuthTokenCache.java b/java/com/google/gerrit/server/auth/oauth/OAuthTokenCache.java
index 1ac3bca..f380051 100644
--- a/java/com/google/gerrit/server/auth/oauth/OAuthTokenCache.java
+++ b/java/com/google/gerrit/server/auth/oauth/OAuthTokenCache.java
@@ -16,16 +16,23 @@
import static com.google.common.base.Preconditions.checkNotNull;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Strings;
import com.google.common.cache.Cache;
import com.google.gerrit.extensions.auth.oauth.OAuthToken;
import com.google.gerrit.extensions.auth.oauth.OAuthTokenEncrypter;
import com.google.gerrit.extensions.registration.DynamicItem;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.server.cache.CacheModule;
+import com.google.gerrit.server.cache.CacheSerializer;
+import com.google.gerrit.server.cache.IntKeyCacheSerializer;
+import com.google.gerrit.server.cache.ProtoCacheSerializers;
+import com.google.gerrit.server.cache.proto.Cache.OAuthTokenProto;
import com.google.inject.Inject;
import com.google.inject.Module;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
+import java.io.IOException;
@Singleton
public class OAuthTokenCache {
@@ -37,11 +44,47 @@
return new CacheModule() {
@Override
protected void configure() {
- persist(OAUTH_TOKENS, Account.Id.class, OAuthToken.class);
+ persist(OAUTH_TOKENS, Account.Id.class, OAuthToken.class)
+ .version(1)
+ .keySerializer(new IntKeyCacheSerializer<>(Account.Id::new))
+ .valueSerializer(new Serializer());
}
};
}
+ // Defined outside of OAuthToken class, since that is in the extensions package which doesn't have
+ // access to the serializer code.
+ @VisibleForTesting
+ static class Serializer implements CacheSerializer<OAuthToken> {
+ @Override
+ public byte[] serialize(OAuthToken object) {
+ return ProtoCacheSerializers.toByteArray(
+ OAuthTokenProto.newBuilder()
+ .setToken(object.getToken())
+ .setSecret(object.getSecret())
+ .setRaw(object.getRaw())
+ .setExpiresAt(object.getExpiresAt())
+ .setProviderId(Strings.nullToEmpty(object.getProviderId()))
+ .build());
+ }
+
+ @Override
+ public OAuthToken deserialize(byte[] in) {
+ OAuthTokenProto proto;
+ try {
+ proto = OAuthTokenProto.parseFrom(in);
+ } catch (IOException e) {
+ throw new IllegalArgumentException("failed to deserialize OAuthToken");
+ }
+ return new OAuthToken(
+ proto.getToken(),
+ proto.getSecret(),
+ proto.getRaw(),
+ proto.getExpiresAt(),
+ Strings.emptyToNull(proto.getProviderId()));
+ }
+ }
+
private final Cache<Account.Id, OAuthToken> cache;
@Inject
diff --git a/java/com/google/gerrit/server/cache/BooleanCacheSerializer.java b/java/com/google/gerrit/server/cache/BooleanCacheSerializer.java
new file mode 100644
index 0000000..59fc946
--- /dev/null
+++ b/java/com/google/gerrit/server/cache/BooleanCacheSerializer.java
@@ -0,0 +1,44 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.cache;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import static java.nio.charset.StandardCharsets.UTF_8;
+
+import com.google.protobuf.TextFormat;
+import java.util.Arrays;
+
+public enum BooleanCacheSerializer implements CacheSerializer<Boolean> {
+ INSTANCE;
+
+ private static final byte[] TRUE = Boolean.toString(true).getBytes(UTF_8);
+ private static final byte[] FALSE = Boolean.toString(false).getBytes(UTF_8);
+
+ @Override
+ public byte[] serialize(Boolean object) {
+ byte[] bytes = checkNotNull(object) ? TRUE : FALSE;
+ return Arrays.copyOf(bytes, bytes.length);
+ }
+
+ @Override
+ public Boolean deserialize(byte[] in) {
+ if (Arrays.equals(in, TRUE)) {
+ return Boolean.TRUE;
+ } else if (Arrays.equals(in, FALSE)) {
+ return Boolean.FALSE;
+ }
+ throw new IllegalArgumentException("Invalid Boolean value: " + TextFormat.escapeBytes(in));
+ }
+}
diff --git a/java/com/google/gerrit/server/cache/CacheSerializer.java b/java/com/google/gerrit/server/cache/CacheSerializer.java
index 6bd1322..08deecd 100644
--- a/java/com/google/gerrit/server/cache/CacheSerializer.java
+++ b/java/com/google/gerrit/server/cache/CacheSerializer.java
@@ -14,13 +14,29 @@
package com.google.gerrit.server.cache;
-import java.io.IOException;
-
-/** Interface for serializing/deserializing a type to/from a persistent cache. */
+/**
+ * Interface for serializing/deserializing a type to/from a persistent cache.
+ *
+ * <p>Implementations are null-hostile and will throw exceptions from {@link #serialize} when passed
+ * null values, unless otherwise specified.
+ */
public interface CacheSerializer<T> {
- /** Serializes the object to a new byte array. */
- byte[] serialize(T object) throws IOException;
+ /**
+ * Serializes the object to a new byte array.
+ *
+ * @param object object to serialize.
+ * @return serialized byte array representation.
+ * @throws RuntimeException for malformed input, for example null or an otherwise unsupported
+ * value.
+ */
+ byte[] serialize(T object);
- /** Deserializes a single object from the given byte array. */
- T deserialize(byte[] in) throws IOException;
+ /**
+ * Deserializes a single object from the given byte array.
+ *
+ * @param in serialized byte array representation.
+ * @throws RuntimeException for malformed input, for example null or an otherwise corrupt
+ * serialized representation.
+ */
+ T deserialize(byte[] in);
}
diff --git a/java/com/google/gerrit/server/cache/EnumCacheSerializer.java b/java/com/google/gerrit/server/cache/EnumCacheSerializer.java
index 6ea6121..c5be783 100644
--- a/java/com/google/gerrit/server/cache/EnumCacheSerializer.java
+++ b/java/com/google/gerrit/server/cache/EnumCacheSerializer.java
@@ -14,28 +14,26 @@
package com.google.gerrit.server.cache;
+import static com.google.common.base.Preconditions.checkNotNull;
import static java.nio.charset.StandardCharsets.UTF_8;
+import com.google.common.base.Converter;
import com.google.common.base.Enums;
-import java.io.IOException;
public class EnumCacheSerializer<E extends Enum<E>> implements CacheSerializer<E> {
- private final Class<E> clazz;
+ private final Converter<String, E> converter;
public EnumCacheSerializer(Class<E> clazz) {
- this.clazz = clazz;
+ this.converter = Enums.stringConverter(clazz);
}
@Override
- public byte[] serialize(E object) throws IOException {
- return object.name().getBytes(UTF_8);
+ public byte[] serialize(E object) {
+ return converter.reverse().convert(checkNotNull(object)).getBytes(UTF_8);
}
@Override
- public E deserialize(byte[] in) throws IOException {
- String name = new String(in, UTF_8);
- return Enums.getIfPresent(clazz, name)
- .toJavaUtil()
- .orElseThrow(() -> new IOException("Invalid " + clazz.getName() + " value: " + name));
+ public E deserialize(byte[] in) {
+ return converter.convert(new String(checkNotNull(in), UTF_8));
}
}
diff --git a/java/com/google/gerrit/server/cache/IntKeyCacheSerializer.java b/java/com/google/gerrit/server/cache/IntKeyCacheSerializer.java
new file mode 100644
index 0000000..a07c004
--- /dev/null
+++ b/java/com/google/gerrit/server/cache/IntKeyCacheSerializer.java
@@ -0,0 +1,38 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.cache;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.gwtorm.client.IntKey;
+import java.util.function.Function;
+
+public class IntKeyCacheSerializer<K extends IntKey<?>> implements CacheSerializer<K> {
+ private final Function<Integer, K> factory;
+
+ public IntKeyCacheSerializer(Function<Integer, K> factory) {
+ this.factory = checkNotNull(factory);
+ }
+
+ @Override
+ public byte[] serialize(K object) {
+ return IntegerCacheSerializer.INSTANCE.serialize(object.get());
+ }
+
+ @Override
+ public K deserialize(byte[] in) {
+ return factory.apply(IntegerCacheSerializer.INSTANCE.deserialize(in));
+ }
+}
diff --git a/java/com/google/gerrit/server/cache/IntegerCacheSerializer.java b/java/com/google/gerrit/server/cache/IntegerCacheSerializer.java
new file mode 100644
index 0000000..5eddb71
--- /dev/null
+++ b/java/com/google/gerrit/server/cache/IntegerCacheSerializer.java
@@ -0,0 +1,63 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.cache;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.protobuf.CodedInputStream;
+import com.google.protobuf.CodedOutputStream;
+import com.google.protobuf.TextFormat;
+import java.io.IOException;
+import java.util.Arrays;
+
+public enum IntegerCacheSerializer implements CacheSerializer<Integer> {
+ INSTANCE;
+
+ // Same as com.google.protobuf.WireFormat#MAX_VARINT_SIZE. Note that negative values take up more
+ // than MAX_VARINT32_SIZE space.
+ private static final int MAX_VARINT_SIZE = 10;
+
+ @Override
+ public byte[] serialize(Integer object) {
+ byte[] buf = new byte[MAX_VARINT_SIZE];
+ CodedOutputStream cout = CodedOutputStream.newInstance(buf);
+ try {
+ cout.writeInt32NoTag(checkNotNull(object));
+ cout.flush();
+ } catch (IOException e) {
+ throw new IllegalStateException("Failed to serialize int");
+ }
+ int n = cout.getTotalBytesWritten();
+ return n == buf.length ? buf : Arrays.copyOfRange(buf, 0, n);
+ }
+
+ @Override
+ public Integer deserialize(byte[] in) {
+ CodedInputStream cin = CodedInputStream.newInstance(checkNotNull(in));
+ int ret;
+ try {
+ ret = cin.readRawVarint32();
+ } catch (IOException e) {
+ throw new IllegalArgumentException("Failed to deserialize int");
+ }
+ int n = cin.getTotalBytesRead();
+ if (n != in.length) {
+ throw new IllegalArgumentException(
+ "Extra bytes in int representation: "
+ + TextFormat.escapeBytes(Arrays.copyOfRange(in, n, in.length)));
+ }
+ return ret;
+ }
+}
diff --git a/java/com/google/gerrit/server/cache/JavaCacheSerializer.java b/java/com/google/gerrit/server/cache/JavaCacheSerializer.java
index 750c5df..55358bc 100644
--- a/java/com/google/gerrit/server/cache/JavaCacheSerializer.java
+++ b/java/com/google/gerrit/server/cache/JavaCacheSerializer.java
@@ -14,6 +14,7 @@
package com.google.gerrit.server.cache;
+import com.google.gerrit.common.Nullable;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
@@ -23,29 +24,33 @@
/**
* Serializer that uses default Java serialization.
*
+ * <p>Unlike most {@link CacheSerializer} implementations, serializing null is supported.
+ *
* @param <T> type to serialize. Must implement {@code Serializable}, but due to implementation
* details this is only checked at runtime.
*/
public class JavaCacheSerializer<T> implements CacheSerializer<T> {
@Override
- public byte[] serialize(T object) throws IOException {
+ public byte[] serialize(@Nullable T object) {
try (ByteArrayOutputStream bout = new ByteArrayOutputStream();
ObjectOutputStream oout = new ObjectOutputStream(bout)) {
oout.writeObject(object);
oout.flush();
return bout.toByteArray();
+ } catch (IOException e) {
+ throw new IllegalArgumentException("Failed to serialize object", e);
}
}
@SuppressWarnings("unchecked")
@Override
- public T deserialize(byte[] in) throws IOException {
+ public T deserialize(byte[] in) {
Object object;
try (ByteArrayInputStream bin = new ByteArrayInputStream(in);
ObjectInputStream oin = new ObjectInputStream(bin)) {
object = oin.readObject();
- } catch (ClassNotFoundException e) {
- throw new IOException("Failed to deserialize object of type", e);
+ } catch (ClassNotFoundException | IOException e) {
+ throw new IllegalArgumentException("Failed to deserialize object", e);
}
return (T) object;
}
diff --git a/java/com/google/gerrit/server/cache/PersistentCacheBinding.java b/java/com/google/gerrit/server/cache/PersistentCacheBinding.java
index 429f5ab..794d3bb 100644
--- a/java/com/google/gerrit/server/cache/PersistentCacheBinding.java
+++ b/java/com/google/gerrit/server/cache/PersistentCacheBinding.java
@@ -34,7 +34,12 @@
PersistentCacheBinding<K, V> version(int version);
- /** Set the total on-disk limit of the cache */
+ /**
+ * Set the total on-disk limit of the cache.
+ *
+ * <p>If 0 or negative, persistence for the cache is disabled by default, but may still be
+ * overridden in the config.
+ */
PersistentCacheBinding<K, V> diskLimit(long limit);
PersistentCacheBinding<K, V> keySerializer(CacheSerializer<K> keySerializer);
diff --git a/java/com/google/gerrit/server/cache/PersistentCacheProvider.java b/java/com/google/gerrit/server/cache/PersistentCacheProvider.java
index 405de4f..46a9e61 100644
--- a/java/com/google/gerrit/server/cache/PersistentCacheProvider.java
+++ b/java/com/google/gerrit/server/cache/PersistentCacheProvider.java
@@ -39,6 +39,7 @@
CacheModule module, String name, TypeLiteral<K> keyType, TypeLiteral<V> valType) {
super(module, name, keyType, valType);
version = -1;
+ diskLimit = 128 << 20;
}
@Inject(optional = true)
@@ -93,10 +94,7 @@
@Override
public long diskLimit() {
- if (diskLimit > 0) {
- return diskLimit;
- }
- return 128 << 20;
+ return diskLimit;
}
@Override
diff --git a/java/com/google/gerrit/server/cache/ProtoCacheSerializers.java b/java/com/google/gerrit/server/cache/ProtoCacheSerializers.java
index 795df72..9fe6b83 100644
--- a/java/com/google/gerrit/server/cache/ProtoCacheSerializers.java
+++ b/java/com/google/gerrit/server/cache/ProtoCacheSerializers.java
@@ -14,6 +14,8 @@
package com.google.gerrit.server.cache;
+import com.google.gwtorm.protobuf.ProtobufCodec;
+import com.google.protobuf.ByteString;
import com.google.protobuf.CodedOutputStream;
import com.google.protobuf.MessageLite;
import java.io.IOException;
@@ -23,8 +25,8 @@
/**
* Serializes a proto to a byte array.
*
- * <p>Guarantees deterministic serialization and thus is suitable for use as a persistent cache
- * key. Should be used in preference to {@link MessageLite#toByteArray()}, which is not guaranteed
+ * <p>Guarantees deterministic serialization and thus is suitable for use in persistent caches.
+ * Should be used in preference to {@link MessageLite#toByteArray()}, which is not guaranteed
* deterministic.
*
* @param message the proto message to serialize.
@@ -39,7 +41,30 @@
cout.checkNoSpaceLeft();
return bytes;
} catch (IOException e) {
- throw new IllegalStateException("exception writing to byte array");
+ throw new IllegalStateException("exception writing to byte array", e);
+ }
+ }
+
+ /**
+ * Serializes an object to a {@link ByteString} using a protobuf codec.
+ *
+ * <p>Guarantees deterministic serialization and thus is suitable for use in persistent caches.
+ * Should be used in preference to {@link ProtobufCodec#encodeToByteString(Object)}, which is not
+ * guaranteed deterministic.
+ *
+ * @param object the object to serialize.
+ * @param codec codec for serializing.
+ * @return a {@code ByteString} with the message contents.
+ */
+ public static <T> ByteString toByteString(T object, ProtobufCodec<T> codec) {
+ try (ByteString.Output bout = ByteString.newOutput()) {
+ CodedOutputStream cout = CodedOutputStream.newInstance(bout);
+ cout.useDeterministicSerialization();
+ codec.encode(object, cout);
+ cout.flush();
+ return bout.toByteString();
+ } catch (IOException e) {
+ throw new IllegalStateException("exception writing to ByteString", e);
}
}
diff --git a/java/com/google/gerrit/server/cache/h2/ObjectKeyTypeImpl.java b/java/com/google/gerrit/server/cache/h2/ObjectKeyTypeImpl.java
index b1a65fe..44e2bb2 100644
--- a/java/com/google/gerrit/server/cache/h2/ObjectKeyTypeImpl.java
+++ b/java/com/google/gerrit/server/cache/h2/ObjectKeyTypeImpl.java
@@ -52,11 +52,7 @@
@Override
public void funnel(K from, PrimitiveSink into) {
- try {
- Funnels.byteArrayFunnel().funnel(serializer.serialize(from), into);
- } catch (IOException e) {
- throw new RuntimeException("Cannot hash", e);
- }
+ Funnels.byteArrayFunnel().funnel(serializer.serialize(from), into);
}
};
}
diff --git a/java/com/google/gerrit/server/cache/testing/BUILD b/java/com/google/gerrit/server/cache/testing/BUILD
new file mode 100644
index 0000000..ed412af
--- /dev/null
+++ b/java/com/google/gerrit/server/cache/testing/BUILD
@@ -0,0 +1,13 @@
+package(default_testonly = 1)
+
+java_library(
+ name = "testing",
+ srcs = glob(["*.java"]),
+ visibility = ["//visibility:public"],
+ deps = [
+ "//lib:guava",
+ "//lib:protobuf",
+ "//lib/commons:lang3",
+ "//lib/truth",
+ ],
+)
diff --git a/java/com/google/gerrit/server/cache/testing/CacheSerializerTestUtil.java b/java/com/google/gerrit/server/cache/testing/CacheSerializerTestUtil.java
new file mode 100644
index 0000000..5d41490
--- /dev/null
+++ b/java/com/google/gerrit/server/cache/testing/CacheSerializerTestUtil.java
@@ -0,0 +1,30 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.cache.testing;
+
+import com.google.protobuf.ByteString;
+
+/** Static utilities for testing cache serializers. */
+public class CacheSerializerTestUtil {
+ public static ByteString bytes(int... ints) {
+ byte[] bytes = new byte[ints.length];
+ for (int i = 0; i < ints.length; i++) {
+ bytes[i] = (byte) ints[i];
+ }
+ return ByteString.copyFrom(bytes);
+ }
+
+ private CacheSerializerTestUtil() {}
+}
diff --git a/java/com/google/gerrit/server/cache/testing/SerializedClassSubject.java b/java/com/google/gerrit/server/cache/testing/SerializedClassSubject.java
new file mode 100644
index 0000000..19c5b67
--- /dev/null
+++ b/java/com/google/gerrit/server/cache/testing/SerializedClassSubject.java
@@ -0,0 +1,103 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.cache.testing;
+
+import static com.google.common.collect.ImmutableMap.toImmutableMap;
+import static com.google.common.truth.Truth.assertAbout;
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.Truth.assertWithMessage;
+
+import com.google.common.truth.FailureMetadata;
+import com.google.common.truth.Subject;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+import java.lang.reflect.Type;
+import java.util.Arrays;
+import java.util.Map;
+import org.apache.commons.lang3.reflect.FieldUtils;
+
+/**
+ * Subject about classes that are serialized into persistent caches.
+ *
+ * <p>Hand-written {@link com.google.gerrit.server.cache.CacheSerializer CacheSerializer}
+ * implementations depend on the exact representation of the data stored in a class, so it is
+ * important to verify any assumptions about the structure of the serialized classes. This class
+ * contains assertions about serialized classes, and should be used for every class that has a
+ * custom serializer implementation.
+ *
+ * <p>Changing fields of a serialized class (or abstract methods, in the case of {@code @AutoValue}
+ * classes) will likely require changes to the serializer implementation, and may require bumping
+ * the {@link com.google.gerrit.server.cache.PersistentCacheBinding#version(int) version} in the
+ * cache binding, in case the representation has changed in such a way that old serialized data
+ * becomes unreadable.
+ *
+ * <p>Changes to a serialized class such as adding or removing fields generally requires a change to
+ * the hand-written serializer. Usually, serializer implementations should be written in such a way
+ * that new fields are considered optional, and won't require bumping the version.
+ */
+public class SerializedClassSubject extends Subject<SerializedClassSubject, Class<?>> {
+ public static SerializedClassSubject assertThatSerializedClass(Class<?> actual) {
+ // This formulation fails in Eclipse 4.7.3a with "The type
+ // SerializedClassSubject does not define SerializedClassSubject() that is
+ // applicable here", due to
+ // https://bugs.eclipse.org/bugs/show_bug.cgi?id=534694 or a similar bug:
+ // return assertAbout(SerializedClassSubject::new).that(actual);
+ Subject.Factory<SerializedClassSubject, Class<?>> factory =
+ (m, a) -> new SerializedClassSubject(m, a);
+ return assertAbout(factory).that(actual);
+ }
+
+ private SerializedClassSubject(FailureMetadata metadata, Class<?> actual) {
+ super(metadata, actual);
+ }
+
+ public void isAbstract() {
+ isNotNull();
+ assertWithMessage("expected class %s to be abstract", actual().getName())
+ .that(Modifier.isAbstract(actual().getModifiers()))
+ .isTrue();
+ }
+
+ public void isConcrete() {
+ isNotNull();
+ assertWithMessage("expected class %s to be concrete", actual().getName())
+ .that(!Modifier.isAbstract(actual().getModifiers()))
+ .isTrue();
+ }
+
+ public void hasFields(Map<String, Type> expectedFields) {
+ isConcrete();
+ assertThat(
+ FieldUtils.getAllFieldsList(actual())
+ .stream()
+ .filter(f -> !Modifier.isStatic(f.getModifiers()))
+ .collect(toImmutableMap(Field::getName, Field::getGenericType)))
+ .containsExactlyEntriesIn(expectedFields);
+ }
+
+ public void hasAutoValueMethods(Map<String, Type> expectedMethods) {
+ // Would be nice if we could check clazz is an @AutoValue, but the retention is not RUNTIME.
+ isAbstract();
+ assertThat(
+ Arrays.stream(actual().getDeclaredMethods())
+ .filter(m -> !Modifier.isStatic(m.getModifiers()))
+ .filter(m -> Modifier.isAbstract(m.getModifiers()))
+ .filter(m -> m.getParameters().length == 0)
+ .collect(toImmutableMap(Method::getName, Method::getGenericReturnType)))
+ .named("no-argument abstract methods on %s", actual().getName())
+ .isEqualTo(expectedMethods);
+ }
+}
diff --git a/java/com/google/gerrit/server/change/ChangeKindCacheImpl.java b/java/com/google/gerrit/server/change/ChangeKindCacheImpl.java
index 6449662..a4eb90f 100644
--- a/java/com/google/gerrit/server/change/ChangeKindCacheImpl.java
+++ b/java/com/google/gerrit/server/change/ChangeKindCacheImpl.java
@@ -42,6 +42,7 @@
import com.google.inject.Module;
import com.google.inject.name.Named;
import com.google.protobuf.ByteString;
+import com.google.protobuf.InvalidProtocolBufferException;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
@@ -176,7 +177,7 @@
@VisibleForTesting
static class Serializer implements CacheSerializer<Key> {
@Override
- public byte[] serialize(Key object) throws IOException {
+ public byte[] serialize(Key object) {
byte[] buf = new byte[Constants.OBJECT_ID_LENGTH];
ChangeKindKeyProto.Builder b = ChangeKindKeyProto.newBuilder();
object.getPrior().copyRawTo(buf, 0);
@@ -188,12 +189,16 @@
}
@Override
- public Key deserialize(byte[] in) throws IOException {
- ChangeKindKeyProto proto = ChangeKindKeyProto.parseFrom(in);
- return new Key(
- ObjectId.fromRaw(proto.getPrior().toByteArray()),
- ObjectId.fromRaw(proto.getNext().toByteArray()),
- proto.getStrategyName());
+ public Key deserialize(byte[] in) {
+ try {
+ ChangeKindKeyProto proto = ChangeKindKeyProto.parseFrom(in);
+ return new Key(
+ ObjectId.fromRaw(proto.getPrior().toByteArray()),
+ ObjectId.fromRaw(proto.getNext().toByteArray()),
+ proto.getStrategyName());
+ } catch (InvalidProtocolBufferException e) {
+ throw new IllegalArgumentException("Failed to deserialize object", e);
+ }
}
}
}
diff --git a/java/com/google/gerrit/server/change/MergeabilityCacheImpl.java b/java/com/google/gerrit/server/change/MergeabilityCacheImpl.java
index ea91e0f..a192228 100644
--- a/java/com/google/gerrit/server/change/MergeabilityCacheImpl.java
+++ b/java/com/google/gerrit/server/change/MergeabilityCacheImpl.java
@@ -16,20 +16,20 @@
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
-import static com.google.common.base.Preconditions.checkState;
-import static com.google.gerrit.server.ioutil.BasicSerialization.readString;
-import static com.google.gerrit.server.ioutil.BasicSerialization.writeString;
-import static org.eclipse.jgit.lib.ObjectIdSerializer.readWithoutMarker;
-import static org.eclipse.jgit.lib.ObjectIdSerializer.writeWithoutMarker;
+import com.google.common.base.Converter;
+import com.google.common.base.Enums;
import com.google.common.base.MoreObjects;
import com.google.common.cache.Cache;
import com.google.common.cache.Weigher;
-import com.google.common.collect.ImmutableBiMap;
import com.google.common.util.concurrent.UncheckedExecutionException;
import com.google.gerrit.extensions.client.SubmitType;
import com.google.gerrit.reviewdb.client.Branch;
+import com.google.gerrit.server.cache.BooleanCacheSerializer;
import com.google.gerrit.server.cache.CacheModule;
+import com.google.gerrit.server.cache.CacheSerializer;
+import com.google.gerrit.server.cache.ProtoCacheSerializers;
+import com.google.gerrit.server.cache.proto.Cache.MergeabilityKeyProto;
import com.google.gerrit.server.git.CodeReviewCommit;
import com.google.gerrit.server.git.CodeReviewCommit.CodeReviewRevWalk;
import com.google.gerrit.server.submit.SubmitDryRun;
@@ -37,14 +37,13 @@
import com.google.inject.Module;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
+import com.google.protobuf.ByteString;
import java.io.IOException;
-import java.io.ObjectInputStream;
-import java.io.ObjectOutputStream;
-import java.io.Serializable;
import java.util.Arrays;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ExecutionException;
+import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
@@ -58,30 +57,16 @@
private static final String CACHE_NAME = "mergeability";
- public static final ImmutableBiMap<SubmitType, Character> SUBMIT_TYPES =
- new ImmutableBiMap.Builder<SubmitType, Character>()
- .put(SubmitType.INHERIT, 'I')
- .put(SubmitType.FAST_FORWARD_ONLY, 'F')
- .put(SubmitType.MERGE_IF_NECESSARY, 'M')
- .put(SubmitType.REBASE_ALWAYS, 'P')
- .put(SubmitType.REBASE_IF_NECESSARY, 'R')
- .put(SubmitType.MERGE_ALWAYS, 'A')
- .put(SubmitType.CHERRY_PICK, 'C')
- .build();
-
- static {
- checkState(
- SUBMIT_TYPES.size() == SubmitType.values().length,
- "SubmitType <-> char BiMap needs updating");
- }
-
public static Module module() {
return new CacheModule() {
@Override
protected void configure() {
persist(CACHE_NAME, EntryKey.class, Boolean.class)
.maximumWeight(1 << 20)
- .weigher(MergeabilityWeigher.class);
+ .weigher(MergeabilityWeigher.class)
+ .version(1)
+ .keySerializer(EntryKey.Serializer.INSTANCE)
+ .valueSerializer(BooleanCacheSerializer.INSTANCE);
bind(MergeabilityCache.class).to(MergeabilityCacheImpl.class);
}
};
@@ -91,9 +76,7 @@
return ref != null && ref.getObjectId() != null ? ref.getObjectId() : ObjectId.zeroId();
}
- public static class EntryKey implements Serializable {
- private static final long serialVersionUID = 1L;
-
+ public static class EntryKey {
private ObjectId commit;
private ObjectId into;
private SubmitType submitType;
@@ -154,26 +137,44 @@
.toString();
}
- private void writeObject(ObjectOutputStream out) throws IOException {
- writeWithoutMarker(out, commit);
- writeWithoutMarker(out, into);
- Character c = SUBMIT_TYPES.get(submitType);
- if (c == null) {
- throw new IOException("Invalid submit type: " + submitType);
- }
- out.writeChar(c);
- writeString(out, mergeStrategy);
- }
+ static enum Serializer implements CacheSerializer<EntryKey> {
+ INSTANCE;
- private void readObject(ObjectInputStream in) throws IOException {
- commit = readWithoutMarker(in);
- into = readWithoutMarker(in);
- char t = in.readChar();
- submitType = SUBMIT_TYPES.inverse().get(t);
- if (submitType == null) {
- throw new IOException("Invalid submit type code: " + t);
+ private static final Converter<String, SubmitType> SUBMIT_TYPE_CONVERTER =
+ Enums.stringConverter(SubmitType.class);
+
+ @Override
+ public byte[] serialize(EntryKey object) {
+ byte[] buf = new byte[Constants.OBJECT_ID_LENGTH];
+ MergeabilityKeyProto.Builder b = MergeabilityKeyProto.newBuilder();
+ object.getCommit().copyRawTo(buf, 0);
+ b.setCommit(ByteString.copyFrom(buf));
+ object.getInto().copyRawTo(buf, 0);
+ b.setInto(ByteString.copyFrom(buf));
+ b.setSubmitType(SUBMIT_TYPE_CONVERTER.reverse().convert(object.getSubmitType()));
+ b.setMergeStrategy(object.getMergeStrategy());
+ return ProtoCacheSerializers.toByteArray(b.build());
}
- mergeStrategy = readString(in);
+
+ @Override
+ public EntryKey deserialize(byte[] in) {
+ MergeabilityKeyProto proto;
+ try {
+ proto = MergeabilityKeyProto.parseFrom(in);
+ } catch (IOException e) {
+ throw new IllegalArgumentException("Failed to deserialize mergeability cache key");
+ }
+ byte[] buf = new byte[Constants.OBJECT_ID_LENGTH];
+ proto.getCommit().copyTo(buf, 0);
+ ObjectId commit = ObjectId.fromRaw(buf);
+ proto.getInto().copyTo(buf, 0);
+ ObjectId into = ObjectId.fromRaw(buf);
+ return new EntryKey(
+ commit,
+ into,
+ SUBMIT_TYPE_CONVERTER.convert(proto.getSubmitType()),
+ proto.getMergeStrategy());
+ }
}
}
diff --git a/java/com/google/gerrit/server/config/GerritGlobalModule.java b/java/com/google/gerrit/server/config/GerritGlobalModule.java
index 09687f5..cb0cdf9 100644
--- a/java/com/google/gerrit/server/config/GerritGlobalModule.java
+++ b/java/com/google/gerrit/server/config/GerritGlobalModule.java
@@ -170,7 +170,6 @@
import com.google.gerrit.server.query.change.ChangeQueryProcessor;
import com.google.gerrit.server.query.change.ConflictsCacheImpl;
import com.google.gerrit.server.restapi.change.SuggestReviewers;
-import com.google.gerrit.server.restapi.config.ConfigRestModule;
import com.google.gerrit.server.restapi.group.GroupModule;
import com.google.gerrit.server.rules.DefaultSubmitRule;
import com.google.gerrit.server.rules.PrologModule;
@@ -306,12 +305,6 @@
install(new AuditModule());
bind(UiActions.class);
- install(new com.google.gerrit.server.restapi.access.Module());
- install(new ConfigRestModule());
- install(new com.google.gerrit.server.restapi.change.Module());
- install(new com.google.gerrit.server.restapi.account.Module());
- install(new com.google.gerrit.server.restapi.project.Module());
- install(new com.google.gerrit.server.restapi.group.Module());
bind(GitReferenceUpdated.class);
DynamicMap.mapOf(binder(), new TypeLiteral<Cache<?, ?>>() {});
diff --git a/java/com/google/gerrit/server/git/ValidationError.java b/java/com/google/gerrit/server/git/ValidationError.java
index 2fd65d2..28d5171 100644
--- a/java/com/google/gerrit/server/git/ValidationError.java
+++ b/java/com/google/gerrit/server/git/ValidationError.java
@@ -15,7 +15,6 @@
package com.google.gerrit.server.git;
import java.util.Objects;
-import org.slf4j.Logger;
/** Indicates a problem with Git based data. */
public class ValidationError {
@@ -46,10 +45,6 @@
void error(ValidationError error);
}
- public static Sink createLoggerSink(String message, Logger log) {
- return error -> log.error(message + error.getMessage());
- }
-
@Override
public boolean equals(Object o) {
if (o == this) {
diff --git a/java/com/google/gerrit/server/git/meta/TabFile.java b/java/com/google/gerrit/server/git/meta/TabFile.java
index 68950602..ef25cd8 100644
--- a/java/com/google/gerrit/server/git/meta/TabFile.java
+++ b/java/com/google/gerrit/server/git/meta/TabFile.java
@@ -24,7 +24,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import org.slf4j.Logger;
public class TabFile {
@FunctionalInterface
@@ -141,8 +140,4 @@
}
return r.toString();
}
-
- public static ValidationError.Sink createLoggerSink(String file, Logger log) {
- return ValidationError.createLoggerSink("Error parsing file " + file + ": ", log);
- }
}
diff --git a/java/com/google/gerrit/server/group/db/InternalGroupUpdate.java b/java/com/google/gerrit/server/group/db/InternalGroupUpdate.java
index 5ce3c1c..bff2952 100644
--- a/java/com/google/gerrit/server/group/db/InternalGroupUpdate.java
+++ b/java/com/google/gerrit/server/group/db/InternalGroupUpdate.java
@@ -142,8 +142,8 @@
* InternalGroupUpdate}.
*
* <p>This modification can be tweaked further and passed to {@link
- * #setMemberModification(MemberModification)} in order to combine multiple member additions,
- * deletions, or other modifications into one update.
+ * #setMemberModification(InternalGroupUpdate.MemberModification)} in order to combine multiple
+ * member additions, deletions, or other modifications into one update.
*/
public abstract MemberModification getMemberModification();
@@ -155,8 +155,8 @@
* InternalGroupUpdate}.
*
* <p>This modification can be tweaked further and passed to {@link
- * #setSubgroupModification(SubgroupModification)} in order to combine multiple subgroup
- * additions, deletions, or other modifications into one update.
+ * #setSubgroupModification(InternalGroupUpdate.SubgroupModification)} in order to combine
+ * multiple subgroup additions, deletions, or other modifications into one update.
*/
public abstract SubgroupModification getSubgroupModification();
diff --git a/java/com/google/gerrit/server/group/testing/BUILD b/java/com/google/gerrit/server/group/testing/BUILD
index 134de78..8b8cd00 100644
--- a/java/com/google/gerrit/server/group/testing/BUILD
+++ b/java/com/google/gerrit/server/group/testing/BUILD
@@ -8,7 +8,8 @@
"//java/com/google/gerrit/common:server",
"//java/com/google/gerrit/reviewdb:server",
"//java/com/google/gerrit/server",
- "//lib:truth",
+ "//lib:guava",
"//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
],
)
diff --git a/java/com/google/gerrit/server/index/change/ChangeField.java b/java/com/google/gerrit/server/index/change/ChangeField.java
index 68b1ff9..82253f2 100644
--- a/java/com/google/gerrit/server/index/change/ChangeField.java
+++ b/java/com/google/gerrit/server/index/change/ChangeField.java
@@ -22,6 +22,9 @@
import static com.google.gerrit.index.FieldDef.prefix;
import static com.google.gerrit.index.FieldDef.storedOnly;
import static com.google.gerrit.index.FieldDef.timestamp;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.APPROVAL_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.CHANGE_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.PATCH_SET_CODEC;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toSet;
@@ -64,7 +67,6 @@
import com.google.gerrit.server.query.change.ChangeQueryBuilder;
import com.google.gerrit.server.query.change.ChangeStatusPredicate;
import com.google.gson.Gson;
-import com.google.gwtorm.protobuf.CodecFactory;
import com.google.gwtorm.protobuf.ProtobufCodec;
import com.google.gwtorm.server.OrmException;
import com.google.protobuf.CodedOutputStream;
@@ -468,15 +470,10 @@
exact(ChangeQueryBuilder.FIELD_EXACTCOMMITTER)
.buildRepeatable(ChangeField::getCommitterNameAndEmail);
- public static final ProtobufCodec<Change> CHANGE_CODEC = CodecFactory.encoder(Change.class);
-
/** Serialized change object, used for pre-populating results. */
public static final FieldDef<ChangeData, byte[]> CHANGE =
storedOnly("_change").build(changeGetter(CHANGE_CODEC::encodeToByteArray));
- public static final ProtobufCodec<PatchSetApproval> APPROVAL_CODEC =
- CodecFactory.encoder(PatchSetApproval.class);
-
/** Serialized approvals for the current patch set, used for pre-populating results. */
public static final FieldDef<ChangeData, Iterable<byte[]>> APPROVAL =
storedOnly("_approval")
@@ -596,9 +593,6 @@
cd ->
cd.patchSets().stream().flatMap(ps -> ps.getGroups().stream()).collect(toSet()));
- public static final ProtobufCodec<PatchSet> PATCH_SET_CODEC =
- CodecFactory.encoder(PatchSet.class);
-
/** Serialized patch set object, used for pre-populating results. */
public static final FieldDef<ChangeData, Iterable<byte[]>> PATCH_SET =
storedOnly("_patch_set").buildRepeatable(cd -> toProtos(PATCH_SET_CODEC, cd.patchSets()));
@@ -649,7 +643,7 @@
* <p>Stored fields need to use a stable format over a long period; this type insulates the index
* from implementation changes in SubmitRecord itself.
*/
- static class StoredSubmitRecord {
+ public static class StoredSubmitRecord {
static class StoredLabel {
String label;
SubmitRecord.Label.Status status;
@@ -667,7 +661,7 @@
List<StoredRequirement> requirements;
String errorMessage;
- StoredSubmitRecord(SubmitRecord rec) {
+ public StoredSubmitRecord(SubmitRecord rec) {
this.status = rec.status;
this.errorMessage = rec.errorMessage;
if (rec.labels != null) {
@@ -692,7 +686,7 @@
}
}
- private SubmitRecord toSubmitRecord() {
+ public SubmitRecord toSubmitRecord() {
SubmitRecord rec = new SubmitRecord();
rec.status = status;
rec.errorMessage = errorMessage;
diff --git a/java/com/google/gerrit/server/notedb/ChangeNotes.java b/java/com/google/gerrit/server/notedb/ChangeNotes.java
index f5fefeb..1bbecc8 100644
--- a/java/com/google/gerrit/server/notedb/ChangeNotes.java
+++ b/java/com/google/gerrit/server/notedb/ChangeNotes.java
@@ -564,12 +564,7 @@
/** @return all change messages, in chronological order, oldest first. */
public ImmutableList<ChangeMessage> getChangeMessages() {
- return state.allChangeMessages();
- }
-
- /** @return change messages by patch set, in chronological order, oldest first. */
- public ImmutableListMultimap<PatchSet.Id, ChangeMessage> getChangeMessagesByPatchSet() {
- return state.changeMessagesByPatchSet();
+ return state.changeMessages();
}
/** @return inline comments on each revision. */
@@ -670,28 +665,6 @@
return state.readOnlyUntil();
}
- public boolean isPrivate() {
- if (state.isPrivate() == null) {
- return false;
- }
- return state.isPrivate();
- }
-
- public boolean isWorkInProgress() {
- if (state.isWorkInProgress() == null) {
- return false;
- }
- return state.isWorkInProgress();
- }
-
- public Change.Id getRevertOf() {
- return state.revertOf();
- }
-
- public boolean hasReviewStarted() {
- return state.hasReviewStarted();
- }
-
@Override
protected void onLoad(LoadHandle handle)
throws NoSuchChangeException, IOException, ConfigInvalidException {
diff --git a/java/com/google/gerrit/server/notedb/ChangeNotesCache.java b/java/com/google/gerrit/server/notedb/ChangeNotesCache.java
index 676dbb8..d1c28c4 100644
--- a/java/com/google/gerrit/server/notedb/ChangeNotesCache.java
+++ b/java/com/google/gerrit/server/notedb/ChangeNotesCache.java
@@ -25,12 +25,16 @@
import com.google.gerrit.server.ReviewerByEmailSet;
import com.google.gerrit.server.ReviewerSet;
import com.google.gerrit.server.cache.CacheModule;
+import com.google.gerrit.server.cache.CacheSerializer;
+import com.google.gerrit.server.cache.ProtoCacheSerializers;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesKeyProto;
import com.google.gerrit.server.notedb.AbstractChangeNotes.Args;
import com.google.gerrit.server.notedb.ChangeNotesCommit.ChangeNotesRevWalk;
import com.google.inject.Inject;
import com.google.inject.Module;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
+import com.google.protobuf.ByteString;
import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -38,6 +42,7 @@
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import org.eclipse.jgit.errors.ConfigInvalidException;
+import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
@Singleton
@@ -49,20 +54,59 @@
@Override
protected void configure() {
bind(ChangeNotesCache.class);
- cache(CACHE_NAME, Key.class, ChangeNotesState.class)
+ persist(CACHE_NAME, Key.class, ChangeNotesState.class)
.weigher(Weigher.class)
- .maximumWeight(10 << 20);
+ .maximumWeight(10 << 20)
+ .diskLimit(-1)
+ .version(1)
+ .keySerializer(Key.Serializer.INSTANCE)
+ .valueSerializer(ChangeNotesState.Serializer.INSTANCE);
}
};
}
@AutoValue
public abstract static class Key {
+ static Key create(Project.NameKey project, Change.Id changeId, ObjectId id) {
+ return new AutoValue_ChangeNotesCache_Key(project, changeId, id.copy());
+ }
+
abstract Project.NameKey project();
abstract Change.Id changeId();
abstract ObjectId id();
+
+ @VisibleForTesting
+ static enum Serializer implements CacheSerializer<Key> {
+ INSTANCE;
+
+ @Override
+ public byte[] serialize(Key object) {
+ byte[] buf = new byte[Constants.OBJECT_ID_LENGTH];
+ object.id().copyRawTo(buf, 0);
+ return ProtoCacheSerializers.toByteArray(
+ ChangeNotesKeyProto.newBuilder()
+ .setProject(object.project().get())
+ .setChangeId(object.changeId().get())
+ .setId(ByteString.copyFrom(buf))
+ .build());
+ }
+
+ @Override
+ public Key deserialize(byte[] in) {
+ ChangeNotesKeyProto proto;
+ try {
+ proto = ChangeNotesKeyProto.parseFrom(in);
+ } catch (IOException e) {
+ throw new IllegalArgumentException("Failed to deserialize " + Key.class.getName());
+ }
+ return Key.create(
+ new Project.NameKey(proto.getProject()),
+ new Change.Id(proto.getChangeId()),
+ ObjectId.fromRaw(proto.getId().toByteArray()));
+ }
+ }
}
public static class Weigher implements com.google.common.cache.Weigher<Key, ChangeNotesState> {
@@ -128,16 +172,13 @@
+ P
+ list(state.submitRecords(), P + list(2, str(4) + P + K) + P)
+ P
- + list(state.allChangeMessages(), changeMessage())
- // Just key overhead for map, already counted messages in previous.
- + P
- + map(state.changeMessagesByPatchSet().asMap(), patchSetId())
+ + list(state.changeMessages(), changeMessage())
+ P
+ map(state.publishedComments().asMap(), comment())
+ T // readOnlyUntil
+ 1 // isPrivate
+ 1 // workInProgress
- + 1; // hasReviewStarted
+ + 1; // reviewStarted
}
private static int ptr(Object o, int size) {
@@ -333,7 +374,7 @@
Value get(Project.NameKey project, Change.Id changeId, ObjectId metaId, ChangeNotesRevWalk rw)
throws IOException {
try {
- Key key = new AutoValue_ChangeNotesCache_Key(project, changeId, metaId.copy());
+ Key key = Key.create(project, changeId, metaId);
Loader loader = new Loader(key, rw);
ChangeNotesState s = cache.get(key, loader);
return new AutoValue_ChangeNotesCache_Value(s, loader.revisionNoteMap);
diff --git a/java/com/google/gerrit/server/notedb/ChangeNotesParser.java b/java/com/google/gerrit/server/notedb/ChangeNotesParser.java
index d6472bc..2eb30ff 100644
--- a/java/com/google/gerrit/server/notedb/ChangeNotesParser.java
+++ b/java/com/google/gerrit/server/notedb/ChangeNotesParser.java
@@ -14,6 +14,7 @@
package com.google.gerrit.server.notedb;
+import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_ASSIGNEE;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_BRANCH;
import static com.google.gerrit.server.notedb.ChangeNoteUtil.FOOTER_CHANGE_ID;
@@ -44,7 +45,6 @@
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableTable;
-import com.google.common.collect.LinkedListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.MultimapBuilder;
@@ -145,7 +145,6 @@
private final Map<ApprovalKey, PatchSetApproval> approvals;
private final List<PatchSetApproval> bufferedApprovals;
private final List<ChangeMessage> allChangeMessages;
- private final ListMultimap<PatchSet.Id, ChangeMessage> changeMessagesByPatchSet;
// Non-final private members filled in during the parsing process.
private String branch;
@@ -193,7 +192,6 @@
reviewerUpdates = new ArrayList<>();
submitRecords = Lists.newArrayListWithExpectedSize(1);
allChangeMessages = new ArrayList<>();
- changeMessagesByPatchSet = LinkedListMultimap.create();
comments = MultimapBuilder.hashKeys().arrayListValues().build();
patchSets = new HashMap<>();
deletedPatchSets = new HashSet<>();
@@ -253,7 +251,7 @@
assignee != null ? assignee.orElse(null) : null,
status,
Sets.newLinkedHashSet(Lists.reverse(pastAssignees)),
- hashtags,
+ firstNonNull(hashtags, ImmutableSet.of()),
patchSets,
buildApprovals(),
ReviewerSet.fromTable(Tables.transpose(reviewers)),
@@ -264,12 +262,11 @@
buildReviewerUpdates(),
submitRecords,
buildAllMessages(),
- buildMessagesByPatchSet(),
comments,
readOnlyUntil,
- isPrivate,
- workInProgress,
- hasReviewStarted,
+ firstNonNull(isPrivate, false),
+ firstNonNull(workInProgress, false),
+ firstNonNull(hasReviewStarted, true),
revertOf);
}
@@ -318,13 +315,6 @@
return Lists.reverse(allChangeMessages);
}
- private ListMultimap<PatchSet.Id, ChangeMessage> buildMessagesByPatchSet() {
- for (Collection<ChangeMessage> v : changeMessagesByPatchSet.asMap().values()) {
- Collections.reverse((List<ChangeMessage>) v);
- }
- return changeMessagesByPatchSet;
- }
-
private void parse(ChangeNotesCommit commit) throws ConfigInvalidException {
Timestamp ts = new Timestamp(commit.getCommitterIdent().getWhen().getTime());
@@ -751,7 +741,6 @@
changeMessage.setMessage(changeMsgString);
changeMessage.setTag(tag);
changeMessage.setRealAuthor(realAccountId);
- changeMessagesByPatchSet.put(psId, changeMessage);
allChangeMessages.add(changeMessage);
}
@@ -1088,8 +1077,6 @@
// (or otherwise missing) patch sets. This is safer than trying to prevent
// insertion, as it will also filter out items racily added after the patch
// set was deleted.
- changeMessagesByPatchSet.keys().retainAll(patchSets.keySet());
-
int pruned =
pruneEntitiesForMissingPatchSets(allChangeMessages, ChangeMessage::getPatchSetId, missing);
pruned +=
diff --git a/java/com/google/gerrit/server/notedb/ChangeNotesState.java b/java/com/google/gerrit/server/notedb/ChangeNotesState.java
index 1dd944d..1b09494 100644
--- a/java/com/google/gerrit/server/notedb/ChangeNotesState.java
+++ b/java/com/google/gerrit/server/notedb/ChangeNotesState.java
@@ -14,15 +14,29 @@
package com.google.gerrit.server.notedb;
+import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
+import static com.google.common.collect.ImmutableList.toImmutableList;
+import static com.google.common.collect.ImmutableListMultimap.toImmutableListMultimap;
+import static com.google.common.collect.ImmutableSet.toImmutableSet;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.APPROVAL_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.MESSAGE_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.PATCH_SET_CODEC;
+import static com.google.gerrit.server.cache.ProtoCacheSerializers.toByteString;
import com.google.auto.value.AutoValue;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Converter;
+import com.google.common.base.Enums;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.ImmutableTable;
import com.google.common.collect.ListMultimap;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Table;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.common.data.SubmitRecord;
import com.google.gerrit.reviewdb.client.Account;
@@ -34,15 +48,28 @@
import com.google.gerrit.reviewdb.client.PatchSetApproval;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.RevId;
+import com.google.gerrit.server.OutputFormat;
import com.google.gerrit.server.ReviewerByEmailSet;
import com.google.gerrit.server.ReviewerSet;
import com.google.gerrit.server.ReviewerStatusUpdate;
+import com.google.gerrit.server.cache.CacheSerializer;
+import com.google.gerrit.server.cache.ProtoCacheSerializers;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ChangeColumnsProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerByEmailSetEntryProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerSetEntryProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerStatusUpdateProto;
+import com.google.gerrit.server.index.change.ChangeField.StoredSubmitRecord;
+import com.google.gerrit.server.mail.Address;
import com.google.gerrit.server.notedb.NoteDbChangeState.PrimaryStorage;
+import com.google.gson.Gson;
+import com.google.protobuf.ByteString;
import java.io.IOException;
import java.sql.Timestamp;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
/**
@@ -57,33 +84,15 @@
@AutoValue
public abstract class ChangeNotesState {
static ChangeNotesState empty(Change change) {
- return new AutoValue_ChangeNotesState(
- null,
- change.getId(),
- null,
- ImmutableSet.of(),
- ImmutableSet.of(),
- ImmutableList.of(),
- ImmutableList.of(),
- ReviewerSet.empty(),
- ReviewerByEmailSet.empty(),
- ReviewerSet.empty(),
- ReviewerByEmailSet.empty(),
- ImmutableList.of(),
- ImmutableList.of(),
- ImmutableList.of(),
- ImmutableList.of(),
- ImmutableListMultimap.of(),
- ImmutableListMultimap.of(),
- null,
- null,
- null,
- true,
- null);
+ return Builder.empty(change.getId()).build();
+ }
+
+ static Builder builder() {
+ return new AutoValue_ChangeNotesState.Builder();
}
static ChangeNotesState create(
- @Nullable ObjectId metaId,
+ ObjectId metaId,
Change.Id changeId,
Change.Key changeKey,
Timestamp createdOn,
@@ -97,8 +106,8 @@
@Nullable String submissionId,
@Nullable Account.Id assignee,
@Nullable Change.Status status,
- @Nullable Set<Account.Id> pastAssignees,
- @Nullable Set<String> hashtags,
+ Set<Account.Id> pastAssignees,
+ Set<String> hashtags,
Map<PatchSet.Id, PatchSet> patchSets,
ListMultimap<PatchSet.Id, PatchSetApproval> approvals,
ReviewerSet reviewers,
@@ -108,56 +117,55 @@
List<Account.Id> allPastReviewers,
List<ReviewerStatusUpdate> reviewerUpdates,
List<SubmitRecord> submitRecords,
- List<ChangeMessage> allChangeMessages,
- ListMultimap<PatchSet.Id, ChangeMessage> changeMessagesByPatchSet,
+ List<ChangeMessage> changeMessages,
ListMultimap<RevId, Comment> publishedComments,
@Nullable Timestamp readOnlyUntil,
- @Nullable Boolean isPrivate,
- @Nullable Boolean workInProgress,
- boolean hasReviewStarted,
+ boolean isPrivate,
+ boolean workInProgress,
+ boolean reviewStarted,
@Nullable Change.Id revertOf) {
- if (hashtags == null) {
- hashtags = ImmutableSet.of();
- }
- return new AutoValue_ChangeNotesState(
+ checkNotNull(
metaId,
- changeId,
- new AutoValue_ChangeNotesState_ChangeColumns(
- changeKey,
- createdOn,
- lastUpdatedOn,
- owner,
- branch,
- currentPatchSetId,
- subject,
- topic,
- originalSubject,
- submissionId,
- assignee,
- status,
- isPrivate,
- workInProgress,
- hasReviewStarted,
- revertOf),
- ImmutableSet.copyOf(pastAssignees),
- ImmutableSet.copyOf(hashtags),
- ImmutableList.copyOf(patchSets.entrySet()),
- ImmutableList.copyOf(approvals.entries()),
- reviewers,
- reviewersByEmail,
- pendingReviewers,
- pendingReviewersByEmail,
- ImmutableList.copyOf(allPastReviewers),
- ImmutableList.copyOf(reviewerUpdates),
- ImmutableList.copyOf(submitRecords),
- ImmutableList.copyOf(allChangeMessages),
- ImmutableListMultimap.copyOf(changeMessagesByPatchSet),
- ImmutableListMultimap.copyOf(publishedComments),
- readOnlyUntil,
- isPrivate,
- workInProgress,
- hasReviewStarted,
- revertOf);
+ "metaId is required when passing arguments to create(...). To create an empty %s without"
+ + " NoteDb data, use empty(...) instead",
+ ChangeNotesState.class.getSimpleName());
+ return builder()
+ .metaId(metaId)
+ .changeId(changeId)
+ .columns(
+ ChangeColumns.builder()
+ .changeKey(changeKey)
+ .createdOn(createdOn)
+ .lastUpdatedOn(lastUpdatedOn)
+ .owner(owner)
+ .branch(branch)
+ .status(status)
+ .currentPatchSetId(currentPatchSetId)
+ .subject(subject)
+ .topic(topic)
+ .originalSubject(originalSubject)
+ .submissionId(submissionId)
+ .assignee(assignee)
+ .isPrivate(isPrivate)
+ .workInProgress(workInProgress)
+ .reviewStarted(reviewStarted)
+ .revertOf(revertOf)
+ .build())
+ .pastAssignees(pastAssignees)
+ .hashtags(hashtags)
+ .patchSets(patchSets.entrySet())
+ .approvals(approvals.entries())
+ .reviewers(reviewers)
+ .reviewersByEmail(reviewersByEmail)
+ .pendingReviewers(pendingReviewers)
+ .pendingReviewersByEmail(pendingReviewersByEmail)
+ .allPastReviewers(allPastReviewers)
+ .reviewerUpdates(reviewerUpdates)
+ .submitRecords(submitRecords)
+ .changeMessages(changeMessages)
+ .publishedComments(publishedComments)
+ .readOnlyUntil(readOnlyUntil)
+ .build();
}
/**
@@ -166,10 +174,14 @@
* <p>Notable exceptions include rowVersion and noteDbState, which are only make sense when read
* from NoteDb, so they cannot be cached.
*
- * <p>Fields are in listed column order.
+ * <p>Fields should match the column names in {@link Change}, and are in listed column order.
*/
@AutoValue
abstract static class ChangeColumns {
+ static Builder builder() {
+ return new AutoValue_ChangeNotesState_ChangeColumns.Builder();
+ }
+
abstract Change.Key changeKey();
abstract Timestamp createdOn();
@@ -181,6 +193,10 @@
// Project not included, as it's not stored anywhere in the meta ref.
abstract String branch();
+ // TODO(dborowitz): Use a sensible default other than null
+ @Nullable
+ abstract Change.Status status();
+
@Nullable
abstract PatchSet.Id currentPatchSetId();
@@ -197,21 +213,54 @@
@Nullable
abstract Account.Id assignee();
- // TODO(dborowitz): Use a sensible default other than null
- @Nullable
- abstract Change.Status status();
- @Nullable
- abstract Boolean isPrivate();
+ abstract boolean isPrivate();
- @Nullable
- abstract Boolean isWorkInProgress();
+ abstract boolean workInProgress();
- @Nullable
- abstract Boolean hasReviewStarted();
+ abstract boolean reviewStarted();
@Nullable
abstract Change.Id revertOf();
+
+ abstract Builder toBuilder();
+
+ @AutoValue.Builder
+ abstract static class Builder {
+ abstract Builder changeKey(Change.Key changeKey);
+
+ abstract Builder createdOn(Timestamp createdOn);
+
+ abstract Builder lastUpdatedOn(Timestamp lastUpdatedOn);
+
+ abstract Builder owner(Account.Id owner);
+
+ abstract Builder branch(String branch);
+
+ abstract Builder currentPatchSetId(@Nullable PatchSet.Id currentPatchSetId);
+
+ abstract Builder subject(String subject);
+
+ abstract Builder topic(@Nullable String topic);
+
+ abstract Builder originalSubject(@Nullable String originalSubject);
+
+ abstract Builder submissionId(@Nullable String submissionId);
+
+ abstract Builder assignee(@Nullable Account.Id assignee);
+
+ abstract Builder status(@Nullable Change.Status status);
+
+ abstract Builder isPrivate(boolean isPrivate);
+
+ abstract Builder workInProgress(boolean workInProgress);
+
+ abstract Builder reviewStarted(boolean reviewStarted);
+
+ abstract Builder revertOf(@Nullable Change.Id revertOf);
+
+ abstract ChangeColumns build();
+ }
}
// Only null if NoteDb is disabled.
@@ -247,27 +296,13 @@
abstract ImmutableList<SubmitRecord> submitRecords();
- abstract ImmutableList<ChangeMessage> allChangeMessages();
-
- abstract ImmutableListMultimap<PatchSet.Id, ChangeMessage> changeMessagesByPatchSet();
+ abstract ImmutableList<ChangeMessage> changeMessages();
abstract ImmutableListMultimap<RevId, Comment> publishedComments();
@Nullable
abstract Timestamp readOnlyUntil();
- @Nullable
- abstract Boolean isPrivate();
-
- @Nullable
- abstract Boolean isWorkInProgress();
-
- @Nullable
- abstract Boolean hasReviewStarted();
-
- @Nullable
- abstract Change.Id revertOf();
-
Change newChange(Project.NameKey project) {
ChangeColumns c = checkNotNull(columns(), "columns are required");
Change change =
@@ -325,9 +360,9 @@
change.setLastUpdatedOn(c.lastUpdatedOn());
change.setSubmissionId(c.submissionId());
change.setAssignee(c.assignee());
- change.setPrivate(c.isPrivate() == null ? false : c.isPrivate());
- change.setWorkInProgress(c.isWorkInProgress() == null ? false : c.isWorkInProgress());
- change.setReviewStarted(c.hasReviewStarted() == null ? false : c.hasReviewStarted());
+ change.setPrivate(c.isPrivate());
+ change.setWorkInProgress(c.workInProgress());
+ change.setReviewStarted(c.reviewStarted());
change.setRevertOf(c.revertOf());
if (!patchSets().isEmpty()) {
@@ -338,4 +373,331 @@
change.clearCurrentPatchSet();
}
}
+
+ @AutoValue.Builder
+ abstract static class Builder {
+ static Builder empty(Change.Id changeId) {
+ return new AutoValue_ChangeNotesState.Builder()
+ .changeId(changeId)
+ .pastAssignees(ImmutableSet.of())
+ .hashtags(ImmutableSet.of())
+ .patchSets(ImmutableList.of())
+ .approvals(ImmutableList.of())
+ .reviewers(ReviewerSet.empty())
+ .reviewersByEmail(ReviewerByEmailSet.empty())
+ .pendingReviewers(ReviewerSet.empty())
+ .pendingReviewersByEmail(ReviewerByEmailSet.empty())
+ .allPastReviewers(ImmutableList.of())
+ .reviewerUpdates(ImmutableList.of())
+ .submitRecords(ImmutableList.of())
+ .changeMessages(ImmutableList.of())
+ .publishedComments(ImmutableListMultimap.of());
+ }
+
+ abstract Builder metaId(ObjectId metaId);
+
+ abstract Builder changeId(Change.Id changeId);
+
+ abstract Builder columns(ChangeColumns columns);
+
+ abstract Builder pastAssignees(Set<Account.Id> pastAssignees);
+
+ abstract Builder hashtags(Iterable<String> hashtags);
+
+ abstract Builder patchSets(Iterable<Map.Entry<PatchSet.Id, PatchSet>> patchSets);
+
+ abstract Builder approvals(Iterable<Map.Entry<PatchSet.Id, PatchSetApproval>> approvals);
+
+ abstract Builder reviewers(ReviewerSet reviewers);
+
+ abstract Builder reviewersByEmail(ReviewerByEmailSet reviewersByEmail);
+
+ abstract Builder pendingReviewers(ReviewerSet pendingReviewers);
+
+ abstract Builder pendingReviewersByEmail(ReviewerByEmailSet pendingReviewersByEmail);
+
+ abstract Builder allPastReviewers(List<Account.Id> allPastReviewers);
+
+ abstract Builder reviewerUpdates(List<ReviewerStatusUpdate> reviewerUpdates);
+
+ abstract Builder submitRecords(List<SubmitRecord> submitRecords);
+
+ abstract Builder changeMessages(List<ChangeMessage> changeMessages);
+
+ abstract Builder publishedComments(ListMultimap<RevId, Comment> publishedComments);
+
+ abstract Builder readOnlyUntil(@Nullable Timestamp readOnlyUntil);
+
+ abstract ChangeNotesState build();
+ }
+
+ static enum Serializer implements CacheSerializer<ChangeNotesState> {
+ INSTANCE;
+
+ @VisibleForTesting static final Gson GSON = OutputFormat.JSON_COMPACT.newGson();
+
+ private static final Converter<String, Change.Status> STATUS_CONVERTER =
+ Enums.stringConverter(Change.Status.class);
+ private static final Converter<String, ReviewerStateInternal> REVIEWER_STATE_CONVERTER =
+ Enums.stringConverter(ReviewerStateInternal.class);
+
+ @Override
+ public byte[] serialize(ChangeNotesState object) {
+ checkArgument(object.metaId() != null, "meta ID is required in: %s", object);
+ checkArgument(object.columns() != null, "ChangeColumns is required in: %s", object);
+ ChangeNotesStateProto.Builder b = ChangeNotesStateProto.newBuilder();
+
+ byte[] idBuf = new byte[Constants.OBJECT_ID_LENGTH];
+ object.metaId().copyRawTo(idBuf, 0);
+ b.setMetaId(ByteString.copyFrom(idBuf))
+ .setChangeId(object.changeId().get())
+ .setColumns(toChangeColumnsProto(object.columns()));
+
+ object.pastAssignees().forEach(a -> b.addPastAssignee(a.get()));
+ object.hashtags().forEach(b::addHashtag);
+ object.patchSets().forEach(e -> b.addPatchSet(toByteString(e.getValue(), PATCH_SET_CODEC)));
+ object.approvals().forEach(e -> b.addApproval(toByteString(e.getValue(), APPROVAL_CODEC)));
+
+ object.reviewers().asTable().cellSet().forEach(c -> b.addReviewer(toReviewerSetEntry(c)));
+ object
+ .reviewersByEmail()
+ .asTable()
+ .cellSet()
+ .forEach(c -> b.addReviewerByEmail(toReviewerByEmailSetEntry(c)));
+ object
+ .pendingReviewers()
+ .asTable()
+ .cellSet()
+ .forEach(c -> b.addPendingReviewer(toReviewerSetEntry(c)));
+ object
+ .pendingReviewersByEmail()
+ .asTable()
+ .cellSet()
+ .forEach(c -> b.addPendingReviewerByEmail(toReviewerByEmailSetEntry(c)));
+
+ object.allPastReviewers().forEach(a -> b.addPastReviewer(a.get()));
+ object.reviewerUpdates().forEach(u -> b.addReviewerUpdate(toReviewerStatusUpdateProto(u)));
+ object
+ .submitRecords()
+ .forEach(r -> b.addSubmitRecord(GSON.toJson(new StoredSubmitRecord(r))));
+ object.changeMessages().forEach(m -> b.addChangeMessage(toByteString(m, MESSAGE_CODEC)));
+ object.publishedComments().values().forEach(c -> b.addPublishedComment(GSON.toJson(c)));
+
+ if (object.readOnlyUntil() != null) {
+ b.setReadOnlyUntil(object.readOnlyUntil().getTime()).setHasReadOnlyUntil(true);
+ }
+
+ return ProtoCacheSerializers.toByteArray(b.build());
+ }
+
+ private static ChangeColumnsProto toChangeColumnsProto(ChangeColumns cols) {
+ ChangeColumnsProto.Builder b =
+ ChangeColumnsProto.newBuilder()
+ .setChangeKey(cols.changeKey().get())
+ .setCreatedOn(cols.createdOn().getTime())
+ .setLastUpdatedOn(cols.lastUpdatedOn().getTime())
+ .setOwner(cols.owner().get())
+ .setBranch(cols.branch());
+ if (cols.currentPatchSetId() != null) {
+ b.setCurrentPatchSetId(cols.currentPatchSetId().get()).setHasCurrentPatchSetId(true);
+ }
+ b.setSubject(cols.subject());
+ if (cols.topic() != null) {
+ b.setTopic(cols.topic()).setHasTopic(true);
+ }
+ if (cols.originalSubject() != null) {
+ b.setOriginalSubject(cols.originalSubject()).setHasOriginalSubject(true);
+ }
+ if (cols.submissionId() != null) {
+ b.setSubmissionId(cols.submissionId()).setHasSubmissionId(true);
+ }
+ if (cols.assignee() != null) {
+ b.setAssignee(cols.assignee().get()).setHasAssignee(true);
+ }
+ if (cols.status() != null) {
+ b.setStatus(STATUS_CONVERTER.reverse().convert(cols.status())).setHasStatus(true);
+ }
+ b.setIsPrivate(cols.isPrivate())
+ .setWorkInProgress(cols.workInProgress())
+ .setReviewStarted(cols.reviewStarted());
+ if (cols.revertOf() != null) {
+ b.setRevertOf(cols.revertOf().get()).setHasRevertOf(true);
+ }
+ return b.build();
+ }
+
+ private static ReviewerSetEntryProto toReviewerSetEntry(
+ Table.Cell<ReviewerStateInternal, Account.Id, Timestamp> c) {
+ return ReviewerSetEntryProto.newBuilder()
+ .setState(REVIEWER_STATE_CONVERTER.reverse().convert(c.getRowKey()))
+ .setAccountId(c.getColumnKey().get())
+ .setTimestamp(c.getValue().getTime())
+ .build();
+ }
+
+ private static ReviewerByEmailSetEntryProto toReviewerByEmailSetEntry(
+ Table.Cell<ReviewerStateInternal, Address, Timestamp> c) {
+ return ReviewerByEmailSetEntryProto.newBuilder()
+ .setState(REVIEWER_STATE_CONVERTER.reverse().convert(c.getRowKey()))
+ .setAddress(c.getColumnKey().toHeaderString())
+ .setTimestamp(c.getValue().getTime())
+ .build();
+ }
+
+ private static ReviewerStatusUpdateProto toReviewerStatusUpdateProto(ReviewerStatusUpdate u) {
+ return ReviewerStatusUpdateProto.newBuilder()
+ .setDate(u.date().getTime())
+ .setUpdatedBy(u.updatedBy().get())
+ .setReviewer(u.reviewer().get())
+ .setState(REVIEWER_STATE_CONVERTER.reverse().convert(u.state()))
+ .build();
+ }
+
+ @Override
+ public ChangeNotesState deserialize(byte[] in) {
+ ChangeNotesStateProto proto;
+ try {
+ proto = ChangeNotesStateProto.parseFrom(in);
+ } catch (IOException e) {
+ throw new IllegalArgumentException(
+ "Failed to deserialize " + ChangeNotesState.class.getName());
+ }
+ Change.Id changeId = new Change.Id(proto.getChangeId());
+
+ ChangeNotesState.Builder b =
+ builder()
+ .metaId(ObjectId.fromRaw(proto.getMetaId().toByteArray()))
+ .changeId(changeId)
+ .columns(toChangeColumns(changeId, proto.getColumns()))
+ .pastAssignees(
+ proto
+ .getPastAssigneeList()
+ .stream()
+ .map(Account.Id::new)
+ .collect(toImmutableSet()))
+ .hashtags(proto.getHashtagList())
+ .patchSets(
+ proto
+ .getPatchSetList()
+ .stream()
+ .map(PATCH_SET_CODEC::decode)
+ .map(ps -> Maps.immutableEntry(ps.getId(), ps))
+ .collect(toImmutableList()))
+ .approvals(
+ proto
+ .getApprovalList()
+ .stream()
+ .map(APPROVAL_CODEC::decode)
+ .map(a -> Maps.immutableEntry(a.getPatchSetId(), a))
+ .collect(toImmutableList()))
+ .reviewers(toReviewerSet(proto.getReviewerList()))
+ .reviewersByEmail(toReviewerByEmailSet(proto.getReviewerByEmailList()))
+ .pendingReviewers(toReviewerSet(proto.getPendingReviewerList()))
+ .pendingReviewersByEmail(toReviewerByEmailSet(proto.getPendingReviewerByEmailList()))
+ .allPastReviewers(
+ proto
+ .getPastReviewerList()
+ .stream()
+ .map(Account.Id::new)
+ .collect(toImmutableList()))
+ .reviewerUpdates(toReviewerStatusUpdateList(proto.getReviewerUpdateList()))
+ .submitRecords(
+ proto
+ .getSubmitRecordList()
+ .stream()
+ .map(r -> GSON.fromJson(r, StoredSubmitRecord.class).toSubmitRecord())
+ .collect(toImmutableList()))
+ .changeMessages(
+ proto
+ .getChangeMessageList()
+ .stream()
+ .map(MESSAGE_CODEC::decode)
+ .collect(toImmutableList()))
+ .publishedComments(
+ proto
+ .getPublishedCommentList()
+ .stream()
+ .map(r -> GSON.fromJson(r, Comment.class))
+ .collect(toImmutableListMultimap(c -> new RevId(c.revId), c -> c)));
+ if (proto.getHasReadOnlyUntil()) {
+ b.readOnlyUntil(new Timestamp(proto.getReadOnlyUntil()));
+ }
+ return b.build();
+ }
+
+ private static ChangeColumns toChangeColumns(Change.Id changeId, ChangeColumnsProto proto) {
+ ChangeColumns.Builder b =
+ ChangeColumns.builder()
+ .changeKey(new Change.Key(proto.getChangeKey()))
+ .createdOn(new Timestamp(proto.getCreatedOn()))
+ .lastUpdatedOn(new Timestamp(proto.getLastUpdatedOn()))
+ .owner(new Account.Id(proto.getOwner()))
+ .branch(proto.getBranch());
+ if (proto.getHasCurrentPatchSetId()) {
+ b.currentPatchSetId(new PatchSet.Id(changeId, proto.getCurrentPatchSetId()));
+ }
+ b.subject(proto.getSubject());
+ if (proto.getHasTopic()) {
+ b.topic(proto.getTopic());
+ }
+ if (proto.getHasOriginalSubject()) {
+ b.originalSubject(proto.getOriginalSubject());
+ }
+ if (proto.getHasSubmissionId()) {
+ b.submissionId(proto.getSubmissionId());
+ }
+ if (proto.getHasAssignee()) {
+ b.assignee(new Account.Id(proto.getAssignee()));
+ }
+ if (proto.getHasStatus()) {
+ b.status(STATUS_CONVERTER.convert(proto.getStatus()));
+ }
+ b.isPrivate(proto.getIsPrivate())
+ .workInProgress(proto.getWorkInProgress())
+ .reviewStarted(proto.getReviewStarted());
+ if (proto.getHasRevertOf()) {
+ b.revertOf(new Change.Id(proto.getRevertOf()));
+ }
+ return b.build();
+ }
+
+ private static ReviewerSet toReviewerSet(List<ReviewerSetEntryProto> protos) {
+ ImmutableTable.Builder<ReviewerStateInternal, Account.Id, Timestamp> b =
+ ImmutableTable.builder();
+ for (ReviewerSetEntryProto e : protos) {
+ b.put(
+ REVIEWER_STATE_CONVERTER.convert(e.getState()),
+ new Account.Id(e.getAccountId()),
+ new Timestamp(e.getTimestamp()));
+ }
+ return ReviewerSet.fromTable(b.build());
+ }
+
+ private static ReviewerByEmailSet toReviewerByEmailSet(
+ List<ReviewerByEmailSetEntryProto> protos) {
+ ImmutableTable.Builder<ReviewerStateInternal, Address, Timestamp> b =
+ ImmutableTable.builder();
+ for (ReviewerByEmailSetEntryProto e : protos) {
+ b.put(
+ REVIEWER_STATE_CONVERTER.convert(e.getState()),
+ Address.parse(e.getAddress()),
+ new Timestamp(e.getTimestamp()));
+ }
+ return ReviewerByEmailSet.fromTable(b.build());
+ }
+
+ private static ImmutableList<ReviewerStatusUpdate> toReviewerStatusUpdateList(
+ List<ReviewerStatusUpdateProto> protos) {
+ ImmutableList.Builder<ReviewerStatusUpdate> b = ImmutableList.builder();
+ for (ReviewerStatusUpdateProto proto : protos) {
+ b.add(
+ ReviewerStatusUpdate.create(
+ new Timestamp(proto.getDate()),
+ new Account.Id(proto.getUpdatedBy()),
+ new Account.Id(proto.getReviewer()),
+ REVIEWER_STATE_CONVERTER.convert(proto.getState())));
+ }
+ return b.build();
+ }
+ }
}
diff --git a/java/com/google/gerrit/server/permissions/ChangeControl.java b/java/com/google/gerrit/server/permissions/ChangeControl.java
index b13d921..3a17965 100644
--- a/java/com/google/gerrit/server/permissions/ChangeControl.java
+++ b/java/com/google/gerrit/server/permissions/ChangeControl.java
@@ -29,6 +29,7 @@
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.CurrentUser;
+import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.notedb.ChangeNotes;
import com.google.gerrit.server.permissions.PermissionBackend.ForChange;
import com.google.gerrit.server.query.change.ChangeData;
@@ -47,11 +48,16 @@
static class Factory {
private final ChangeData.Factory changeDataFactory;
private final ChangeNotes.Factory notesFactory;
+ private final IdentifiedUser.GenericFactory identifiedUserFactory;
@Inject
- Factory(ChangeData.Factory changeDataFactory, ChangeNotes.Factory notesFactory) {
+ Factory(
+ ChangeData.Factory changeDataFactory,
+ ChangeNotes.Factory notesFactory,
+ IdentifiedUser.GenericFactory identifiedUserFactory) {
this.changeDataFactory = changeDataFactory;
this.notesFactory = notesFactory;
+ this.identifiedUserFactory = identifiedUserFactory;
}
ChangeControl create(
@@ -61,17 +67,22 @@
}
ChangeControl create(RefControl refControl, ChangeNotes notes) {
- return new ChangeControl(changeDataFactory, refControl, notes);
+ return new ChangeControl(changeDataFactory, identifiedUserFactory, refControl, notes);
}
}
private final ChangeData.Factory changeDataFactory;
+ private final IdentifiedUser.GenericFactory identifiedUserFactory;
private final RefControl refControl;
private final ChangeNotes notes;
private ChangeControl(
- ChangeData.Factory changeDataFactory, RefControl refControl, ChangeNotes notes) {
+ ChangeData.Factory changeDataFactory,
+ IdentifiedUser.GenericFactory identifiedUserFactory,
+ RefControl refControl,
+ ChangeNotes notes) {
this.changeDataFactory = changeDataFactory;
+ this.identifiedUserFactory = identifiedUserFactory;
this.refControl = refControl;
this.notes = notes;
}
@@ -84,7 +95,8 @@
if (getUser().equals(who)) {
return this;
}
- return new ChangeControl(changeDataFactory, refControl.forUser(who), notes);
+ return new ChangeControl(
+ changeDataFactory, identifiedUserFactory, refControl.forUser(who), notes);
}
private CurrentUser getUser() {
@@ -261,6 +273,11 @@
}
@Override
+ public ForChange absentUser(Account.Id id) {
+ return user(identifiedUserFactory.create(id));
+ }
+
+ @Override
public String resourcePath() {
if (resourcePath == null) {
resourcePath =
diff --git a/java/com/google/gerrit/server/permissions/DefaultPermissionBackend.java b/java/com/google/gerrit/server/permissions/DefaultPermissionBackend.java
index 02eed30..490b45e 100644
--- a/java/com/google/gerrit/server/permissions/DefaultPermissionBackend.java
+++ b/java/com/google/gerrit/server/permissions/DefaultPermissionBackend.java
@@ -79,8 +79,8 @@
}
@Override
- public WithUser absentUser(Account.Id user) {
- IdentifiedUser identifiedUser = identifiedUserFactory.create(checkNotNull(user, "user"));
+ public WithUser absentUser(Account.Id id) {
+ IdentifiedUser identifiedUser = identifiedUserFactory.create(checkNotNull(id, "user"));
return new WithUserImpl(identifiedUser);
}
diff --git a/java/com/google/gerrit/server/permissions/FailedPermissionBackend.java b/java/com/google/gerrit/server/permissions/FailedPermissionBackend.java
index 35b6e0d..431bfd9 100644
--- a/java/com/google/gerrit/server/permissions/FailedPermissionBackend.java
+++ b/java/com/google/gerrit/server/permissions/FailedPermissionBackend.java
@@ -15,6 +15,7 @@
package com.google.gerrit.server.permissions;
import com.google.gerrit.extensions.api.access.GlobalOrPluginPermission;
+import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.CurrentUser;
@@ -129,6 +130,11 @@
}
@Override
+ public ForProject absentUser(Account.Id id) {
+ return this;
+ }
+
+ @Override
public String resourcePath() {
throw new UnsupportedOperationException(
"FailedPermissionBackend is not scoped to a resource");
@@ -182,6 +188,11 @@
}
@Override
+ public ForRef absentUser(Account.Id id) {
+ return this;
+ }
+
+ @Override
public String resourcePath() {
throw new UnsupportedOperationException(
"FailedPermissionBackend is not scoped to a resource");
@@ -234,6 +245,11 @@
}
@Override
+ public ForChange absentUser(Account.Id id) {
+ return this;
+ }
+
+ @Override
public String resourcePath() {
throw new UnsupportedOperationException(
"FailedPermissionBackend is not scoped to a resource");
diff --git a/java/com/google/gerrit/server/permissions/PermissionBackend.java b/java/com/google/gerrit/server/permissions/PermissionBackend.java
index 4cbd77e..8cdb61d 100644
--- a/java/com/google/gerrit/server/permissions/PermissionBackend.java
+++ b/java/com/google/gerrit/server/permissions/PermissionBackend.java
@@ -112,7 +112,7 @@
*
* <p>Usage should be very limited as this can expose a group-oracle.
*/
- public abstract WithUser absentUser(Account.Id user);
+ public abstract WithUser absentUser(Account.Id id);
/**
* Check whether this {@code PermissionBackend} respects the same global capabilities as the
@@ -305,6 +305,9 @@
/** Returns a new instance rescoped to same project, but different {@code user}. */
public abstract ForProject user(CurrentUser user);
+ /** @see PermissionBackend#absentUser(Account.Id) */
+ public abstract ForProject absentUser(Account.Id id);
+
/** Returns an instance scoped for {@code ref} in this project. */
public abstract ForRef ref(String ref);
@@ -413,6 +416,9 @@
/** Returns a new instance rescoped to same reference, but different {@code user}. */
public abstract ForRef user(CurrentUser user);
+ /** @see PermissionBackend#absentUser(Account.Id) */
+ public abstract ForRef absentUser(Account.Id id);
+
/** Returns an instance scoped to change. */
public abstract ForChange change(ChangeData cd);
@@ -471,6 +477,9 @@
/** Returns a new instance rescoped to same change, but different {@code user}. */
public abstract ForChange user(CurrentUser user);
+ /** @see PermissionBackend#absentUser(Account.Id) */
+ public abstract ForChange absentUser(Account.Id id);
+
/** Verify scoped user can {@code perm}, throwing if denied. */
public abstract void check(ChangePermissionOrLabel perm)
throws AuthException, PermissionBackendException;
diff --git a/java/com/google/gerrit/server/permissions/ProjectControl.java b/java/com/google/gerrit/server/permissions/ProjectControl.java
index dbd60ea..2d2a64d 100644
--- a/java/com/google/gerrit/server/permissions/ProjectControl.java
+++ b/java/com/google/gerrit/server/permissions/ProjectControl.java
@@ -20,6 +20,7 @@
import com.google.gerrit.common.data.Permission;
import com.google.gerrit.common.data.PermissionRule;
import com.google.gerrit.extensions.restapi.AuthException;
+import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.AccountGroup;
import com.google.gerrit.reviewdb.client.Branch;
import com.google.gerrit.reviewdb.client.Change;
@@ -27,6 +28,7 @@
import com.google.gerrit.reviewdb.client.RefNames;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.CurrentUser;
+import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.account.GroupMembership;
import com.google.gerrit.server.config.GitReceivePackGroups;
import com.google.gerrit.server.config.GitUploadPackGroups;
@@ -67,6 +69,7 @@
private final ChangeControl.Factory changeControlFactory;
private final PermissionCollection.Factory permissionFilter;
private final DefaultRefFilter.Factory refFilterFactory;
+ private final IdentifiedUser.GenericFactory identifiedUserFactory;
private List<SectionMatcher> allSections;
private Map<String, RefControl> refControls;
@@ -80,6 +83,7 @@
ChangeControl.Factory changeControlFactory,
PermissionBackend permissionBackend,
DefaultRefFilter.Factory refFilterFactory,
+ IdentifiedUser.GenericFactory identifiedUserFactory,
@Assisted CurrentUser who,
@Assisted ProjectState ps) {
this.changeControlFactory = changeControlFactory;
@@ -88,6 +92,7 @@
this.permissionFilter = permissionFilter;
this.permissionBackend = permissionBackend;
this.refFilterFactory = refFilterFactory;
+ this.identifiedUserFactory = identifiedUserFactory;
user = who;
state = ps;
}
@@ -101,6 +106,7 @@
changeControlFactory,
permissionBackend,
refFilterFactory,
+ identifiedUserFactory,
who,
state);
// Not per-user, and reusing saves lookup time.
@@ -132,7 +138,7 @@
RefControl ctl = refControls.get(refName);
if (ctl == null) {
PermissionCollection relevant = permissionFilter.filter(access(), refName, user);
- ctl = new RefControl(this, refName, relevant);
+ ctl = new RefControl(identifiedUserFactory, this, refName, relevant);
refControls.put(refName, ctl);
}
return ctl;
@@ -327,6 +333,11 @@
}
@Override
+ public ForProject absentUser(Account.Id id) {
+ return user(identifiedUserFactory.create(id));
+ }
+
+ @Override
public String resourcePath() {
if (resourcePath == null) {
resourcePath = "/projects/" + getProjectState().getName();
diff --git a/java/com/google/gerrit/server/permissions/RefControl.java b/java/com/google/gerrit/server/permissions/RefControl.java
index 28781ea..cd1f84a 100644
--- a/java/com/google/gerrit/server/permissions/RefControl.java
+++ b/java/com/google/gerrit/server/permissions/RefControl.java
@@ -21,10 +21,12 @@
import com.google.gerrit.common.data.PermissionRule;
import com.google.gerrit.common.data.PermissionRule.Action;
import com.google.gerrit.extensions.restapi.AuthException;
+import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.RefNames;
import com.google.gerrit.server.CurrentUser;
+import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.notedb.ChangeNotes;
import com.google.gerrit.server.permissions.PermissionBackend.ForChange;
import com.google.gerrit.server.permissions.PermissionBackend.ForRef;
@@ -39,6 +41,7 @@
/** Manages access control for Git references (aka branches, tags). */
class RefControl {
+ private final IdentifiedUser.GenericFactory identifiedUserFactory;
private final ProjectControl projectControl;
private final String refName;
@@ -52,7 +55,12 @@
private Boolean canForgeCommitter;
private Boolean isVisible;
- RefControl(ProjectControl projectControl, String ref, PermissionCollection relevant) {
+ RefControl(
+ IdentifiedUser.GenericFactory identifiedUserFactory,
+ ProjectControl projectControl,
+ String ref,
+ PermissionCollection relevant) {
+ this.identifiedUserFactory = identifiedUserFactory;
this.projectControl = projectControl;
this.refName = ref;
this.relevant = relevant;
@@ -71,7 +79,7 @@
if (relevant.isUserSpecific()) {
return newCtl.controlForRef(refName);
}
- return new RefControl(newCtl, refName, relevant);
+ return new RefControl(identifiedUserFactory, newCtl, refName, relevant);
}
/** Is this user a ref owner? */
@@ -404,6 +412,11 @@
}
@Override
+ public ForRef absentUser(Account.Id id) {
+ return user(identifiedUserFactory.create(id));
+ }
+
+ @Override
public String resourcePath() {
if (resourcePath == null) {
resourcePath =
diff --git a/java/com/google/gerrit/server/restapi/RestApiModule.java b/java/com/google/gerrit/server/restapi/RestApiModule.java
new file mode 100644
index 0000000..1ba6f22
--- /dev/null
+++ b/java/com/google/gerrit/server/restapi/RestApiModule.java
@@ -0,0 +1,29 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.restapi;
+
+import com.google.inject.AbstractModule;
+
+public class RestApiModule extends AbstractModule {
+ @Override
+ protected void configure() {
+ install(new com.google.gerrit.server.restapi.access.Module());
+ install(new com.google.gerrit.server.restapi.account.Module());
+ install(new com.google.gerrit.server.restapi.change.Module());
+ install(new com.google.gerrit.server.restapi.config.Module());
+ install(new com.google.gerrit.server.restapi.group.Module());
+ install(new com.google.gerrit.server.restapi.project.Module());
+ }
+}
diff --git a/java/com/google/gerrit/server/restapi/change/PostReviewers.java b/java/com/google/gerrit/server/restapi/change/PostReviewers.java
index 46955e8..65c7db7 100644
--- a/java/com/google/gerrit/server/restapi/change/PostReviewers.java
+++ b/java/com/google/gerrit/server/restapi/change/PostReviewers.java
@@ -98,7 +98,6 @@
private final AccountLoader.Factory accountLoaderFactory;
private final Provider<ReviewDb> dbProvider;
private final ChangeData.Factory changeDataFactory;
- private final IdentifiedUser.GenericFactory identifiedUserFactory;
private final Config cfg;
private final ReviewerJson json;
private final NotesMigration migration;
@@ -118,7 +117,6 @@
Provider<ReviewDb> db,
ChangeData.Factory changeDataFactory,
RetryHelper retryHelper,
- IdentifiedUser.GenericFactory identifiedUserFactory,
@GerritServerConfig Config cfg,
ReviewerJson json,
NotesMigration migration,
@@ -135,7 +133,6 @@
this.accountLoaderFactory = accountLoaderFactory;
this.dbProvider = db;
this.changeDataFactory = changeDataFactory;
- this.identifiedUserFactory = identifiedUserFactory;
this.cfg = cfg;
this.json = json;
this.migration = migration;
@@ -376,18 +373,18 @@
private boolean isValidReviewer(Account member, PermissionBackend.ForRef perm)
throws PermissionBackendException {
- if (member.isActive()) {
- IdentifiedUser user = identifiedUserFactory.create(member.getId());
- // Does not account for draft status as a user might want to let a
- // reviewer see a draft.
- try {
- perm.user(user).check(RefPermission.READ);
- return true;
- } catch (AuthException e) {
- return false;
- }
+ if (!member.isActive()) {
+ return false;
}
- return false;
+
+ // Does not account for draft status as a user might want to let a
+ // reviewer see a draft.
+ try {
+ perm.absentUser(member.getId()).check(RefPermission.READ);
+ return true;
+ } catch (AuthException e) {
+ return false;
+ }
}
private Addition fail(String reviewer, String error) {
@@ -464,8 +461,8 @@
if (migration.readChanges() && state == CC) {
result.ccs = Lists.newArrayListWithCapacity(opResult.addedCCs().size());
for (Account.Id accountId : opResult.addedCCs()) {
- IdentifiedUser u = identifiedUserFactory.create(accountId);
- result.ccs.add(json.format(new ReviewerInfo(accountId.get()), perm.user(u), cd));
+ result.ccs.add(
+ json.format(new ReviewerInfo(accountId.get()), perm.absentUser(accountId), cd));
}
accountLoaderFactory.create(true).fill(result.ccs);
for (Address a : reviewersByEmail) {
@@ -475,11 +472,10 @@
result.reviewers = Lists.newArrayListWithCapacity(opResult.addedReviewers().size());
for (PatchSetApproval psa : opResult.addedReviewers()) {
// New reviewers have value 0, don't bother normalizing.
- IdentifiedUser u = identifiedUserFactory.create(psa.getAccountId());
result.reviewers.add(
json.format(
new ReviewerInfo(psa.getAccountId().get()),
- perm.user(u),
+ perm.absentUser(psa.getAccountId()),
cd,
ImmutableList.of(psa)));
}
diff --git a/java/com/google/gerrit/server/restapi/change/Submit.java b/java/com/google/gerrit/server/restapi/change/Submit.java
index be63e5d..54ecd18 100644
--- a/java/com/google/gerrit/server/restapi/change/Submit.java
+++ b/java/com/google/gerrit/server/restapi/change/Submit.java
@@ -18,7 +18,6 @@
import com.google.common.base.MoreObjects;
import com.google.common.base.Strings;
-import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Sets;
@@ -33,12 +32,10 @@
import com.google.gerrit.extensions.webui.UiAction;
import com.google.gerrit.reviewdb.client.Branch;
import com.google.gerrit.reviewdb.client.Change;
-import com.google.gerrit.reviewdb.client.ChangeMessage;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.RevId;
import com.google.gerrit.reviewdb.server.ReviewDb;
-import com.google.gerrit.server.ChangeMessagesUtil;
import com.google.gerrit.server.ChangeUtil;
import com.google.gerrit.server.CurrentUser;
import com.google.gerrit.server.IdentifiedUser;
@@ -119,7 +116,6 @@
private final GitRepositoryManager repoManager;
private final PermissionBackend permissionBackend;
private final ChangeData.Factory changeDataFactory;
- private final ChangeMessagesUtil cmUtil;
private final ChangeNotes.Factory changeNotesFactory;
private final Provider<MergeOp> mergeOpProvider;
private final Provider<MergeSuperSet> mergeSuperSet;
@@ -141,7 +137,6 @@
GitRepositoryManager repoManager,
PermissionBackend permissionBackend,
ChangeData.Factory changeDataFactory,
- ChangeMessagesUtil cmUtil,
ChangeNotes.Factory changeNotesFactory,
Provider<MergeOp> mergeOpProvider,
Provider<MergeSuperSet> mergeSuperSet,
@@ -154,7 +149,6 @@
this.repoManager = repoManager;
this.permissionBackend = permissionBackend;
this.changeDataFactory = changeDataFactory;
- this.cmUtil = cmUtil;
this.changeNotesFactory = changeNotesFactory;
this.mergeOpProvider = mergeOpProvider;
this.mergeSuperSet = mergeSuperSet;
@@ -237,11 +231,8 @@
case MERGED:
return change;
case NEW:
- ChangeMessage msg = getConflictMessage(rsrc);
- if (msg != null) {
- throw new ResourceConflictException(msg.getMessage());
- }
- // $FALL-THROUGH$
+ throw new RestApiException(
+ "change unexpectedly had status " + change.getStatus() + " after submit attempt");
case ABANDONED:
default:
throw new ResourceConflictException("change is " + ChangeUtil.status(change));
@@ -394,18 +385,6 @@
.setEnabled(Boolean.TRUE.equals(enabled));
}
- /**
- * If the merge was attempted and it failed the system usually writes a comment as a ChangeMessage
- * and sets status to NEW. Find the relevant message and return it.
- */
- public ChangeMessage getConflictMessage(RevisionResource rsrc) throws OrmException {
- return FluentIterable.from(
- cmUtil.byPatchSet(dbProvider.get(), rsrc.getNotes(), rsrc.getPatchSet().getId()))
- .filter(cm -> cm.getAuthor() == null)
- .last()
- .orNull();
- }
-
public Collection<ChangeData> unmergeableChanges(ChangeSet cs) throws OrmException, IOException {
Set<ChangeData> mergeabilityMap = new HashSet<>();
for (ChangeData change : cs.changes()) {
diff --git a/java/com/google/gerrit/server/restapi/config/ConfigRestModule.java b/java/com/google/gerrit/server/restapi/config/Module.java
similarity index 97%
rename from java/com/google/gerrit/server/restapi/config/ConfigRestModule.java
rename to java/com/google/gerrit/server/restapi/config/Module.java
index 0b94d16..c4a6f56 100644
--- a/java/com/google/gerrit/server/restapi/config/ConfigRestModule.java
+++ b/java/com/google/gerrit/server/restapi/config/Module.java
@@ -22,7 +22,7 @@
import com.google.gerrit.server.config.CapabilityResource;
import com.google.gerrit.server.config.TopMenuResource;
-public class ConfigRestModule extends RestApiModule {
+public class Module extends RestApiModule {
@Override
protected void configure() {
DynamicMap.mapOf(binder(), CapabilityResource.CAPABILITY_KIND);
diff --git a/java/com/google/gerrit/testing/BUILD b/java/com/google/gerrit/testing/BUILD
index f2fe4c2..875d636 100644
--- a/java/com/google/gerrit/testing/BUILD
+++ b/java/com/google/gerrit/testing/BUILD
@@ -29,9 +29,10 @@
"//java/com/google/gerrit/server/cache/mem",
"//java/com/google/gerrit/server/restapi",
"//java/com/google/gerrit/server/schema",
+ "//lib:guava",
"//lib:gwtorm",
"//lib:h2",
- "//lib:truth",
+ "//lib:junit",
"//lib/auto:auto-value",
"//lib/auto:auto-value-annotations",
"//lib/guice",
@@ -39,5 +40,6 @@
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
"//lib/log:api",
+ "//lib/truth",
],
)
diff --git a/java/com/google/gerrit/testing/InMemoryModule.java b/java/com/google/gerrit/testing/InMemoryModule.java
index 7d79829..b472857 100644
--- a/java/com/google/gerrit/testing/InMemoryModule.java
+++ b/java/com/google/gerrit/testing/InMemoryModule.java
@@ -77,6 +77,7 @@
import com.google.gerrit.server.plugins.PluginRestApiModule;
import com.google.gerrit.server.plugins.ServerInformationImpl;
import com.google.gerrit.server.project.DefaultProjectNameLockManager;
+import com.google.gerrit.server.restapi.RestApiModule;
import com.google.gerrit.server.schema.DataSourceType;
import com.google.gerrit.server.schema.InMemoryAccountPatchReviewStore;
import com.google.gerrit.server.schema.NotesMigrationSchemaFactory;
@@ -262,6 +263,7 @@
}
bind(ServerInformationImpl.class);
bind(ServerInformation.class).to(ServerInformationImpl.class);
+ install(new RestApiModule());
install(new PluginRestApiModule());
install(new DefaultProjectNameLockManager.Module());
}
diff --git a/java/com/google/gerrit/truth/BUILD b/java/com/google/gerrit/truth/BUILD
index a0e2ee9..719ddce 100644
--- a/java/com/google/gerrit/truth/BUILD
+++ b/java/com/google/gerrit/truth/BUILD
@@ -4,6 +4,7 @@
srcs = glob(["**/*.java"]),
visibility = ["//visibility:public"],
deps = [
- "//lib:truth",
+ "//lib:guava",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/acceptance/BUILD b/javatests/com/google/gerrit/acceptance/BUILD
index 234e4be..9246abb 100644
--- a/javatests/com/google/gerrit/acceptance/BUILD
+++ b/javatests/com/google/gerrit/acceptance/BUILD
@@ -6,7 +6,7 @@
deps = [
"//java/com/google/gerrit/acceptance:lib",
"//lib:guava",
- "//lib:truth",
"//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/acceptance/api/group/BUILD b/javatests/com/google/gerrit/acceptance/api/group/BUILD
index 21294f5..a0b70cc 100644
--- a/javatests/com/google/gerrit/acceptance/api/group/BUILD
+++ b/javatests/com/google/gerrit/acceptance/api/group/BUILD
@@ -21,6 +21,6 @@
"//java/com/google/gerrit/reviewdb:server",
"//java/com/google/gerrit/server",
"//lib:gwtorm",
- "//lib:truth",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/acceptance/rest/project/BUILD b/javatests/com/google/gerrit/acceptance/rest/project/BUILD
index 0720fb3..dad3ca9 100644
--- a/javatests/com/google/gerrit/acceptance/rest/project/BUILD
+++ b/javatests/com/google/gerrit/acceptance/rest/project/BUILD
@@ -18,7 +18,8 @@
],
deps = [
"//java/com/google/gerrit/extensions:api",
- "//lib:truth",
+ "//lib:guava",
+ "//lib/truth",
],
)
@@ -31,8 +32,9 @@
"//java/com/google/gerrit/extensions:api",
"//java/com/google/gerrit/reviewdb:server",
"//java/com/google/gerrit/server",
+ "//lib:guava",
"//lib:gwtorm",
- "//lib:truth",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/common/BUILD b/javatests/com/google/gerrit/common/BUILD
index ff19646..ba9a5bc 100644
--- a/javatests/com/google/gerrit/common/BUILD
+++ b/javatests/com/google/gerrit/common/BUILD
@@ -15,7 +15,7 @@
"//java/com/google/gerrit/common:client",
"//lib:guava",
"//lib:junit",
- "//lib:truth",
+ "//lib/truth",
],
)
@@ -28,8 +28,8 @@
"//java/com/google/gerrit/common:version",
"//java/com/google/gerrit/launcher",
"//lib:guava",
- "//lib:truth",
"//lib/auto:auto-value",
"//lib/auto:auto-value-annotations",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/elasticsearch/BUILD b/javatests/com/google/gerrit/elasticsearch/BUILD
index 70d7089..a2f5229 100644
--- a/javatests/com/google/gerrit/elasticsearch/BUILD
+++ b/javatests/com/google/gerrit/elasticsearch/BUILD
@@ -14,10 +14,10 @@
"//lib:gson",
"//lib:guava",
"//lib:junit",
- "//lib:truth",
"//lib/elasticsearch",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/extensions/BUILD b/javatests/com/google/gerrit/extensions/BUILD
index 2557750..069c915 100644
--- a/javatests/com/google/gerrit/extensions/BUILD
+++ b/javatests/com/google/gerrit/extensions/BUILD
@@ -7,7 +7,7 @@
deps = [
"//java/com/google/gerrit/extensions:api",
"//java/com/google/gerrit/extensions/common/testing:common-test-util",
- "//lib:truth",
"//lib/guice",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/extensions/conditions/BUILD b/javatests/com/google/gerrit/extensions/conditions/BUILD
index aebe347..e2d5951 100644
--- a/javatests/com/google/gerrit/extensions/conditions/BUILD
+++ b/javatests/com/google/gerrit/extensions/conditions/BUILD
@@ -5,6 +5,6 @@
srcs = glob(["*.java"]),
deps = [
"//java/com/google/gerrit/extensions:lib",
- "//lib:truth",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/git/testing/BUILD b/javatests/com/google/gerrit/git/testing/BUILD
index 13eb5bf..56e9ec2 100644
--- a/javatests/com/google/gerrit/git/testing/BUILD
+++ b/javatests/com/google/gerrit/git/testing/BUILD
@@ -5,6 +5,6 @@
srcs = glob(["*.java"]),
deps = [
"//java/com/google/gerrit/git/testing",
- "//lib:truth",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/gpg/BUILD b/javatests/com/google/gerrit/gpg/BUILD
index 5cc9ae8..ab66f9a 100644
--- a/javatests/com/google/gerrit/gpg/BUILD
+++ b/javatests/com/google/gerrit/gpg/BUILD
@@ -20,7 +20,6 @@
"//java/com/google/gerrit/testing:gerrit-test-util",
"//lib:guava",
"//lib:gwtorm",
- "//lib:truth",
"//lib/bouncycastle:bcpg",
"//lib/bouncycastle:bcpg-neverlink",
"//lib/bouncycastle:bcprov",
@@ -30,5 +29,6 @@
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
"//lib/log:api",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/httpd/BUILD b/javatests/com/google/gerrit/httpd/BUILD
index e2f2a45..ec2df15 100644
--- a/javatests/com/google/gerrit/httpd/BUILD
+++ b/javatests/com/google/gerrit/httpd/BUILD
@@ -19,11 +19,11 @@
"//lib:junit",
"//lib:servlet-api-3_1-without-neverlink",
"//lib:soy",
- "//lib:truth",
"//lib/easymock",
"//lib/guice",
"//lib/guice:guice-servlet",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/index/BUILD b/javatests/com/google/gerrit/index/BUILD
index bd79860..d905188 100644
--- a/javatests/com/google/gerrit/index/BUILD
+++ b/javatests/com/google/gerrit/index/BUILD
@@ -9,9 +9,10 @@
"//java/com/google/gerrit/index",
"//java/com/google/gerrit/index:query_exception",
"//java/com/google/gerrit/index:query_parser",
+ "//lib:guava",
"//lib:junit",
- "//lib:truth",
"//lib/antlr:java_runtime",
"//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/metrics/proc/BUILD b/javatests/com/google/gerrit/metrics/proc/BUILD
index 8e50cf6..91e5cf6 100644
--- a/javatests/com/google/gerrit/metrics/proc/BUILD
+++ b/javatests/com/google/gerrit/metrics/proc/BUILD
@@ -9,8 +9,8 @@
"//java/com/google/gerrit/lifecycle",
"//java/com/google/gerrit/metrics",
"//java/com/google/gerrit/metrics/dropwizard",
- "//lib:truth",
"//lib/dropwizard:dropwizard-core",
"//lib/guice",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/pgm/BUILD b/javatests/com/google/gerrit/pgm/BUILD
index af0bea6..e4afae2 100644
--- a/javatests/com/google/gerrit/pgm/BUILD
+++ b/javatests/com/google/gerrit/pgm/BUILD
@@ -13,11 +13,11 @@
"//java/com/google/gerrit/server",
"//lib:guava",
"//lib:junit",
- "//lib:truth",
"//lib/easymock",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/reviewdb/BUILD b/javatests/com/google/gerrit/reviewdb/BUILD
index a7b9b51..0fd140e 100644
--- a/javatests/com/google/gerrit/reviewdb/BUILD
+++ b/javatests/com/google/gerrit/reviewdb/BUILD
@@ -7,7 +7,8 @@
"//java/com/google/gerrit/reviewdb:client",
"//java/com/google/gerrit/server/project/testing:project-test-util",
"//java/com/google/gerrit/testing:gerrit-test-util",
+ "//lib:guava",
"//lib:gwtorm",
- "//lib:truth",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/server/BUILD b/javatests/com/google/gerrit/server/BUILD
index 3864676..3113a8a 100644
--- a/javatests/com/google/gerrit/server/BUILD
+++ b/javatests/com/google/gerrit/server/BUILD
@@ -12,7 +12,8 @@
"//java/com/google/gerrit/extensions:api",
"//java/com/google/gerrit/server",
"//java/com/google/gerrit/truth",
- "//lib:truth",
+ "//lib:guava",
+ "//lib/truth",
],
)
@@ -42,6 +43,7 @@
"//java/com/google/gerrit/metrics",
"//java/com/google/gerrit/reviewdb:server",
"//java/com/google/gerrit/server",
+ "//java/com/google/gerrit/server/cache/testing",
"//java/com/google/gerrit/server/group/testing",
"//java/com/google/gerrit/server/project/testing:project-test-util",
"//java/com/google/gerrit/server/restapi",
@@ -51,16 +53,19 @@
"//java/org/eclipse/jgit:server",
"//lib:grappa",
"//lib:gson",
+ "//lib:guava",
"//lib:guava-retrying",
"//lib:gwtorm",
"//lib:protobuf",
- "//lib:truth-java8-extension",
"//lib/auto:auto-value",
"//lib/auto:auto-value-annotations",
"//lib/commons:codec",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
+ "//lib/truth:truth-java8-extension",
+ "//lib/truth:truth-proto-extension",
"//proto:cache_java_proto",
],
)
diff --git a/javatests/com/google/gerrit/server/auth/oauth/OAuthTokenCacheTest.java b/javatests/com/google/gerrit/server/auth/oauth/OAuthTokenCacheTest.java
new file mode 100644
index 0000000..586c065
--- /dev/null
+++ b/javatests/com/google/gerrit/server/auth/oauth/OAuthTokenCacheTest.java
@@ -0,0 +1,74 @@
+package com.google.gerrit.server.auth.oauth;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.gerrit.server.cache.testing.SerializedClassSubject.assertThatSerializedClass;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.gerrit.extensions.auth.oauth.OAuthToken;
+import com.google.gerrit.server.cache.CacheSerializer;
+import com.google.gerrit.server.cache.proto.Cache.OAuthTokenProto;
+import java.lang.reflect.Type;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+@RunWith(JUnit4.class)
+public final class OAuthTokenCacheTest {
+ @Test
+ public void oAuthTokenSerializer() throws Exception {
+ OAuthToken token = new OAuthToken("token", "secret", "raw", 12345L, "provider");
+ CacheSerializer<OAuthToken> s = new OAuthTokenCache.Serializer();
+ byte[] serialized = s.serialize(token);
+ assertThat(OAuthTokenProto.parseFrom(serialized))
+ .isEqualTo(
+ OAuthTokenProto.newBuilder()
+ .setToken("token")
+ .setSecret("secret")
+ .setRaw("raw")
+ .setExpiresAt(12345L)
+ .setProviderId("provider")
+ .build());
+ assertThat(s.deserialize(serialized)).isEqualTo(token);
+ }
+
+ @Test
+ public void oAuthTokenSerializerWithNullProvider() throws Exception {
+ OAuthToken tokenWithNull = new OAuthToken("token", "secret", "raw", 12345L, null);
+ CacheSerializer<OAuthToken> s = new OAuthTokenCache.Serializer();
+ OAuthTokenProto expectedProto =
+ OAuthTokenProto.newBuilder()
+ .setToken("token")
+ .setSecret("secret")
+ .setRaw("raw")
+ .setExpiresAt(12345L)
+ .setProviderId("")
+ .build();
+
+ byte[] serializedWithNull = s.serialize(tokenWithNull);
+ assertThat(OAuthTokenProto.parseFrom(serializedWithNull)).isEqualTo(expectedProto);
+ assertThat(s.deserialize(serializedWithNull)).isEqualTo(tokenWithNull);
+
+ OAuthToken tokenWithEmptyString = new OAuthToken("token", "secret", "raw", 12345L, "");
+ assertThat(tokenWithEmptyString).isEqualTo(tokenWithNull);
+ byte[] serializedWithEmptyString = s.serialize(tokenWithEmptyString);
+ assertThat(OAuthTokenProto.parseFrom(serializedWithEmptyString)).isEqualTo(expectedProto);
+ assertThat(s.deserialize(serializedWithEmptyString)).isEqualTo(tokenWithNull);
+ }
+
+ /**
+ * See {@link com.google.gerrit.server.cache.testing.SerializedClassSubject} for background and
+ * what to do if this test fails.
+ */
+ @Test
+ public void oAuthTokenFields() throws Exception {
+ assertThatSerializedClass(OAuthToken.class)
+ .hasFields(
+ ImmutableMap.<String, Type>builder()
+ .put("token", String.class)
+ .put("secret", String.class)
+ .put("raw", String.class)
+ .put("expiresAt", long.class)
+ .put("providerId", String.class)
+ .build());
+ }
+}
diff --git a/javatests/com/google/gerrit/server/cache/BUILD b/javatests/com/google/gerrit/server/cache/BUILD
index eed4a87..278330b 100644
--- a/javatests/com/google/gerrit/server/cache/BUILD
+++ b/javatests/com/google/gerrit/server/cache/BUILD
@@ -6,9 +6,11 @@
deps = [
"//java/com/google/gerrit/server",
"//lib:guava",
+ "//lib:gwtorm",
"//lib:junit",
- "//lib:truth",
+ "//lib:protobuf",
"//lib/auto:auto-value",
"//lib/auto:auto-value-annotations",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/server/cache/BooleanCacheSerializerTest.java b/javatests/com/google/gerrit/server/cache/BooleanCacheSerializerTest.java
new file mode 100644
index 0000000..3186620
--- /dev/null
+++ b/javatests/com/google/gerrit/server/cache/BooleanCacheSerializerTest.java
@@ -0,0 +1,62 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.cache;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.Truth.assert_;
+import static java.nio.charset.StandardCharsets.UTF_8;
+
+import com.google.protobuf.TextFormat;
+import org.junit.Test;
+
+public class BooleanCacheSerializerTest {
+ @Test
+ public void serialize() throws Exception {
+ assertThat(BooleanCacheSerializer.INSTANCE.serialize(true))
+ .isEqualTo(new byte[] {'t', 'r', 'u', 'e'});
+ assertThat(BooleanCacheSerializer.INSTANCE.serialize(false))
+ .isEqualTo(new byte[] {'f', 'a', 'l', 's', 'e'});
+ }
+
+ @Test
+ public void deserialize() throws Exception {
+ assertThat(BooleanCacheSerializer.INSTANCE.deserialize(new byte[] {'t', 'r', 'u', 'e'}))
+ .isEqualTo(true);
+ assertThat(BooleanCacheSerializer.INSTANCE.deserialize(new byte[] {'f', 'a', 'l', 's', 'e'}))
+ .isEqualTo(false);
+ }
+
+ @Test
+ public void deserializeInvalid() throws Exception {
+ assertDeserializeFails(null);
+ assertDeserializeFails("t".getBytes(UTF_8));
+ assertDeserializeFails("tru".getBytes(UTF_8));
+ assertDeserializeFails("trueee".getBytes(UTF_8));
+ assertDeserializeFails("TRUE".getBytes(UTF_8));
+ assertDeserializeFails("f".getBytes(UTF_8));
+ assertDeserializeFails("fal".getBytes(UTF_8));
+ assertDeserializeFails("falseee".getBytes(UTF_8));
+ assertDeserializeFails("FALSE".getBytes(UTF_8));
+ }
+
+ private static void assertDeserializeFails(byte[] in) {
+ try {
+ BooleanCacheSerializer.INSTANCE.deserialize(in);
+ assert_().fail("expected deserialization to fail for \"%s\"", TextFormat.escapeBytes(in));
+ } catch (RuntimeException e) {
+ // Expected.
+ }
+ }
+}
diff --git a/javatests/com/google/gerrit/server/cache/EnumCacheSerializerTest.java b/javatests/com/google/gerrit/server/cache/EnumCacheSerializerTest.java
index 0e04d32..60bbb16 100644
--- a/javatests/com/google/gerrit/server/cache/EnumCacheSerializerTest.java
+++ b/javatests/com/google/gerrit/server/cache/EnumCacheSerializerTest.java
@@ -15,6 +15,8 @@
package com.google.gerrit.server.cache;
import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.Truth.assert_;
+import static java.nio.charset.StandardCharsets.UTF_8;
import org.junit.Test;
@@ -26,6 +28,14 @@
assertRoundTrip(MyEnum.BAZ);
}
+ @Test
+ public void deserializeInvalidValues() throws Exception {
+ assertDeserializeFails(null);
+ assertDeserializeFails("".getBytes(UTF_8));
+ assertDeserializeFails("foo".getBytes(UTF_8));
+ assertDeserializeFails("QUUX".getBytes(UTF_8));
+ }
+
private enum MyEnum {
FOO,
BAR,
@@ -36,4 +46,14 @@
CacheSerializer<MyEnum> s = new EnumCacheSerializer<>(MyEnum.class);
assertThat(s.deserialize(s.serialize(e))).isEqualTo(e);
}
+
+ private static void assertDeserializeFails(byte[] in) {
+ CacheSerializer<MyEnum> s = new EnumCacheSerializer<>(MyEnum.class);
+ try {
+ s.deserialize(in);
+ assert_().fail("expected RuntimeException");
+ } catch (RuntimeException e) {
+ // Expected.
+ }
+ }
}
diff --git a/javatests/com/google/gerrit/server/cache/IntKeyCacheSerializerTest.java b/javatests/com/google/gerrit/server/cache/IntKeyCacheSerializerTest.java
new file mode 100644
index 0000000..7a7c27c
--- /dev/null
+++ b/javatests/com/google/gerrit/server/cache/IntKeyCacheSerializerTest.java
@@ -0,0 +1,66 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.cache;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.Truth.assert_;
+
+import com.google.gwtorm.client.IntKey;
+import com.google.gwtorm.client.Key;
+import org.junit.Test;
+
+public class IntKeyCacheSerializerTest {
+
+ private static class MyIntKey extends IntKey<Key<?>> {
+ private static final long serialVersionUID = 1L;
+
+ private int val;
+
+ MyIntKey(int val) {
+ this.val = val;
+ }
+
+ @Override
+ public int get() {
+ return val;
+ }
+
+ @Override
+ protected void set(int newValue) {
+ this.val = newValue;
+ }
+ }
+
+ private static final IntKeyCacheSerializer<MyIntKey> SERIALIZER =
+ new IntKeyCacheSerializer<>(MyIntKey::new);
+
+ @Test
+ public void serialize() throws Exception {
+ MyIntKey k = new MyIntKey(1234);
+ byte[] serialized = SERIALIZER.serialize(k);
+ assertThat(serialized).isEqualTo(new byte[] {-46, 9});
+ assertThat(SERIALIZER.deserialize(serialized).get()).isEqualTo(1234);
+ }
+
+ @Test
+ public void deserializeNullFails() throws Exception {
+ try {
+ SERIALIZER.deserialize(null);
+ assert_().fail("expected RuntimeException");
+ } catch (RuntimeException e) {
+ // Expected.
+ }
+ }
+}
diff --git a/javatests/com/google/gerrit/server/cache/IntegerCacheSerializerTest.java b/javatests/com/google/gerrit/server/cache/IntegerCacheSerializerTest.java
new file mode 100644
index 0000000..962b797
--- /dev/null
+++ b/javatests/com/google/gerrit/server/cache/IntegerCacheSerializerTest.java
@@ -0,0 +1,64 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.cache;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.Truth.assert_;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.primitives.Bytes;
+import com.google.protobuf.TextFormat;
+import org.junit.Test;
+
+public class IntegerCacheSerializerTest {
+ @Test
+ public void serialize() throws Exception {
+ for (int i :
+ ImmutableList.of(
+ Integer.MIN_VALUE,
+ Integer.MIN_VALUE + 20,
+ -1,
+ 0,
+ 1,
+ Integer.MAX_VALUE - 20,
+ Integer.MAX_VALUE)) {
+ assertRoundTrip(i);
+ }
+ }
+
+ @Test
+ public void deserializeInvalidValues() throws Exception {
+ assertDeserializeFails(null);
+ assertDeserializeFails(
+ Bytes.concat(IntegerCacheSerializer.INSTANCE.serialize(1), new byte[] {0, 0, 0, 0}));
+ }
+
+ private static void assertRoundTrip(int i) throws Exception {
+ byte[] serialized = IntegerCacheSerializer.INSTANCE.serialize(i);
+ int result = IntegerCacheSerializer.INSTANCE.deserialize(serialized);
+ assertThat(result)
+ .named("round-trip of %s via \"%s\"", i, TextFormat.escapeBytes(serialized))
+ .isEqualTo(i);
+ }
+
+ private static void assertDeserializeFails(byte[] in) {
+ try {
+ IntegerCacheSerializer.INSTANCE.deserialize(in);
+ assert_().fail("expected RuntimeException");
+ } catch (RuntimeException e) {
+ // Expected.
+ }
+ }
+}
diff --git a/javatests/com/google/gerrit/server/cache/h2/BUILD b/javatests/com/google/gerrit/server/cache/h2/BUILD
index e2b9257..63ae94b 100644
--- a/javatests/com/google/gerrit/server/cache/h2/BUILD
+++ b/javatests/com/google/gerrit/server/cache/h2/BUILD
@@ -9,7 +9,7 @@
"//lib:guava",
"//lib:h2",
"//lib:junit",
- "//lib:truth",
"//lib/guice",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/server/change/ChangeKindCacheImplTest.java b/javatests/com/google/gerrit/server/change/ChangeKindCacheImplTest.java
index 4470f55..5b77094 100644
--- a/javatests/com/google/gerrit/server/change/ChangeKindCacheImplTest.java
+++ b/javatests/com/google/gerrit/server/change/ChangeKindCacheImplTest.java
@@ -15,10 +15,12 @@
package com.google.gerrit.server.change;
import static com.google.common.truth.Truth.assertThat;
+import static com.google.gerrit.server.cache.testing.CacheSerializerTestUtil.bytes;
+import static com.google.gerrit.server.cache.testing.SerializedClassSubject.assertThatSerializedClass;
+import com.google.common.collect.ImmutableMap;
import com.google.gerrit.server.cache.CacheSerializer;
import com.google.gerrit.server.cache.proto.Cache.ChangeKindKeyProto;
-import com.google.protobuf.ByteString;
import org.eclipse.jgit.lib.ObjectId;
import org.junit.Test;
@@ -45,11 +47,15 @@
assertThat(s.deserialize(serialized)).isEqualTo(key);
}
- private static ByteString bytes(int... ints) {
- byte[] bytes = new byte[ints.length];
- for (int i = 0; i < ints.length; i++) {
- bytes[i] = (byte) ints[i];
- }
- return ByteString.copyFrom(bytes);
+ /**
+ * See {@link com.google.gerrit.server.cache.testing.SerializedClassSubject} for background and
+ * what to do if this test fails.
+ */
+ @Test
+ public void keyFields() throws Exception {
+ assertThatSerializedClass(ChangeKindCacheImpl.Key.class)
+ .hasFields(
+ ImmutableMap.of(
+ "prior", ObjectId.class, "next", ObjectId.class, "strategyName", String.class));
}
}
diff --git a/javatests/com/google/gerrit/server/change/MergeabilityCacheImplTest.java b/javatests/com/google/gerrit/server/change/MergeabilityCacheImplTest.java
new file mode 100644
index 0000000..69fc531
--- /dev/null
+++ b/javatests/com/google/gerrit/server/change/MergeabilityCacheImplTest.java
@@ -0,0 +1,69 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.change;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.gerrit.server.cache.testing.CacheSerializerTestUtil.bytes;
+import static com.google.gerrit.server.cache.testing.SerializedClassSubject.assertThatSerializedClass;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.gerrit.extensions.client.SubmitType;
+import com.google.gerrit.server.cache.proto.Cache.MergeabilityKeyProto;
+import org.eclipse.jgit.lib.ObjectId;
+import org.junit.Test;
+
+public class MergeabilityCacheImplTest {
+ @Test
+ public void keySerializer() throws Exception {
+ MergeabilityCacheImpl.EntryKey key =
+ new MergeabilityCacheImpl.EntryKey(
+ ObjectId.fromString("badc0feebadc0feebadc0feebadc0feebadc0fee"),
+ ObjectId.fromString("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef"),
+ SubmitType.MERGE_IF_NECESSARY,
+ "aStrategy");
+ byte[] serialized = MergeabilityCacheImpl.EntryKey.Serializer.INSTANCE.serialize(key);
+ assertThat(MergeabilityKeyProto.parseFrom(serialized))
+ .isEqualTo(
+ MergeabilityKeyProto.newBuilder()
+ .setCommit(
+ bytes(
+ 0xba, 0xdc, 0x0f, 0xee, 0xba, 0xdc, 0x0f, 0xee, 0xba, 0xdc, 0x0f, 0xee,
+ 0xba, 0xdc, 0x0f, 0xee, 0xba, 0xdc, 0x0f, 0xee))
+ .setInto(
+ bytes(
+ 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+ 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef))
+ .setSubmitType("MERGE_IF_NECESSARY")
+ .setMergeStrategy("aStrategy")
+ .build());
+ assertThat(MergeabilityCacheImpl.EntryKey.Serializer.INSTANCE.deserialize(serialized))
+ .isEqualTo(key);
+ }
+
+ /**
+ * See {@link com.google.gerrit.server.cache.testing.SerializedClassSubject} for background and
+ * what to do if this test fails.
+ */
+ @Test
+ public void keyFields() throws Exception {
+ assertThatSerializedClass(MergeabilityCacheImpl.EntryKey.class)
+ .hasFields(
+ ImmutableMap.of(
+ "commit", ObjectId.class,
+ "into", ObjectId.class,
+ "submitType", SubmitType.class,
+ "mergeStrategy", String.class));
+ }
+}
diff --git a/javatests/com/google/gerrit/server/config/ListCapabilitiesTest.java b/javatests/com/google/gerrit/server/config/ListCapabilitiesTest.java
index 935dfc6..fd9c925 100644
--- a/javatests/com/google/gerrit/server/config/ListCapabilitiesTest.java
+++ b/javatests/com/google/gerrit/server/config/ListCapabilitiesTest.java
@@ -87,7 +87,7 @@
}
@Override
- public WithUser absentUser(Id user) {
+ public WithUser absentUser(Id id) {
throw new UnsupportedOperationException();
}
diff --git a/javatests/com/google/gerrit/server/extensions/webui/UiActionsTest.java b/javatests/com/google/gerrit/server/extensions/webui/UiActionsTest.java
index d242962..834f658 100644
--- a/javatests/com/google/gerrit/server/extensions/webui/UiActionsTest.java
+++ b/javatests/com/google/gerrit/server/extensions/webui/UiActionsTest.java
@@ -78,6 +78,11 @@
}
@Override
+ public ForProject absentUser(Account.Id id) {
+ throw new UnsupportedOperationException("not implemented");
+ }
+
+ @Override
public ForRef ref(String ref) {
throw new UnsupportedOperationException("not implemented");
}
diff --git a/javatests/com/google/gerrit/server/group/db/BUILD b/javatests/com/google/gerrit/server/group/db/BUILD
index 48e8d303..eee5529 100644
--- a/javatests/com/google/gerrit/server/group/db/BUILD
+++ b/javatests/com/google/gerrit/server/group/db/BUILD
@@ -16,9 +16,10 @@
"//java/com/google/gerrit/server/group/testing",
"//java/com/google/gerrit/testing:gerrit-test-util",
"//java/com/google/gerrit/truth",
+ "//lib:guava",
"//lib:gwtorm",
- "//lib:truth",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/server/notedb/ChangeNotesCacheTest.java b/javatests/com/google/gerrit/server/notedb/ChangeNotesCacheTest.java
new file mode 100644
index 0000000..5a7d812
--- /dev/null
+++ b/javatests/com/google/gerrit/server/notedb/ChangeNotesCacheTest.java
@@ -0,0 +1,60 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.notedb;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
+import static com.google.gerrit.server.cache.testing.CacheSerializerTestUtil.bytes;
+import static com.google.gerrit.server.cache.testing.SerializedClassSubject.assertThatSerializedClass;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.gerrit.reviewdb.client.Change;
+import com.google.gerrit.reviewdb.client.Project;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesKeyProto;
+import org.eclipse.jgit.lib.ObjectId;
+import org.junit.Test;
+
+public final class ChangeNotesCacheTest {
+ @Test
+ public void keySerializer() throws Exception {
+ ChangeNotesCache.Key key =
+ ChangeNotesCache.Key.create(
+ new Project.NameKey("project"),
+ new Change.Id(1234),
+ ObjectId.fromString("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef"));
+ byte[] serialized = ChangeNotesCache.Key.Serializer.INSTANCE.serialize(key);
+ assertThat(ChangeNotesKeyProto.parseFrom(serialized))
+ .isEqualTo(
+ ChangeNotesKeyProto.newBuilder()
+ .setProject("project")
+ .setChangeId(1234)
+ .setId(
+ bytes(
+ 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef,
+ 0xde, 0xad, 0xbe, 0xef, 0xde, 0xad, 0xbe, 0xef))
+ .build());
+ assertThat(ChangeNotesCache.Key.Serializer.INSTANCE.deserialize(serialized)).isEqualTo(key);
+ }
+
+ @Test
+ public void keyMethods() throws Exception {
+ assertThatSerializedClass(ChangeNotesCache.Key.class)
+ .hasAutoValueMethods(
+ ImmutableMap.of(
+ "project", Project.NameKey.class,
+ "changeId", Change.Id.class,
+ "id", ObjectId.class));
+ }
+}
diff --git a/javatests/com/google/gerrit/server/notedb/ChangeNotesParserTest.java b/javatests/com/google/gerrit/server/notedb/ChangeNotesParserTest.java
index 9b7aad2..b8f544a 100644
--- a/javatests/com/google/gerrit/server/notedb/ChangeNotesParserTest.java
+++ b/javatests/com/google/gerrit/server/notedb/ChangeNotesParserTest.java
@@ -442,17 +442,17 @@
// Change created in WIP remains in WIP.
RevCommit commit = writeCommit("Update WIP change\n" + "\n" + "Patch-set: 1\n", true);
ChangeNotesState state = newParser(commit).parseAll();
- assertThat(state.hasReviewStarted()).isFalse();
+ assertThat(state.columns().reviewStarted()).isFalse();
// Moving change out of WIP starts review.
commit =
writeCommit("New ready change\n" + "\n" + "Patch-set: 1\n" + "Work-in-progress: false\n");
state = newParser(commit).parseAll();
- assertThat(state.hasReviewStarted()).isTrue();
+ assertThat(state.columns().reviewStarted()).isTrue();
// Change created not in WIP has always been in review started state.
state = assertParseSucceeds("New change that doesn't declare WIP\n" + "\n" + "Patch-set: 1\n");
- assertThat(state.hasReviewStarted()).isTrue();
+ assertThat(state.columns().reviewStarted()).isTrue();
}
@Test
diff --git a/javatests/com/google/gerrit/server/notedb/ChangeNotesStateTest.java b/javatests/com/google/gerrit/server/notedb/ChangeNotesStateTest.java
new file mode 100644
index 0000000..c0f2c43
--- /dev/null
+++ b/javatests/com/google/gerrit/server/notedb/ChangeNotesStateTest.java
@@ -0,0 +1,957 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.notedb;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.APPROVAL_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.MESSAGE_CODEC;
+import static com.google.gerrit.reviewdb.server.ReviewDbCodecs.PATCH_SET_CODEC;
+import static com.google.gerrit.server.cache.testing.SerializedClassSubject.assertThatSerializedClass;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableListMultimap;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.ImmutableTable;
+import com.google.common.collect.Iterables;
+import com.google.gerrit.common.data.SubmitRecord;
+import com.google.gerrit.common.data.SubmitRequirement;
+import com.google.gerrit.reviewdb.client.Account;
+import com.google.gerrit.reviewdb.client.Change;
+import com.google.gerrit.reviewdb.client.ChangeMessage;
+import com.google.gerrit.reviewdb.client.Comment;
+import com.google.gerrit.reviewdb.client.LabelId;
+import com.google.gerrit.reviewdb.client.PatchSet;
+import com.google.gerrit.reviewdb.client.PatchSetApproval;
+import com.google.gerrit.reviewdb.client.RevId;
+import com.google.gerrit.server.ReviewerByEmailSet;
+import com.google.gerrit.server.ReviewerSet;
+import com.google.gerrit.server.ReviewerStatusUpdate;
+import com.google.gerrit.server.cache.ProtoCacheSerializers;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ChangeColumnsProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerByEmailSetEntryProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerSetEntryProto;
+import com.google.gerrit.server.cache.proto.Cache.ChangeNotesStateProto.ReviewerStatusUpdateProto;
+import com.google.gerrit.server.mail.Address;
+import com.google.gerrit.server.notedb.ChangeNotesState.ChangeColumns;
+import com.google.gerrit.server.notedb.ChangeNotesState.Serializer;
+import com.google.gwtorm.client.KeyUtil;
+import com.google.gwtorm.protobuf.ProtobufCodec;
+import com.google.gwtorm.server.StandardKeyEncoder;
+import com.google.inject.TypeLiteral;
+import com.google.protobuf.ByteString;
+import java.lang.reflect.Type;
+import java.sql.Timestamp;
+import java.util.List;
+import java.util.Map;
+import org.eclipse.jgit.lib.Constants;
+import org.eclipse.jgit.lib.ObjectId;
+import org.junit.Before;
+import org.junit.Test;
+
+public class ChangeNotesStateTest {
+ static {
+ KeyUtil.setEncoderImpl(new StandardKeyEncoder());
+ }
+
+ private static final Change.Id ID = new Change.Id(123);
+ private static final ObjectId SHA =
+ ObjectId.fromString("1234567812345678123456781234567812345678");
+ private static final ByteString SHA_BYTES = toByteString(SHA);
+ private static final String CHANGE_KEY = "Iabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd";
+
+ private ChangeColumns cols;
+ private ChangeColumnsProto colsProto;
+
+ @Before
+ public void setUp() throws Exception {
+ cols =
+ ChangeColumns.builder()
+ .changeKey(new Change.Key(CHANGE_KEY))
+ .createdOn(new Timestamp(123456L))
+ .lastUpdatedOn(new Timestamp(234567L))
+ .owner(new Account.Id(1000))
+ .branch("refs/heads/master")
+ .subject("Test change")
+ .isPrivate(false)
+ .workInProgress(false)
+ .reviewStarted(true)
+ .build();
+ colsProto = toProto(newBuilder().build()).getColumns();
+ }
+
+ private ChangeNotesState.Builder newBuilder() {
+ return ChangeNotesState.Builder.empty(ID).metaId(SHA).columns(cols);
+ }
+
+ @Test
+ public void serializeChangeKey() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .columns(
+ cols.toBuilder()
+ .changeKey(new Change.Key("Ieeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"))
+ .build())
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(
+ colsProto.toBuilder().setChangeKey("Ieeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"))
+ .build());
+ }
+
+ @Test
+ public void serializeCreatedOn() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().createdOn(new Timestamp(98765L)).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setCreatedOn(98765L))
+ .build());
+ }
+
+ @Test
+ public void serializeLastUpdatedOn() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().lastUpdatedOn(new Timestamp(98765L)).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setLastUpdatedOn(98765L))
+ .build());
+ }
+
+ @Test
+ public void serializeOwner() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().owner(new Account.Id(7777)).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setOwner(7777))
+ .build());
+ }
+
+ @Test
+ public void serializeBranch() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().branch("refs/heads/bar").build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setBranch("refs/heads/bar"))
+ .build());
+ }
+
+ @Test
+ public void serializeSubject() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().subject("A different test change").build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setSubject("A different test change"))
+ .build());
+ }
+
+ @Test
+ public void serializeCurrentPatchSetId() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .columns(cols.toBuilder().currentPatchSetId(new PatchSet.Id(ID, 2)).build())
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setCurrentPatchSetId(2).setHasCurrentPatchSetId(true))
+ .build());
+ }
+
+ @Test
+ public void serializeNullTopic() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().topic(null).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .build());
+ }
+
+ @Test
+ public void serializeEmptyTopic() throws Exception {
+ ChangeNotesState state = newBuilder().columns(cols.toBuilder().topic("").build()).build();
+ assertRoundTrip(
+ state,
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setTopic("").setHasTopic(true))
+ .build());
+ }
+
+ @Test
+ public void serializeNonEmptyTopic() throws Exception {
+ ChangeNotesState state = newBuilder().columns(cols.toBuilder().topic("topic").build()).build();
+ assertRoundTrip(
+ state,
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setTopic("topic").setHasTopic(true))
+ .build());
+ }
+
+ @Test
+ public void serializeOriginalSubject() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .columns(cols.toBuilder().originalSubject("The first patch set").build())
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(
+ colsProto
+ .toBuilder()
+ .setOriginalSubject("The first patch set")
+ .setHasOriginalSubject(true))
+ .build());
+ }
+
+ @Test
+ public void serializeSubmissionId() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().submissionId("xyz").build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setSubmissionId("xyz").setHasSubmissionId(true))
+ .build());
+ }
+
+ @Test
+ public void serializeAssignee() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().assignee(new Account.Id(2000)).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setAssignee(2000).setHasAssignee(true))
+ .build());
+ }
+
+ @Test
+ public void serializeStatus() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().status(Change.Status.MERGED).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setStatus("MERGED").setHasStatus(true))
+ .build());
+ }
+
+ @Test
+ public void serializeIsPrivate() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().isPrivate(true).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setIsPrivate(true))
+ .build());
+ }
+
+ @Test
+ public void serializeIsWorkInProgress() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().workInProgress(true).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setWorkInProgress(true))
+ .build());
+ }
+
+ @Test
+ public void serializeHasReviewStarted() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().reviewStarted(true).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setReviewStarted(true))
+ .build());
+ }
+
+ @Test
+ public void serializeRevertOf() throws Exception {
+ assertRoundTrip(
+ newBuilder().columns(cols.toBuilder().revertOf(new Change.Id(999)).build()).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto.toBuilder().setRevertOf(999).setHasRevertOf(true))
+ .build());
+ }
+
+ @Test
+ public void serializePastAssignees() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .pastAssignees(ImmutableSet.of(new Account.Id(2002), new Account.Id(2001)))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPastAssignee(2002)
+ .addPastAssignee(2001)
+ .build());
+ }
+
+ @Test
+ public void serializeHashtags() throws Exception {
+ assertRoundTrip(
+ newBuilder().hashtags(ImmutableSet.of("tag2", "tag1")).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addHashtag("tag2")
+ .addHashtag("tag1")
+ .build());
+ }
+
+ @Test
+ public void serializePatchSets() throws Exception {
+ PatchSet ps1 = new PatchSet(new PatchSet.Id(ID, 1));
+ ps1.setUploader(new Account.Id(2000));
+ ps1.setRevision(new RevId("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"));
+ ps1.setCreatedOn(cols.createdOn());
+ ByteString ps1Bytes = toByteString(ps1, PATCH_SET_CODEC);
+ assertThat(ps1Bytes.size()).isEqualTo(66);
+
+ PatchSet ps2 = new PatchSet(new PatchSet.Id(ID, 2));
+ ps2.setUploader(new Account.Id(3000));
+ ps2.setRevision(new RevId("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"));
+ ps2.setCreatedOn(cols.lastUpdatedOn());
+ ByteString ps2Bytes = toByteString(ps2, PATCH_SET_CODEC);
+ assertThat(ps2Bytes.size()).isEqualTo(66);
+ assertThat(ps2Bytes).isNotEqualTo(ps1Bytes);
+
+ assertRoundTrip(
+ newBuilder()
+ .patchSets(ImmutableMap.of(ps2.getId(), ps2, ps1.getId(), ps1).entrySet())
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPatchSet(ps2Bytes)
+ .addPatchSet(ps1Bytes)
+ .build());
+ }
+
+ @Test
+ public void serializeApprovals() throws Exception {
+ PatchSetApproval a1 =
+ new PatchSetApproval(
+ new PatchSetApproval.Key(
+ new PatchSet.Id(ID, 1), new Account.Id(2001), new LabelId("Code-Review")),
+ (short) 1,
+ new Timestamp(1212L));
+ ByteString a1Bytes = toByteString(a1, APPROVAL_CODEC);
+ assertThat(a1Bytes.size()).isEqualTo(43);
+
+ PatchSetApproval a2 =
+ new PatchSetApproval(
+ new PatchSetApproval.Key(
+ new PatchSet.Id(ID, 1), new Account.Id(2002), new LabelId("Verified")),
+ (short) -1,
+ new Timestamp(3434L));
+ ByteString a2Bytes = toByteString(a2, APPROVAL_CODEC);
+ assertThat(a2Bytes.size()).isEqualTo(49);
+ assertThat(a2Bytes).isNotEqualTo(a1Bytes);
+
+ assertRoundTrip(
+ newBuilder()
+ .approvals(
+ ImmutableListMultimap.of(a2.getPatchSetId(), a2, a1.getPatchSetId(), a1).entries())
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addApproval(a2Bytes)
+ .addApproval(a1Bytes)
+ .build());
+ }
+
+ @Test
+ public void serializeReviewers() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .reviewers(
+ ReviewerSet.fromTable(
+ ImmutableTable.<ReviewerStateInternal, Account.Id, Timestamp>builder()
+ .put(ReviewerStateInternal.CC, new Account.Id(2001), new Timestamp(1212L))
+ .put(
+ ReviewerStateInternal.REVIEWER,
+ new Account.Id(2002),
+ new Timestamp(3434L))
+ .build()))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addReviewer(
+ ReviewerSetEntryProto.newBuilder()
+ .setState("CC")
+ .setAccountId(2001)
+ .setTimestamp(1212L))
+ .addReviewer(
+ ReviewerSetEntryProto.newBuilder()
+ .setState("REVIEWER")
+ .setAccountId(2002)
+ .setTimestamp(3434L))
+ .build());
+ }
+
+ @Test
+ public void serializeReviewersByEmail() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .reviewersByEmail(
+ ReviewerByEmailSet.fromTable(
+ ImmutableTable.<ReviewerStateInternal, Address, Timestamp>builder()
+ .put(
+ ReviewerStateInternal.CC,
+ new Address("Name1", "email1@example.com"),
+ new Timestamp(1212L))
+ .put(
+ ReviewerStateInternal.REVIEWER,
+ new Address("Name2", "email2@example.com"),
+ new Timestamp(3434L))
+ .build()))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addReviewerByEmail(
+ ReviewerByEmailSetEntryProto.newBuilder()
+ .setState("CC")
+ .setAddress("Name1 <email1@example.com>")
+ .setTimestamp(1212L))
+ .addReviewerByEmail(
+ ReviewerByEmailSetEntryProto.newBuilder()
+ .setState("REVIEWER")
+ .setAddress("Name2 <email2@example.com>")
+ .setTimestamp(3434L))
+ .build());
+ }
+
+ @Test
+ public void serializeReviewersByEmailWithNullName() throws Exception {
+ ChangeNotesState actual =
+ assertRoundTrip(
+ newBuilder()
+ .reviewersByEmail(
+ ReviewerByEmailSet.fromTable(
+ ImmutableTable.of(
+ ReviewerStateInternal.CC,
+ new Address("emailonly@example.com"),
+ new Timestamp(1212L))))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addReviewerByEmail(
+ ReviewerByEmailSetEntryProto.newBuilder()
+ .setState("CC")
+ .setAddress("emailonly@example.com")
+ .setTimestamp(1212L))
+ .build());
+
+ // Address doesn't consider the name field in equals, so we have to check it manually.
+ // TODO(dborowitz): Fix Address#equals.
+ ImmutableSet<Address> ccs = actual.reviewersByEmail().byState(ReviewerStateInternal.CC);
+ assertThat(ccs).hasSize(1);
+ Address address = Iterables.getOnlyElement(ccs);
+ assertThat(address.getName()).isNull();
+ assertThat(address.getEmail()).isEqualTo("emailonly@example.com");
+ }
+
+ @Test
+ public void serializePendingReviewers() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .pendingReviewers(
+ ReviewerSet.fromTable(
+ ImmutableTable.<ReviewerStateInternal, Account.Id, Timestamp>builder()
+ .put(ReviewerStateInternal.CC, new Account.Id(2001), new Timestamp(1212L))
+ .put(
+ ReviewerStateInternal.REVIEWER,
+ new Account.Id(2002),
+ new Timestamp(3434L))
+ .build()))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPendingReviewer(
+ ReviewerSetEntryProto.newBuilder()
+ .setState("CC")
+ .setAccountId(2001)
+ .setTimestamp(1212L))
+ .addPendingReviewer(
+ ReviewerSetEntryProto.newBuilder()
+ .setState("REVIEWER")
+ .setAccountId(2002)
+ .setTimestamp(3434L))
+ .build());
+ }
+
+ @Test
+ public void serializePendingReviewersByEmail() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .pendingReviewersByEmail(
+ ReviewerByEmailSet.fromTable(
+ ImmutableTable.<ReviewerStateInternal, Address, Timestamp>builder()
+ .put(
+ ReviewerStateInternal.CC,
+ new Address("Name1", "email1@example.com"),
+ new Timestamp(1212L))
+ .put(
+ ReviewerStateInternal.REVIEWER,
+ new Address("Name2", "email2@example.com"),
+ new Timestamp(3434L))
+ .build()))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPendingReviewerByEmail(
+ ReviewerByEmailSetEntryProto.newBuilder()
+ .setState("CC")
+ .setAddress("Name1 <email1@example.com>")
+ .setTimestamp(1212L))
+ .addPendingReviewerByEmail(
+ ReviewerByEmailSetEntryProto.newBuilder()
+ .setState("REVIEWER")
+ .setAddress("Name2 <email2@example.com>")
+ .setTimestamp(3434L))
+ .build());
+ }
+
+ @Test
+ public void serializeAllPastReviewers() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .allPastReviewers(ImmutableList.of(new Account.Id(2002), new Account.Id(2001)))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPastReviewer(2002)
+ .addPastReviewer(2001)
+ .build());
+ }
+
+ @Test
+ public void serializeReviewerUpdates() throws Exception {
+ assertRoundTrip(
+ newBuilder()
+ .reviewerUpdates(
+ ImmutableList.of(
+ ReviewerStatusUpdate.create(
+ new Timestamp(1212L),
+ new Account.Id(1000),
+ new Account.Id(2002),
+ ReviewerStateInternal.CC),
+ ReviewerStatusUpdate.create(
+ new Timestamp(3434L),
+ new Account.Id(1000),
+ new Account.Id(2001),
+ ReviewerStateInternal.REVIEWER)))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addReviewerUpdate(
+ ReviewerStatusUpdateProto.newBuilder()
+ .setDate(1212L)
+ .setUpdatedBy(1000)
+ .setReviewer(2002)
+ .setState("CC"))
+ .addReviewerUpdate(
+ ReviewerStatusUpdateProto.newBuilder()
+ .setDate(3434L)
+ .setUpdatedBy(1000)
+ .setReviewer(2001)
+ .setState("REVIEWER"))
+ .build());
+ }
+
+ @Test
+ public void serializeSubmitRecords() throws Exception {
+ SubmitRecord sr1 = new SubmitRecord();
+ sr1.status = SubmitRecord.Status.OK;
+
+ SubmitRecord sr2 = new SubmitRecord();
+ sr2.status = SubmitRecord.Status.FORCED;
+
+ assertRoundTrip(
+ newBuilder().submitRecords(ImmutableList.of(sr2, sr1)).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addSubmitRecord("{\"status\":\"FORCED\"}")
+ .addSubmitRecord("{\"status\":\"OK\"}")
+ .build());
+ }
+
+ @Test
+ public void serializeChangeMessages() throws Exception {
+ ChangeMessage m1 =
+ new ChangeMessage(
+ new ChangeMessage.Key(ID, "uuid1"),
+ new Account.Id(1000),
+ new Timestamp(1212L),
+ new PatchSet.Id(ID, 1));
+ ByteString m1Bytes = toByteString(m1, MESSAGE_CODEC);
+ assertThat(m1Bytes.size()).isEqualTo(35);
+
+ ChangeMessage m2 =
+ new ChangeMessage(
+ new ChangeMessage.Key(ID, "uuid2"),
+ new Account.Id(2000),
+ new Timestamp(3434L),
+ new PatchSet.Id(ID, 2));
+ ByteString m2Bytes = toByteString(m2, MESSAGE_CODEC);
+ assertThat(m2Bytes.size()).isEqualTo(35);
+ assertThat(m2Bytes).isNotEqualTo(m1Bytes);
+
+ assertRoundTrip(
+ newBuilder().changeMessages(ImmutableList.of(m2, m1)).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addChangeMessage(m2Bytes)
+ .addChangeMessage(m1Bytes)
+ .build());
+ }
+
+ @Test
+ public void serializePublishedComments() throws Exception {
+ Comment c1 =
+ new Comment(
+ new Comment.Key("uuid1", "file1", 1),
+ new Account.Id(1001),
+ new Timestamp(1212L),
+ (short) 1,
+ "message 1",
+ "serverId",
+ false);
+ c1.setRevId(new RevId("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"));
+ String c1Json = Serializer.GSON.toJson(c1);
+
+ Comment c2 =
+ new Comment(
+ new Comment.Key("uuid2", "file2", 2),
+ new Account.Id(1002),
+ new Timestamp(3434L),
+ (short) 2,
+ "message 2",
+ "serverId",
+ true);
+ c2.setRevId(new RevId("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"));
+ String c2Json = Serializer.GSON.toJson(c2);
+
+ assertRoundTrip(
+ newBuilder()
+ .publishedComments(
+ ImmutableListMultimap.of(new RevId(c2.revId), c2, new RevId(c1.revId), c1))
+ .build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .addPublishedComment(c2Json)
+ .addPublishedComment(c1Json)
+ .build());
+ }
+
+ @Test
+ public void serializeReadOnlyUntil() throws Exception {
+ assertRoundTrip(
+ newBuilder().readOnlyUntil(new Timestamp(1212L)).build(),
+ ChangeNotesStateProto.newBuilder()
+ .setMetaId(SHA_BYTES)
+ .setChangeId(ID.get())
+ .setColumns(colsProto)
+ .setReadOnlyUntil(1212L)
+ .setHasReadOnlyUntil(true)
+ .build());
+ }
+
+ @Test
+ public void changeNotesStateMethods() throws Exception {
+ assertThatSerializedClass(ChangeNotesState.class)
+ .hasAutoValueMethods(
+ ImmutableMap.<String, Type>builder()
+ .put("metaId", ObjectId.class)
+ .put("changeId", Change.Id.class)
+ .put("columns", ChangeColumns.class)
+ .put("pastAssignees", new TypeLiteral<ImmutableSet<Account.Id>>() {}.getType())
+ .put("hashtags", new TypeLiteral<ImmutableSet<String>>() {}.getType())
+ .put(
+ "patchSets",
+ new TypeLiteral<ImmutableList<Map.Entry<PatchSet.Id, PatchSet>>>() {}.getType())
+ .put(
+ "approvals",
+ new TypeLiteral<
+ ImmutableList<Map.Entry<PatchSet.Id, PatchSetApproval>>>() {}.getType())
+ .put("reviewers", ReviewerSet.class)
+ .put("reviewersByEmail", ReviewerByEmailSet.class)
+ .put("pendingReviewers", ReviewerSet.class)
+ .put("pendingReviewersByEmail", ReviewerByEmailSet.class)
+ .put("allPastReviewers", new TypeLiteral<ImmutableList<Account.Id>>() {}.getType())
+ .put(
+ "reviewerUpdates",
+ new TypeLiteral<ImmutableList<ReviewerStatusUpdate>>() {}.getType())
+ .put("submitRecords", new TypeLiteral<ImmutableList<SubmitRecord>>() {}.getType())
+ .put("changeMessages", new TypeLiteral<ImmutableList<ChangeMessage>>() {}.getType())
+ .put(
+ "publishedComments",
+ new TypeLiteral<ImmutableListMultimap<RevId, Comment>>() {}.getType())
+ .put("readOnlyUntil", Timestamp.class)
+ .build());
+ }
+
+ @Test
+ public void changeColumnsMethods() throws Exception {
+ assertThatSerializedClass(ChangeColumns.class)
+ .hasAutoValueMethods(
+ ImmutableMap.<String, Type>builder()
+ .put("changeKey", Change.Key.class)
+ .put("createdOn", Timestamp.class)
+ .put("lastUpdatedOn", Timestamp.class)
+ .put("owner", Account.Id.class)
+ .put("branch", String.class)
+ .put("currentPatchSetId", PatchSet.Id.class)
+ .put("subject", String.class)
+ .put("topic", String.class)
+ .put("originalSubject", String.class)
+ .put("submissionId", String.class)
+ .put("assignee", Account.Id.class)
+ .put("status", Change.Status.class)
+ .put("isPrivate", boolean.class)
+ .put("workInProgress", boolean.class)
+ .put("reviewStarted", boolean.class)
+ .put("revertOf", Change.Id.class)
+ .put("toBuilder", ChangeNotesState.ChangeColumns.Builder.class)
+ .build());
+ }
+
+ @Test
+ public void patchSetFields() throws Exception {
+ assertThatSerializedClass(PatchSet.class)
+ .hasFields(
+ ImmutableMap.<String, Type>builder()
+ .put("id", PatchSet.Id.class)
+ .put("revision", RevId.class)
+ .put("uploader", Account.Id.class)
+ .put("createdOn", Timestamp.class)
+ .put("groups", String.class)
+ .put("pushCertificate", String.class)
+ .put("description", String.class)
+ .build());
+ }
+
+ @Test
+ public void patchSetApprovalFields() throws Exception {
+ assertThatSerializedClass(PatchSetApproval.Key.class)
+ .hasFields(
+ ImmutableMap.<String, Type>builder()
+ .put("patchSetId", PatchSet.Id.class)
+ .put("accountId", Account.Id.class)
+ .put("categoryId", LabelId.class)
+ .build());
+ assertThatSerializedClass(PatchSetApproval.class)
+ .hasFields(
+ ImmutableMap.<String, Type>builder()
+ .put("key", PatchSetApproval.Key.class)
+ .put("value", short.class)
+ .put("granted", Timestamp.class)
+ .put("tag", String.class)
+ .put("realAccountId", Account.Id.class)
+ .put("postSubmit", boolean.class)
+ .build());
+ }
+
+ @Test
+ public void reviewerSetFields() throws Exception {
+ assertThatSerializedClass(ReviewerSet.class)
+ .hasFields(
+ ImmutableMap.of(
+ "table",
+ new TypeLiteral<
+ ImmutableTable<
+ ReviewerStateInternal, Account.Id, Timestamp>>() {}.getType(),
+ "accounts", new TypeLiteral<ImmutableSet<Account.Id>>() {}.getType()));
+ }
+
+ @Test
+ public void reviewerByEmailSetFields() throws Exception {
+ assertThatSerializedClass(ReviewerByEmailSet.class)
+ .hasFields(
+ ImmutableMap.of(
+ "table",
+ new TypeLiteral<
+ ImmutableTable<ReviewerStateInternal, Address, Timestamp>>() {}.getType(),
+ "users", new TypeLiteral<ImmutableSet<Address>>() {}.getType()));
+ }
+
+ @Test
+ public void reviewerStatusUpdateMethods() throws Exception {
+ assertThatSerializedClass(ReviewerStatusUpdate.class)
+ .hasAutoValueMethods(
+ ImmutableMap.of(
+ "date", Timestamp.class,
+ "updatedBy", Account.Id.class,
+ "reviewer", Account.Id.class,
+ "state", ReviewerStateInternal.class));
+ }
+
+ @Test
+ public void submitRecordFields() throws Exception {
+ assertThatSerializedClass(SubmitRecord.class)
+ .hasFields(
+ ImmutableMap.of(
+ "status",
+ SubmitRecord.Status.class,
+ "labels",
+ new TypeLiteral<List<SubmitRecord.Label>>() {}.getType(),
+ "requirements",
+ new TypeLiteral<List<SubmitRequirement>>() {}.getType(),
+ "errorMessage",
+ String.class));
+ assertThatSerializedClass(SubmitRecord.Label.class)
+ .hasFields(
+ ImmutableMap.of(
+ "label", String.class,
+ "status", SubmitRecord.Label.Status.class,
+ "appliedBy", Account.Id.class));
+ assertThatSerializedClass(SubmitRequirement.class)
+ .hasAutoValueMethods(
+ ImmutableMap.of(
+ "fallbackText", String.class,
+ "type", String.class,
+ "data", new TypeLiteral<ImmutableMap<String, String>>() {}.getType()));
+ }
+
+ @Test
+ public void changeMessageFields() throws Exception {
+ assertThatSerializedClass(ChangeMessage.Key.class)
+ .hasFields(ImmutableMap.of("changeId", Change.Id.class, "uuid", String.class));
+ assertThatSerializedClass(ChangeMessage.class)
+ .hasFields(
+ ImmutableMap.<String, Type>builder()
+ .put("key", ChangeMessage.Key.class)
+ .put("author", Account.Id.class)
+ .put("writtenOn", Timestamp.class)
+ .put("message", String.class)
+ .put("patchset", PatchSet.Id.class)
+ .put("tag", String.class)
+ .put("realAuthor", Account.Id.class)
+ .build());
+ }
+
+ @Test
+ public void commentFields() throws Exception {
+ assertThatSerializedClass(Comment.Key.class)
+ .hasFields(
+ ImmutableMap.of(
+ "uuid", String.class, "filename", String.class, "patchSetId", int.class));
+ assertThatSerializedClass(Comment.Identity.class).hasFields(ImmutableMap.of("id", int.class));
+ assertThatSerializedClass(Comment.Range.class)
+ .hasFields(
+ ImmutableMap.of(
+ "startLine", int.class,
+ "startChar", int.class,
+ "endLine", int.class,
+ "endChar", int.class));
+ assertThatSerializedClass(Comment.class)
+ .hasFields(
+ ImmutableMap.<String, Type>builder()
+ .put("key", Comment.Key.class)
+ .put("lineNbr", int.class)
+ .put("author", Comment.Identity.class)
+ .put("realAuthor", Comment.Identity.class)
+ .put("writtenOn", Timestamp.class)
+ .put("side", short.class)
+ .put("message", String.class)
+ .put("parentUuid", String.class)
+ .put("range", Comment.Range.class)
+ .put("tag", String.class)
+ .put("revId", String.class)
+ .put("serverId", String.class)
+ .put("unresolved", boolean.class)
+ .put("legacyFormat", boolean.class)
+ .build());
+ }
+
+ private static ChangeNotesStateProto toProto(ChangeNotesState state) throws Exception {
+ return ChangeNotesStateProto.parseFrom(Serializer.INSTANCE.serialize(state));
+ }
+
+ private static ChangeNotesState assertRoundTrip(
+ ChangeNotesState state, ChangeNotesStateProto expectedProto) throws Exception {
+ ChangeNotesStateProto actualProto = toProto(state);
+ assertThat(actualProto).isEqualTo(expectedProto);
+ ChangeNotesState actual = Serializer.INSTANCE.deserialize(Serializer.INSTANCE.serialize(state));
+ assertThat(actual).isEqualTo(state);
+ // It's possible that ChangeNotesState contains objects which implement equals without taking
+ // into account all fields. Return the actual deserialized instance so that callers can perform
+ // additional assertions if necessary.
+ return actual;
+ }
+
+ private static ByteString toByteString(ObjectId id) {
+ byte[] buf = new byte[Constants.OBJECT_ID_LENGTH];
+ id.copyRawTo(buf, 0);
+ return ByteString.copyFrom(buf);
+ }
+
+ private <T> ByteString toByteString(T object, ProtobufCodec<T> codec) {
+ return ProtoCacheSerializers.toByteString(object, codec);
+ }
+}
diff --git a/javatests/com/google/gerrit/server/notedb/ChangeNotesTest.java b/javatests/com/google/gerrit/server/notedb/ChangeNotesTest.java
index e5a34aa..9d38704 100644
--- a/javatests/com/google/gerrit/server/notedb/ChangeNotesTest.java
+++ b/javatests/com/google/gerrit/server/notedb/ChangeNotesTest.java
@@ -1078,7 +1078,6 @@
ChangeNotes notes = newNotes(c);
assertThat(notes.getPatchSets().keySet()).containsExactly(psId1, psId2);
assertThat(notes.getApprovals()).isNotEmpty();
- assertThat(notes.getChangeMessagesByPatchSet()).isNotEmpty();
assertThat(notes.getChangeMessages()).isNotEmpty();
assertThat(notes.getComments()).isNotEmpty();
@@ -1095,7 +1094,6 @@
notes = newNotes(c);
assertThat(notes.getPatchSets().keySet()).containsExactly(psId1);
assertThat(notes.getApprovals()).isEmpty();
- assertThat(notes.getChangeMessagesByPatchSet()).isEmpty();
assertThat(notes.getChangeMessages()).isEmpty();
assertThat(notes.getComments()).isEmpty();
}
@@ -1349,16 +1347,12 @@
update.putReviewer(changeOwner.getAccount().getId(), REVIEWER);
update.setChangeMessage("Just a little code change.\n");
update.commit();
- PatchSet.Id ps1 = c.currentPatchSetId();
ChangeNotes notes = newNotes(c);
- ListMultimap<PatchSet.Id, ChangeMessage> changeMessages = notes.getChangeMessagesByPatchSet();
- assertThat(changeMessages.keySet()).containsExactly(ps1);
-
- ChangeMessage cm = Iterables.getOnlyElement(changeMessages.get(ps1));
+ ChangeMessage cm = Iterables.getOnlyElement(notes.getChangeMessages());
assertThat(cm.getMessage()).isEqualTo("Just a little code change.\n");
assertThat(cm.getAuthor()).isEqualTo(changeOwner.getAccount().getId());
- assertThat(cm.getPatchSetId()).isEqualTo(ps1);
+ assertThat(cm.getPatchSetId()).isEqualTo(c.currentPatchSetId());
}
@Test
@@ -1378,13 +1372,9 @@
ChangeUpdate update = newUpdate(c, changeOwner);
update.setChangeMessage("Testing trailing double newline\n\n");
update.commit();
- PatchSet.Id ps1 = c.currentPatchSetId();
ChangeNotes notes = newNotes(c);
- ListMultimap<PatchSet.Id, ChangeMessage> changeMessages = notes.getChangeMessagesByPatchSet();
- assertThat(changeMessages).hasSize(1);
-
- ChangeMessage cm1 = Iterables.getOnlyElement(changeMessages.get(ps1));
+ ChangeMessage cm1 = Iterables.getOnlyElement(notes.getChangeMessages());
assertThat(cm1.getMessage()).isEqualTo("Testing trailing double newline\n\n");
assertThat(cm1.getAuthor()).isEqualTo(changeOwner.getAccount().getId());
}
@@ -1395,13 +1385,9 @@
ChangeUpdate update = newUpdate(c, changeOwner);
update.setChangeMessage("Testing paragraph 1\n\nTesting paragraph 2\n\nTesting paragraph 3");
update.commit();
- PatchSet.Id ps1 = c.currentPatchSetId();
ChangeNotes notes = newNotes(c);
- ListMultimap<PatchSet.Id, ChangeMessage> changeMessages = notes.getChangeMessagesByPatchSet();
- assertThat(changeMessages).hasSize(1);
-
- ChangeMessage cm1 = Iterables.getOnlyElement(changeMessages.get(ps1));
+ ChangeMessage cm1 = Iterables.getOnlyElement(notes.getChangeMessages());
assertThat(cm1.getMessage())
.isEqualTo(
"Testing paragraph 1\n"
@@ -1429,15 +1415,15 @@
PatchSet.Id ps2 = c.currentPatchSetId();
ChangeNotes notes = newNotes(c);
- ListMultimap<PatchSet.Id, ChangeMessage> changeMessages = notes.getChangeMessagesByPatchSet();
- assertThat(changeMessages).hasSize(2);
+ assertThat(notes.getChangeMessages()).hasSize(2);
- ChangeMessage cm1 = Iterables.getOnlyElement(changeMessages.get(ps1));
+ ChangeMessage cm1 = notes.getChangeMessages().get(0);
+ assertThat(cm1.getPatchSetId()).isEqualTo(ps1);
assertThat(cm1.getMessage()).isEqualTo("This is the change message for the first PS.");
assertThat(cm1.getAuthor()).isEqualTo(changeOwner.getAccount().getId());
- ChangeMessage cm2 = Iterables.getOnlyElement(changeMessages.get(ps2));
- assertThat(cm1.getPatchSetId()).isEqualTo(ps1);
+ ChangeMessage cm2 = notes.getChangeMessages().get(1);
+ assertThat(cm2.getPatchSetId()).isEqualTo(ps2);
assertThat(cm2.getMessage()).isEqualTo("This is the change message for the second PS.");
assertThat(cm2.getAuthor()).isEqualTo(changeOwner.getAccount().getId());
assertThat(cm2.getPatchSetId()).isEqualTo(ps2);
@@ -1459,10 +1445,8 @@
update.commit();
ChangeNotes notes = newNotes(c);
- ListMultimap<PatchSet.Id, ChangeMessage> changeMessages = notes.getChangeMessagesByPatchSet();
- assertThat(changeMessages.keySet()).hasSize(1);
- List<ChangeMessage> cm = changeMessages.get(ps1);
+ List<ChangeMessage> cm = notes.getChangeMessages();
assertThat(cm).hasSize(2);
assertThat(cm.get(0).getMessage()).isEqualTo("First change message.\n");
assertThat(cm.get(0).getAuthor()).isEqualTo(changeOwner.getAccount().getId());
@@ -3266,7 +3250,7 @@
public void privateDefault() throws Exception {
Change c = newChange();
ChangeNotes notes = newNotes(c);
- assertThat(notes.isPrivate()).isFalse();
+ assertThat(notes.getChange().isPrivate()).isFalse();
}
@Test
@@ -3277,7 +3261,7 @@
update.commit();
ChangeNotes notes = newNotes(c);
- assertThat(notes.isPrivate()).isTrue();
+ assertThat(notes.getChange().isPrivate()).isTrue();
}
@Test
@@ -3292,7 +3276,7 @@
update.commit();
ChangeNotes notes = newNotes(c);
- assertThat(notes.isPrivate()).isFalse();
+ assertThat(notes.getChange().isPrivate()).isFalse();
}
@Test
@@ -3397,38 +3381,38 @@
@Test
public void hasReviewStarted() throws Exception {
ChangeNotes notes = newNotes(newChange());
- assertThat(notes.hasReviewStarted()).isTrue();
+ assertThat(notes.getChange().hasReviewStarted()).isTrue();
notes = newNotes(newWorkInProgressChange());
- assertThat(notes.hasReviewStarted()).isFalse();
+ assertThat(notes.getChange().hasReviewStarted()).isFalse();
Change c = newWorkInProgressChange();
ChangeUpdate update = newUpdate(c, changeOwner);
update.commit();
notes = newNotes(c);
- assertThat(notes.hasReviewStarted()).isFalse();
+ assertThat(notes.getChange().hasReviewStarted()).isFalse();
update = newUpdate(c, changeOwner);
update.setWorkInProgress(true);
update.commit();
notes = newNotes(c);
- assertThat(notes.hasReviewStarted()).isFalse();
+ assertThat(notes.getChange().hasReviewStarted()).isFalse();
update = newUpdate(c, changeOwner);
update.setWorkInProgress(false);
update.commit();
notes = newNotes(c);
- assertThat(notes.hasReviewStarted()).isTrue();
+ assertThat(notes.getChange().hasReviewStarted()).isTrue();
// Once review is started, setting WIP should have no impact.
c = newChange();
notes = newNotes(c);
- assertThat(notes.hasReviewStarted()).isTrue();
+ assertThat(notes.getChange().hasReviewStarted()).isTrue();
update = newUpdate(c, changeOwner);
update.setWorkInProgress(true);
update.commit();
notes = newNotes(c);
- assertThat(notes.hasReviewStarted()).isTrue();
+ assertThat(notes.getChange().hasReviewStarted()).isTrue();
}
@Test
@@ -3493,7 +3477,7 @@
public void revertOfIsNullByDefault() throws Exception {
Change c = newChange();
ChangeNotes notes = newNotes(c);
- assertThat(notes.getRevertOf()).isNull();
+ assertThat(notes.getChange().getRevertOf()).isNull();
}
@Test
@@ -3504,7 +3488,7 @@
update.setChangeId(c.getKey().get());
update.setRevertOf(changeToRevert.getId().get());
update.commit();
- assertThat(newNotes(c).getRevertOf()).isEqualTo(changeToRevert.getId());
+ assertThat(newNotes(c).getChange().getRevertOf()).isEqualTo(changeToRevert.getId());
}
@Test
diff --git a/javatests/com/google/gerrit/server/permissions/RefControlTest.java b/javatests/com/google/gerrit/server/permissions/RefControlTest.java
index c30803a..7890de8 100644
--- a/javatests/com/google/gerrit/server/permissions/RefControlTest.java
+++ b/javatests/com/google/gerrit/server/permissions/RefControlTest.java
@@ -47,6 +47,7 @@
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.CurrentUser;
+import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.account.CapabilityCollection;
import com.google.gerrit.server.account.GroupMembership;
import com.google.gerrit.server.account.ListGroupMembership;
@@ -202,6 +203,7 @@
@Inject private InMemoryDatabase schemaFactory;
@Inject private ThreadLocalRequestContext requestContext;
@Inject private DefaultRefFilter.Factory refFilterFactory;
+ @Inject private IdentifiedUser.GenericFactory identifiedUserFactory;
@Before
public void setUp() throws Exception {
@@ -986,6 +988,7 @@
changeControlFactory,
permissionBackend,
refFilterFactory,
+ identifiedUserFactory,
new MockUser(name, memberOf),
newProjectState(local));
}
diff --git a/javatests/com/google/gerrit/server/query/account/BUILD b/javatests/com/google/gerrit/server/query/account/BUILD
index c352f43..e6c631b 100644
--- a/javatests/com/google/gerrit/server/query/account/BUILD
+++ b/javatests/com/google/gerrit/server/query/account/BUILD
@@ -15,10 +15,11 @@
"//java/com/google/gerrit/server",
"//java/com/google/gerrit/server/schema",
"//java/com/google/gerrit/testing:gerrit-test-util",
- "//lib:truth",
- "//lib:truth-java8-extension",
+ "//lib:guava",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
+ "//lib/truth:truth-java8-extension",
"//prolog:gerrit-prolog-common",
],
)
diff --git a/javatests/com/google/gerrit/server/query/change/BUILD b/javatests/com/google/gerrit/server/query/change/BUILD
index 66c825c..78ec176 100644
--- a/javatests/com/google/gerrit/server/query/change/BUILD
+++ b/javatests/com/google/gerrit/server/query/change/BUILD
@@ -19,11 +19,12 @@
"//java/com/google/gerrit/server/project/testing:project-test-util",
"//java/com/google/gerrit/server/schema",
"//java/com/google/gerrit/testing:gerrit-test-util",
+ "//lib:guava",
"//lib:gwtorm",
- "//lib:truth",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
],
)
@@ -41,10 +42,11 @@
"//java/com/google/gerrit/reviewdb:server",
"//java/com/google/gerrit/server",
"//java/com/google/gerrit/testing:gerrit-test-util",
+ "//lib:guava",
"//lib:gwtorm",
- "//lib:truth",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/server/query/group/BUILD b/javatests/com/google/gerrit/server/query/group/BUILD
index 01a54a3..0dd16cd 100644
--- a/javatests/com/google/gerrit/server/query/group/BUILD
+++ b/javatests/com/google/gerrit/server/query/group/BUILD
@@ -15,10 +15,11 @@
"//java/com/google/gerrit/server",
"//java/com/google/gerrit/server/schema",
"//java/com/google/gerrit/testing:gerrit-test-util",
- "//lib:truth",
- "//lib:truth-java8-extension",
+ "//lib:guava",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
+ "//lib/truth:truth-java8-extension",
],
)
diff --git a/javatests/com/google/gerrit/server/query/project/BUILD b/javatests/com/google/gerrit/server/query/project/BUILD
index ac2692b..eaa3df3 100644
--- a/javatests/com/google/gerrit/server/query/project/BUILD
+++ b/javatests/com/google/gerrit/server/query/project/BUILD
@@ -14,9 +14,10 @@
"//java/com/google/gerrit/server",
"//java/com/google/gerrit/server/schema",
"//java/com/google/gerrit/testing:gerrit-test-util",
- "//lib:truth",
+ "//lib:guava",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/server/rules/BUILD b/javatests/com/google/gerrit/server/rules/BUILD
index 04a6485..42452df 100644
--- a/javatests/com/google/gerrit/server/rules/BUILD
+++ b/javatests/com/google/gerrit/server/rules/BUILD
@@ -10,10 +10,10 @@
"//java/com/google/gerrit/server",
"//java/com/google/gerrit/server/project/testing:project-test-util",
"//java/com/google/gerrit/testing:gerrit-test-util",
- "//lib:truth",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/prolog:runtime",
+ "//lib/truth",
"//prolog:gerrit-prolog-common",
],
)
diff --git a/javatests/com/google/gerrit/server/update/BUILD b/javatests/com/google/gerrit/server/update/BUILD
index 81e8b31..46820c7 100644
--- a/javatests/com/google/gerrit/server/update/BUILD
+++ b/javatests/com/google/gerrit/server/update/BUILD
@@ -12,9 +12,9 @@
"//java/com/google/gerrit/server",
"//lib:guava",
"//lib:junit",
- "//lib:truth",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
],
)
@@ -34,10 +34,10 @@
"//java/com/google/gerrit/testing:gerrit-test-util",
"//lib:guava",
"//lib:gwtorm",
- "//lib:truth",
- "//lib:truth-java8-extension",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
+ "//lib/truth",
+ "//lib/truth:truth-java8-extension",
],
)
diff --git a/javatests/com/google/gerrit/sshd/BUILD b/javatests/com/google/gerrit/sshd/BUILD
index c0eaedf..ad7d8a9 100644
--- a/javatests/com/google/gerrit/sshd/BUILD
+++ b/javatests/com/google/gerrit/sshd/BUILD
@@ -7,7 +7,7 @@
"//java/com/google/gerrit/extensions:api",
"//java/com/google/gerrit/server",
"//java/com/google/gerrit/sshd",
- "//lib:truth",
"//lib/mina:sshd",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/testing/BUILD b/javatests/com/google/gerrit/testing/BUILD
index 191e98f..5774707 100644
--- a/javatests/com/google/gerrit/testing/BUILD
+++ b/javatests/com/google/gerrit/testing/BUILD
@@ -7,6 +7,6 @@
deps = [
"//java/com/google/gerrit/server",
"//java/com/google/gerrit/testing:gerrit-test-util",
- "//lib:truth",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gerrit/util/http/BUILD b/javatests/com/google/gerrit/util/http/BUILD
index 5755ca8..48b4339 100644
--- a/javatests/com/google/gerrit/util/http/BUILD
+++ b/javatests/com/google/gerrit/util/http/BUILD
@@ -8,7 +8,7 @@
"//javatests/com/google/gerrit/util/http/testutil",
"//lib:junit",
"//lib:servlet-api-3_1-without-neverlink",
- "//lib:truth",
"//lib/easymock",
+ "//lib/truth",
],
)
diff --git a/javatests/com/google/gwtexpui/safehtml/BUILD b/javatests/com/google/gwtexpui/safehtml/BUILD
index 4f75bdb..694f422 100644
--- a/javatests/com/google/gwtexpui/safehtml/BUILD
+++ b/javatests/com/google/gwtexpui/safehtml/BUILD
@@ -5,8 +5,9 @@
srcs = glob(["client/**/*.java"]),
deps = [
"//java/com/google/gwtexpui/safehtml",
- "//lib:truth",
+ "//lib:guava",
"//lib/gwt:dev",
"//lib/gwt:user",
+ "//lib/truth",
],
)
diff --git a/lib/BUILD b/lib/BUILD
index 5e391e9..c698afb 100644
--- a/lib/BUILD
+++ b/lib/BUILD
@@ -217,28 +217,6 @@
)
java_library(
- name = "truth",
- data = ["//lib:LICENSE-DO_NOT_DISTRIBUTE"],
- visibility = ["//visibility:public"],
- exports = [
- ":guava",
- ":junit",
- "@truth//jar",
- ],
-)
-
-java_library(
- name = "truth-java8-extension",
- data = ["//lib:LICENSE-DO_NOT_DISTRIBUTE"],
- visibility = ["//visibility:public"],
- exports = [
- ":guava",
- ":truth",
- "@truth-java8-extension//jar",
- ],
-)
-
-java_library(
name = "javassist",
data = ["//lib:LICENSE-DO_NOT_DISTRIBUTE"],
visibility = ["//visibility:public"],
diff --git a/lib/guava.bzl b/lib/guava.bzl
index db85900..e90c2b3 100644
--- a/lib/guava.bzl
+++ b/lib/guava.bzl
@@ -1,5 +1,5 @@
-GUAVA_VERSION = "24.1-jre"
+GUAVA_VERSION = "25.0-jre"
-GUAVA_BIN_SHA1 = "96c528475465aeb22cce60605d230a7e67cebd7b"
+GUAVA_BIN_SHA1 = "7319c34fa5866a85b6bad445adad69d402323129"
GUAVA_DOC_URL = "https://google.github.io/guava/releases/" + GUAVA_VERSION + "/api/docs/"
diff --git a/lib/truth/BUILD b/lib/truth/BUILD
new file mode 100644
index 0000000..82cd98a
--- /dev/null
+++ b/lib/truth/BUILD
@@ -0,0 +1,49 @@
+java_library(
+ name = "truth",
+ data = ["//lib:LICENSE-DO_NOT_DISTRIBUTE"],
+ visibility = ["//visibility:public"],
+ exports = ["@truth//jar"],
+ runtime_deps = [
+ "//lib:guava",
+ "//lib:junit",
+ ],
+)
+
+java_library(
+ name = "truth-java8-extension",
+ data = ["//lib:LICENSE-DO_NOT_DISTRIBUTE"],
+ visibility = ["//visibility:public"],
+ exports = ["@truth-java8-extension//jar"],
+ runtime_deps = [
+ ":truth",
+ "//lib:guava",
+ ],
+)
+
+java_library(
+ name = "truth-liteproto-extension",
+ data = ["//lib:LICENSE-DO_NOT_DISTRIBUTE"],
+ visibility = ["//visibility:private"],
+ exports = ["@truth-liteproto-extension//jar"],
+ runtime_deps = [
+ ":truth",
+ "//lib:guava",
+ "//lib:protobuf",
+ ],
+)
+
+java_library(
+ name = "truth-proto-extension",
+ data = ["//lib:LICENSE-DO_NOT_DISTRIBUTE"],
+ visibility = ["//visibility:public"],
+ exports = [
+ ":truth-liteproto-extension",
+ "@truth-proto-extension//jar",
+ ],
+ runtime_deps = [
+ ":truth",
+ ":truth-liteproto-extension",
+ "//lib:guava",
+ "//lib:protobuf",
+ ],
+)
diff --git a/plugins/codemirror-editor b/plugins/codemirror-editor
index c97e280..ee50e45 160000
--- a/plugins/codemirror-editor
+++ b/plugins/codemirror-editor
@@ -1 +1 @@
-Subproject commit c97e2806532cff00fea6424cde0d440f9ea5016d
+Subproject commit ee50e45b449e282ed78917175daf8b359da8d943
diff --git a/polygerrit-ui/app/behaviors/gr-tooltip-behavior/gr-tooltip-behavior.js b/polygerrit-ui/app/behaviors/gr-tooltip-behavior/gr-tooltip-behavior.js
index 4d53631..04d8b6e 100644
--- a/polygerrit-ui/app/behaviors/gr-tooltip-behavior/gr-tooltip-behavior.js
+++ b/polygerrit-ui/app/behaviors/gr-tooltip-behavior/gr-tooltip-behavior.js
@@ -51,7 +51,6 @@
detached() {
this._handleHideTooltip();
- this.unlisten(window, 'scroll', '_handleWindowScroll');
},
_setupTooltipListeners() {
@@ -59,9 +58,6 @@
this._hasSetupTooltipListeners = true;
this.addEventListener('mouseenter', this._handleShowTooltip.bind(this));
- this.addEventListener('mouseleave', this._handleHideTooltip.bind(this));
- this.addEventListener('tap', this._handleHideTooltip.bind(this));
- this.listen(window, 'scroll', '_handleWindowScroll');
},
_handleShowTooltip(e) {
@@ -91,6 +87,9 @@
tooltip.style.visibility = null;
this._tooltip = tooltip;
+ this.listen(window, 'scroll', '_handleWindowScroll');
+ this.listen(this, 'mouseleave', '_handleHideTooltip');
+ this.listen(this, 'tap', '_handleHideTooltip');
},
_handleHideTooltip(e) {
@@ -100,6 +99,9 @@
return;
}
+ this.unlisten(window, 'scroll', '_handleWindowScroll');
+ this.unlisten(this, 'mouseleave', '_handleHideTooltip');
+ this.unlisten(this, 'tap', '_handleHideTooltip');
this.setAttribute('title', this._titleText);
if (this._tooltip && this._tooltip.parentNode) {
this._tooltip.parentNode.removeChild(this._tooltip);
diff --git a/polygerrit-ui/app/elements/admin/gr-group/gr-group.html b/polygerrit-ui/app/elements/admin/gr-group/gr-group.html
index d21247a..1e19107 100644
--- a/polygerrit-ui/app/elements/admin/gr-group/gr-group.html
+++ b/polygerrit-ui/app/elements/admin/gr-group/gr-group.html
@@ -32,7 +32,7 @@
<style include="shared-styles"></style>
<style include="gr-subpage-styles">
h3.edited:after {
- color: #444;
+ color: var(--deemphasized-text-color);
content: ' *';
}
.inputUpdateBtn {
diff --git a/polygerrit-ui/app/elements/admin/gr-repo-detail-list/gr-repo-detail-list.js b/polygerrit-ui/app/elements/admin/gr-repo-detail-list/gr-repo-detail-list.js
index feaadc7..8512a5d 100644
--- a/polygerrit-ui/app/elements/admin/gr-repo-detail-list/gr-repo-detail-list.js
+++ b/polygerrit-ui/app/elements/admin/gr-repo-detail-list/gr-repo-detail-list.js
@@ -209,8 +209,7 @@
_handleDeleteItemConfirm() {
this.$.overlay.close();
if (this.detailType === DETAIL_TYPES.BRANCHES) {
- return this.$.restAPI.deleteRepoBranches(this._repo,
- this._refName)
+ return this.$.restAPI.deleteRepoBranches(this._repo, this._refName)
.then(itemDeleted => {
if (itemDeleted.status === 204) {
this._getItems(
@@ -219,8 +218,7 @@
}
});
} else if (this.detailType === DETAIL_TYPES.TAGS) {
- return this.$.restAPI.deleteRepoTags(this._repo,
- this._refName)
+ return this.$.restAPI.deleteRepoTags(this._repo, this._refName)
.then(itemDeleted => {
if (itemDeleted.status === 204) {
this._getItems(
diff --git a/polygerrit-ui/app/elements/admin/gr-repo/gr-repo.html b/polygerrit-ui/app/elements/admin/gr-repo/gr-repo.html
index 85dcdbe..ac12d71 100644
--- a/polygerrit-ui/app/elements/admin/gr-repo/gr-repo.html
+++ b/polygerrit-ui/app/elements/admin/gr-repo/gr-repo.html
@@ -33,7 +33,7 @@
<style="shared-styles"></style>
<style include="gr-subpage-styles">
h2.edited:after {
- color: #444;
+ color: var(--deemphasized-text-color);
content: ' *';
}
.loading,
diff --git a/polygerrit-ui/app/elements/change-list/gr-change-list-item/gr-change-list-item.html b/polygerrit-ui/app/elements/change-list/gr-change-list-item/gr-change-list-item.html
index de39478..6ea7cf3 100644
--- a/polygerrit-ui/app/elements/change-list/gr-change-list-item/gr-change-list-item.html
+++ b/polygerrit-ui/app/elements/change-list/gr-change-list-item/gr-change-list-item.html
@@ -46,7 +46,7 @@
font-family: var(--font-family-bold);
}
:host([highlight]) {
- background-color: #fcfad6;
+ background-color: var(--assignee-highlight-color);
}
.container {
position: relative;
diff --git a/polygerrit-ui/app/elements/change/gr-change-actions/gr-change-actions.js b/polygerrit-ui/app/elements/change/gr-change-actions/gr-change-actions.js
index 3f967c8..cfdf88c 100644
--- a/polygerrit-ui/app/elements/change/gr-change-actions/gr-change-actions.js
+++ b/polygerrit-ui/app/elements/change/gr-change-actions/gr-change-actions.js
@@ -1188,7 +1188,7 @@
}
const patchNum = revisionAction ? this.latestPatchNum : null;
return this.$.restAPI.getChangeURLAndSend(this.changeNum, method,
- patchNum, actionEndpoint, payload, handleError, this)
+ patchNum, actionEndpoint, payload, handleError)
.then(response => {
cleanupFn.call(this);
return response;
diff --git a/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.html b/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.html
index 519b15b..b26d649 100644
--- a/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.html
+++ b/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.html
@@ -178,7 +178,7 @@
display: none;
}
.warning {
- color: #d14836;
+ color: var(--error-text-color);
}
hr {
border: 0;
diff --git a/polygerrit-ui/app/elements/change/gr-confirm-move-dialog/gr-confirm-move-dialog.html b/polygerrit-ui/app/elements/change/gr-confirm-move-dialog/gr-confirm-move-dialog.html
index 271dd3cc..350af900 100644
--- a/polygerrit-ui/app/elements/change/gr-confirm-move-dialog/gr-confirm-move-dialog.html
+++ b/polygerrit-ui/app/elements/change/gr-confirm-move-dialog/gr-confirm-move-dialog.html
@@ -55,7 +55,7 @@
width: 100%;
}
.warning {
- color: red;
+ color: var(--error-text-color);
}
</style>
<gr-confirm-dialog
diff --git a/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list.html b/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list.html
index 487d8c8..3f26628 100644
--- a/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list.html
+++ b/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list.html
@@ -122,7 +122,7 @@
text-overflow: ellipsis;
}
.oldPath {
- color: #999;
+ color: var(--deemphasized-text-color);
}
.comments,
.stats {
@@ -283,6 +283,7 @@
as="file"
initial-count="[[fileListIncrement]]"
target-framerate="1">
+ [[_reportRenderedRow(index)]]
<div class="stickyArea">
<div class$="file-row row [[_computePathClass(file.__path, _expandedFilePaths.*)]]"
data-path$="[[file.__path]]" tabindex="-1">
diff --git a/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list.js b/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list.js
index 0fa037c..83f1565 100644
--- a/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list.js
+++ b/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list.js
@@ -26,6 +26,8 @@
const SIZE_BAR_GAP_WIDTH = 1;
const SIZE_BAR_MIN_WIDTH = 1.5;
+ const RENDER_TIME = 'FileListRenderTime';
+
const FileStatus = {
A: 'Added',
C: 'Copied',
@@ -429,17 +431,21 @@
return GrCountStringFormatter.computeShortString(commentCount, 'c');
},
- _reviewFile(path) {
+ /**
+ * @param {string} path
+ * @param {boolean=} opt_reviewed
+ */
+ _reviewFile(path, opt_reviewed) {
if (this.editMode) { return; }
const index = this._files.findIndex(file => file.__path === path);
- const reviewed = this._files[index].isReviewed;
+ const reviewed = opt_reviewed || !this._files[index].isReviewed;
- this.set(['_files', index, 'isReviewed'], !reviewed);
+ this.set(['_files', index, 'isReviewed'], reviewed);
if (index < this._shownFiles.length) {
- this.set(['_shownFiles', index, 'isReviewed'], !reviewed);
+ this.set(['_shownFiles', index, 'isReviewed'], reviewed);
}
- this._saveReviewedState(path, !reviewed);
+ this._saveReviewedState(path, reviewed);
},
_saveReviewedState(path, reviewed) {
@@ -797,6 +803,12 @@
_computeFilesShown(numFilesShown, files) {
const filesShown = files.base.slice(0, numFilesShown);
this.fire('files-shown-changed', {length: filesShown.length});
+
+ // Start the timer for the rendering work hwere because this is where the
+ // _shownFiles property is being set, and _shownFiles is used in the
+ // dom-repeat binding.
+ this.$.reporting.time(RENDER_TIME);
+
return filesShown;
},
@@ -953,7 +965,7 @@
path, this.patchRange, this.projectConfig);
const promises = [diffElem.reload()];
if (this._loggedIn && !this.diffPrefs.manual_review) {
- promises.push(this._reviewFile(path));
+ promises.push(this._reviewFile(path, true));
}
return Promise.all(promises);
}).then(() => {
@@ -1175,5 +1187,21 @@
_noDiffsExpanded() {
return this.filesExpanded === GrFileListConstants.FilesExpandedState.NONE;
},
+
+ /**
+ * Method to call via binding when each file list row is rendered. This
+ * allows approximate detection of when the dom-repeat has completed
+ * rendering.
+ * @param {number} index The index of the row being rendered.
+ * @return {string} an empty string.
+ */
+ _reportRenderedRow(index) {
+ if (index === this._shownFiles.length - 1) {
+ this.async(() => {
+ this.$.reporting.timeEnd(RENDER_TIME);
+ }, 1);
+ }
+ return '';
+ },
});
})();
diff --git a/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list_test.html b/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list_test.html
index 8541edf..3c90a1f 100644
--- a/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list_test.html
+++ b/polygerrit-ui/app/elements/change/gr-file-list/gr-file-list_test.html
@@ -60,7 +60,6 @@
stub('gr-rest-api-interface', {
getLoggedIn() { return Promise.resolve(true); },
getPreferences() { return Promise.resolve({}); },
- fetchJSON() { return Promise.resolve({}); },
getDiffComments() { return Promise.resolve({}); },
getDiffRobotComments() { return Promise.resolve({}); },
getDiffDrafts() { return Promise.resolve({}); },
@@ -127,6 +126,19 @@
assert.isTrue(controlRow.classList.contains('invisible'));
});
+ test('rendering each row calls the _reportRenderedRow method', () => {
+ const renderedStub = sandbox.stub(element, '_reportRenderedRow');
+ element._filesByPath = _.range(10)
+ .reduce((_filesByPath, i) => {
+ _filesByPath['/file' + i] = {lines_inserted: 9};
+ return _filesByPath;
+ }, {});
+ flushAsynchronousOperations();
+ assert.equal(
+ Polymer.dom(element.root).querySelectorAll('.file-row').length, 10);
+ assert.equal(renderedStub.callCount, 10);
+ });
+
test('calculate totals for patch number', () => {
element._filesByPath = {
'/COMMIT_MSG': {
@@ -1023,6 +1035,7 @@
delete element.diffPrefs.manual_review;
return element._renderInOrder(['p'], diffs, 1).then(() => {
assert.isTrue(reviewStub.called);
+ assert.isTrue(reviewStub.calledWithExactly('p', true));
});
});
});
diff --git a/polygerrit-ui/app/elements/change/gr-included-in-dialog/gr-included-in-dialog.html b/polygerrit-ui/app/elements/change/gr-included-in-dialog/gr-included-in-dialog.html
index cf79a31..b824f1c 100644
--- a/polygerrit-ui/app/elements/change/gr-included-in-dialog/gr-included-in-dialog.html
+++ b/polygerrit-ui/app/elements/change/gr-included-in-dialog/gr-included-in-dialog.html
@@ -30,6 +30,7 @@
padding: 4.5em 1em 1em 1em;
}
header {
+ background-color: var(--dialog-background-color);
border-bottom: 1px solid var(--border-color);
left: 0;
padding: 1em;
diff --git a/polygerrit-ui/app/elements/change/gr-reviewer-list/gr-reviewer-list.html b/polygerrit-ui/app/elements/change/gr-reviewer-list/gr-reviewer-list.html
index da5ab1e..73e8bea 100644
--- a/polygerrit-ui/app/elements/change/gr-reviewer-list/gr-reviewer-list.html
+++ b/polygerrit-ui/app/elements/change/gr-reviewer-list/gr-reviewer-list.html
@@ -49,12 +49,6 @@
gr-account-chip {
margin-top: .3em;
}
- .remove {
- color: #999;
- }
- .remove {
- font-size: var(--font-size-small);
- }
gr-button {
--gr-button: {
padding-left: 0;
diff --git a/polygerrit-ui/app/elements/core/gr-account-dropdown/gr-account-dropdown.html b/polygerrit-ui/app/elements/core/gr-account-dropdown/gr-account-dropdown.html
index bbe2877..d1ae719 100644
--- a/polygerrit-ui/app/elements/core/gr-account-dropdown/gr-account-dropdown.html
+++ b/polygerrit-ui/app/elements/core/gr-account-dropdown/gr-account-dropdown.html
@@ -31,7 +31,7 @@
color: var(--header-text-color);
}
--gr-dropdown-item: {
- color: var(--header-text-color);
+ color: var(--primary-text-color);
}
}
gr-avatar {
diff --git a/polygerrit-ui/app/elements/core/gr-main-header/gr-main-header.html b/polygerrit-ui/app/elements/core/gr-main-header/gr-main-header.html
index fa0fe52..8ce59f2 100644
--- a/polygerrit-ui/app/elements/core/gr-main-header/gr-main-header.html
+++ b/polygerrit-ui/app/elements/core/gr-main-header/gr-main-header.html
@@ -52,7 +52,6 @@
content: "";
display: inline-block;
height: var(--header-icon-size);
- margin: 0 .25em 0 0;
vertical-align: text-bottom;
width: var(--header-icon-size);
}
@@ -99,7 +98,7 @@
}
gr-dropdown {
--gr-dropdown-item: {
- color: var(--header-text-color);
+ color: var(--primary-text-color);
}
}
.browse {
diff --git a/polygerrit-ui/app/elements/core/gr-reporting/gr-jank-detector.js b/polygerrit-ui/app/elements/core/gr-reporting/gr-jank-detector.js
new file mode 100644
index 0000000..28c46f4
--- /dev/null
+++ b/polygerrit-ui/app/elements/core/gr-reporting/gr-jank-detector.js
@@ -0,0 +1,61 @@
+/**
+ * @license
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+(function() {
+ 'use strict';
+
+ const JANK_SLEEP_TIME_MS = 1000;
+
+ const GrJankDetector = {
+ // Slowdowns counter.
+ jank: 0,
+ fps: 0,
+ _lastFrameTime: 0,
+
+ start() {
+ this._requestAnimationFrame(this._detect.bind(this));
+ },
+
+ _requestAnimationFrame(callback) {
+ window.requestAnimationFrame(callback);
+ },
+
+ _detect(now) {
+ if (this._lastFrameTime === 0) {
+ this._lastFrameTime = now;
+ this.fps = 0;
+ this._requestAnimationFrame(this._detect.bind(this));
+ return;
+ }
+ const fpsNow = 1000/(now - this._lastFrameTime);
+ this._lastFrameTime = now;
+ // Calculate moving average within last 3 measurements.
+ this.fps = this.fps === 0 ? fpsNow : ((this.fps * 2 + fpsNow) / 3);
+ if (this.fps > 10) {
+ this._requestAnimationFrame(this._detect.bind(this));
+ } else {
+ this.jank++;
+ console.warn('JANK', this.jank);
+ this._lastFrameTime = 0;
+ window.setTimeout(
+ () => this._requestAnimationFrame(this._detect.bind(this)),
+ JANK_SLEEP_TIME_MS);
+ }
+ },
+ };
+
+ window.GrJankDetector = GrJankDetector;
+})();
diff --git a/polygerrit-ui/app/elements/core/gr-reporting/gr-jank-detector_test.html b/polygerrit-ui/app/elements/core/gr-reporting/gr-jank-detector_test.html
new file mode 100644
index 0000000..6faeec1
--- /dev/null
+++ b/polygerrit-ui/app/elements/core/gr-reporting/gr-jank-detector_test.html
@@ -0,0 +1,78 @@
+<!DOCTYPE html>
+<!--
+@license
+Copyright (C) 2018 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+<meta name="viewport" content="width=device-width, minimum-scale=1.0, initial-scale=1.0, user-scalable=yes">
+<title>gr-jank-detector</title>
+
+<script src="../../../bower_components/webcomponentsjs/webcomponents-lite.min.js"></script>
+<script src="../../../bower_components/web-component-tester/browser.js"></script>
+<link rel="import" href="../../../test/common-test-setup.html"/>
+
+<script src="gr-jank-detector.js"></script>
+
+<script>
+ suite('gr-jank-detector tests', () => {
+ let sandbox;
+ let clock;
+ let instance;
+
+ const NOW_TIME = 100;
+
+ setup(() => {
+ sandbox = sinon.sandbox.create();
+ clock = sinon.useFakeTimers(NOW_TIME);
+ instance = GrJankDetector;
+ instance._lastFrameTime = 0;
+ sandbox.stub(instance, '_requestAnimationFrame');
+ });
+
+ teardown(() => {
+ sandbox.restore();
+ });
+
+ test('start() installs frame callback', () => {
+ sandbox.stub(instance, '_detect');
+ instance._requestAnimationFrame.callsArg(0);
+ instance.start();
+ assert.isTrue(instance._detect.calledOnce);
+ });
+
+ test('measures fps', () => {
+ instance._detect(10);
+ instance._detect(30);
+ assert.equal(instance.fps, 50);
+ });
+
+ test('detects jank', () => {
+ let now = 10;
+ instance._detect(now);
+ const fastFrame = () => instance._detect(now += 20);
+ const slowFrame = () => instance._detect(now += 300);
+ fastFrame();
+ assert.equal(instance.jank, 0);
+ _.times(4, slowFrame);
+ assert.equal(instance.jank, 0);
+ instance._requestAnimationFrame.reset();
+ slowFrame();
+ assert.equal(instance.jank, 1);
+ assert.isFalse(instance._requestAnimationFrame.called);
+ clock.tick(1000);
+ assert.isTrue(instance._requestAnimationFrame.called);
+ });
+ });
+</script>
diff --git a/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting.html b/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting.html
index 2970a26..cbb2c09 100644
--- a/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting.html
+++ b/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting.html
@@ -19,5 +19,6 @@
<link rel="import" href="../../shared/gr-js-api-interface/gr-js-api-interface.html">
<dom-module id="gr-reporting">
+ <script src="gr-jank-detector.js"></script>
<script src="gr-reporting.js"></script>
</dom-module>
diff --git a/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting.js b/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting.js
index 0db442f..ae67dac 100644
--- a/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting.js
+++ b/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting.js
@@ -48,6 +48,14 @@
STARTED_HIDDEN: 'hidden',
};
+ // Frame rate related constants.
+ const JANK = {
+ TYPE: 'lifecycle',
+ CATEGORY: 'UI Latency',
+ // Reported events - alphabetize below.
+ COUNT: 'Jank count',
+ };
+
// Navigation reporting constants.
const NAVIGATION = {
TYPE: 'nav-report',
@@ -118,6 +126,8 @@
};
catchErrors();
+ GrJankDetector.start();
+
const GrReporting = Polymer({
is: 'gr-reporting',
@@ -206,6 +216,11 @@
},
beforeLocationChanged() {
+ if (GrJankDetector.jank > 0) {
+ this.reporter(
+ JANK.TYPE, JANK.CATEGORY, JANK.COUNT, GrJankDetector.jank);
+ GrJankDetector.jank = 0;
+ }
for (const prop of Object.keys(this._baselines)) {
delete this._baselines[prop];
}
diff --git a/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting_test.html b/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting_test.html
index bfb45f6..e2bb83d 100644
--- a/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting_test.html
+++ b/polygerrit-ui/app/elements/core/gr-reporting/gr-reporting_test.html
@@ -93,7 +93,11 @@
test('beforeLocationChanged', () => {
element._baselines['garbage'] = 'monster';
sandbox.stub(element, 'time');
+ GrJankDetector.jank = 42;
element.beforeLocationChanged();
+ assert.equal(GrJankDetector.jank, 0);
+ assert.isTrue(element.reporter.calledWithExactly(
+ 'lifecycle', 'UI Latency', 'Jank count', 42));
assert.isTrue(element.time.calledWithExactly('DashboardDisplayed'));
assert.isTrue(element.time.calledWithExactly('ChangeDisplayed'));
assert.isTrue(element.time.calledWithExactly('DiffViewDisplayed'));
diff --git a/polygerrit-ui/app/elements/diff/gr-diff-comment/gr-diff-comment.html b/polygerrit-ui/app/elements/diff/gr-diff-comment/gr-diff-comment.html
index 29b780a..79b23ca 100644
--- a/polygerrit-ui/app/elements/diff/gr-diff-comment/gr-diff-comment.html
+++ b/polygerrit-ui/app/elements/diff/gr-diff-comment/gr-diff-comment.html
@@ -212,7 +212,6 @@
font-size: var(--font-size-small);
}
gr-confirm-dialog .main {
- background-color: #fef;
display: flex;
flex-direction: column;
width: 100%;
diff --git a/polygerrit-ui/app/elements/diff/gr-diff-view/gr-diff-view.html b/polygerrit-ui/app/elements/diff/gr-diff-view/gr-diff-view.html
index 47c4651..edee1ae 100644
--- a/polygerrit-ui/app/elements/diff/gr-diff-view/gr-diff-view.html
+++ b/polygerrit-ui/app/elements/diff/gr-diff-view/gr-diff-view.html
@@ -77,7 +77,7 @@
}
.navLink:not([href]),
.downloadLink:not([href]) {
- color: #999;
+ color: var(--deemphasized-text-color);
}
.navLinks {
align-items: center;
@@ -189,7 +189,7 @@
text-decoration: none;
}
.mobileNavLink:not([href]) {
- color: #bbb;
+ color: var(--deemphasized-text-color);
}
.jumpToFileContainer {
display: block;
diff --git a/polygerrit-ui/app/elements/diff/gr-diff/gr-diff.html b/polygerrit-ui/app/elements/diff/gr-diff/gr-diff.html
index 81c6d99..540df98 100644
--- a/polygerrit-ui/app/elements/diff/gr-diff/gr-diff.html
+++ b/polygerrit-ui/app/elements/diff/gr-diff/gr-diff.html
@@ -108,7 +108,6 @@
cursor: pointer;
}
.content {
- overflow: hidden;
/* Set min width since setting width on table cells still
allows them to shrink. Do not set max width because
CJK (Chinese-Japanese-Korean) glyphs have variable width */
diff --git a/polygerrit-ui/app/elements/gr-app.html b/polygerrit-ui/app/elements/gr-app.html
index 7cfb3b0..e4b2577 100644
--- a/polygerrit-ui/app/elements/gr-app.html
+++ b/polygerrit-ui/app/elements/gr-app.html
@@ -19,6 +19,11 @@
if (localStorage.getItem('USE_SHADOW_DOM') === 'true') {
window.Polymer = {
dom: 'shadow',
+ passiveTouchGestures: true,
+ };
+ } else if (!window.Polymer) {
+ window.Polymer = {
+ passiveTouchGestures: true,
};
}
</script>
@@ -34,8 +39,8 @@
<link rel="import" href="../behaviors/base-url-behavior/base-url-behavior.html">
<link rel="import" href="../behaviors/keyboard-shortcut-behavior/keyboard-shortcut-behavior.html">
-<link rel="import" href="../styles/app-theme.html">
<link rel="import" href="../styles/shared-styles.html">
+<link rel="import" href="../styles/themes/app-theme.html">
<link rel="import" href="./admin/gr-admin-view/gr-admin-view.html">
<link rel="import" href="./change-list/gr-change-list-view/gr-change-list-view.html">
<link rel="import" href="./change-list/gr-dashboard-view/gr-dashboard-view.html">
@@ -127,10 +132,10 @@
font-size: 1.2rem;
}
.errorMoreInfo {
- color: #999;
+ color: var(--deemphasized-text-color);
}
.feedback {
- color: #b71c1c;
+ color: var(--error-text-color);
}
</style>
<gr-fixed-panel id="header">
diff --git a/polygerrit-ui/app/elements/gr-app.js b/polygerrit-ui/app/elements/gr-app.js
index b866088..921415f 100644
--- a/polygerrit-ui/app/elements/gr-app.js
+++ b/polygerrit-ui/app/elements/gr-app.js
@@ -127,6 +127,10 @@
this._version = version;
});
+ if (window.localStorage.getItem('dark-theme')) {
+ this.importHref('../styles/themes/dark-theme.html');
+ }
+
// Note: this is evaluated here to ensure that it only happens after the
// router has been initialized. @see Issue 7837
this._settingsUrl = Gerrit.Nav.getUrlForSettings();
diff --git a/polygerrit-ui/app/elements/settings/gr-cla-view/gr-cla-view.html b/polygerrit-ui/app/elements/settings/gr-cla-view/gr-cla-view.html
index 963d2e3..233235e 100644
--- a/polygerrit-ui/app/elements/settings/gr-cla-view/gr-cla-view.html
+++ b/polygerrit-ui/app/elements/settings/gr-cla-view/gr-cla-view.html
@@ -52,7 +52,7 @@
font-family: var(--font-family-bold);
}
.alreadySubmittedText {
- color: red;
+ color: var(--error-text-color);
margin: 0 2em;
padding: .5em;
}
diff --git a/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.html b/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.html
index 48b01f6..14e5e6f 100644
--- a/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.html
+++ b/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.html
@@ -16,7 +16,9 @@
-->
<link rel="import" href="../../../bower_components/polymer/polymer.html">
+
<link rel="import" href="../../../behaviors/docs-url-behavior/docs-url-behavior.html">
+<link rel="import" href="../../../bower_components/paper-toggle-button/paper-toggle-button.html">
<link rel="import" href="../../../styles/gr-form-styles.html">
<link rel="import" href="../../../styles/gr-menu-page-styles.html">
<link rel="import" href="../../../styles/gr-page-nav-styles.html">
@@ -52,12 +54,19 @@
#email {
margin-bottom: 1em;
}
- .filters p {
+ .filters p,
+ .darkToggle p {
margin-bottom: 1em;
}
.queryExample em {
color: violet;
}
+ .toggle {
+ align-items: center;
+ display: flex;
+ margin-bottom: 1rem;
+ margin-right: 1rem;
+ }
</style>
<style include="gr-form-styles"></style>
<style include="gr-menu-page-styles"></style>
@@ -95,6 +104,19 @@
</gr-page-nav>
<main class="gr-form-styles">
<h1>User Settings</h1>
+ <section class="darkToggle">
+ <div class="toggle">
+ <paper-toggle-button
+ checked="[[_isDark]]"
+ on-change="_handleToggleDark"></paper-toggle-button>
+ <div>Dark theme (alpha)</div>
+ </div>
+ <p>
+ Gerrit's dark theme is in early alpha, and almost definitely will
+ not play nicely with themes set by specific Gerrit hosts. Filing
+ feedback via the link in the app footer is strongly encouraged!
+ </p>
+ </section>
<h2
id="Profile"
class$="[[_computeHeaderClass(_accountInfoChanged)]]">Profile</h2>
diff --git a/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.js b/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.js
index 215aaa1..213ab65 100644
--- a/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.js
+++ b/polygerrit-ui/app/elements/settings/gr-settings-view/gr-settings-view.js
@@ -35,6 +35,8 @@
const ABSOLUTE_URL_PATTERN = /^https?:/;
const TRAILING_SLASH_PATTERN = /\/$/;
+ const RELOAD_MESSAGE = 'Reloading...';
+
Polymer({
is: 'gr-settings-view',
@@ -45,7 +47,7 @@
*/
/**
- * Fired with email confirmation text.
+ * Fired with email confirmation text, or when the page reloads.
*
* @event show-alert
*/
@@ -132,6 +134,11 @@
_loadingPromise: Object,
_showNumber: Boolean,
+
+ _isDark: {
+ type: Boolean,
+ value: false,
+ },
},
behaviors: [
@@ -149,6 +156,8 @@
attached() {
this.fire('title-change', {title: 'Settings'});
+ this._isDark = !!window.localStorage.getItem('dark-theme');
+
const promises = [
this.$.accountInfo.loadData(),
this.$.watchedProjectsEditor.loadData(),
@@ -410,5 +419,20 @@
return base + GERRIT_DOCS_FILTER_PATH;
},
+
+ _handleToggleDark() {
+ if (this._isDark) {
+ window.localStorage.removeItem('dark-theme');
+ } else {
+ window.localStorage.setItem('dark-theme', 'true');
+ }
+ this.dispatchEvent(new CustomEvent('show-alert', {
+ detail: {message: RELOAD_MESSAGE},
+ bubbles: true,
+ }));
+ this.async(() => {
+ window.location.reload();
+ }, 1);
+ },
});
})();
diff --git a/polygerrit-ui/app/elements/shared/gr-autocomplete/gr-autocomplete.html b/polygerrit-ui/app/elements/shared/gr-autocomplete/gr-autocomplete.html
index ef058a5..4b447d1 100644
--- a/polygerrit-ui/app/elements/shared/gr-autocomplete/gr-autocomplete.html
+++ b/polygerrit-ui/app/elements/shared/gr-autocomplete/gr-autocomplete.html
@@ -59,7 +59,7 @@
}
paper-input.warnUncommitted {
--paper-input-container-input: {
- color: #ff0000;
+ color: var(--error-text-color);
font-size: var(--font-size-normal);
}
}
diff --git a/polygerrit-ui/app/elements/shared/gr-autocomplete/gr-autocomplete_test.html b/polygerrit-ui/app/elements/shared/gr-autocomplete/gr-autocomplete_test.html
index a72511e..585b16f 100644
--- a/polygerrit-ui/app/elements/shared/gr-autocomplete/gr-autocomplete_test.html
+++ b/polygerrit-ui/app/elements/shared/gr-autocomplete/gr-autocomplete_test.html
@@ -519,12 +519,8 @@
element.text = 'blah blah blah';
MockInteractions.blur(element.$.input);
assert.isTrue(inputClassList.contains('warnUncommitted'));
- assert.equal(getComputedStyle(element.$.input.inputElement).color,
- 'rgb(255, 0, 0)');
MockInteractions.focus(element.$.input);
assert.isFalse(inputClassList.contains('warnUncommitted'));
- assert.notEqual(getComputedStyle(element.$.input.inputElement).color,
- 'rgb(255, 0, 0)ed');
});
test('disabled', () => {
diff --git a/polygerrit-ui/app/elements/shared/gr-dropdown/gr-dropdown.html b/polygerrit-ui/app/elements/shared/gr-dropdown/gr-dropdown.html
index 8a70b8b..f527aa3 100644
--- a/polygerrit-ui/app/elements/shared/gr-dropdown/gr-dropdown.html
+++ b/polygerrit-ui/app/elements/shared/gr-dropdown/gr-dropdown.html
@@ -69,7 +69,7 @@
@apply --gr-dropdown-item;
}
li .itemAction.disabled {
- color: #ccc;
+ color: var(--deemphasized-text-color);
cursor: default;
}
li .itemAction:link,
@@ -145,6 +145,7 @@
<a
class="itemAction"
href$="[[_computeLinkURL(link)]]"
+ download$="[[_computeIsDownload(link)]]"
rel$="[[_computeLinkRel(link)]]"
target$="[[link.target]]"
hidden$="[[!link.url]]"
diff --git a/polygerrit-ui/app/elements/shared/gr-dropdown/gr-dropdown.js b/polygerrit-ui/app/elements/shared/gr-dropdown/gr-dropdown.js
index 70534f0..dcb428f 100644
--- a/polygerrit-ui/app/elements/shared/gr-dropdown/gr-dropdown.js
+++ b/polygerrit-ui/app/elements/shared/gr-dropdown/gr-dropdown.js
@@ -286,5 +286,9 @@
_computeHasTooltip(tooltip) {
return !!tooltip;
},
+
+ _computeIsDownload(link) {
+ return !!link.download;
+ },
});
})();
diff --git a/polygerrit-ui/app/elements/shared/gr-dropdown/gr-dropdown_test.html b/polygerrit-ui/app/elements/shared/gr-dropdown/gr-dropdown_test.html
index 89b6068..456f235 100644
--- a/polygerrit-ui/app/elements/shared/gr-dropdown/gr-dropdown_test.html
+++ b/polygerrit-ui/app/elements/shared/gr-dropdown/gr-dropdown_test.html
@@ -49,6 +49,11 @@
sandbox.restore();
});
+ test('_computeIsDownload', () => {
+ assert.isTrue(element._computeIsDownload({download: true}));
+ assert.isFalse(element._computeIsDownload({download: false}));
+ });
+
test('tap on trigger opens menu, then closes', () => {
sandbox.stub(element, '_open', () => { element.$.dropdown.open(); });
sandbox.stub(element, '_close', () => { element.$.dropdown.close(); });
diff --git a/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context.js b/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context.js
index 84b7f0a..5ac8773 100644
--- a/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context.js
+++ b/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context.js
@@ -93,7 +93,14 @@
}
this.plugin.restApi()
.send(this.action.method, this.action.__url, payload)
- .then(onSuccess);
+ .then(onSuccess)
+ .catch(error => {
+ document.dispatchEvent(new CustomEvent('show-alert', {
+ detail: {
+ message: `Plugin network error: ${error}`,
+ },
+ }));
+ });
};
window.GrPluginActionContext = GrPluginActionContext;
diff --git a/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context_test.html b/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context_test.html
index 7c18a99..bf6a046 100644
--- a/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context_test.html
+++ b/polygerrit-ui/app/elements/shared/gr-js-api-interface/gr-plugin-action-context_test.html
@@ -128,5 +128,26 @@
assert.isTrue(sendStub.calledWith(
'METHOD', '/changes/1/revisions/2/foo~bar', payload));
});
+
+ test('call error', done => {
+ instance.action = {
+ method: 'METHOD',
+ __key: 'key',
+ __url: '/changes/1/revisions/2/foo~bar',
+ };
+ const sendStub = sandbox.stub().returns(Promise.reject('boom'));
+ sandbox.stub(plugin, 'restApi').returns({
+ send: sendStub,
+ });
+ const errorStub = sandbox.stub();
+ document.addEventListener('network-error', errorStub);
+ instance.call();
+ flush(() => {
+ assert.isTrue(errorStub.calledOnce);
+ assert.equal(errorStub.args[0][0].detail.message,
+ 'Plugin network error: boom');
+ done();
+ });
+ });
});
</script>
diff --git a/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface.js b/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface.js
index 9dc51ba..c081b30 100644
--- a/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface.js
+++ b/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface.js
@@ -27,6 +27,36 @@
*/
Defs.patchRange;
+ /**
+ * Object to describe a request for passing into _fetchJSON or _fetchRawJSON.
+ * - url is the URL for the request (excluding get params)
+ * - errFn is a function to invoke when the request fails.
+ * - cancelCondition is a function that, if provided and returns true, will
+ * cancel the response after it resolves.
+ * - params is a key-value hash to specify get params for the request URL.
+ * @typedef {{
+ * url: string,
+ * errFn: (function(?Response, string=)|null|undefined),
+ * cancelCondition: (function()|null|undefined),
+ * params: (Object|null|undefined),
+ * fetchOptions: (Object|null|undefined),
+ * }}
+ */
+ Defs.FetchJSONRequest;
+
+ /**
+ * @typedef {{
+ * changeNum: (string|number),
+ * endpoint: string,
+ * patchNum: (string|number|null|undefined),
+ * errFn: (function(?Response, string=)|null|undefined),
+ * cancelCondition: (function()|null|undefined),
+ * params: (Object|null|undefined),
+ * fetchOptions: (Object|null|undefined),
+ * }}
+ */
+ Defs.ChangeFetchRequest;
+
const DiffViewMode = {
SIDE_BY_SIDE: 'SIDE_BY_SIDE',
UNIFIED: 'UNIFIED_DIFF',
@@ -112,23 +142,17 @@
* Returns a Promise that resolves to a native Response.
* Doesn't do error checking. Supports cancel condition. Performs auth.
* Validates auth expiry errors.
- * @param {string} url
- * @param {?function(?Response, string=)=} opt_errFn
- * passed as null sometimes.
- * @param {?function()=} opt_cancelCondition
- * passed as null sometimes.
- * @param {?Object=} opt_params URL params, key-value hash.
- * @param {?Object=} opt_options Fetch options.
+ * @param {Defs.FetchJSONRequest} req
+ * @return {Promise}
*/
- _fetchRawJSON(url, opt_errFn, opt_cancelCondition, opt_params,
- opt_options) {
- const urlWithParams = this._urlWithParams(url, opt_params);
- return this._auth.fetch(urlWithParams, opt_options).then(response => {
- if (opt_cancelCondition && opt_cancelCondition()) {
- response.body.cancel();
+ _fetchRawJSON(req) {
+ const urlWithParams = this._urlWithParams(req.url, req.params);
+ return this._auth.fetch(urlWithParams, req.fetchOptions).then(res => {
+ if (req.cancelCondition && req.cancelCondition()) {
+ res.body.cancel();
return;
}
- return response;
+ return res;
}).catch(err => {
const isLoggedIn = !!this._cache['/accounts/self/detail'];
if (isLoggedIn && err && err.message === FAILED_TO_FETCH_ERROR) {
@@ -139,8 +163,8 @@
CHECK_SIGN_IN_DEBOUNCE_MS);
return;
}
- if (opt_errFn) {
- opt_errFn.call(undefined, null, err);
+ if (req.errFn) {
+ req.errFn.call(undefined, null, err);
} else {
this.fire('network-error', {error: err});
}
@@ -152,31 +176,23 @@
* Fetch JSON from url provided.
* Returns a Promise that resolves to a parsed response.
* Same as {@link _fetchRawJSON}, plus error handling.
- * @param {string} url
- * @param {?function(?Response, string=)=} opt_errFn
- * passed as null sometimes.
- * @param {?function()=} opt_cancelCondition
- * passed as null sometimes.
- * @param {?Object=} opt_params URL params, key-value hash.
- * @param {?Object=} opt_options Fetch options.
+ * @param {Defs.FetchJSONRequest} req
*/
- fetchJSON(url, opt_errFn, opt_cancelCondition, opt_params, opt_options) {
- return this._fetchRawJSON(
- url, opt_errFn, opt_cancelCondition, opt_params, opt_options)
- .then(response => {
- if (!response) {
- return;
- }
- if (!response.ok) {
- if (opt_errFn) {
- opt_errFn.call(null, response);
- return;
- }
- this.fire('server-error', {response});
- return;
- }
- return response && this.getResponseObject(response);
- });
+ _fetchJSON(req) {
+ return this._fetchRawJSON(req).then(response => {
+ if (!response) {
+ return;
+ }
+ if (!response.ok) {
+ if (req.errFn) {
+ req.errFn.call(null, response);
+ return;
+ }
+ this.fire('server-error', {response});
+ return;
+ }
+ return response && this.getResponseObject(response);
+ });
},
/**
@@ -236,121 +252,120 @@
getConfig(noCache) {
if (!noCache) {
- return this._fetchSharedCacheURL('/config/server/info');
+ return this._fetchSharedCacheURL({url: '/config/server/info'});
}
- return this.fetchJSON('/config/server/info');
+ return this._fetchJSON({url: '/config/server/info'});
},
getRepo(repo, opt_errFn) {
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
- return this._fetchSharedCacheURL(
- '/projects/' + encodeURIComponent(repo), opt_errFn);
+ return this._fetchSharedCacheURL({
+ url: '/projects/' + encodeURIComponent(repo),
+ errFn: opt_errFn,
+ });
},
getProjectConfig(repo, opt_errFn) {
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
- return this._fetchSharedCacheURL(
- '/projects/' + encodeURIComponent(repo) + '/config', opt_errFn);
+ return this._fetchSharedCacheURL({
+ url: '/projects/' + encodeURIComponent(repo) + '/config',
+ errFn: opt_errFn,
+ });
},
getRepoAccess(repo) {
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
- return this._fetchSharedCacheURL(
- '/access/?project=' + encodeURIComponent(repo));
+ return this._fetchSharedCacheURL({
+ url: '/access/?project=' + encodeURIComponent(repo),
+ });
},
getRepoDashboards(repo, opt_errFn) {
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
- return this._fetchSharedCacheURL(
- `/projects/${encodeURIComponent(repo)}/dashboards?inherited`,
- opt_errFn);
+ return this._fetchSharedCacheURL({
+ url: `/projects/${encodeURIComponent(repo)}/dashboards?inherited`,
+ errFn: opt_errFn,
+ });
},
- saveRepoConfig(repo, config, opt_errFn, opt_ctx) {
+ saveRepoConfig(repo, config, opt_errFn) {
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(repo);
return this.send('PUT', `/projects/${encodeName}/config`, config,
- opt_errFn, opt_ctx);
+ opt_errFn);
},
- runRepoGC(repo, opt_errFn, opt_ctx) {
+ runRepoGC(repo, opt_errFn) {
if (!repo) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(repo);
- return this.send('POST', `/projects/${encodeName}/gc`, '',
- opt_errFn, opt_ctx);
+ return this.send('POST', `/projects/${encodeName}/gc`, '', opt_errFn);
},
/**
* @param {?Object} config
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- createRepo(config, opt_errFn, opt_ctx) {
+ createRepo(config, opt_errFn) {
if (!config.name) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(config.name);
- return this.send('PUT', `/projects/${encodeName}`, config, opt_errFn,
- opt_ctx);
+ return this.send('PUT', `/projects/${encodeName}`, config, opt_errFn);
},
/**
* @param {?Object} config
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- createGroup(config, opt_errFn, opt_ctx) {
+ createGroup(config, opt_errFn) {
if (!config.name) { return ''; }
const encodeName = encodeURIComponent(config.name);
- return this.send('PUT', `/groups/${encodeName}`, config, opt_errFn,
- opt_ctx);
+ return this.send('PUT', `/groups/${encodeName}`, config, opt_errFn);
},
getGroupConfig(group, opt_errFn) {
- const encodeName = encodeURIComponent(group);
- return this.fetchJSON(`/groups/${encodeName}/detail`, opt_errFn);
+ return this._fetchJSON({
+ url: `/groups/${encodeURIComponent(group)}/detail`,
+ errFn: opt_errFn,
+ });
},
/**
* @param {string} repo
* @param {string} ref
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- deleteRepoBranches(repo, ref, opt_errFn, opt_ctx) {
+ deleteRepoBranches(repo, ref, opt_errFn) {
if (!repo || !ref) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(repo);
const encodeRef = encodeURIComponent(ref);
return this.send('DELETE',
- `/projects/${encodeName}/branches/${encodeRef}`, '',
- opt_errFn, opt_ctx);
+ `/projects/${encodeName}/branches/${encodeRef}`, '', opt_errFn);
},
/**
* @param {string} repo
* @param {string} ref
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- deleteRepoTags(repo, ref, opt_errFn, opt_ctx) {
+ deleteRepoTags(repo, ref, opt_errFn) {
if (!repo || !ref) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(repo);
const encodeRef = encodeURIComponent(ref);
return this.send('DELETE',
- `/projects/${encodeName}/tags/${encodeRef}`, '',
- opt_errFn, opt_ctx);
+ `/projects/${encodeName}/tags/${encodeRef}`, '', opt_errFn);
},
/**
@@ -358,9 +373,8 @@
* @param {string} branch
* @param {string} revision
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- createRepoBranch(name, branch, revision, opt_errFn, opt_ctx) {
+ createRepoBranch(name, branch, revision, opt_errFn) {
if (!name || !branch || !revision) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
@@ -368,7 +382,7 @@
const encodeBranch = encodeURIComponent(branch);
return this.send('PUT',
`/projects/${encodeName}/branches/${encodeBranch}`,
- revision, opt_errFn, opt_ctx);
+ revision, opt_errFn);
},
/**
@@ -376,16 +390,15 @@
* @param {string} tag
* @param {string} revision
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- createRepoTag(name, tag, revision, opt_errFn, opt_ctx) {
+ createRepoTag(name, tag, revision, opt_errFn) {
if (!name || !tag || !revision) { return ''; }
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
const encodeName = encodeURIComponent(name);
const encodeTag = encodeURIComponent(tag);
return this.send('PUT', `/projects/${encodeName}/tags/${encodeTag}`,
- revision, opt_errFn, opt_ctx);
+ revision, opt_errFn);
},
/**
@@ -394,7 +407,7 @@
*/
getIsGroupOwner(groupName) {
const encodeName = encodeURIComponent(groupName);
- return this._fetchSharedCacheURL(`/groups/?owned&q=${encodeName}`)
+ return this._fetchSharedCacheURL({url: `/groups/?owned&q=${encodeName}`})
.then(configs => configs.hasOwnProperty(groupName));
},
@@ -432,8 +445,10 @@
},
getGroupAuditLog(group, opt_errFn) {
- return this._fetchSharedCacheURL(
- '/groups/' + group + '/log.audit', opt_errFn);
+ return this._fetchSharedCacheURL({
+ url: '/groups/' + group + '/log.audit',
+ errFn: opt_errFn,
+ });
},
saveGroupMembers(groupName, groupMembers) {
@@ -470,13 +485,15 @@
},
getVersion() {
- return this._fetchSharedCacheURL('/config/server/version');
+ return this._fetchSharedCacheURL({url: '/config/server/version'});
},
getDiffPreferences() {
return this.getLoggedIn().then(loggedIn => {
if (loggedIn) {
- return this._fetchSharedCacheURL('/accounts/self/preferences.diff');
+ return this._fetchSharedCacheURL({
+ url: '/accounts/self/preferences.diff',
+ });
}
// These defaults should match the defaults in
// java/com/google/gerrit/extensions/client/DiffPreferencesInfo.java
@@ -504,7 +521,9 @@
getEditPreferences() {
return this.getLoggedIn().then(loggedIn => {
if (loggedIn) {
- return this._fetchSharedCacheURL('/accounts/self/preferences.edit');
+ return this._fetchSharedCacheURL({
+ url: '/accounts/self/preferences.edit',
+ });
}
// These defaults should match the defaults in
// java/com/google/gerrit/extensions/client/EditPreferencesInfo.java
@@ -532,53 +551,52 @@
/**
* @param {?Object} prefs
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- savePreferences(prefs, opt_errFn, opt_ctx) {
+ savePreferences(prefs, opt_errFn) {
// Note (Issue 5142): normalize the download scheme with lower case before
// saving.
if (prefs.download_scheme) {
prefs.download_scheme = prefs.download_scheme.toLowerCase();
}
- return this.send('PUT', '/accounts/self/preferences', prefs, opt_errFn,
- opt_ctx);
+ return this.send('PUT', '/accounts/self/preferences', prefs, opt_errFn);
},
/**
* @param {?Object} prefs
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- saveDiffPreferences(prefs, opt_errFn, opt_ctx) {
+ saveDiffPreferences(prefs, opt_errFn) {
// Invalidate the cache.
this._cache['/accounts/self/preferences.diff'] = undefined;
return this.send('PUT', '/accounts/self/preferences.diff', prefs,
- opt_errFn, opt_ctx);
+ opt_errFn);
},
/**
* @param {?Object} prefs
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- saveEditPreferences(prefs, opt_errFn, opt_ctx) {
+ saveEditPreferences(prefs, opt_errFn) {
// Invalidate the cache.
this._cache['/accounts/self/preferences.edit'] = undefined;
return this.send('PUT', '/accounts/self/preferences.edit', prefs,
- opt_errFn, opt_ctx);
+ opt_errFn);
},
getAccount() {
- return this._fetchSharedCacheURL('/accounts/self/detail', resp => {
- if (!resp || resp.status === 403) {
- this._cache['/accounts/self/detail'] = null;
- }
+ return this._fetchSharedCacheURL({
+ url: '/accounts/self/detail',
+ errFn: resp => {
+ if (!resp || resp.status === 403) {
+ this._cache['/accounts/self/detail'] = null;
+ }
+ },
});
},
getExternalIds() {
- return this.fetchJSON('/accounts/self/external.ids');
+ return this._fetchJSON({url: '/accounts/self/external.ids'});
},
deleteAccountIdentity(id) {
@@ -591,56 +609,55 @@
* @return {!Promise<!Object>}
*/
getAccountDetails(userId) {
- return this.fetchJSON(`/accounts/${encodeURIComponent(userId)}/detail`);
+ return this._fetchJSON({
+ url: `/accounts/${encodeURIComponent(userId)}/detail`,
+ });
},
getAccountEmails() {
- return this._fetchSharedCacheURL('/accounts/self/emails');
+ return this._fetchSharedCacheURL({url: '/accounts/self/emails'});
},
/**
* @param {string} email
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- addAccountEmail(email, opt_errFn, opt_ctx) {
+ addAccountEmail(email, opt_errFn) {
return this.send('PUT', '/accounts/self/emails/' +
- encodeURIComponent(email), null, opt_errFn, opt_ctx);
+ encodeURIComponent(email), null, opt_errFn);
},
/**
* @param {string} email
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- deleteAccountEmail(email, opt_errFn, opt_ctx) {
+ deleteAccountEmail(email, opt_errFn) {
return this.send('DELETE', '/accounts/self/emails/' +
- encodeURIComponent(email), null, opt_errFn, opt_ctx);
+ encodeURIComponent(email), null, opt_errFn);
},
/**
* @param {string} email
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- setPreferredAccountEmail(email, opt_errFn, opt_ctx) {
- return this.send('PUT', '/accounts/self/emails/' +
- encodeURIComponent(email) + '/preferred', null,
- opt_errFn, opt_ctx).then(() => {
- // If result of getAccountEmails is in cache, update it in the cache
- // so we don't have to invalidate it.
- const cachedEmails = this._cache['/accounts/self/emails'];
- if (cachedEmails) {
- const emails = cachedEmails.map(entry => {
- if (entry.email === email) {
- return {email, preferred: true};
- } else {
- return {email};
- }
- });
- this._cache['/accounts/self/emails'] = emails;
+ setPreferredAccountEmail(email, opt_errFn) {
+ const encodedEmail = encodeURIComponent(email);
+ const url = `/accounts/self/emails/${encodedEmail}/preferred`;
+ return this.send('PUT', url, null, opt_errFn).then(() => {
+ // If result of getAccountEmails is in cache, update it in the cache
+ // so we don't have to invalidate it.
+ const cachedEmails = this._cache['/accounts/self/emails'];
+ if (cachedEmails) {
+ const emails = cachedEmails.map(entry => {
+ if (entry.email === email) {
+ return {email, preferred: true};
+ } else {
+ return {email};
}
});
+ this._cache['/accounts/self/emails'] = emails;
+ }
+ });
},
/**
@@ -660,47 +677,45 @@
/**
* @param {string} name
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- setAccountName(name, opt_errFn, opt_ctx) {
- return this.send('PUT', '/accounts/self/name', {name}, opt_errFn, opt_ctx)
- .then(response => this.getResponseObject(response)
- .then(newName => this._updateCachedAccount({name: newName})));
+ setAccountName(name, opt_errFn) {
+ return this.send('PUT', '/accounts/self/name', {name}, opt_errFn)
+ .then(response => this.getResponseObject(response))
+ .then(newName => this._updateCachedAccount({name: newName}));
},
/**
* @param {string} username
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- setAccountUsername(username, opt_errFn, opt_ctx) {
- return this.send('PUT', '/accounts/self/username', {username}, opt_errFn,
- opt_ctx).then(response => this.getResponseObject(response)
- .then(newName => this._updateCachedAccount({username: newName})));
+ setAccountUsername(username, opt_errFn) {
+ return this.send('PUT', '/accounts/self/username', {username}, opt_errFn)
+ .then(response => this.getResponseObject(response))
+ .then(newName => this._updateCachedAccount({username: newName}));
},
/**
* @param {string} status
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- setAccountStatus(status, opt_errFn, opt_ctx) {
- return this.send('PUT', '/accounts/self/status', {status},
- opt_errFn, opt_ctx).then(response => this.getResponseObject(response)
- .then(newStatus => this._updateCachedAccount(
- {status: newStatus})));
+ setAccountStatus(status, opt_errFn) {
+ return this.send('PUT', '/accounts/self/status', {status}, opt_errFn)
+ .then(response => this.getResponseObject(response))
+ .then(newStatus => this._updateCachedAccount({status: newStatus}));
},
getAccountStatus(userId) {
- return this.fetchJSON(`/accounts/${encodeURIComponent(userId)}/status`);
+ return this._fetchJSON({
+ url: `/accounts/${encodeURIComponent(userId)}/status`,
+ });
},
getAccountGroups() {
- return this.fetchJSON('/accounts/self/groups');
+ return this._fetchJSON({url: '/accounts/self/groups'});
},
getAccountAgreements() {
- return this.fetchJSON('/accounts/self/agreements');
+ return this._fetchJSON({url: '/accounts/self/agreements'});
},
saveAccountAgreement(name) {
@@ -717,8 +732,9 @@
.map(param => { return encodeURIComponent(param); })
.join('&q=');
}
- return this._fetchSharedCacheURL('/accounts/self/capabilities' +
- queryString);
+ return this._fetchSharedCacheURL({
+ url: '/accounts/self/capabilities' + queryString,
+ });
},
getLoggedIn() {
@@ -741,31 +757,31 @@
checkCredentials() {
// Skip the REST response cache.
- return this._fetchRawJSON('/accounts/self/detail').then(response => {
- if (!response) { return; }
- if (response.status === 403) {
+ return this._fetchRawJSON({url: '/accounts/self/detail'}).then(res => {
+ if (!res) { return; }
+ if (res.status === 403) {
this.fire('auth-error');
this._cache['/accounts/self/detail'] = null;
- } else if (response.ok) {
- return this.getResponseObject(response);
+ } else if (res.ok) {
+ return this.getResponseObject(res);
}
- }).then(response => {
- if (response) {
- this._cache['/accounts/self/detail'] = response;
+ }).then(res => {
+ if (res) {
+ this._cache['/accounts/self/detail'] = res;
}
- return response;
+ return res;
});
},
getDefaultPreferences() {
- return this._fetchSharedCacheURL('/config/server/preferences');
+ return this._fetchSharedCacheURL({url: '/config/server/preferences'});
},
getPreferences() {
return this.getLoggedIn().then(loggedIn => {
if (loggedIn) {
- return this._fetchSharedCacheURL('/accounts/self/preferences').then(
- res => {
+ return this._fetchSharedCacheURL({url: '/accounts/self/preferences'})
+ .then(res => {
if (this._isNarrowScreen()) {
res.default_diff_view = DiffViewMode.UNIFIED;
} else {
@@ -786,56 +802,53 @@
},
getWatchedProjects() {
- return this._fetchSharedCacheURL('/accounts/self/watched.projects');
+ return this._fetchSharedCacheURL({
+ url: '/accounts/self/watched.projects',
+ });
},
/**
* @param {string} projects
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- saveWatchedProjects(projects, opt_errFn, opt_ctx) {
- return this.send('POST', '/accounts/self/watched.projects', projects,
- opt_errFn, opt_ctx)
- .then(response => {
- return this.getResponseObject(response);
- });
+ saveWatchedProjects(projects, opt_errFn) {
+ const url = '/accounts/self/watched.projects';
+ return this.send('POST', url, projects, opt_errFn)
+ .then(response => this.getResponseObject(response));
},
/**
* @param {string} projects
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- deleteWatchedProjects(projects, opt_errFn, opt_ctx) {
+ deleteWatchedProjects(projects, opt_errFn) {
return this.send('POST', '/accounts/self/watched.projects:delete',
- projects, opt_errFn, opt_ctx);
+ projects, opt_errFn);
},
/**
- * @param {string} url
- * @param {function(?Response, string=)=} opt_errFn
+ * @param {Defs.FetchJSONRequest} req
*/
- _fetchSharedCacheURL(url, opt_errFn) {
- if (this._sharedFetchPromises[url]) {
- return this._sharedFetchPromises[url];
+ _fetchSharedCacheURL(req) {
+ if (this._sharedFetchPromises[req.url]) {
+ return this._sharedFetchPromises[req.url];
}
// TODO(andybons): Periodic cache invalidation.
- if (this._cache[url] !== undefined) {
- return Promise.resolve(this._cache[url]);
+ if (this._cache[req.url] !== undefined) {
+ return Promise.resolve(this._cache[req.url]);
}
- this._sharedFetchPromises[url] = this.fetchJSON(url, opt_errFn)
+ this._sharedFetchPromises[req.url] = this._fetchJSON(req)
.then(response => {
if (response !== undefined) {
- this._cache[url] = response;
+ this._cache[req.url] = response;
}
- this._sharedFetchPromises[url] = undefined;
+ this._sharedFetchPromises[req.url] = undefined;
return response;
}).catch(err => {
- this._sharedFetchPromises[url] = undefined;
+ this._sharedFetchPromises[req.url] = undefined;
throw err;
});
- return this._sharedFetchPromises[url];
+ return this._sharedFetchPromises[req.url];
},
_isNarrowScreen() {
@@ -848,8 +861,8 @@
* @param {number|string=} opt_offset
* @param {!Object=} opt_options
* @return {?Array<!Object>|?Array<!Array<!Object>>} If opt_query is an
- * array, fetchJSON will return an array of arrays of changeInfos. If it
- * is unspecified or a string, fetchJSON will return an array of
+ * array, _fetchJSON will return an array of arrays of changeInfos. If it
+ * is unspecified or a string, _fetchJSON will return an array of
* changeInfos.
*/
getChanges(opt_changesPerPage, opt_query, opt_offset, opt_options) {
@@ -874,7 +887,7 @@
this._maybeInsertInLookup(change);
}
};
- return this.fetchJSON('/changes/', null, null, params).then(response => {
+ return this._fetchJSON({url: '/changes/', params}).then(response => {
// Response may be an array of changes OR an array of arrays of
// changes.
if (opt_query instanceof Array) {
@@ -959,43 +972,43 @@
* @param {function(?Response, string=)=} opt_errFn
* @param {function()=} opt_cancelCondition
*/
- _getChangeDetail(changeNum, params, opt_errFn,
- opt_cancelCondition) {
+ _getChangeDetail(changeNum, params, opt_errFn, opt_cancelCondition) {
return this.getChangeActionURL(changeNum, null, '/detail').then(url => {
const urlWithParams = this._urlWithParams(url, params);
- return this._fetchRawJSON(
- url,
- opt_errFn,
- opt_cancelCondition,
- {O: params},
- this._etags.getOptions(urlWithParams))
- .then(response => {
- if (response && response.status === 304) {
- return Promise.resolve(this._parsePrefixedJSON(
- this._etags.getCachedPayload(urlWithParams)));
- }
+ const req = {
+ url,
+ errFn: opt_errFn,
+ cancelCondition: opt_cancelCondition,
+ params: {O: params},
+ fetchOptions: this._etags.getOptions(urlWithParams),
+ };
+ return this._fetchRawJSON(req).then(response => {
+ if (response && response.status === 304) {
+ return Promise.resolve(this._parsePrefixedJSON(
+ this._etags.getCachedPayload(urlWithParams)));
+ }
- if (response && !response.ok) {
- if (opt_errFn) {
- opt_errFn.call(null, response);
- } else {
- this.fire('server-error', {response});
- }
- return;
- }
+ if (response && !response.ok) {
+ if (opt_errFn) {
+ opt_errFn.call(null, response);
+ } else {
+ this.fire('server-error', {response});
+ }
+ return;
+ }
- const payloadPromise = response ?
- this._readResponsePayload(response) :
- Promise.resolve(null);
+ const payloadPromise = response ?
+ this._readResponsePayload(response) :
+ Promise.resolve(null);
- return payloadPromise.then(payload => {
- if (!payload) { return null; }
- this._etags.collect(urlWithParams, response, payload.raw);
- this._maybeInsertInLookup(payload.parsed);
+ return payloadPromise.then(payload => {
+ if (!payload) { return null; }
+ this._etags.collect(urlWithParams, response, payload.raw);
+ this._maybeInsertInLookup(payload.parsed);
- return payload.parsed;
- });
- });
+ return payload.parsed;
+ });
+ });
});
},
@@ -1004,7 +1017,11 @@
* @param {number|string} patchNum
*/
getChangeCommitInfo(changeNum, patchNum) {
- return this._getChangeURLAndFetch(changeNum, '/commit?links', patchNum);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: '/commit?links',
+ patchNum,
+ });
},
/**
@@ -1019,8 +1036,12 @@
} else if (!this.patchNumEquals(patchRange.basePatchNum, 'PARENT')) {
params = {base: patchRange.basePatchNum};
}
- return this._getChangeURLAndFetch(changeNum, '/files',
- patchRange.patchNum, undefined, undefined, params);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: '/files',
+ patchNum: patchRange.patchNum,
+ params,
+ });
},
/**
@@ -1032,7 +1053,7 @@
if (patchRange.basePatchNum !== 'PARENT') {
endpoint += '&base=' + encodeURIComponent(patchRange.basePatchNum + '');
}
- return this._getChangeURLAndFetch(changeNum, endpoint);
+ return this._getChangeURLAndFetch({changeNum, endpoint});
},
/**
@@ -1042,8 +1063,11 @@
* @return {!Promise<!Object>}
*/
queryChangeFiles(changeNum, patchNum, query) {
- return this._getChangeURLAndFetch(changeNum,
- `/files?q=${encodeURIComponent(query)}`, patchNum);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: `/files?q=${encodeURIComponent(query)}`,
+ patchNum,
+ });
},
/**
@@ -1071,16 +1095,16 @@
},
getChangeRevisionActions(changeNum, patchNum) {
- return this._getChangeURLAndFetch(changeNum, '/actions', patchNum)
- .then(revisionActions => {
- // The rebase button on change screen is always enabled.
- if (revisionActions.rebase) {
- revisionActions.rebase.rebaseOnCurrent =
- !!revisionActions.rebase.enabled;
- revisionActions.rebase.enabled = true;
- }
- return revisionActions;
- });
+ const req = {changeNum, endpoint: '/actions', patchNum};
+ return this._getChangeURLAndFetch(req).then(revisionActions => {
+ // The rebase button on change screen is always enabled.
+ if (revisionActions.rebase) {
+ revisionActions.rebase.rebaseOnCurrent =
+ !!revisionActions.rebase.enabled;
+ revisionActions.rebase.enabled = true;
+ }
+ return revisionActions;
+ });
},
/**
@@ -1091,15 +1115,19 @@
getChangeSuggestedReviewers(changeNum, inputVal, opt_errFn) {
const params = {n: 10};
if (inputVal) { params.q = inputVal; }
- return this._getChangeURLAndFetch(changeNum, '/suggest_reviewers', null,
- opt_errFn, null, params);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: '/suggest_reviewers',
+ errFn: opt_errFn,
+ params,
+ });
},
/**
* @param {number|string} changeNum
*/
getChangeIncludedIn(changeNum) {
- return this._getChangeURLAndFetch(changeNum, '/in', null);
+ return this._getChangeURLAndFetch({changeNum, endpoint: '/in'});
},
_computeFilter(filter) {
@@ -1122,10 +1150,10 @@
getGroups(filter, groupsPerPage, opt_offset) {
const offset = opt_offset || 0;
- return this._fetchSharedCacheURL(
- `/groups/?n=${groupsPerPage + 1}&S=${offset}` +
- this._computeFilter(filter)
- );
+ return this._fetchSharedCacheURL({
+ url: `/groups/?n=${groupsPerPage + 1}&S=${offset}` +
+ this._computeFilter(filter),
+ });
},
/**
@@ -1139,10 +1167,10 @@
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
- return this._fetchSharedCacheURL(
- `/projects/?d&n=${reposPerPage + 1}&S=${offset}` +
- this._computeFilter(filter)
- );
+ return this._fetchSharedCacheURL({
+ url: `/projects/?d&n=${reposPerPage + 1}&S=${offset}` +
+ this._computeFilter(filter),
+ });
},
setRepoHead(repo, ref) {
@@ -1162,15 +1190,13 @@
*/
getRepoBranches(filter, repo, reposBranchesPerPage, opt_offset, opt_errFn) {
const offset = opt_offset || 0;
-
+ const count = reposBranchesPerPage + 1;
+ filter = this._computeFilter(filter);
+ repo = encodeURIComponent(repo);
+ const url = `/projects/${repo}/branches?n=${count}&S=${offset}${filter}`;
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
- return this.fetchJSON(
- `/projects/${encodeURIComponent(repo)}/branches` +
- `?n=${reposBranchesPerPage + 1}&S=${offset}` +
- this._computeFilter(filter),
- opt_errFn
- );
+ return this._fetchJSON({url, errFn: opt_errFn});
},
/**
@@ -1183,15 +1209,14 @@
*/
getRepoTags(filter, repo, reposTagsPerPage, opt_offset, opt_errFn) {
const offset = opt_offset || 0;
-
+ const encodedRepo = encodeURIComponent(repo);
+ const n = reposTagsPerPage + 1;
+ const encodedFilter = this._computeFilter(filter);
+ const url = `/projects/${encodedRepo}/tags` + `?n=${n}&S=${offset}` +
+ encodedFilter;
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
- return this.fetchJSON(
- `/projects/${encodeURIComponent(repo)}/tags` +
- `?n=${reposTagsPerPage + 1}&S=${offset}` +
- this._computeFilter(filter),
- opt_errFn
- );
+ return this._fetchJSON({url, errFn: opt_errFn});
},
/**
@@ -1203,21 +1228,19 @@
*/
getPlugins(filter, pluginsPerPage, opt_offset, opt_errFn) {
const offset = opt_offset || 0;
-
- return this.fetchJSON(
- `/plugins/?all&n=${pluginsPerPage + 1}&S=${offset}` +
- this._computeFilter(filter),
- opt_errFn
- );
+ const encodedFilter = this._computeFilter(filter);
+ const n = pluginsPerPage + 1;
+ const url = `/plugins/?all&n=${n}&S=${offset}${encodedFilter}`;
+ return this._fetchJSON({url, errFn: opt_errFn});
},
getRepoAccessRights(repoName, opt_errFn) {
// TODO(kaspern): Rename rest api from /projects/ to /repos/ once backend
// supports it.
- return this.fetchJSON(
- `/projects/${encodeURIComponent(repoName)}/access`,
- opt_errFn
- );
+ return this._fetchJSON({
+ url: `/projects/${encodeURIComponent(repoName)}/access`,
+ errFn: opt_errFn,
+ });
},
setRepoAccessRights(repoName, repoInfo) {
@@ -1238,43 +1261,52 @@
* @param {string} inputVal
* @param {number} opt_n
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- getSuggestedGroups(inputVal, opt_n, opt_errFn, opt_ctx) {
+ getSuggestedGroups(inputVal, opt_n, opt_errFn) {
const params = {s: inputVal};
if (opt_n) { params.n = opt_n; }
- return this.fetchJSON('/groups/', opt_errFn, opt_ctx, params);
+ return this._fetchJSON({
+ url: '/groups/',
+ errFn: opt_errFn,
+ params,
+ });
},
/**
* @param {string} inputVal
* @param {number} opt_n
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- getSuggestedProjects(inputVal, opt_n, opt_errFn, opt_ctx) {
+ getSuggestedProjects(inputVal, opt_n, opt_errFn) {
const params = {
m: inputVal,
n: MAX_PROJECT_RESULTS,
type: 'ALL',
};
if (opt_n) { params.n = opt_n; }
- return this.fetchJSON('/projects/', opt_errFn, opt_ctx, params);
+ return this._fetchJSON({
+ url: '/projects/',
+ errFn: opt_errFn,
+ params,
+ });
},
/**
* @param {string} inputVal
* @param {number} opt_n
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- getSuggestedAccounts(inputVal, opt_n, opt_errFn, opt_ctx) {
+ getSuggestedAccounts(inputVal, opt_n, opt_errFn) {
if (!inputVal) {
return Promise.resolve([]);
}
const params = {suggest: null, q: inputVal};
if (opt_n) { params.n = opt_n; }
- return this.fetchJSON('/accounts/', opt_errFn, opt_ctx, params);
+ return this._fetchJSON({
+ url: '/accounts/',
+ errFn: opt_errFn,
+ params,
+ });
},
addChangeReviewer(changeNum, reviewerID) {
@@ -1305,11 +1337,18 @@
},
getRelatedChanges(changeNum, patchNum) {
- return this._getChangeURLAndFetch(changeNum, '/related', patchNum);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: '/related',
+ patchNum,
+ });
},
getChangesSubmittedTogether(changeNum) {
- return this._getChangeURLAndFetch(changeNum, '/submitted_together', null);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: '/submitted_together',
+ });
},
getChangeConflicts(changeNum) {
@@ -1321,7 +1360,7 @@
O: options,
q: 'status:open is:mergeable conflicts:' + changeNum,
};
- return this.fetchJSON('/changes/', null, null, params);
+ return this._fetchJSON({url: '/changes/', params});
},
getChangeCherryPicks(project, changeID, changeNum) {
@@ -1339,7 +1378,7 @@
O: options,
q: query,
};
- return this.fetchJSON('/changes/', null, null, params);
+ return this._fetchJSON({url: '/changes/', params});
},
getChangesWithSameTopic(topic) {
@@ -1353,11 +1392,15 @@
O: options,
q: 'status:open topic:' + topic,
};
- return this.fetchJSON('/changes/', null, null, params);
+ return this._fetchJSON({url: '/changes/', params});
},
getReviewedFiles(changeNum, patchNum) {
- return this._getChangeURLAndFetch(changeNum, '/files?reviewed', patchNum);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: '/files?reviewed',
+ patchNum,
+ });
},
/**
@@ -1366,13 +1409,12 @@
* @param {string} path
* @param {boolean} reviewed
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- saveFileReviewed(changeNum, patchNum, path, reviewed, opt_errFn, opt_ctx) {
+ saveFileReviewed(changeNum, patchNum, path, reviewed, opt_errFn) {
const method = reviewed ? 'PUT' : 'DELETE';
- const e = `/files/${encodeURIComponent(path)}/reviewed`;
- return this.getChangeURLAndSend(changeNum, method, patchNum, e, null,
- opt_errFn, opt_ctx);
+ const endpoint = `/files/${encodeURIComponent(path)}/reviewed`;
+ return this.getChangeURLAndSend(changeNum, method, patchNum, endpoint,
+ null, opt_errFn);
},
/**
@@ -1380,25 +1422,26 @@
* @param {number|string} patchNum
* @param {!Object} review
* @param {function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
*/
- saveChangeReview(changeNum, patchNum, review, opt_errFn, opt_ctx) {
+ saveChangeReview(changeNum, patchNum, review, opt_errFn) {
const promises = [
this.awaitPendingDiffDrafts(),
this.getChangeActionURL(changeNum, patchNum, '/review'),
];
return Promise.all(promises).then(([, url]) => {
- return this.send('POST', url, review, opt_errFn, opt_ctx);
+ return this.send('POST', url, review, opt_errFn);
});
},
getChangeEdit(changeNum, opt_download_commands) {
const params = opt_download_commands ? {'download-commands': true} : null;
return this.getLoggedIn().then(loggedIn => {
- return loggedIn ?
- this._getChangeURLAndFetch(changeNum, '/edit/', null, null, null,
- params) :
- false;
+ if (!loggedIn) { return false; }
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: '/edit/',
+ params,
+ });
});
},
@@ -1465,7 +1508,7 @@
const e = `/files/${encodeURIComponent(path)}/content`;
const headers = {Accept: 'application/json'};
return this.getChangeURLAndSend(changeNum, 'GET', patchNum, e, null,
- opt_errFn, null, null, headers);
+ opt_errFn, null, headers);
},
/**
@@ -1477,7 +1520,7 @@
const e = '/edit/' + encodeURIComponent(path);
const headers = {Accept: 'application/json'};
return this.getChangeURLAndSend(changeNum, 'GET', null, e, null, null,
- null, null, headers);
+ null, headers);
},
rebaseChangeEdit(changeNum) {
@@ -1506,7 +1549,7 @@
saveChangeEdit(changeNum, path, contents) {
const e = '/edit/' + encodeURIComponent(path);
return this.getChangeURLAndSend(changeNum, 'PUT', null, e, contents, null,
- null, 'text/plain');
+ 'text/plain');
},
// Deprecated, prefer to use putChangeCommitMessage instead.
@@ -1540,12 +1583,10 @@
* number at least.
* @param {?function(?Response, string=)=} opt_errFn
* passed as null sometimes.
- * @param {?=} opt_ctx
* @param {?string=} opt_contentType
* @param {Object=} opt_headers
*/
- send(method, url, opt_body, opt_errFn, opt_ctx, opt_contentType,
- opt_headers) {
+ send(method, url, opt_body, opt_errFn, opt_contentType, opt_headers) {
const options = {method};
if (opt_body) {
options.headers = new Headers();
@@ -1569,7 +1610,7 @@
return this._auth.fetch(url, options).then(response => {
if (!response.ok) {
if (opt_errFn) {
- return opt_errFn.call(opt_ctx || null, response);
+ return opt_errFn.call(null, response);
}
this.fire('server-error', {response});
}
@@ -1577,7 +1618,7 @@
}).catch(err => {
this.fire('network-error', {error: err});
if (opt_errFn) {
- return opt_errFn.call(opt_ctx, null, err);
+ return opt_errFn.call(null, null, err);
} else {
throw err;
}
@@ -1607,8 +1648,14 @@
}
const endpoint = `/files/${encodeURIComponent(path)}/diff`;
- return this._getChangeURLAndFetch(changeNum, endpoint, patchNum,
- opt_errFn, opt_cancelCondition, params);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint,
+ patchNum,
+ errFn: opt_errFn,
+ cancelCondition: opt_cancelCondition,
+ params,
+ });
},
/**
@@ -1695,7 +1742,11 @@
* @return {!Promise<!Object>} Diff comments response.
*/
const fetchComments = opt_patchNum => {
- return this._getChangeURLAndFetch(changeNum, endpoint, opt_patchNum);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint,
+ patchNum: opt_patchNum,
+ });
};
if (!opt_basePatchNum && !opt_patchNum && !opt_path) {
@@ -1809,9 +1860,10 @@
},
getCommitInfo(project, commit) {
- return this.fetchJSON(
- '/projects/' + encodeURIComponent(project) +
- '/commits/' + encodeURIComponent(commit));
+ return this._fetchJSON({
+ url: '/projects/' + encodeURIComponent(project) +
+ '/commits/' + encodeURIComponent(commit),
+ });
},
_fetchB64File(url) {
@@ -1940,11 +1992,11 @@
},
getAccountSSHKeys() {
- return this._fetchSharedCacheURL('/accounts/self/sshkeys');
+ return this._fetchSharedCacheURL({url: '/accounts/self/sshkeys'});
},
addAccountSSHKey(key) {
- return this.send('POST', '/accounts/self/sshkeys', key, null, null,
+ return this.send('POST', '/accounts/self/sshkeys', key, null,
'plain/text')
.then(response => {
if (response.status < 200 && response.status >= 300) {
@@ -1963,7 +2015,7 @@
},
getAccountGPGKeys() {
- return this.fetchJSON('/accounts/self/gpgkeys');
+ return this._fetchJSON({url: '/accounts/self/gpgkeys'});
},
addAccountGPGKey(key) {
@@ -2006,7 +2058,10 @@
},
getCapabilities(token, opt_errFn) {
- return this.fetchJSON('/config/server/capabilities', opt_errFn);
+ return this._fetchJSON({
+ url: '/config/server/capabilities',
+ errFn: opt_errFn,
+ });
},
setAssignee(changeNum, assignee) {
@@ -2073,11 +2128,13 @@
*/
getChange(changeNum, opt_errFn) {
// Cannot use _changeBaseURL, as this function is used by _projectLookup.
- return this.fetchJSON(`/changes/?q=change:${changeNum}`, opt_errFn)
- .then(res => {
- if (!res || !res.length) { return null; }
- return res[0];
- });
+ return this._fetchJSON({
+ url: `/changes/?q=change:${changeNum}`,
+ errFn: opt_errFn,
+ }).then(res => {
+ if (!res || !res.length) { return null; }
+ return res[0];
+ });
},
/**
@@ -2127,36 +2184,31 @@
* @param {?Object|number|string=} opt_payload gets passed as null, string,
* Object, or number.
* @param {?function(?Response, string=)=} opt_errFn
- * @param {?=} opt_ctx
* @param {?=} opt_contentType
* @param {Object=} opt_headers
* @return {!Promise<!Object>}
*/
getChangeURLAndSend(changeNum, method, patchNum, endpoint, opt_payload,
- opt_errFn, opt_ctx, opt_contentType, opt_headers) {
- return this._changeBaseURL(changeNum, patchNum).then(url => {
- return this.send(method, url + endpoint, opt_payload, opt_errFn,
- opt_ctx, opt_contentType, opt_headers);
- });
+ opt_errFn, opt_contentType, opt_headers) {
+ return this._changeBaseURL(changeNum, patchNum).then(url =>
+ this.send(method, url + endpoint, opt_payload, opt_errFn,
+ opt_contentType, opt_headers));
},
- /**
- * Alias for _changeBaseURL.then(fetchJSON).
- * @todo(beckysiegel) clean up comments
- * @param {string|number} changeNum
- * @param {string} endpoint
- * @param {?string|number=} opt_patchNum gets passed as null.
- * @param {?function(?Response, string=)=} opt_errFn gets passed as null.
- * @param {?function()=} opt_cancelCondition gets passed as null.
- * @param {?Object=} opt_params gets passed as null.
- * @param {!Object=} opt_options
- * @return {!Promise<!Object>}
- */
- _getChangeURLAndFetch(changeNum, endpoint, opt_patchNum, opt_errFn,
- opt_cancelCondition, opt_params, opt_options) {
- return this._changeBaseURL(changeNum, opt_patchNum).then(url => {
- return this.fetchJSON(url + endpoint, opt_errFn, opt_cancelCondition,
- opt_params, opt_options);
+ /**
+ * Alias for _changeBaseURL.then(_fetchJSON).
+ * @param {Defs.ChangeFetchRequest} req
+ * @return {!Promise<!Object>}
+ */
+ _getChangeURLAndFetch(req) {
+ return this._changeBaseURL(req.changeNum, req.patchNum).then(url => {
+ return this._fetchJSON({
+ url: url + req.endpoint,
+ errFn: req.errFn,
+ cancelCondition: req.cancelCondition,
+ params: req.params,
+ fetchOptions: req.fetchOptions,
+ });
});
},
@@ -2171,9 +2223,12 @@
*/
getBlame(changeNum, patchNum, path, opt_base) {
const encodedPath = encodeURIComponent(path);
- return this._getChangeURLAndFetch(changeNum,
- `/files/${encodedPath}/blame`, patchNum, undefined, undefined,
- opt_base ? {base: 't'} : undefined);
+ return this._getChangeURLAndFetch({
+ changeNum,
+ endpoint: `/files/${encodedPath}/blame`,
+ patchNum,
+ params: opt_base ? {base: 't'} : undefined,
+ });
},
/**
@@ -2217,7 +2272,7 @@
getDashboard(project, dashboard, opt_errFn) {
const url = '/projects/' + encodeURIComponent(project) + '/dashboards/' +
encodeURIComponent(dashboard);
- return this._fetchSharedCacheURL(url, opt_errFn);
+ return this._fetchSharedCacheURL({url, errFn: opt_errFn});
},
getMergeable(changeNum) {
diff --git a/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface_test.html b/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface_test.html
index fb20da4..7e71efa 100644
--- a/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface_test.html
+++ b/polygerrit-ui/app/elements/shared/gr-rest-api-interface/gr-rest-api-interface_test.html
@@ -58,7 +58,7 @@
});
test('JSON prefix is properly removed', done => {
- element.fetchJSON('/dummy/url').then(obj => {
+ element._fetchJSON('/dummy/url').then(obj => {
assert.deepEqual(obj, {hello: 'bonjour'});
done();
});
@@ -66,7 +66,7 @@
test('cached results', done => {
let n = 0;
- sandbox.stub(element, 'fetchJSON', () => {
+ sandbox.stub(element, '_fetchJSON', () => {
return Promise.resolve(++n);
});
const promises = [];
@@ -86,7 +86,7 @@
test('cached promise', done => {
const promise = Promise.reject('foo');
element._cache['/foo'] = promise;
- element._fetchSharedCacheURL('/foo').catch(p => {
+ element._fetchSharedCacheURL({url: '/foo'}).catch(p => {
assert.equal(p, 'foo');
done();
});
@@ -120,7 +120,8 @@
cancel() { cancelCalled = true; },
},
}));
- element.fetchJSON('/dummy/url', null, () => { return true; }).then(
+ const cancelCondition = () => { return true; };
+ element._fetchJSON({url: '/dummy/url', cancelCondition}).then(
obj => {
assert.isUndefined(obj);
assert.isTrue(cancelCalled);
@@ -129,7 +130,7 @@
});
test('parent diff comments are properly grouped', done => {
- sandbox.stub(element, 'fetchJSON', () => {
+ sandbox.stub(element, '_fetchJSON', () => {
return Promise.resolve({
'/COMMIT_MSG': [],
'sieve.go': [
@@ -272,7 +273,8 @@
test('differing patch diff comments are properly grouped', done => {
sandbox.stub(element, 'getFromProjectLookup')
.returns(Promise.resolve('test'));
- sandbox.stub(element, 'fetchJSON', url => {
+ sandbox.stub(element, '_fetchJSON', request => {
+ const url = request.url;
if (url === '/changes/test~42/revisions/1') {
return Promise.resolve({
'/COMMIT_MSG': [],
@@ -386,11 +388,11 @@
});
suite('rebase action', () => {
- let resolveFetchJSON;
+ let resolve_fetchJSON;
setup(() => {
- sandbox.stub(element, 'fetchJSON').returns(
+ sandbox.stub(element, '_fetchJSON').returns(
new Promise(resolve => {
- resolveFetchJSON = resolve;
+ resolve_fetchJSON = resolve;
}));
});
@@ -401,7 +403,7 @@
assert.isFalse(response.rebase.rebaseOnCurrent);
done();
});
- resolveFetchJSON({rebase: {}});
+ resolve_fetchJSON({rebase: {}});
});
test('rebase on current', done => {
@@ -411,7 +413,7 @@
assert.isTrue(response.rebase.rebaseOnCurrent);
done();
});
- resolveFetchJSON({rebase: {enabled: true}});
+ resolve_fetchJSON({rebase: {enabled: true}});
});
});
@@ -423,7 +425,7 @@
element.addEventListener('server-error', resolve);
});
- element.fetchJSON().then(response => {
+ element._fetchJSON({}).then(response => {
assert.isUndefined(response);
assert.isTrue(getResponseObjectStub.notCalled);
serverErrorEventPromise.then(() => done());
@@ -444,7 +446,7 @@
element.addEventListener('server-error', serverErrorStub);
const authErrorStub = sandbox.stub();
element.addEventListener('auth-error', authErrorStub);
- element.fetchJSON('/bar').then(r => {
+ element._fetchJSON('/bar').then(r => {
flush(() => {
assert.isTrue(authErrorStub.called);
assert.isFalse(serverErrorStub.called);
@@ -484,10 +486,10 @@
});
test('legacy n,z key in change url is replaced', () => {
- const stub = sandbox.stub(element, 'fetchJSON')
+ const stub = sandbox.stub(element, '_fetchJSON')
.returns(Promise.resolve([]));
element.getChanges(1, null, 'n,z');
- assert.equal(stub.args[0][3].S, 0);
+ assert.equal(stub.lastCall.args[0].params.S, 0);
});
test('saveDiffPreferences invalidates cache line', () => {
@@ -512,7 +514,7 @@
});
element._cache[cacheKey] = 'fake cache';
- stub.callArg(1);
+ stub.lastCall.args[0].errFn();
});
test('getAccount does not add to the cache when resp.status is 403',
@@ -527,7 +529,7 @@
done();
});
element._cache[cacheKey] = 'fake cache';
- stub.callArgWith(1, {status: 403});
+ stub.lastCall.args[0].errFn({status: 403});
});
test('getAccount when resp is successful', done => {
@@ -541,7 +543,8 @@
done();
});
element._cache[cacheKey] = 'fake cache';
- stub.callArg(1, {});
+
+ stub.lastCall.args[0].errFn({});
});
const preferenceSetup = function(testJSON, loggedIn, smallScreen) {
@@ -872,66 +875,69 @@
const fetchStub = sandbox.stub(element, '_getChangeURLAndFetch')
.returns(Promise.resolve());
return element.queryChangeFiles('42', 'edit', 'test/path.js').then(() => {
- assert.deepEqual(fetchStub.lastCall.args,
- ['42', '/files?q=test%2Fpath.js', 'edit']);
+ assert.deepEqual(fetchStub.lastCall.args[0], {
+ changeNum: '42',
+ endpoint: '/files?q=test%2Fpath.js',
+ patchNum: 'edit',
+ });
});
});
test('getRepos', () => {
sandbox.stub(element, '_fetchSharedCacheURL');
element.getRepos('test', 25);
- assert.isTrue(element._fetchSharedCacheURL.lastCall
- .calledWithExactly('/projects/?d&n=26&S=0&m=test'));
+ assert.equal(element._fetchSharedCacheURL.lastCall.args[0].url,
+ '/projects/?d&n=26&S=0&m=test');
element.getRepos(null, 25);
- assert.isTrue(element._fetchSharedCacheURL.lastCall
- .calledWithExactly('/projects/?d&n=26&S=0'));
+ assert.equal(element._fetchSharedCacheURL.lastCall.args[0].url,
+ '/projects/?d&n=26&S=0');
element.getRepos('test', 25, 25);
- assert.isTrue(element._fetchSharedCacheURL.lastCall
- .calledWithExactly('/projects/?d&n=26&S=25&m=test'));
+ assert.equal(element._fetchSharedCacheURL.lastCall.args[0].url,
+ '/projects/?d&n=26&S=25&m=test');
});
test('getRepos filter', () => {
sandbox.stub(element, '_fetchSharedCacheURL');
element.getRepos('test/test/test', 25);
- assert.isTrue(element._fetchSharedCacheURL.lastCall
- .calledWithExactly('/projects/?d&n=26&S=0&m=test%2Ftest%2Ftest'));
+ assert.equal(element._fetchSharedCacheURL.lastCall.args[0].url,
+ '/projects/?d&n=26&S=0&m=test%2Ftest%2Ftest');
});
test('getRepos filter regex', () => {
sandbox.stub(element, '_fetchSharedCacheURL');
element.getRepos('^test.*', 25);
- assert.isTrue(element._fetchSharedCacheURL.lastCall
- .calledWithExactly('/projects/?d&n=26&S=0&r=%5Etest.*'));
+ assert.equal(element._fetchSharedCacheURL.lastCall.args[0].url,
+ '/projects/?d&n=26&S=0&r=%5Etest.*');
});
test('getGroups filter regex', () => {
sandbox.stub(element, '_fetchSharedCacheURL');
element.getGroups('^test.*', 25);
- assert.isTrue(element._fetchSharedCacheURL.lastCall
- .calledWithExactly('/groups/?n=26&S=0&r=%5Etest.*'));
+ assert.equal(element._fetchSharedCacheURL.lastCall.args[0].url,
+ '/groups/?n=26&S=0&r=%5Etest.*');
});
test('gerrit auth is used', () => {
sandbox.stub(Gerrit.Auth, 'fetch').returns(Promise.resolve());
- element.fetchJSON('foo');
+ element._fetchJSON('foo');
assert(Gerrit.Auth.fetch.called);
});
- test('getSuggestedAccounts does not return fetchJSON', () => {
- const fetchJSONSpy = sandbox.spy(element, 'fetchJSON');
+ test('getSuggestedAccounts does not return _fetchJSON', () => {
+ const _fetchJSONSpy = sandbox.spy(element, '_fetchJSON');
return element.getSuggestedAccounts().then(accts => {
- assert.isFalse(fetchJSONSpy.called);
+ assert.isFalse(_fetchJSONSpy.called);
assert.equal(accts.length, 0);
});
});
- test('fetchJSON gets called by getSuggestedAccounts', () => {
- const fetchJSONStub = sandbox.stub(element, 'fetchJSON',
+ test('_fetchJSON gets called by getSuggestedAccounts', () => {
+ const _fetchJSONStub = sandbox.stub(element, '_fetchJSON',
() => Promise.resolve());
return element.getSuggestedAccounts('own').then(() => {
- assert.deepEqual(fetchJSONStub.lastCall.args[3], {
+ assert.deepEqual(_fetchJSONStub.lastCall.args[0].params, {
q: 'own',
suggest: null,
});
@@ -1064,7 +1070,7 @@
suite('getChanges populates _projectLookup', () => {
test('multiple queries', () => {
- sandbox.stub(element, 'fetchJSON')
+ sandbox.stub(element, '_fetchJSON')
.returns(Promise.resolve([
[
{_number: 1, project: 'test'},
@@ -1073,7 +1079,7 @@
{_number: 3, project: 'test/test'},
],
]));
- // When opt_query instanceof Array, fetchJSON returns
+ // When opt_query instanceof Array, _fetchJSON returns
// Array<Array<Object>>.
return element.getChanges(null, []).then(() => {
assert.equal(Object.keys(element._projectLookup).length, 3);
@@ -1084,14 +1090,14 @@
});
test('no query', () => {
- sandbox.stub(element, 'fetchJSON')
+ sandbox.stub(element, '_fetchJSON')
.returns(Promise.resolve([
{_number: 1, project: 'test'},
{_number: 2, project: 'test'},
{_number: 3, project: 'test/test'},
]));
- // When opt_query !instanceof Array, fetchJSON returns
+ // When opt_query !instanceof Array, _fetchJSON returns
// Array<Object>.
return element.getChanges().then(() => {
assert.equal(Object.keys(element._projectLookup).length, 3);
@@ -1104,10 +1110,12 @@
test('_getChangeURLAndFetch', () => {
element._projectLookup = {1: 'test'};
- const fetchStub = sandbox.stub(element, 'fetchJSON')
+ const fetchStub = sandbox.stub(element, '_fetchJSON')
.returns(Promise.resolve());
- return element._getChangeURLAndFetch(1, '/test', 1).then(() => {
- assert.isTrue(fetchStub.calledWith('/changes/test~1/revisions/1/test'));
+ const req = {changeNum: 1, endpoint: '/test', patchNum: 1};
+ return element._getChangeURLAndFetch(req).then(() => {
+ assert.equal(fetchStub.lastCall.args[0].url,
+ '/changes/test~1/revisions/1/test');
});
});
@@ -1170,8 +1178,8 @@
const range = {basePatchNum: 'PARENT', patchNum: 2};
return element.getChangeFiles(123, range).then(() => {
assert.isTrue(fetchStub.calledOnce);
- assert.equal(fetchStub.lastCall.args[2], 2);
- assert.isNotOk(fetchStub.lastCall.args[5]);
+ assert.equal(fetchStub.lastCall.args[0].patchNum, 2);
+ assert.isNotOk(fetchStub.lastCall.args[0].params);
});
});
@@ -1181,10 +1189,10 @@
const range = {basePatchNum: 4, patchNum: 5};
return element.getChangeFiles(123, range).then(() => {
assert.isTrue(fetchStub.calledOnce);
- assert.equal(fetchStub.lastCall.args[2], 5);
- assert.isOk(fetchStub.lastCall.args[5]);
- assert.equal(fetchStub.lastCall.args[5].base, 4);
- assert.isNotOk(fetchStub.lastCall.args[5].parent);
+ assert.equal(fetchStub.lastCall.args[0].patchNum, 5);
+ assert.isOk(fetchStub.lastCall.args[0].params);
+ assert.equal(fetchStub.lastCall.args[0].params.base, 4);
+ assert.isNotOk(fetchStub.lastCall.args[0].params.parent);
});
});
@@ -1194,10 +1202,10 @@
const range = {basePatchNum: -3, patchNum: 5};
return element.getChangeFiles(123, range).then(() => {
assert.isTrue(fetchStub.calledOnce);
- assert.equal(fetchStub.lastCall.args[2], 5);
- assert.isOk(fetchStub.lastCall.args[5]);
- assert.isNotOk(fetchStub.lastCall.args[5].base);
- assert.equal(fetchStub.lastCall.args[5].parent, 3);
+ assert.equal(fetchStub.lastCall.args[0].patchNum, 5);
+ assert.isOk(fetchStub.lastCall.args[0].params);
+ assert.isNotOk(fetchStub.lastCall.args[0].params.base);
+ assert.equal(fetchStub.lastCall.args[0].params.parent, 3);
});
});
});
@@ -1208,10 +1216,10 @@
.returns(Promise.resolve());
return element.getDiff(123, 'PARENT', 2, 'foo/bar.baz').then(() => {
assert.isTrue(fetchStub.calledOnce);
- assert.equal(fetchStub.lastCall.args[2], 2);
- assert.isOk(fetchStub.lastCall.args[5]);
- assert.isNotOk(fetchStub.lastCall.args[5].parent);
- assert.isNotOk(fetchStub.lastCall.args[5].base);
+ assert.equal(fetchStub.lastCall.args[0].patchNum, 2);
+ assert.isOk(fetchStub.lastCall.args[0].params);
+ assert.isNotOk(fetchStub.lastCall.args[0].params.parent);
+ assert.isNotOk(fetchStub.lastCall.args[0].params.base);
});
});
@@ -1220,10 +1228,10 @@
.returns(Promise.resolve());
return element.getDiff(123, 4, 5, 'foo/bar.baz').then(() => {
assert.isTrue(fetchStub.calledOnce);
- assert.equal(fetchStub.lastCall.args[2], 5);
- assert.isOk(fetchStub.lastCall.args[5]);
- assert.isNotOk(fetchStub.lastCall.args[5].parent);
- assert.equal(fetchStub.lastCall.args[5].base, 4);
+ assert.equal(fetchStub.lastCall.args[0].patchNum, 5);
+ assert.isOk(fetchStub.lastCall.args[0].params);
+ assert.isNotOk(fetchStub.lastCall.args[0].params.parent);
+ assert.equal(fetchStub.lastCall.args[0].params.base, 4);
});
});
@@ -1232,10 +1240,10 @@
.returns(Promise.resolve());
return element.getDiff(123, -3, 5, 'foo/bar.baz').then(() => {
assert.isTrue(fetchStub.calledOnce);
- assert.equal(fetchStub.lastCall.args[2], 5);
- assert.isOk(fetchStub.lastCall.args[5]);
- assert.isNotOk(fetchStub.lastCall.args[5].base);
- assert.equal(fetchStub.lastCall.args[5].parent, 3);
+ assert.equal(fetchStub.lastCall.args[0].patchNum, 5);
+ assert.isOk(fetchStub.lastCall.args[0].params);
+ assert.isNotOk(fetchStub.lastCall.args[0].params.base);
+ assert.equal(fetchStub.lastCall.args[0].params.parent, 3);
});
});
});
@@ -1245,7 +1253,7 @@
element.getDashboard('gerrit/project', 'default:main');
assert.isTrue(fetchStub.calledOnce);
assert.equal(
- fetchStub.lastCall.args[0],
+ fetchStub.lastCall.args[0].url,
'/projects/gerrit%2Fproject/dashboards/default%3Amain');
});
diff --git a/polygerrit-ui/app/embed/embed.html b/polygerrit-ui/app/embed/embed.html
index f3c727e..9fb5c23 100644
--- a/polygerrit-ui/app/embed/embed.html
+++ b/polygerrit-ui/app/embed/embed.html
@@ -21,4 +21,4 @@
<link rel="import" href="../elements/change-list/gr-change-list-view/gr-change-list-view.html">
<link rel="import" href="../elements/change-list/gr-change-list/gr-change-list.html">
<link rel="import" href="../elements/change-list/gr-dashboard-view/gr-dashboard-view.html">
-<link rel="import" href="../styles/app-theme.html">
+<link rel="import" href="../styles/themes/app-theme.html">
diff --git a/polygerrit-ui/app/rules.bzl b/polygerrit-ui/app/rules.bzl
index b60aa22..199a947 100644
--- a/polygerrit-ui/app/rules.bzl
+++ b/polygerrit-ui/app/rules.bzl
@@ -62,6 +62,15 @@
)
native.filegroup(
+ name = name + "_theme_sources",
+ srcs = native.glob(
+ ["styles/themes/*.html"],
+ # app-theme.html already included via an import in gr-app.html.
+ exclude = ["styles/themes/app-theme.html"],
+ ),
+ )
+
+ native.filegroup(
name = name + "_top_sources",
srcs = [
"favicon.ico",
@@ -73,6 +82,7 @@
srcs = [
name + "_app_sources",
name + "_css_sources",
+ name + "_theme_sources",
name + "_top_sources",
"//lib/fonts:robotofonts",
"//lib/js:highlightjs_files",
@@ -82,11 +92,12 @@
],
outs = outs,
cmd = " && ".join([
- "mkdir -p $$TMP/polygerrit_ui/{styles,fonts,bower_components/{highlightjs,webcomponentsjs},elements}",
+ "mkdir -p $$TMP/polygerrit_ui/{styles/themes,fonts,bower_components/{highlightjs,webcomponentsjs},elements}",
"for f in $(locations " + name + "_app_sources); do ext=$${f##*.}; cp -p $$f $$TMP/polygerrit_ui/elements/" + appName + ".$$ext; done",
"cp $(locations //lib/fonts:robotofonts) $$TMP/polygerrit_ui/fonts/",
"for f in $(locations " + name + "_top_sources); do cp $$f $$TMP/polygerrit_ui/; done",
"for f in $(locations "+ name + "_css_sources); do cp $$f $$TMP/polygerrit_ui/styles; done",
+ "for f in $(locations "+ name + "_theme_sources); do cp $$f $$TMP/polygerrit_ui/styles/themes; done",
"for f in $(locations //lib/js:highlightjs_files); do cp $$f $$TMP/polygerrit_ui/bower_components/highlightjs/ ; done",
"unzip -qd $$TMP/polygerrit_ui/bower_components $(location @webcomponentsjs//:zipfile) webcomponentsjs/webcomponents-lite.js",
"cd $$TMP",
diff --git a/polygerrit-ui/app/styles/gr-change-list-styles.html b/polygerrit-ui/app/styles/gr-change-list-styles.html
index 4f92039..7379b9c 100644
--- a/polygerrit-ui/app/styles/gr-change-list-styles.html
+++ b/polygerrit-ui/app/styles/gr-change-list-styles.html
@@ -124,7 +124,7 @@
vertical-align: middle;
}
.leftPadding {
- width: 20px;
+ width: var(--default-horizontal-margin);
}
.star {
width: 30px;
diff --git a/polygerrit-ui/app/styles/gr-form-styles.html b/polygerrit-ui/app/styles/gr-form-styles.html
index f4b367b..3cfd1d5c 100644
--- a/polygerrit-ui/app/styles/gr-form-styles.html
+++ b/polygerrit-ui/app/styles/gr-form-styles.html
@@ -17,6 +17,14 @@
<dom-module id="gr-form-styles">
<template>
<style>
+ .gr-form-styles input {
+ background-color: var(--view-background-color);
+ color: var(--primary-text-color);
+ }
+ .gr-form-styles select {
+ background-color: var(--select-background-color);
+ color: var(--primary-text-color);
+ }
.gr-form-styles h1,
.gr-form-styles h2 {
margin-bottom: .3em;
diff --git a/polygerrit-ui/app/styles/gr-menu-page-styles.html b/polygerrit-ui/app/styles/gr-menu-page-styles.html
index 4adbeda..48ca396 100644
--- a/polygerrit-ui/app/styles/gr-menu-page-styles.html
+++ b/polygerrit-ui/app/styles/gr-menu-page-styles.html
@@ -37,7 +37,7 @@
max-width: none;
}
h2.edited:after {
- color: #444;
+ color: var(--deemphasized-text-color);
content: ' *';
}
.loading {
diff --git a/polygerrit-ui/app/styles/gr-page-nav-styles.html b/polygerrit-ui/app/styles/gr-page-nav-styles.html
index 6eee5a8..49aa033 100644
--- a/polygerrit-ui/app/styles/gr-page-nav-styles.html
+++ b/polygerrit-ui/app/styles/gr-page-nav-styles.html
@@ -24,7 +24,7 @@
border-bottom: 1px solid transparent;
border-top: 1px solid transparent;
display: block;
- padding: 0 2em;
+ padding: 0 calc(var(--default-horizontal-margin) + 0.5em);
}
.navStyles li a {
display: block;
@@ -33,13 +33,13 @@
white-space: nowrap;
}
.navStyles .subsectionItem {
- padding-left: 3em;
+ padding-left: calc(var(--default-horizontal-margin) + 1.5em);
}
.navStyles .hideSubsection {
display: none;
}
.navStyles li.sectionTitle {
- padding: 0 2em 0 1.5em;
+ padding: 0 2em 0 var(--default-horizontal-margin);
}
.navStyles li.sectionTitle:not(:first-child) {
margin-top: 1em;
diff --git a/polygerrit-ui/app/styles/app-theme.html b/polygerrit-ui/app/styles/themes/app-theme.html
similarity index 97%
rename from polygerrit-ui/app/styles/app-theme.html
rename to polygerrit-ui/app/styles/themes/app-theme.html
index 6112035..4500e10 100644
--- a/polygerrit-ui/app/styles/app-theme.html
+++ b/polygerrit-ui/app/styles/themes/app-theme.html
@@ -42,10 +42,14 @@
--table-header-background-color: #fafafa;
--table-subheader-background-color: #eaeaea;
- --chip-background-color: var(--header-background-color);
+ --chip-background-color: #eee;
--dropdown-background-color: #fff;
+ --select-background-color: rgb(248, 248, 248);
+
+ --assignee-highlight-color: #fcfad6;
+
/* Font sizes */
--font-size-normal: 1rem;
--font-size-small: .92rem;
diff --git a/polygerrit-ui/app/styles/themes/dark-theme.html b/polygerrit-ui/app/styles/themes/dark-theme.html
new file mode 100644
index 0000000..1f473da
--- /dev/null
+++ b/polygerrit-ui/app/styles/themes/dark-theme.html
@@ -0,0 +1,83 @@
+<dom-module id="dark-theme">
+ <style is="custom-style">
+ html {
+ --primary-text-color: #e2e2e2;
+ --view-background-color: #212121;
+ --border-color: #555555;
+ --table-header-background-color: #353637;
+ --table-subheader-background-color: rgb(23, 27, 31);
+ --header-background-color: #5487E5;
+ --header-text-color: var(--primary-text-color);
+ --deemphasized-text-color: #9a9a9a;
+ --footer-background-color: var(--table-header-background-color);
+ --expanded-background-color: #26282b;
+ --link-color: #5487E5;
+ --primary-button-background-color: var(--link-color);
+ --primary-button-text-color: var(--primary-text-color);
+ --secondary-button-background-color: var(--primary-text-color);
+ --secondary-button-text-color: var(--deemphasized-text-color);
+ --default-button-text-color: var(--link-color);
+ --default-button-background-color: var(--table-subheader-background-color);
+ --dropdown-background-color: var(--table-header-background-color);
+ --dialog-background-color: var(--view-background-color);
+ --chip-background-color: var(--table-header-background-color);
+
+ --select-background-color: var(--table-subheader-background-color);
+
+ --assignee-highlight-color: rgb(58, 54, 28);
+
+ --diff-selection-background-color: #3A71D8;
+ --light-remove-highlight-color: rgb(53, 27, 27);
+ --light-add-highlight-color: rgb(24, 45, 24);
+ --light-rebased-remove-highlight-color: rgb(60, 37, 8);
+ --light-rebased-add-highlight-color: rgb(72, 113, 101);
+ --dark-remove-highlight-color: rgba(255, 0, 0, 0.15);
+ --dark-add-highlight-color: rgba(0, 255, 0, 0.15);
+ --dark-rebased-remove-highlight-color: rgba(255, 139, 6, 0.15);
+ --dark-rebased-add-highlight-color: rgba(11, 255, 155, 0.15);
+ --diff-context-control-color: var(--table-header-background-color);
+ --diff-context-control-border-color: var(--border-color);
+ --diff-highlight-range-color: rgba(0, 100, 200, 0.5);
+ --diff-highlight-range-hover-color: rgba(0, 150, 255, 0.5);
+ --comment-text-color: var(--primary-text-color);
+ --comment-background-color: #0B162B;
+ --unresolved-comment-background-color: rgb(56, 90, 154);
+
+ --vote-color-approved: rgb(127, 182, 107);
+ --vote-color-recommended: rgb(63, 103, 50);
+ --vote-color-rejected: #ac2d3e;
+ --vote-color-disliked: #bf6874;
+ --vote-color-neutral: #597280;
+
+ --edit-mode-background-color: rgb(92, 10, 54);
+ --emphasis-color: #383f4a;
+
+ --tooltip-background-color: #111;
+
+ --syntax-default-color: var(--primary-text-color);
+ --syntax-meta-color: #6D7EEE;
+ --syntax-keyword-color: #CD4CF0;
+ --syntax-number-color: #00998A;
+ --syntax-selector-class-color: #FFCB68;
+ --syntax-variable-color: #F77669;
+ --syntax-template-variable-color: #F77669;
+ --syntax-comment-color: var(--deemphasized-text-color);
+ --syntax-string-color: #C3E88D;
+ --syntax-selector-id-color: #F77669;
+ --syntax-built_in-color: rgb(247, 195, 105);
+ --syntax-tag-color: #F77669;
+ --syntax-link-color: #C792EA;
+ --syntax-meta-keyword-color: #EEFFF7;
+ --syntax-type-color: #DD5F5F;
+ --syntax-title-color: #75A5FF;
+ --syntax-attr-color: #80CBBF;
+ --syntax-literal-color: #EEFFF7;
+ --syntax-selector-pseudo-color: #C792EA;
+ --syntax-regexp-color: #F77669;
+ --syntax-selector-attr-color: #80CBBF;
+ --syntax-template-tag-color: #C792EA;
+
+ background-color: var(--view-background-color);
+ }
+ </style>
+</dom-module>
\ No newline at end of file
diff --git a/polygerrit-ui/app/template_test_srcs/convert_for_template_tests.py b/polygerrit-ui/app/template_test_srcs/convert_for_template_tests.py
index 3a5cd83b..579e783 100644
--- a/polygerrit-ui/app/template_test_srcs/convert_for_template_tests.py
+++ b/polygerrit-ui/app/template_test_srcs/convert_for_template_tests.py
@@ -1,5 +1,6 @@
-import os, re, json
-from shutil import copyfile, rmtree
+import json
+import os
+import re
polymerRegex = r"Polymer\({"
polymerCompiledRegex = re.compile(polymerRegex)
@@ -10,103 +11,119 @@
regexBehavior = r"<script>(.+)<\/script>"
behaviorCompiledRegex = re.compile(regexBehavior, re.DOTALL)
+
def _open(filename, mode="r"):
- try:
- return open(filename, mode, encoding="utf-8")
- except TypeError:
- return open(filename, mode)
+ try:
+ return open(filename, mode, encoding="utf-8")
+ except TypeError:
+ return open(filename, mode)
-def replaceBehaviorLikeHTML (fileIn, fileOut):
- with _open(fileIn) as f:
- file_str = f.read()
- match = behaviorCompiledRegex.search(file_str)
- if (match):
- with _open("polygerrit-ui/temp/behaviors/" + fileOut.replace("html", "js") , "w+") as f:
- f.write(match.group(1))
-def replaceBehaviorLikeJS (fileIn, fileOut):
- with _open(fileIn) as f:
- file_str = f.read()
- with _open("polygerrit-ui/temp/behaviors/" + fileOut , "w+") as f:
- f.write(file_str)
+def replaceBehaviorLikeHTML(fileIn, fileOut):
+ with _open(fileIn) as f:
+ file_str = f.read()
+ match = behaviorCompiledRegex.search(file_str)
+ if match:
+ with _open("polygerrit-ui/temp/behaviors/" +
+ fileOut.replace("html", "js"), "w+") as f:
+ f.write(match.group(1))
+
+
+def replaceBehaviorLikeJS(fileIn, fileOut):
+ with _open(fileIn) as f:
+ file_str = f.read()
+ with _open("polygerrit-ui/temp/behaviors/" + fileOut, "w+") as f:
+ f.write(file_str)
+
def generateStubBehavior(behaviorName):
- with _open("polygerrit-ui/temp/behaviors/" + behaviorName + ".js", "w+") as f:
- f.write("/** @polymerBehavior **/\n" + behaviorName + "= {};")
+ with _open("polygerrit-ui/temp/behaviors/" +
+ behaviorName + ".js", "w+") as f:
+ f.write("/** @polymerBehavior **/\n" + behaviorName + "= {};")
-def replacePolymerElement (fileIn, fileOut, root):
- with _open(fileIn) as f:
- key = fileOut.split('.')[0]
- # Removed self invoked function
- file_str = f.read()
- file_str_no_fn = fnCompiledRegex.search(file_str)
- if file_str_no_fn:
- package = root.replace("/", ".") + "." + fileOut
+def replacePolymerElement(fileIn, fileOut, root):
+ with _open(fileIn) as f:
+ key = fileOut.split('.')[0]
+ # Removed self invoked function
+ file_str = f.read()
+ file_str_no_fn = fnCompiledRegex.search(file_str)
- with _open("polygerrit-ui/temp/" + fileOut, "w+") as f:
- mainFileContents = re.sub(polymerCompiledRegex, "exports = Polymer({", file_str_no_fn.group(1)).replace("'use strict';", "")
- f.write("/** \n" \
- "* @fileoverview \n" \
- "* @suppress {missingProperties} \n" \
- "*/ \n\n" \
- "goog.module('polygerrit." + package + "')\n\n" + mainFileContents)
+ if file_str_no_fn:
+ package = root.replace("/", ".") + "." + fileOut
- # Add package and javascript to files object.
- elements[key]["js"] = "polygerrit-ui/temp/" + fileOut
- elements[key]["package"] = package
+ with _open("polygerrit-ui/temp/" + fileOut, "w+") as f:
+ mainFileContents = re.sub(
+ polymerCompiledRegex,
+ "exports = Polymer({",
+ file_str_no_fn.group(1)).replace("'use strict';", "")
+ f.write("/** \n"
+ "* @fileoverview \n"
+ "* @suppress {missingProperties} \n"
+ "*/ \n\n"
+ "goog.module('polygerrit." + package + "')\n\n" +
+ mainFileContents)
+
+ # Add package and javascript to files object.
+ elements[key]["js"] = "polygerrit-ui/temp/" + fileOut
+ elements[key]["package"] = package
+
def writeTempFile(file, root):
- # This is included in an extern because it is directly on the window object.
- # (for now at least).
- if "gr-reporting" in file:
- return
- key = file.split('.')[0]
- if not key in elements:
- # gr-app doesn't have an additional level
- elements[key] = {"directory": 'gr-app' if len(root.split("/")) < 4 else root.split("/")[3]}
- if file.endswith(".html") and not file.endswith("_test.html"):
- # gr-navigation is treated like a behavior rather than a standard element
- # because of the way it added to the Gerrit object.
- if file.endswith("gr-navigation.html"):
- replaceBehaviorLikeHTML(os.path.join(root, file), file)
- else:
- elements[key]["html"] = os.path.join(root, file)
- if file.endswith(".js"):
- replacePolymerElement(os.path.join(root, file), file, root)
+ # This is included in an extern because it is directly on the window object
+ # (for now at least).
+ if "gr-reporting" in file:
+ return
+ key = file.split('.')[0]
+ if key not in elements:
+ # gr-app doesn't have an additional level
+ elements[key] = {
+ "directory":
+ 'gr-app' if len(root.split("/")) < 4 else root.split("/")[3]
+ }
+ if file.endswith(".html") and not file.endswith("_test.html"):
+ # gr-navigation is treated like a behavior rather than a standard
+ # element because of the way it added to the Gerrit object.
+ if file.endswith("gr-navigation.html"):
+ replaceBehaviorLikeHTML(os.path.join(root, file), file)
+ else:
+ elements[key]["html"] = os.path.join(root, file)
+ if file.endswith(".js"):
+ replacePolymerElement(os.path.join(root, file), file, root)
if __name__ == "__main__":
- # Create temp directory.
- if not os.path.exists("polygerrit-ui/temp"):
- os.makedirs("polygerrit-ui/temp")
+ # Create temp directory.
+ if not os.path.exists("polygerrit-ui/temp"):
+ os.makedirs("polygerrit-ui/temp")
- # Within temp directory create behavior directory.
- if not os.path.exists("polygerrit-ui/temp/behaviors"):
- os.makedirs("polygerrit-ui/temp/behaviors")
+ # Within temp directory create behavior directory.
+ if not os.path.exists("polygerrit-ui/temp/behaviors"):
+ os.makedirs("polygerrit-ui/temp/behaviors")
- elements = {}
+ elements = {}
- # Go through every file in app/elements, and re-write accordingly to temp
- # directory, and also added to elements object, which is used to generate a
- # map of html files, package names, and javascript files.
- for root, dirs, files in os.walk("polygerrit-ui/app/elements"):
- for file in files:
- writeTempFile(file, root)
+ # Go through every file in app/elements, and re-write accordingly to temp
+ # directory, and also added to elements object, which is used to generate a
+ # map of html files, package names, and javascript files.
+ for root, dirs, files in os.walk("polygerrit-ui/app/elements"):
+ for file in files:
+ writeTempFile(file, root)
- # Special case for polymer behaviors we are using.
- replaceBehaviorLikeHTML("polygerrit-ui/app/bower_components/iron-a11y-keys-behavior/iron-a11y-keys-behavior.html", "iron-a11y-keys-behavior.html")
- generateStubBehavior("Polymer.IronOverlayBehavior")
- generateStubBehavior("Polymer.IronFitBehavior")
+ # Special case for polymer behaviors we are using.
+ replaceBehaviorLikeHTML("polygerrit-ui/app/bower_components/iron-a11y-keys-behavior/iron-a11y-keys-behavior.html", "iron-a11y-keys-behavior.html")
+ generateStubBehavior("Polymer.IronOverlayBehavior")
+ generateStubBehavior("Polymer.IronFitBehavior")
- #TODO figure out something to do with iron-overlay-behavior. it is hard-coded reformatted.
+ # TODO figure out something to do with iron-overlay-behavior.
+ # it is hard-coded reformatted.
- with _open("polygerrit-ui/temp/map.json", "w+") as f:
- f.write(json.dumps(elements))
+ with _open("polygerrit-ui/temp/map.json", "w+") as f:
+ f.write(json.dumps(elements))
- for root, dirs, files in os.walk("polygerrit-ui/app/behaviors"):
- for file in files:
- if file.endswith("behavior.html"):
- replaceBehaviorLikeHTML(os.path.join(root, file), file)
- elif file.endswith("behavior.js"):
- replaceBehaviorLikeJS(os.path.join(root, file), file)
+ for root, dirs, files in os.walk("polygerrit-ui/app/behaviors"):
+ for file in files:
+ if file.endswith("behavior.html"):
+ replaceBehaviorLikeHTML(os.path.join(root, file), file)
+ elif file.endswith("behavior.js"):
+ replaceBehaviorLikeJS(os.path.join(root, file), file)
diff --git a/polygerrit-ui/app/test/index.html b/polygerrit-ui/app/test/index.html
index 6cf674a..6a562fc 100644
--- a/polygerrit-ui/app/test/index.html
+++ b/polygerrit-ui/app/test/index.html
@@ -88,6 +88,7 @@
'core/gr-error-manager/gr-error-manager_test.html',
'core/gr-main-header/gr-main-header_test.html',
'core/gr-navigation/gr-navigation_test.html',
+ 'core/gr-reporting/gr-jank-detector_test.html',
'core/gr-reporting/gr-reporting_test.html',
'core/gr-router/gr-router_test.html',
'core/gr-search-bar/gr-search-bar_test.html',
diff --git a/proto/cache.proto b/proto/cache.proto
index 4a84ab1..7e2e75a 100644
--- a/proto/cache.proto
+++ b/proto/cache.proto
@@ -25,3 +25,162 @@
bytes next = 2;
string strategy_name = 3;
}
+
+// Serialized form of
+// com.google.gerrit.server.change.MergeabilityCacheImpl.EntryKey.
+// Next ID: 5
+message MergeabilityKeyProto {
+ bytes commit = 1;
+ bytes into = 2;
+ string submit_type = 3;
+ string merge_strategy = 4;
+}
+
+// Serialized form of com.google.gerrit.extensions.auth.oauth.OAuthToken.
+// Next ID: 6
+message OAuthTokenProto {
+ string token = 1;
+ string secret = 2;
+ string raw = 3;
+ int64 expires_at = 4;
+ string provider_id = 5;
+}
+
+
+// Serialized form of com.google.gerrit.server.notedb.ChangeNotesCache.Key.
+// Next ID: 4
+message ChangeNotesKeyProto {
+ string project = 1;
+ int32 change_id = 2;
+ bytes id = 3;
+}
+
+// Serialized from of com.google.gerrit.server.notedb.ChangeNotesState.
+//
+// Note on embedded protos: this is just for storing in a cache, so some formats
+// were chosen ease of coding the initial implementation. In particular, where
+// there already exists another serialization mechanism in Gerrit for
+// serializing a particular field, we use that rather than defining a new proto
+// type. This includes ReviewDb types that can be serialized to proto using
+// ProtobufCodec as well as NoteDb and indexed types that are serialized using
+// JSON. We can always revisit this decision later, particularly when we
+// eliminate the ReviewDb types; it just requires bumping the cache version.
+//
+// Note on nullability: there are a lot of nullable fields in ChangeNotesState
+// and its dependencies. It's likely we could make some of them non-nullable,
+// but each one of those would be a potentially significant amount of cleanup,
+// and there's no guarantee we'd be able to eliminate all of them. (For a less
+// complex class, it's likely the cleanup would be more feasible.)
+//
+// Instead, we just take the tedious yet simple approach of having a "has_foo"
+// field for each nullable field "foo", indicating whether or not foo is null.
+//
+// Next ID: 19
+message ChangeNotesStateProto {
+ // Effectively required, even though the corresponding ChangeNotesState field
+ // is optional, since the field is only absent when NoteDb is disabled, in
+ // which case attempting to use the ChangeNotesCache is programmer error.
+ bytes meta_id = 1;
+
+ int32 change_id = 2;
+
+ // Next ID: 24
+ message ChangeColumnsProto {
+ string change_key = 1;
+
+ int64 created_on = 2;
+
+ int64 last_updated_on = 3;
+
+ int32 owner = 4;
+
+ string branch = 5;
+
+ int32 current_patch_set_id = 6;
+ bool has_current_patch_set_id = 7;
+
+ string subject = 8;
+
+ string topic = 9;
+ bool has_topic = 10;
+
+ string original_subject = 11;
+ bool has_original_subject = 12;
+
+ string submission_id = 13;
+ bool has_submission_id = 14;
+
+ int32 assignee = 15;
+ bool has_assignee = 16;
+
+ string status = 17;
+ bool has_status = 18;
+
+ bool is_private = 19;
+
+ bool work_in_progress = 20;
+
+ bool review_started = 21;
+
+ int32 revert_of = 22;
+ bool has_revert_of = 23;
+ }
+ // Effectively required, even though the corresponding ChangeNotesState field
+ // is optional, since the field is only absent when NoteDb is disabled, in
+ // which case attempting to use the ChangeNotesCache is programmer error.
+ ChangeColumnsProto columns = 3;
+
+ repeated int32 past_assignee = 4;
+
+ repeated string hashtag = 5;
+
+ // Raw PatchSet proto as produced by ProtobufCodec.
+ repeated bytes patch_set = 6;
+
+ // Raw PatchSetApproval proto as produced by ProtobufCodec.
+ repeated bytes approval = 7;
+
+ // Next ID: 4
+ message ReviewerSetEntryProto {
+ string state = 1;
+ int32 account_id = 2;
+ int64 timestamp = 3;
+ }
+ repeated ReviewerSetEntryProto reviewer = 8;
+
+ // Next ID: 4
+ message ReviewerByEmailSetEntryProto {
+ string state = 1;
+ string address = 2;
+ int64 timestamp = 3;
+ }
+ repeated ReviewerByEmailSetEntryProto reviewer_by_email = 9;
+
+ repeated ReviewerSetEntryProto pending_reviewer = 10;
+
+ repeated ReviewerByEmailSetEntryProto pending_reviewer_by_email = 11;
+
+ repeated int32 past_reviewer = 12;
+
+ // Next ID: 5
+ message ReviewerStatusUpdateProto {
+ int64 date = 1;
+ int32 updated_by = 2;
+ int32 reviewer = 3;
+ string state = 4;
+ }
+ repeated ReviewerStatusUpdateProto reviewer_update = 13;
+
+ // JSON produced from
+ // com.google.gerrit.server.index.change.ChangeField.StoredSubmitRecord.
+ repeated string submit_record = 14;
+
+ // Raw ChangeMessage proto as produced by ProtobufCodec.
+ repeated bytes change_message = 15;
+
+ // JSON produced from com.google.gerrit.reviewdb.client.Comment.
+ repeated string published_comment = 16;
+
+ int64 read_only_until = 17;
+ bool has_read_only_until = 18;
+}
diff --git a/resources/com/google/gerrit/pgm/Startup.py b/resources/com/google/gerrit/pgm/Startup.py
index 469d5df..ec18f42 100644
--- a/resources/com/google/gerrit/pgm/Startup.py
+++ b/resources/com/google/gerrit/pgm/Startup.py
@@ -19,14 +19,16 @@
from __future__ import print_function
import sys
+
def print_help():
- for (n, v) in vars(sys.modules['__main__']).items():
- if not n.startswith("__") and not n in ['help', 'reload'] \
- and str(type(v)) != "<type 'javapackage'>" \
- and not str(v).startswith("<module"):
- print("\"%s\" is \"%s\"" % (n, v))
- print()
- print("Welcome to the Gerrit Inspector")
- print("Enter help() to see the above again, EOF to quit and stop Gerrit")
+ for (n, v) in vars(sys.modules['__main__']).items():
+ if not n.startswith("__") and n not in ['help', 'reload'] \
+ and str(type(v)) != "<type 'javapackage'>" \
+ and not str(v).startswith("<module"):
+ print("\"%s\" is \"%s\"" % (n, v))
+ print()
+ print("Welcome to the Gerrit Inspector")
+ print("Enter help() to see the above again, EOF to quit and stop Gerrit")
+
print_help()
diff --git a/tools/bzl/license-map.py b/tools/bzl/license-map.py
index 74a84cc..476ccb9 100644
--- a/tools/bzl/license-map.py
+++ b/tools/bzl/license-map.py
@@ -25,35 +25,34 @@
handled_rules = []
for xml in args.xmls:
- tree = ET.parse(xml)
- root = tree.getroot()
+ tree = ET.parse(xml)
+ root = tree.getroot()
- for child in root:
- rule_name = child.attrib["name"]
- if rule_name in handled_rules:
- # already handled in other xml files
- continue
+ for child in root:
+ rule_name = child.attrib["name"]
+ if rule_name in handled_rules:
+ # already handled in other xml files
+ continue
- handled_rules.append(rule_name)
- for c in child.getchildren():
- if c.tag != "rule-input":
- continue
+ handled_rules.append(rule_name)
+ for c in child.getchildren():
+ if c.tag != "rule-input":
+ continue
- license_name = c.attrib["name"]
- if LICENSE_PREFIX in license_name:
- entries[rule_name].append(license_name)
- graph[license_name].append(rule_name)
+ license_name = c.attrib["name"]
+ if LICENSE_PREFIX in license_name:
+ entries[rule_name].append(license_name)
+ graph[license_name].append(rule_name)
if len(graph[DO_NOT_DISTRIBUTE]):
- print("DO_NOT_DISTRIBUTE license found in:", file=stderr)
- for target in graph[DO_NOT_DISTRIBUTE]:
- print(target, file=stderr)
- exit(1)
+ print("DO_NOT_DISTRIBUTE license found in:", file=stderr)
+ for target in graph[DO_NOT_DISTRIBUTE]:
+ print(target, file=stderr)
+ exit(1)
if args.asciidoctor:
- print(
-# We don't want any blank line before "= Gerrit Code Review - Licenses"
-"""= Gerrit Code Review - Licenses
+ # We don't want any blank line before "= Gerrit Code Review - Licenses"
+ print("""= Gerrit Code Review - Licenses
Gerrit open source software is licensed under the <<Apache2_0,Apache
License 2.0>>. Executable distributions also include other software
@@ -93,40 +92,39 @@
""")
for n in sorted(graph.keys()):
- if len(graph[n]) == 0:
- continue
+ if len(graph[n]) == 0:
+ continue
- name = n[len(LICENSE_PREFIX):]
- safename = name.replace(".", "_")
- print()
- print("[[%s]]" % safename)
- print(name)
- print()
- for d in sorted(graph[n]):
- if d.startswith("//lib:") or d.startswith("//lib/"):
- p = d[len("//lib:"):]
- else:
- p = d[d.index(":")+1:].lower()
- if "__" in p:
- p = p[:p.index("__")]
- print("* " + p)
- print()
- print("[[%s_license]]" % safename)
- print("----")
- filename = n[2:].replace(":", "/")
- try:
- with open(filename, errors='ignore') as fd:
- copyfileobj(fd, stdout)
- except TypeError:
- with open(filename) as fd:
- copyfileobj(fd, stdout)
- print()
- print("----")
- print()
+ name = n[len(LICENSE_PREFIX):]
+ safename = name.replace(".", "_")
+ print()
+ print("[[%s]]" % safename)
+ print(name)
+ print()
+ for d in sorted(graph[n]):
+ if d.startswith("//lib:") or d.startswith("//lib/"):
+ p = d[len("//lib:"):]
+ else:
+ p = d[d.index(":")+1:].lower()
+ if "__" in p:
+ p = p[:p.index("__")]
+ print("* " + p)
+ print()
+ print("[[%s_license]]" % safename)
+ print("----")
+ filename = n[2:].replace(":", "/")
+ try:
+ with open(filename, errors='ignore') as fd:
+ copyfileobj(fd, stdout)
+ except TypeError:
+ with open(filename) as fd:
+ copyfileobj(fd, stdout)
+ print()
+ print("----")
+ print()
if args.asciidoctor:
- print(
-"""
+ print("""
GERRIT
------
Part of link:index.html[Gerrit Code Review]
diff --git a/tools/download_file.py b/tools/download_file.py
index 26671f0..29398e6 100755
--- a/tools/download_file.py
+++ b/tools/download_file.py
@@ -30,49 +30,50 @@
def safe_mkdirs(d):
- if path.isdir(d):
- return
- try:
- makedirs(d)
- except OSError as err:
- if not path.isdir(d):
- raise err
+ if path.isdir(d):
+ return
+ try:
+ makedirs(d)
+ except OSError as err:
+ if not path.isdir(d):
+ raise err
def download_properties(root_dir):
- """ Get the download properties.
+ """ Get the download properties.
- First tries to find the properties file in the given root directory,
- and if not found there, tries in the Gerrit settings folder in the
- user's home directory.
+ First tries to find the properties file in the given root directory,
+ and if not found there, tries in the Gerrit settings folder in the
+ user's home directory.
- Returns a set of download properties, which may be empty.
+ Returns a set of download properties, which may be empty.
- """
- p = {}
- local_prop = path.join(root_dir, LOCAL_PROPERTIES)
- if not path.isfile(local_prop):
- local_prop = path.join(GERRIT_HOME, LOCAL_PROPERTIES)
- if path.isfile(local_prop):
- try:
- with open(local_prop) as fd:
- for line in fd:
- if line.startswith('download.'):
- d = [e.strip() for e in line.split('=', 1)]
- name, url = d[0], d[1]
- p[name[len('download.'):]] = url
- except OSError:
- pass
- return p
+ """
+ p = {}
+ local_prop = path.join(root_dir, LOCAL_PROPERTIES)
+ if not path.isfile(local_prop):
+ local_prop = path.join(GERRIT_HOME, LOCAL_PROPERTIES)
+ if path.isfile(local_prop):
+ try:
+ with open(local_prop) as fd:
+ for line in fd:
+ if line.startswith('download.'):
+ d = [e.strip() for e in line.split('=', 1)]
+ name, url = d[0], d[1]
+ p[name[len('download.'):]] = url
+ except OSError:
+ pass
+ return p
def cache_entry(args):
- if args.v:
- h = args.v
- else:
- h = sha1(args.u.encode('utf-8')).hexdigest()
- name = '%s-%s' % (path.basename(args.o), h)
- return path.join(CACHE_DIR, name)
+ if args.v:
+ h = args.v
+ else:
+ h = sha1(args.u.encode('utf-8')).hexdigest()
+ name = '%s-%s' % (path.basename(args.o), h)
+ return path.join(CACHE_DIR, name)
+
opts = OptionParser()
opts.add_option('-o', help='local output file')
@@ -85,89 +86,90 @@
root_dir = args.o
while root_dir and path.dirname(root_dir) != root_dir:
- root_dir, n = path.split(root_dir)
- if n == 'WORKSPACE':
- break
+ root_dir, n = path.split(root_dir)
+ if n == 'WORKSPACE':
+ break
redirects = download_properties(root_dir)
cache_ent = cache_entry(args)
src_url = resolve_url(args.u, redirects)
if not path.exists(cache_ent):
- try:
- safe_mkdirs(path.dirname(cache_ent))
- except OSError as err:
- print('error creating directory %s: %s' %
- (path.dirname(cache_ent), err), file=stderr)
- exit(1)
+ try:
+ safe_mkdirs(path.dirname(cache_ent))
+ except OSError as err:
+ print('error creating directory %s: %s' %
+ (path.dirname(cache_ent), err), file=stderr)
+ exit(1)
- print('Download %s' % src_url, file=stderr)
- try:
- check_call(['curl', '--proxy-anyauth', '-ksSfLo', cache_ent, src_url])
- except OSError as err:
- print('could not invoke curl: %s\nis curl installed?' % err, file=stderr)
- exit(1)
- except CalledProcessError as err:
- print('error using curl: %s' % err, file=stderr)
- exit(1)
+ print('Download %s' % src_url, file=stderr)
+ try:
+ check_call(['curl', '--proxy-anyauth', '-ksSfLo', cache_ent, src_url])
+ except OSError as err:
+ print('could not invoke curl: %s\nis curl installed?' % err,
+ file=stderr)
+ exit(1)
+ except CalledProcessError as err:
+ print('error using curl: %s' % err, file=stderr)
+ exit(1)
if args.v:
- have = hash_file(sha1(), cache_ent).hexdigest()
- if args.v != have:
- print((
- '%s:\n' +
- 'expected %s\n' +
- 'received %s\n') % (src_url, args.v, have), file=stderr)
- try:
- remove(cache_ent)
- except OSError as err:
- if path.exists(cache_ent):
- print('error removing %s: %s' % (cache_ent, err), file=stderr)
- exit(1)
+ have = hash_file(sha1(), cache_ent).hexdigest()
+ if args.v != have:
+ print((
+ '%s:\n' +
+ 'expected %s\n' +
+ 'received %s\n') % (src_url, args.v, have), file=stderr)
+ try:
+ remove(cache_ent)
+ except OSError as err:
+ if path.exists(cache_ent):
+ print('error removing %s: %s' % (cache_ent, err), file=stderr)
+ exit(1)
exclude = []
if args.x:
- exclude += args.x
+ exclude += args.x
if args.exclude_java_sources:
- try:
- with ZipFile(cache_ent, 'r') as zf:
- for n in zf.namelist():
- if n.endswith('.java'):
- exclude.append(n)
- except (BadZipfile, LargeZipFile) as err:
- print('error opening %s: %s' % (cache_ent, err), file=stderr)
- exit(1)
+ try:
+ with ZipFile(cache_ent, 'r') as zf:
+ for n in zf.namelist():
+ if n.endswith('.java'):
+ exclude.append(n)
+ except (BadZipfile, LargeZipFile) as err:
+ print('error opening %s: %s' % (cache_ent, err), file=stderr)
+ exit(1)
if args.unsign:
- try:
- with ZipFile(cache_ent, 'r') as zf:
- for n in zf.namelist():
- if (n.endswith('.RSA')
- or n.endswith('.SF')
- or n.endswith('.LIST')):
- exclude.append(n)
- except (BadZipfile, LargeZipFile) as err:
- print('error opening %s: %s' % (cache_ent, err), file=stderr)
- exit(1)
+ try:
+ with ZipFile(cache_ent, 'r') as zf:
+ for n in zf.namelist():
+ if (n.endswith('.RSA')
+ or n.endswith('.SF')
+ or n.endswith('.LIST')):
+ exclude.append(n)
+ except (BadZipfile, LargeZipFile) as err:
+ print('error opening %s: %s' % (cache_ent, err), file=stderr)
+ exit(1)
safe_mkdirs(path.dirname(args.o))
if exclude:
- try:
- shutil.copyfile(cache_ent, args.o)
- except (shutil.Error, IOError) as err:
- print('error copying to %s: %s' % (args.o, err), file=stderr)
- exit(1)
- try:
- check_call(['zip', '-d', args.o] + exclude)
- except CalledProcessError as err:
- print('error removing files from zip: %s' % err, file=stderr)
- exit(1)
-else:
- try:
- link(cache_ent, args.o)
- except OSError as err:
try:
- shutil.copyfile(cache_ent, args.o)
+ shutil.copyfile(cache_ent, args.o)
except (shutil.Error, IOError) as err:
- print('error copying to %s: %s' % (args.o, err), file=stderr)
- exit(1)
+ print('error copying to %s: %s' % (args.o, err), file=stderr)
+ exit(1)
+ try:
+ check_call(['zip', '-d', args.o] + exclude)
+ except CalledProcessError as err:
+ print('error removing files from zip: %s' % err, file=stderr)
+ exit(1)
+else:
+ try:
+ link(cache_ent, args.o)
+ except OSError as err:
+ try:
+ shutil.copyfile(cache_ent, args.o)
+ except (shutil.Error, IOError) as err:
+ print('error copying to %s: %s' % (args.o, err), file=stderr)
+ exit(1)
diff --git a/tools/eclipse/project.py b/tools/eclipse/project.py
index a6b0964..b99c04e 100755
--- a/tools/eclipse/project.py
+++ b/tools/eclipse/project.py
@@ -30,20 +30,20 @@
GWT = '//gerrit-gwtui:ui_module'
AUTO = '//lib/auto:auto-value'
JRE = '/'.join([
- 'org.eclipse.jdt.launching.JRE_CONTAINER',
- 'org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType',
- 'JavaSE-1.8',
+ 'org.eclipse.jdt.launching.JRE_CONTAINER',
+ 'org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType',
+ 'JavaSE-1.8',
])
# Map of targets to corresponding classpath collector rules
cp_targets = {
- AUTO: '//tools/eclipse:autovalue_classpath_collect',
- GWT: '//tools/eclipse:gwt_classpath_collect',
- MAIN: '//tools/eclipse:main_classpath_collect',
+ AUTO: '//tools/eclipse:autovalue_classpath_collect',
+ GWT: '//tools/eclipse:gwt_classpath_collect',
+ MAIN: '//tools/eclipse:main_classpath_collect',
}
ROOT = path.abspath(__file__)
while not path.exists(path.join(ROOT, 'WORKSPACE')):
- ROOT = path.dirname(ROOT)
+ ROOT = path.dirname(ROOT)
opts = OptionParser()
opts.add_option('--plugins', help='create eclipse projects for plugins',
@@ -56,38 +56,43 @@
batch_option = '--batch' if args.batch else None
+
def _build_bazel_cmd(*args):
- cmd = ['bazel']
- if batch_option:
- cmd.append('--batch')
- for arg in args:
- cmd.append(arg)
- return cmd
+ cmd = ['bazel']
+ if batch_option:
+ cmd.append('--batch')
+ for arg in args:
+ cmd.append(arg)
+ return cmd
+
def retrieve_ext_location():
- return check_output(_build_bazel_cmd('info', 'output_base')).strip()
+ return check_output(_build_bazel_cmd('info', 'output_base')).strip()
+
def gen_bazel_path():
- bazel = check_output(['which', 'bazel']).strip().decode('UTF-8')
- with open(path.join(ROOT, ".bazel_path"), 'w') as fd:
- fd.write("bazel=%s\n" % bazel)
- fd.write("PATH=%s\n" % environ["PATH"])
+ bazel = check_output(['which', 'bazel']).strip().decode('UTF-8')
+ with open(path.join(ROOT, ".bazel_path"), 'w') as fd:
+ fd.write("bazel=%s\n" % bazel)
+ fd.write("PATH=%s\n" % environ["PATH"])
+
def _query_classpath(target):
- deps = []
- t = cp_targets[target]
- try:
- check_call(_build_bazel_cmd('build', t))
- except CalledProcessError:
- exit(1)
- name = 'bazel-bin/tools/eclipse/' + t.split(':')[1] + '.runtime_classpath'
- deps = [line.rstrip('\n') for line in open(name)]
- return deps
+ deps = []
+ t = cp_targets[target]
+ try:
+ check_call(_build_bazel_cmd('build', t))
+ except CalledProcessError:
+ exit(1)
+ name = 'bazel-bin/tools/eclipse/' + t.split(':')[1] + '.runtime_classpath'
+ deps = [line.rstrip('\n') for line in open(name)]
+ return deps
+
def gen_project(name='gerrit', root=ROOT):
- p = path.join(root, '.project')
- with open(p, 'w') as fd:
- print("""\
+ p = path.join(root, '.project')
+ with open(p, 'w') as fd:
+ print("""\
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>%(name)s</name>
@@ -102,16 +107,17 @@
</projectDescription>\
""" % {"name": name}, file=fd)
+
def gen_plugin_classpath(root):
- p = path.join(root, '.classpath')
- with open(p, 'w') as fd:
- if path.exists(path.join(root, 'src', 'test', 'java')):
- testpath = """
+ p = path.join(root, '.classpath')
+ with open(p, 'w') as fd:
+ if path.exists(path.join(root, 'src', 'test', 'java')):
+ testpath = """
<classpathentry excluding="**/BUILD" kind="src" path="src/test/java"\
out="eclipse-out/test"/>"""
- else:
- testpath = ""
- print("""\
+ else:
+ testpath = ""
+ print("""\
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry excluding="**/BUILD" kind="src" path="src/main/java"/>%(testpath)s
@@ -120,186 +126,193 @@
<classpathentry kind="output" path="eclipse-out/classes"/>
</classpath>""" % {"testpath": testpath}, file=fd)
+
def gen_classpath(ext):
- def make_classpath():
- impl = minidom.getDOMImplementation()
- return impl.createDocument(None, 'classpath', None)
+ def make_classpath():
+ impl = minidom.getDOMImplementation()
+ return impl.createDocument(None, 'classpath', None)
- def classpathentry(kind, path, src=None, out=None, exported=None):
- e = doc.createElement('classpathentry')
- e.setAttribute('kind', kind)
- # TODO(davido): Remove this and other exclude BUILD files hack
- # when this Bazel bug is fixed:
- # https://github.com/bazelbuild/bazel/issues/1083
- if kind == 'src':
- e.setAttribute('excluding', '**/BUILD')
- e.setAttribute('path', path)
- if src:
- e.setAttribute('sourcepath', src)
- if out:
- e.setAttribute('output', out)
- if exported:
- e.setAttribute('exported', 'true')
- doc.documentElement.appendChild(e)
+ def classpathentry(kind, path, src=None, out=None, exported=None):
+ e = doc.createElement('classpathentry')
+ e.setAttribute('kind', kind)
+ # TODO(davido): Remove this and other exclude BUILD files hack
+ # when this Bazel bug is fixed:
+ # https://github.com/bazelbuild/bazel/issues/1083
+ if kind == 'src':
+ e.setAttribute('excluding', '**/BUILD')
+ e.setAttribute('path', path)
+ if src:
+ e.setAttribute('sourcepath', src)
+ if out:
+ e.setAttribute('output', out)
+ if exported:
+ e.setAttribute('exported', 'true')
+ doc.documentElement.appendChild(e)
- doc = make_classpath()
- src = set()
- lib = set()
- proto = set()
- gwt_src = set()
- gwt_lib = set()
- plugins = set()
+ doc = make_classpath()
+ src = set()
+ lib = set()
+ proto = set()
+ gwt_src = set()
+ gwt_lib = set()
+ plugins = set()
- # Classpath entries are absolute for cross-cell support
- java_library = re.compile('bazel-out/.*?-fastbuild/bin/(.*)/[^/]+[.]jar$')
- srcs = re.compile('(.*/external/[^/]+)/jar/(.*)[.]jar')
- for p in _query_classpath(MAIN):
- if p.endswith('-src.jar'):
- # gwt_module() depends on -src.jar for Java to JavaScript compiles.
- if p.startswith("external"):
- p = path.join(ext, p)
- gwt_lib.add(p)
- continue
-
- m = java_library.match(p)
- if m:
- src.add(m.group(1))
- # Exceptions: both source and lib
- if p.endswith('libquery_parser.jar') or \
- p.endswith('libgerrit-prolog-common.jar'):
- lib.add(p)
- # JGit dependency from external repository
- if 'gerrit-' not in p and 'jgit' in p:
- lib.add(p)
- # Assume any jars in /proto/ are from java_proto_library rules
- if '/bin/proto/' in p:
- proto.add(p)
- else:
- # Don't mess up with Bazel internal test runner dependencies.
- # When we use Eclipse we rely on it for running the tests
- if p.endswith("external/bazel_tools/tools/jdk/TestRunner_deploy.jar"):
- continue
- if p.startswith("external"):
- p = path.join(ext, p)
- lib.add(p)
-
- for p in _query_classpath(GWT):
- m = java_library.match(p)
- if m:
- gwt_src.add(m.group(1))
-
- classpathentry('src', 'java')
- classpathentry('src', 'javatests', out='eclipse-out/test')
- classpathentry('src', 'resources')
- for s in sorted(src):
- out = None
-
- if s.startswith('lib/'):
- out = 'eclipse-out/lib'
- elif s.startswith('plugins/'):
- if args.plugins:
- plugins.add(s)
- continue
- out = 'eclipse-out/' + s
-
- p = path.join(s, 'java')
- if path.exists(p):
- classpathentry('src', p, out=out)
- continue
-
- for env in ['main', 'test']:
- o = None
- if out:
- o = out + '/' + env
- elif env == 'test':
- o = 'eclipse-out/test'
-
- for srctype in ['java', 'resources']:
- p = path.join(s, 'src', env, srctype)
- if path.exists(p):
- classpathentry('src', p, out=o)
-
- for libs in [lib, gwt_lib]:
- for j in sorted(libs):
- s = None
- m = srcs.match(j)
- if m:
- prefix = m.group(1)
- suffix = m.group(2)
- p = path.join(prefix, "jar", "%s-src.jar" % suffix)
- if path.exists(p):
- s = p
- if args.plugins:
- classpathentry('lib', j, s, exported=True)
- else:
- # Filter out the source JARs that we pull through transitive closure of
- # GWT plugin API (we add source directories themself). Exception is
- # libEdit-src.jar, that is needed for GWT SDM to work.
- m = java_library.match(j)
- if m:
- if m.group(1).startswith("gerrit-") and \
- j.endswith("-src.jar") and \
- not j.endswith("libEdit-src.jar"):
+ # Classpath entries are absolute for cross-cell support
+ java_library = re.compile('bazel-out/.*?-fastbuild/bin/(.*)/[^/]+[.]jar$')
+ srcs = re.compile('(.*/external/[^/]+)/jar/(.*)[.]jar')
+ for p in _query_classpath(MAIN):
+ if p.endswith('-src.jar'):
+ # gwt_module() depends on -src.jar for Java to JavaScript compiles.
+ if p.startswith("external"):
+ p = path.join(ext, p)
+ gwt_lib.add(p)
continue
- classpathentry('lib', j, s)
- for p in sorted(proto):
- s = p.replace('-fastbuild/bin/proto/lib', '-fastbuild/genfiles/proto/')
- s = s.replace('.jar', '-src.jar')
- classpathentry('lib', p, s)
+ m = java_library.match(p)
+ if m:
+ src.add(m.group(1))
+ # Exceptions: both source and lib
+ if p.endswith('libquery_parser.jar') or \
+ p.endswith('libgerrit-prolog-common.jar'):
+ lib.add(p)
+ # JGit dependency from external repository
+ if 'gerrit-' not in p and 'jgit' in p:
+ lib.add(p)
+ # Assume any jars in /proto/ are from java_proto_library rules
+ if '/bin/proto/' in p:
+ proto.add(p)
+ else:
+ # Don't mess up with Bazel internal test runner dependencies.
+ # When we use Eclipse we rely on it for running the tests
+ if p.endswith(
+ "external/bazel_tools/tools/jdk/TestRunner_deploy.jar"):
+ continue
+ if p.startswith("external"):
+ p = path.join(ext, p)
+ lib.add(p)
- for s in sorted(gwt_src):
- p = path.join(ROOT, s, 'src', 'main', 'java')
- if path.exists(p):
- classpathentry('lib', p, out='eclipse-out/gwtsrc')
+ for p in _query_classpath(GWT):
+ m = java_library.match(p)
+ if m:
+ gwt_src.add(m.group(1))
- classpathentry('con', JRE)
- classpathentry('output', 'eclipse-out/classes')
+ classpathentry('src', 'java')
+ classpathentry('src', 'javatests', out='eclipse-out/test')
+ classpathentry('src', 'resources')
+ for s in sorted(src):
+ out = None
- p = path.join(ROOT, '.classpath')
- with open(p, 'w') as fd:
- doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
+ if s.startswith('lib/'):
+ out = 'eclipse-out/lib'
+ elif s.startswith('plugins/'):
+ if args.plugins:
+ plugins.add(s)
+ continue
+ out = 'eclipse-out/' + s
- if args.plugins:
- for plugin in plugins:
- plugindir = path.join(ROOT, plugin)
- try:
- gen_project(plugin.replace('plugins/', ""), plugindir)
- gen_plugin_classpath(plugindir)
- except (IOError, OSError) as err:
- print('error generating project for %s: %s' % (plugin, err),
- file=sys.stderr)
+ p = path.join(s, 'java')
+ if path.exists(p):
+ classpathentry('src', p, out=out)
+ continue
+
+ for env in ['main', 'test']:
+ o = None
+ if out:
+ o = out + '/' + env
+ elif env == 'test':
+ o = 'eclipse-out/test'
+
+ for srctype in ['java', 'resources']:
+ p = path.join(s, 'src', env, srctype)
+ if path.exists(p):
+ classpathentry('src', p, out=o)
+
+ for libs in [lib, gwt_lib]:
+ for j in sorted(libs):
+ s = None
+ m = srcs.match(j)
+ if m:
+ prefix = m.group(1)
+ suffix = m.group(2)
+ p = path.join(prefix, "jar", "%s-src.jar" % suffix)
+ if path.exists(p):
+ s = p
+ if args.plugins:
+ classpathentry('lib', j, s, exported=True)
+ else:
+ # Filter out the source JARs that we pull through transitive
+ # closure of GWT plugin API (we add source directories
+ # themselves). Exception is libEdit-src.jar, that is needed
+ # for GWT SDM to work.
+ m = java_library.match(j)
+ if m:
+ if m.group(1).startswith("gerrit-") and \
+ j.endswith("-src.jar") and \
+ not j.endswith("libEdit-src.jar"):
+ continue
+ classpathentry('lib', j, s)
+
+ for p in sorted(proto):
+ s = p.replace('-fastbuild/bin/proto/lib', '-fastbuild/genfiles/proto/')
+ s = s.replace('.jar', '-src.jar')
+ classpathentry('lib', p, s)
+
+ for s in sorted(gwt_src):
+ p = path.join(ROOT, s, 'src', 'main', 'java')
+ if path.exists(p):
+ classpathentry('lib', p, out='eclipse-out/gwtsrc')
+
+ classpathentry('con', JRE)
+ classpathentry('output', 'eclipse-out/classes')
+
+ p = path.join(ROOT, '.classpath')
+ with open(p, 'w') as fd:
+ doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
+
+ if args.plugins:
+ for plugin in plugins:
+ plugindir = path.join(ROOT, plugin)
+ try:
+ gen_project(plugin.replace('plugins/', ""), plugindir)
+ gen_plugin_classpath(plugindir)
+ except (IOError, OSError) as err:
+ print('error generating project for %s: %s' % (plugin, err),
+ file=sys.stderr)
+
def gen_factorypath(ext):
- doc = minidom.getDOMImplementation().createDocument(None, 'factorypath', None)
- for jar in _query_classpath(AUTO):
- e = doc.createElement('factorypathentry')
- e.setAttribute('kind', 'EXTJAR')
- e.setAttribute('id', path.join(ext, jar))
- e.setAttribute('enabled', 'true')
- e.setAttribute('runInBatchMode', 'false')
- doc.documentElement.appendChild(e)
+ doc = minidom.getDOMImplementation().createDocument(None, 'factorypath',
+ None)
+ for jar in _query_classpath(AUTO):
+ e = doc.createElement('factorypathentry')
+ e.setAttribute('kind', 'EXTJAR')
+ e.setAttribute('id', path.join(ext, jar))
+ e.setAttribute('enabled', 'true')
+ e.setAttribute('runInBatchMode', 'false')
+ doc.documentElement.appendChild(e)
- p = path.join(ROOT, '.factorypath')
- with open(p, 'w') as fd:
- doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
+ p = path.join(ROOT, '.factorypath')
+ with open(p, 'w') as fd:
+ doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
+
try:
- ext_location = retrieve_ext_location().decode("utf-8")
- gen_project(args.project_name)
- gen_classpath(ext_location)
- gen_factorypath(ext_location)
- gen_bazel_path()
+ ext_location = retrieve_ext_location().decode("utf-8")
+ gen_project(args.project_name)
+ gen_classpath(ext_location)
+ gen_factorypath(ext_location)
+ gen_bazel_path()
- # TODO(davido): Remove this when GWT gone
- gwt_working_dir = ".gwt_work_dir"
- if not path.isdir(gwt_working_dir):
- makedirs(path.join(ROOT, gwt_working_dir))
+ # TODO(davido): Remove this when GWT gone
+ gwt_working_dir = ".gwt_work_dir"
+ if not path.isdir(gwt_working_dir):
+ makedirs(path.join(ROOT, gwt_working_dir))
- try:
- check_call(_build_bazel_cmd('build', MAIN, GWT, '//java/org/eclipse/jgit:libEdit-src.jar'))
- except CalledProcessError:
- exit(1)
+ try:
+ check_call(_build_bazel_cmd('build', MAIN, GWT,
+ '//java/org/eclipse/jgit:libEdit-src.jar'))
+ except CalledProcessError:
+ exit(1)
except KeyboardInterrupt:
- print('Interrupted by user', file=sys.stderr)
- exit(1)
+ print('Interrupted by user', file=sys.stderr)
+ exit(1)
diff --git a/tools/js/bower2bazel.py b/tools/js/bower2bazel.py
index 171ab55..7b24524 100755
--- a/tools/js/bower2bazel.py
+++ b/tools/js/bower2bazel.py
@@ -13,9 +13,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Suggested call sequence:
+"""
+Suggested call sequence:
-python tools/js/bower2bazel.py -w lib/js/bower_archives.bzl -b lib/js/bower_components.bzl
+python tools/js/bower2bazel.py -w lib/js/bower_archives.bzl \
+ -b lib/js/bower_components.bzl
"""
from __future__ import print_function
@@ -31,139 +33,147 @@
import glob
import bowerutil
-# list of licenses for packages that don't specify one in their bower.json file.
+# list of licenses for packages that don't specify one in their bower.json file
package_licenses = {
- "codemirror-minified": "codemirror-minified",
- "es6-promise": "es6-promise",
- "fetch": "fetch",
- "font-roboto": "polymer",
- "iron-a11y-announcer": "polymer",
- "iron-a11y-keys-behavior": "polymer",
- "iron-autogrow-textarea": "polymer",
- "iron-behaviors": "polymer",
- "iron-dropdown": "polymer",
- "iron-fit-behavior": "polymer",
- "iron-flex-layout": "polymer",
- "iron-form-element-behavior": "polymer",
- "iron-icon": "polymer",
- "iron-iconset-svg": "polymer",
- "iron-input": "polymer",
- "iron-menu-behavior": "polymer",
- "iron-meta": "polymer",
- "iron-overlay-behavior": "polymer",
- "iron-resizable-behavior": "polymer",
- "iron-selector": "polymer",
- "iron-validatable-behavior": "polymer",
- "moment": "moment",
- "neon-animation": "polymer",
- "page": "page.js",
- "paper-button": "polymer",
- "paper-icon-button": "polymer",
- "paper-input": "polymer",
- "paper-item": "polymer",
- "paper-listbox": "polymer",
- "paper-toggle-button": "polymer",
- "paper-styles": "polymer",
- "paper-tabs": "polymer",
- "polymer": "polymer",
- "polymer-resin": "polymer",
- "promise-polyfill": "promise-polyfill",
- "web-animations-js": "Apache2.0",
- "webcomponentsjs": "polymer",
- "paper-material": "polymer",
- "paper-styles": "polymer",
- "paper-behaviors": "polymer",
- "paper-ripple": "polymer",
- "iron-checked-element-behavior": "polymer",
- "font-roboto": "polymer",
+ "codemirror-minified": "codemirror-minified",
+ "es6-promise": "es6-promise",
+ "fetch": "fetch",
+ "font-roboto": "polymer",
+ "iron-a11y-announcer": "polymer",
+ "iron-a11y-keys-behavior": "polymer",
+ "iron-autogrow-textarea": "polymer",
+ "iron-behaviors": "polymer",
+ "iron-dropdown": "polymer",
+ "iron-fit-behavior": "polymer",
+ "iron-flex-layout": "polymer",
+ "iron-form-element-behavior": "polymer",
+ "iron-icon": "polymer",
+ "iron-iconset-svg": "polymer",
+ "iron-input": "polymer",
+ "iron-menu-behavior": "polymer",
+ "iron-meta": "polymer",
+ "iron-overlay-behavior": "polymer",
+ "iron-resizable-behavior": "polymer",
+ "iron-selector": "polymer",
+ "iron-validatable-behavior": "polymer",
+ "moment": "moment",
+ "neon-animation": "polymer",
+ "page": "page.js",
+ "paper-button": "polymer",
+ "paper-icon-button": "polymer",
+ "paper-input": "polymer",
+ "paper-item": "polymer",
+ "paper-listbox": "polymer",
+ "paper-toggle-button": "polymer",
+ "paper-styles": "polymer",
+ "paper-tabs": "polymer",
+ "polymer": "polymer",
+ "polymer-resin": "polymer",
+ "promise-polyfill": "promise-polyfill",
+ "web-animations-js": "Apache2.0",
+ "webcomponentsjs": "polymer",
+ "paper-material": "polymer",
+ "paper-styles": "polymer",
+ "paper-behaviors": "polymer",
+ "paper-ripple": "polymer",
+ "iron-checked-element-behavior": "polymer",
+ "font-roboto": "polymer",
}
def build_bower_json(version_targets, seeds):
- """Generate bower JSON file, return its path.
+ """Generate bower JSON file, return its path.
- Args:
- version_targets: bazel target names of the versions.json file.
- seeds: an iterable of bower package names of the seed packages, ie.
- the packages whose versions we control manually.
- """
- bower_json = collections.OrderedDict()
- bower_json['name'] = 'bower2bazel-output'
- bower_json['version'] = '0.0.0'
- bower_json['description'] = 'Auto-generated bower.json for dependency management'
- bower_json['private'] = True
- bower_json['dependencies'] = {}
+ Args:
+ version_targets: bazel target names of the versions.json file.
+ seeds: an iterable of bower package names of the seed packages, ie.
+ the packages whose versions we control manually.
+ """
+ bower_json = collections.OrderedDict()
+ bower_json['name'] = 'bower2bazel-output'
+ bower_json['version'] = '0.0.0'
+ bower_json['description'] = 'Auto-generated bower.json for dependency ' + \
+ 'management'
+ bower_json['private'] = True
+ bower_json['dependencies'] = {}
- seeds = set(seeds)
- for v in version_targets:
- path = os.path.join("bazel-out/*-fastbuild/bin", v.lstrip("/").replace(":", "/"))
- fs = glob.glob(path)
- assert len(fs) == 1, '%s: file not found or multiple files found: %s' % (path, fs)
- with open(fs[0]) as f:
- j = json.load(f)
- if "" in j:
- # drop dummy entries.
- del j[""]
+ seeds = set(seeds)
+ for v in version_targets:
+ path = os.path.join("bazel-out/*-fastbuild/bin",
+ v.lstrip("/").replace(":", "/"))
+ fs = glob.glob(path)
+ err_msg = '%s: file not found or multiple files found: %s' % (path, fs)
+ assert len(fs) == 1, err_msg
+ with open(fs[0]) as f:
+ j = json.load(f)
+ if "" in j:
+ # drop dummy entries.
+ del j[""]
- trimmed = {}
- for k, v in j.items():
- if k in seeds:
- trimmed[k] = v
+ trimmed = {}
+ for k, v in j.items():
+ if k in seeds:
+ trimmed[k] = v
- bower_json['dependencies'].update(trimmed)
+ bower_json['dependencies'].update(trimmed)
- tmpdir = tempfile.mkdtemp()
- ret = os.path.join(tmpdir, 'bower.json')
- with open(ret, 'w') as f:
- json.dump(bower_json, f, indent=2)
- return ret
+ tmpdir = tempfile.mkdtemp()
+ ret = os.path.join(tmpdir, 'bower.json')
+ with open(ret, 'w') as f:
+ json.dump(bower_json, f, indent=2)
+ return ret
+
def decode(input):
- try:
- return input.decode("utf-8")
- except TypeError:
- return input
+ try:
+ return input.decode("utf-8")
+ except TypeError:
+ return input
+
def bower_command(args):
- base = subprocess.check_output(["bazel", "info", "output_base"]).strip()
- exp = os.path.join(decode(base), "external", "bower", "*npm_binary.tgz")
- fs = sorted(glob.glob(exp))
- assert len(fs) == 1, "bower tarball not found or have multiple versions %s" % fs
- return ["python", os.getcwd() + "/tools/js/run_npm_binary.py", sorted(fs)[0]] + args
+ base = subprocess.check_output(["bazel", "info", "output_base"]).strip()
+ exp = os.path.join(decode(base), "external", "bower", "*npm_binary.tgz")
+ fs = sorted(glob.glob(exp))
+ err_msg = "bower tarball not found or have multiple versions %s" % fs
+ assert len(fs) == 1, err_msg
+ return ["python",
+ os.getcwd() + "/tools/js/run_npm_binary.py", sorted(fs)[0]] + args
def main(args):
- opts = optparse.OptionParser()
- opts.add_option('-w', help='.bzl output for WORKSPACE')
- opts.add_option('-b', help='.bzl output for //lib:BUILD')
- opts, args = opts.parse_args()
+ opts = optparse.OptionParser()
+ opts.add_option('-w', help='.bzl output for WORKSPACE')
+ opts.add_option('-b', help='.bzl output for //lib:BUILD')
+ opts, args = opts.parse_args()
- target_str = subprocess.check_output([
- "bazel", "query", "kind(bower_component_bundle, //polygerrit-ui/...)"])
- seed_str = subprocess.check_output([
- "bazel", "query", "attr(seed, 1, kind(bower_component, deps(//polygerrit-ui/...)))"])
- targets = [s for s in decode(target_str).split('\n') if s]
- seeds = [s for s in decode(seed_str).split('\n') if s]
- prefix = "//lib/js:"
- non_seeds = [s for s in seeds if not s.startswith(prefix)]
- assert not non_seeds, non_seeds
- seeds = set([s[len(prefix):] for s in seeds])
+ target_str = subprocess.check_output([
+ "bazel", "query", "kind(bower_component_bundle, //polygerrit-ui/...)"])
+ seed_str = subprocess.check_output(
+ ["bazel", "query",
+ "attr(seed, 1, kind(bower_component, deps(//polygerrit-ui/...)))"])
+ targets = [s for s in decode(target_str).split('\n') if s]
+ seeds = [s for s in decode(seed_str).split('\n') if s]
+ prefix = "//lib/js:"
+ non_seeds = [s for s in seeds if not s.startswith(prefix)]
+ assert not non_seeds, non_seeds
+ seeds = set([s[len(prefix):] for s in seeds])
- version_targets = [t + "-versions.json" for t in targets]
- subprocess.check_call(['bazel', 'build'] + version_targets)
- bower_json_path = build_bower_json(version_targets, seeds)
- dir = os.path.dirname(bower_json_path)
- cmd = bower_command(["install"])
+ version_targets = [t + "-versions.json" for t in targets]
+ subprocess.check_call(['bazel', 'build'] + version_targets)
+ bower_json_path = build_bower_json(version_targets, seeds)
+ dir = os.path.dirname(bower_json_path)
+ cmd = bower_command(["install"])
- build_out = sys.stdout
- if opts.b:
- build_out = open(opts.b + ".tmp", 'w')
+ build_out = sys.stdout
+ if opts.b:
+ build_out = open(opts.b + ".tmp", 'w')
- ws_out = sys.stdout
- if opts.b:
- ws_out = open(opts.w + ".tmp", 'w')
+ ws_out = sys.stdout
+ if opts.b:
+ ws_out = open(opts.w + ".tmp", 'w')
- header = """# DO NOT EDIT
+ header = """# DO NOT EDIT
# generated with the following command:
#
# %s
@@ -171,30 +181,30 @@
""" % ' '.join(sys.argv)
- ws_out.write(header)
- build_out.write(header)
+ ws_out.write(header)
+ build_out.write(header)
- oldwd = os.getcwd()
- os.chdir(dir)
- subprocess.check_call(cmd)
+ oldwd = os.getcwd()
+ os.chdir(dir)
+ subprocess.check_call(cmd)
- interpret_bower_json(seeds, ws_out, build_out)
- ws_out.close()
- build_out.close()
+ interpret_bower_json(seeds, ws_out, build_out)
+ ws_out.close()
+ build_out.close()
- os.chdir(oldwd)
- os.rename(opts.w + ".tmp", opts.w)
- os.rename(opts.b + ".tmp", opts.b)
+ os.chdir(oldwd)
+ os.rename(opts.w + ".tmp", opts.w)
+ os.rename(opts.b + ".tmp", opts.b)
def dump_workspace(data, seeds, out):
- out.write('load("//tools/bzl:js.bzl", "bower_archive")\n\n')
- out.write('def load_bower_archives():\n')
+ out.write('load("//tools/bzl:js.bzl", "bower_archive")\n\n')
+ out.write('def load_bower_archives():\n')
- for d in data:
- if d["name"] in seeds:
- continue
- out.write(""" bower_archive(
+ for d in data:
+ if d["name"] in seeds:
+ continue
+ out.write(""" bower_archive(
name = "%(name)s",
package = "%(normalized-name)s",
version = "%(version)s",
@@ -203,48 +213,49 @@
def dump_build(data, seeds, out):
- out.write('load("//tools/bzl:js.bzl", "bower_component")\n\n')
- out.write('def define_bower_components():\n')
- for d in data:
- out.write(" bower_component(\n")
- out.write(" name = \"%s\",\n" % d["name"])
- out.write(" license = \"//lib:LICENSE-%s\",\n" % d["bazel-license"])
- deps = sorted(d.get("dependencies", {}).keys())
- if deps:
- if len(deps) == 1:
- out.write(" deps = [ \":%s\" ],\n" % deps[0])
- else:
- out.write(" deps = [\n")
- for dep in deps:
- out.write(" \":%s\",\n" % dep)
- out.write(" ],\n")
- if d["name"] in seeds:
- out.write(" seed = True,\n")
- out.write(" )\n")
- # done
+ out.write('load("//tools/bzl:js.bzl", "bower_component")\n\n')
+ out.write('def define_bower_components():\n')
+ for d in data:
+ out.write(" bower_component(\n")
+ out.write(" name = \"%s\",\n" % d["name"])
+ out.write(" license = \"//lib:LICENSE-%s\",\n" % d["bazel-license"])
+ deps = sorted(d.get("dependencies", {}).keys())
+ if deps:
+ if len(deps) == 1:
+ out.write(" deps = [ \":%s\" ],\n" % deps[0])
+ else:
+ out.write(" deps = [\n")
+ for dep in deps:
+ out.write(" \":%s\",\n" % dep)
+ out.write(" ],\n")
+ if d["name"] in seeds:
+ out.write(" seed = True,\n")
+ out.write(" )\n")
+ # done
def interpret_bower_json(seeds, ws_out, build_out):
- out = subprocess.check_output(["find", "bower_components/", "-name", ".bower.json"])
+ out = subprocess.check_output(["find", "bower_components/", "-name",
+ ".bower.json"])
- data = []
- for f in sorted(decode(out).split('\n')):
- if not f:
- continue
- pkg = json.load(open(f))
- pkg_name = pkg["name"]
+ data = []
+ for f in sorted(decode(out).split('\n')):
+ if not f:
+ continue
+ pkg = json.load(open(f))
+ pkg_name = pkg["name"]
- pkg["bazel-sha1"] = bowerutil.hash_bower_component(
- hashlib.sha1(), os.path.dirname(f)).hexdigest()
- license = package_licenses.get(pkg_name, "DO_NOT_DISTRIBUTE")
+ pkg["bazel-sha1"] = bowerutil.hash_bower_component(
+ hashlib.sha1(), os.path.dirname(f)).hexdigest()
+ license = package_licenses.get(pkg_name, "DO_NOT_DISTRIBUTE")
- pkg["bazel-license"] = license
- pkg["normalized-name"] = pkg["_originalSource"]
- data.append(pkg)
+ pkg["bazel-license"] = license
+ pkg["normalized-name"] = pkg["_originalSource"]
+ data.append(pkg)
- dump_workspace(data, seeds, ws_out)
- dump_build(data, seeds, build_out)
+ dump_workspace(data, seeds, ws_out)
+ dump_build(data, seeds, build_out)
if __name__ == '__main__':
- main(sys.argv[1:])
+ main(sys.argv[1:])
diff --git a/tools/js/bowerutil.py b/tools/js/bowerutil.py
index c2e11cd..9fb82af 100644
--- a/tools/js/bowerutil.py
+++ b/tools/js/bowerutil.py
@@ -16,31 +16,31 @@
def hash_bower_component(hash_obj, path):
- """Hash the contents of a bower component directory.
+ """Hash the contents of a bower component directory.
- This is a stable hash of a directory downloaded with `bower install`, minus
- the .bower.json file, which is autogenerated each time by bower. Used in lieu
- of hashing a zipfile of the contents, since zipfiles are difficult to hash in
- a stable manner.
+ This is a stable hash of a directory downloaded with `bower install`, minus
+ the .bower.json file, which is autogenerated each time by bower. Used in
+ lieu of hashing a zipfile of the contents, since zipfiles are difficult to
+ hash in a stable manner.
- Args:
- hash_obj: an open hash object, e.g. hashlib.sha1().
- path: path to the directory to hash.
+ Args:
+ hash_obj: an open hash object, e.g. hashlib.sha1().
+ path: path to the directory to hash.
- Returns:
- The passed-in hash_obj.
- """
- if not os.path.isdir(path):
- raise ValueError('Not a directory: %s' % path)
+ Returns:
+ The passed-in hash_obj.
+ """
+ if not os.path.isdir(path):
+ raise ValueError('Not a directory: %s' % path)
- path = os.path.abspath(path)
- for root, dirs, files in os.walk(path):
- dirs.sort()
- for f in sorted(files):
- if f == '.bower.json':
- continue
- p = os.path.join(root, f)
- hash_obj.update(p[len(path)+1:].encode("utf-8"))
- hash_obj.update(open(p, "rb").read())
+ path = os.path.abspath(path)
+ for root, dirs, files in os.walk(path):
+ dirs.sort()
+ for f in sorted(files):
+ if f == '.bower.json':
+ continue
+ p = os.path.join(root, f)
+ hash_obj.update(p[len(path)+1:].encode("utf-8"))
+ hash_obj.update(open(p, "rb").read())
- return hash_obj
+ return hash_obj
diff --git a/tools/js/download_bower.py b/tools/js/download_bower.py
index 3db39d5..c9a5df6 100755
--- a/tools/js/download_bower.py
+++ b/tools/js/download_bower.py
@@ -30,99 +30,105 @@
def bower_cmd(bower, *args):
- cmd = bower.split(' ')
- cmd.extend(args)
- return cmd
+ cmd = bower.split(' ')
+ cmd.extend(args)
+ return cmd
def bower_info(bower, name, package, version):
- cmd = bower_cmd(bower, '-l=error', '-j',
- 'info', '%s#%s' % (package, version))
- try:
- p = subprocess.Popen(cmd , stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- except:
- sys.stderr.write("error executing: %s\n" % ' '.join(cmd))
- raise
- out, err = p.communicate()
- if p.returncode:
- sys.stderr.write(err)
- raise OSError('Command failed: %s' % ' '.join(cmd))
+ cmd = bower_cmd(bower, '-l=error', '-j',
+ 'info', '%s#%s' % (package, version))
+ try:
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ except:
+ sys.stderr.write("error executing: %s\n" % ' '.join(cmd))
+ raise
+ out, err = p.communicate()
+ if p.returncode:
+ sys.stderr.write(err)
+ raise OSError('Command failed: %s' % ' '.join(cmd))
- try:
- info = json.loads(out)
- except ValueError:
- raise ValueError('invalid JSON from %s:\n%s' % (" ".join(cmd), out))
- info_name = info.get('name')
- if info_name != name:
- raise ValueError('expected package name %s, got: %s' % (name, info_name))
- return info
+ try:
+ info = json.loads(out)
+ except ValueError:
+ raise ValueError('invalid JSON from %s:\n%s' % (" ".join(cmd), out))
+ info_name = info.get('name')
+ if info_name != name:
+ raise ValueError(
+ 'expected package name %s, got: %s' % (name, info_name))
+ return info
def ignore_deps(info):
- # Tell bower to ignore dependencies so we just download this component. This
- # is just an optimization, since we only pick out the component we need, but
- # it's important when downloading sizable dependency trees.
- #
- # As of 1.6.5 I don't think ignoredDependencies can be specified on the
- # command line with --config, so we have to create .bowerrc.
- deps = info.get('dependencies')
- if deps:
- with open(os.path.join('.bowerrc'), 'w') as f:
- json.dump({'ignoredDependencies': list(deps.keys())}, f)
+ # Tell bower to ignore dependencies so we just download this component.
+ # This is just an optimization, since we only pick out the component we
+ # need, but it's important when downloading sizable dependency trees.
+ #
+ # As of 1.6.5 I don't think ignoredDependencies can be specified on the
+ # command line with --config, so we have to create .bowerrc.
+ deps = info.get('dependencies')
+ if deps:
+ with open(os.path.join('.bowerrc'), 'w') as f:
+ json.dump({'ignoredDependencies': list(deps.keys())}, f)
def cache_entry(name, package, version, sha1):
- if not sha1:
- sha1 = hashlib.sha1('%s#%s' % (package, version)).hexdigest()
- return os.path.join(CACHE_DIR, '%s-%s.zip-%s' % (name, version, sha1))
+ if not sha1:
+ sha1 = hashlib.sha1('%s#%s' % (package, version)).hexdigest()
+ return os.path.join(CACHE_DIR, '%s-%s.zip-%s' % (name, version, sha1))
def main(args):
- opts = optparse.OptionParser()
- opts.add_option('-n', help='short name of component')
- opts.add_option('-b', help='bower command')
- opts.add_option('-p', help='full package name of component')
- opts.add_option('-v', help='version number')
- opts.add_option('-s', help='expected content sha1')
- opts.add_option('-o', help='output file location')
- opts, args_ = opts.parse_args(args)
+ opts = optparse.OptionParser()
+ opts.add_option('-n', help='short name of component')
+ opts.add_option('-b', help='bower command')
+ opts.add_option('-p', help='full package name of component')
+ opts.add_option('-v', help='version number')
+ opts.add_option('-s', help='expected content sha1')
+ opts.add_option('-o', help='output file location')
+ opts, args_ = opts.parse_args(args)
- assert opts.p
- assert opts.v
- assert opts.n
+ assert opts.p
+ assert opts.v
+ assert opts.n
- cwd = os.getcwd()
- outzip = os.path.join(cwd, opts.o)
- cached = cache_entry(opts.n, opts.p, opts.v, opts.s)
+ cwd = os.getcwd()
+ outzip = os.path.join(cwd, opts.o)
+ cached = cache_entry(opts.n, opts.p, opts.v, opts.s)
- if not os.path.exists(cached):
- info = bower_info(opts.b, opts.n, opts.p, opts.v)
- ignore_deps(info)
- subprocess.check_call(
- bower_cmd(opts.b, '--quiet', 'install', '%s#%s' % (opts.p, opts.v)))
- bc = os.path.join(cwd, 'bower_components')
- subprocess.check_call(
- ['zip', '-q', '--exclude', '.bower.json', '-r', cached, opts.n],
- cwd=bc)
+ if not os.path.exists(cached):
+ info = bower_info(opts.b, opts.n, opts.p, opts.v)
+ ignore_deps(info)
+ subprocess.check_call(
+ bower_cmd(
+ opts.b, '--quiet', 'install', '%s#%s' % (opts.p, opts.v)))
+ bc = os.path.join(cwd, 'bower_components')
+ subprocess.check_call(
+ ['zip', '-q', '--exclude', '.bower.json', '-r', cached, opts.n],
+ cwd=bc)
- if opts.s:
- path = os.path.join(bc, opts.n)
- sha1 = bowerutil.hash_bower_component(hashlib.sha1(), path).hexdigest()
- if opts.s != sha1:
- print((
- '%s#%s:\n'
- 'expected %s\n'
- 'received %s\n') % (opts.p, opts.v, opts.s, sha1), file=sys.stderr)
- try:
- os.remove(cached)
- except OSError as err:
- if path.exists(cached):
- print('error removing %s: %s' % (cached, err), file=sys.stderr)
- return 1
+ if opts.s:
+ path = os.path.join(bc, opts.n)
+ sha1 = bowerutil.hash_bower_component(
+ hashlib.sha1(), path).hexdigest()
+ if opts.s != sha1:
+ print((
+ '%s#%s:\n'
+ 'expected %s\n'
+ 'received %s\n') % (opts.p, opts.v, opts.s, sha1),
+ file=sys.stderr)
+ try:
+ os.remove(cached)
+ except OSError as err:
+ if path.exists(cached):
+ print('error removing %s: %s' % (cached, err),
+ file=sys.stderr)
+ return 1
- shutil.copyfile(cached, outzip)
- return 0
+ shutil.copyfile(cached, outzip)
+ return 0
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/js/npm_pack.py b/tools/js/npm_pack.py
index de45083..d817701 100755
--- a/tools/js/npm_pack.py
+++ b/tools/js/npm_pack.py
@@ -32,49 +32,49 @@
def is_bundled(tar):
- # No entries for directories, so scan for a matching prefix.
- for entry in tar.getmembers():
- if entry.name.startswith('package/node_modules/'):
- return True
- return False
+ # No entries for directories, so scan for a matching prefix.
+ for entry in tar.getmembers():
+ if entry.name.startswith('package/node_modules/'):
+ return True
+ return False
def bundle_dependencies():
- with open('package.json') as f:
- package = json.load(f)
- package['bundledDependencies'] = list(package['dependencies'].keys())
- with open('package.json', 'w') as f:
- json.dump(package, f)
+ with open('package.json') as f:
+ package = json.load(f)
+ package['bundledDependencies'] = list(package['dependencies'].keys())
+ with open('package.json', 'w') as f:
+ json.dump(package, f)
def main(args):
- if len(args) != 2:
- print('Usage: %s <package> <version>' % sys.argv[0], file=sys.stderr)
- return 1
+ if len(args) != 2:
+ print('Usage: %s <package> <version>' % sys.argv[0], file=sys.stderr)
+ return 1
- name, version = args
- filename = '%s-%s.tgz' % (name, version)
- url = 'http://registry.npmjs.org/%s/-/%s' % (name, filename)
+ name, version = args
+ filename = '%s-%s.tgz' % (name, version)
+ url = 'http://registry.npmjs.org/%s/-/%s' % (name, filename)
- tmpdir = tempfile.mkdtemp();
- tgz = os.path.join(tmpdir, filename)
- atexit.register(lambda: shutil.rmtree(tmpdir))
+ tmpdir = tempfile.mkdtemp()
+ tgz = os.path.join(tmpdir, filename)
+ atexit.register(lambda: shutil.rmtree(tmpdir))
- subprocess.check_call(['curl', '--proxy-anyauth', '-ksfo', tgz, url])
- with tarfile.open(tgz, 'r:gz') as tar:
- if is_bundled(tar):
- print('%s already has bundled node_modules' % filename)
- return 1
- tar.extractall(path=tmpdir)
+ subprocess.check_call(['curl', '--proxy-anyauth', '-ksfo', tgz, url])
+ with tarfile.open(tgz, 'r:gz') as tar:
+ if is_bundled(tar):
+ print('%s already has bundled node_modules' % filename)
+ return 1
+ tar.extractall(path=tmpdir)
- oldpwd = os.getcwd()
- os.chdir(os.path.join(tmpdir, 'package'))
- bundle_dependencies()
- subprocess.check_call(['npm', 'install'])
- subprocess.check_call(['npm', 'pack'])
- shutil.copy(filename, os.path.join(oldpwd, filename))
- return 0
+ oldpwd = os.getcwd()
+ os.chdir(os.path.join(tmpdir, 'package'))
+ bundle_dependencies()
+ subprocess.check_call(['npm', 'install'])
+ subprocess.check_call(['npm', 'pack'])
+ shutil.copy(filename, os.path.join(oldpwd, filename))
+ return 0
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/js/run_npm_binary.py b/tools/js/run_npm_binary.py
index d769b98..dfcdaca 100644
--- a/tools/js/run_npm_binary.py
+++ b/tools/js/run_npm_binary.py
@@ -27,65 +27,68 @@
def extract(path, outdir, bin):
- if os.path.exists(os.path.join(outdir, bin)):
- return # Another process finished extracting, ignore.
+ if os.path.exists(os.path.join(outdir, bin)):
+ return # Another process finished extracting, ignore.
- # Use a temp directory adjacent to outdir so shutil.move can use the same
- # device atomically.
- tmpdir = tempfile.mkdtemp(dir=os.path.dirname(outdir))
- def cleanup():
- try:
- shutil.rmtree(tmpdir)
- except OSError:
- pass # Too late now
- atexit.register(cleanup)
+ # Use a temp directory adjacent to outdir so shutil.move can use the same
+ # device atomically.
+ tmpdir = tempfile.mkdtemp(dir=os.path.dirname(outdir))
- def extract_one(mem):
- dest = os.path.join(outdir, mem.name)
- tar.extract(mem, path=tmpdir)
- try:
- os.makedirs(os.path.dirname(dest))
- except OSError:
- pass # Either exists, or will fail on the next line.
- shutil.move(os.path.join(tmpdir, mem.name), dest)
+ def cleanup():
+ try:
+ shutil.rmtree(tmpdir)
+ except OSError:
+ pass # Too late now
+ atexit.register(cleanup)
- with tarfile.open(path, 'r:gz') as tar:
- for mem in tar.getmembers():
- if mem.name != bin:
- extract_one(mem)
- # Extract bin last so other processes only short circuit when extraction is
- # finished.
- extract_one(tar.getmember(bin))
+ def extract_one(mem):
+ dest = os.path.join(outdir, mem.name)
+ tar.extract(mem, path=tmpdir)
+ try:
+ os.makedirs(os.path.dirname(dest))
+ except OSError:
+ pass # Either exists, or will fail on the next line.
+ shutil.move(os.path.join(tmpdir, mem.name), dest)
+
+ with tarfile.open(path, 'r:gz') as tar:
+ for mem in tar.getmembers():
+ if mem.name != bin:
+ extract_one(mem)
+ # Extract bin last so other processes only short circuit when
+ # extraction is finished.
+ extract_one(tar.getmember(bin))
+
def main(args):
- path = args[0]
- suffix = '.npm_binary.tgz'
- tgz = os.path.basename(path)
+ path = args[0]
+ suffix = '.npm_binary.tgz'
+ tgz = os.path.basename(path)
- parts = tgz[:-len(suffix)].split('@')
+ parts = tgz[:-len(suffix)].split('@')
- if not tgz.endswith(suffix) or len(parts) != 2:
- print('usage: %s <path/to/npm_binary>' % sys.argv[0], file=sys.stderr)
- return 1
+ if not tgz.endswith(suffix) or len(parts) != 2:
+ print('usage: %s <path/to/npm_binary>' % sys.argv[0], file=sys.stderr)
+ return 1
- name, _ = parts
+ name, _ = parts
- # Avoid importing from gerrit because we don't want to depend on the right CWD.
- sha1 = hashlib.sha1(open(path, 'rb').read()).hexdigest()
- outdir = '%s-%s' % (path[:-len(suffix)], sha1)
- rel_bin = os.path.join('package', 'bin', name)
- bin = os.path.join(outdir, rel_bin)
- if not os.path.isfile(bin):
- extract(path, outdir, rel_bin)
+ # Avoid importing from gerrit because we don't want to depend on the right
+ # working directory
+ sha1 = hashlib.sha1(open(path, 'rb').read()).hexdigest()
+ outdir = '%s-%s' % (path[:-len(suffix)], sha1)
+ rel_bin = os.path.join('package', 'bin', name)
+ bin = os.path.join(outdir, rel_bin)
+ if not os.path.isfile(bin):
+ extract(path, outdir, rel_bin)
- nodejs = spawn.find_executable('nodejs')
- if nodejs:
- # Debian installs Node.js as 'nodejs', due to a conflict with another
- # package.
- subprocess.check_call([nodejs, bin] + args[1:])
- else:
- subprocess.check_call([bin] + args[1:])
+ nodejs = spawn.find_executable('nodejs')
+ if nodejs:
+ # Debian installs Node.js as 'nodejs', due to a conflict with another
+ # package.
+ subprocess.check_call([nodejs, bin] + args[1:])
+ else:
+ subprocess.check_call([bin] + args[1:])
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/maven/mvn.py b/tools/maven/mvn.py
index 50c4ac6..d47d027 100755
--- a/tools/maven/mvn.py
+++ b/tools/maven/mvn.py
@@ -29,56 +29,57 @@
args, ctx = opts.parse_args()
if not args.v:
- print('version is empty', file=stderr)
- exit(1)
+ print('version is empty', file=stderr)
+ exit(1)
root = path.abspath(__file__)
while not path.exists(path.join(root, 'WORKSPACE')):
- root = path.dirname(root)
+ root = path.dirname(root)
if 'install' == args.a:
- cmd = [
- 'mvn',
- 'install:install-file',
- '-Dversion=%s' % args.v,
- ]
+ cmd = [
+ 'mvn',
+ 'install:install-file',
+ '-Dversion=%s' % args.v,
+ ]
elif 'deploy' == args.a:
- cmd = [
- 'mvn',
- 'gpg:sign-and-deploy-file',
- '-DrepositoryId=%s' % args.repository,
- '-Durl=%s' % args.url,
- ]
+ cmd = [
+ 'mvn',
+ 'gpg:sign-and-deploy-file',
+ '-DrepositoryId=%s' % args.repository,
+ '-Durl=%s' % args.url,
+ ]
else:
- print("unknown action -a %s" % args.a, file=stderr)
- exit(1)
+ print("unknown action -a %s" % args.a, file=stderr)
+ exit(1)
for spec in args.s:
- artifact, packaging_type, src = spec.split(':')
- exe = cmd + [
- '-DpomFile=%s' % path.join(root, 'tools', 'maven', '%s_pom.xml' % artifact),
- '-Dpackaging=%s' % packaging_type,
- '-Dfile=%s' % src,
- ]
- try:
- if environ.get('VERBOSE'):
- print(' '.join(exe), file=stderr)
- check_output(exe)
- except Exception as e:
- print('%s command failed: %s\n%s' % (args.a, ' '.join(exe), e),
- file=stderr)
- if environ.get('VERBOSE') and isinstance(e, CalledProcessError):
- print('Command output\n%s' % e.output, file=stderr)
- exit(1)
+ artifact, packaging_type, src = spec.split(':')
+ exe = cmd + [
+ '-DpomFile=%s' % path.join(root, 'tools', 'maven',
+ '%s_pom.xml' % artifact),
+ '-Dpackaging=%s' % packaging_type,
+ '-Dfile=%s' % src,
+ ]
+ try:
+ if environ.get('VERBOSE'):
+ print(' '.join(exe), file=stderr)
+ check_output(exe)
+ except Exception as e:
+ print('%s command failed: %s\n%s' % (args.a, ' '.join(exe), e),
+ file=stderr)
+ if environ.get('VERBOSE') and isinstance(e, CalledProcessError):
+ print('Command output\n%s' % e.output, file=stderr)
+ exit(1)
out = stderr
if args.o:
- out = open(args.o, 'w')
+ out = open(args.o, 'w')
with out as fd:
- if args.repository:
- print('Repository: %s' % args.repository, file=fd)
- if args.url:
- print('URL: %s' % args.url, file=fd)
- print('Version: %s' % args.v, file=fd)
+ if args.repository:
+ print('Repository: %s' % args.repository, file=fd)
+ if args.url:
+ print('URL: %s' % args.url, file=fd)
+ print('Version: %s' % args.v, file=fd)
diff --git a/tools/merge_jars.py b/tools/merge_jars.py
index 97a87c4..6b46069 100755
--- a/tools/merge_jars.py
+++ b/tools/merge_jars.py
@@ -17,11 +17,10 @@
import collections
import sys
import zipfile
-import io
if len(sys.argv) < 3:
- print('usage: %s <out.zip> <in.zip>...' % sys.argv[0], file=sys.stderr)
- exit(1)
+ print('usage: %s <out.zip> <in.zip>...' % sys.argv[0], file=sys.stderr)
+ exit(1)
outfile = sys.argv[1]
infiles = sys.argv[2:]
@@ -29,22 +28,22 @@
SERVICES = 'META-INF/services/'
try:
- with zipfile.ZipFile(outfile, 'w') as outzip:
- services = collections.defaultdict(lambda: '')
- for infile in infiles:
- with zipfile.ZipFile(infile) as inzip:
- for info in inzip.infolist():
- n = info.filename
- if n in seen:
- continue
- elif n.startswith(SERVICES):
- # Concatenate all provider configuration files.
- services[n] += inzip.read(n).decode("UTF-8")
- continue
- outzip.writestr(info, inzip.read(n))
- seen.add(n)
+ with zipfile.ZipFile(outfile, 'w') as outzip:
+ services = collections.defaultdict(lambda: '')
+ for infile in infiles:
+ with zipfile.ZipFile(infile) as inzip:
+ for info in inzip.infolist():
+ n = info.filename
+ if n in seen:
+ continue
+ elif n.startswith(SERVICES):
+ # Concatenate all provider configuration files.
+ services[n] += inzip.read(n).decode("UTF-8")
+ continue
+ outzip.writestr(info, inzip.read(n))
+ seen.add(n)
- for n, v in list(services.items()):
- outzip.writestr(n, v)
+ for n, v in list(services.items()):
+ outzip.writestr(n, v)
except Exception as err:
- exit('Failed to merge jars: %s' % err)
+ exit('Failed to merge jars: %s' % err)
diff --git a/tools/release-announcement.py b/tools/release-announcement.py
index f700185..a25a340 100755
--- a/tools/release-announcement.py
+++ b/tools/release-announcement.py
@@ -101,9 +101,9 @@
summary = summary + "."
data = {
- "version": Version(options.version),
- "previous": options.previous,
- "summary": summary
+ "version": Version(options.version),
+ "previous": options.previous,
+ "summary": summary
}
war = os.path.join(
diff --git a/tools/util.py b/tools/util.py
index e8182ed..45d0541 100644
--- a/tools/util.py
+++ b/tools/util.py
@@ -15,57 +15,59 @@
from os import path
REPO_ROOTS = {
- 'GERRIT': 'http://gerrit-maven.storage.googleapis.com',
- 'GERRIT_API': 'https://gerrit-api.commondatastorage.googleapis.com/release',
- 'MAVEN_CENTRAL': 'http://repo1.maven.org/maven2',
- 'MAVEN_LOCAL': 'file://' + path.expanduser('~/.m2/repository'),
- 'MAVEN_SNAPSHOT': 'https://oss.sonatype.org/content/repositories/snapshots',
+ 'GERRIT': 'http://gerrit-maven.storage.googleapis.com',
+ 'GERRIT_API':
+ 'https://gerrit-api.commondatastorage.googleapis.com/release',
+ 'MAVEN_CENTRAL': 'http://repo1.maven.org/maven2',
+ 'MAVEN_LOCAL': 'file://' + path.expanduser('~/.m2/repository'),
+ 'MAVEN_SNAPSHOT':
+ 'https://oss.sonatype.org/content/repositories/snapshots',
}
def resolve_url(url, redirects):
- """ Resolve URL of a Maven artifact.
+ """ Resolve URL of a Maven artifact.
- prefix:path is passed as URL. prefix identifies known or custom
- repositories that can be rewritten in redirects set, passed as
- second arguments.
+ prefix:path is passed as URL. prefix identifies known or custom
+ repositories that can be rewritten in redirects set, passed as
+ second arguments.
- A special case is supported, when prefix neither exists in
- REPO_ROOTS, no in redirects set: the url is returned as is.
- This enables plugins to pass custom maven_repository URL as is
- directly to maven_jar().
+ A special case is supported, when prefix neither exists in
+ REPO_ROOTS, no in redirects set: the url is returned as is.
+ This enables plugins to pass custom maven_repository URL as is
+ directly to maven_jar().
- Returns a resolved path for Maven artifact.
- """
- s = url.find(':')
- if s < 0:
- return url
- scheme, rest = url[:s], url[s+1:]
- if scheme in redirects:
- root = redirects[scheme]
- elif scheme in REPO_ROOTS:
- root = REPO_ROOTS[scheme]
- else:
- return url
- root = root.rstrip('/')
- rest = rest.lstrip('/')
- return '/'.join([root, rest])
+ Returns a resolved path for Maven artifact.
+ """
+ s = url.find(':')
+ if s < 0:
+ return url
+ scheme, rest = url[:s], url[s+1:]
+ if scheme in redirects:
+ root = redirects[scheme]
+ elif scheme in REPO_ROOTS:
+ root = REPO_ROOTS[scheme]
+ else:
+ return url
+ root = root.rstrip('/')
+ rest = rest.lstrip('/')
+ return '/'.join([root, rest])
def hash_file(hash_obj, path):
- """Hash the contents of a file.
+ """Hash the contents of a file.
- Args:
- hash_obj: an open hash object, e.g. hashlib.sha1().
- path: path to the file to hash.
+ Args:
+ hash_obj: an open hash object, e.g. hashlib.sha1().
+ path: path to the file to hash.
- Returns:
- The passed-in hash_obj.
- """
- with open(path, 'rb') as f:
- while True:
- b = f.read(8192)
- if not b:
- break
- hash_obj.update(b)
- return hash_obj
+ Returns:
+ The passed-in hash_obj.
+ """
+ with open(path, 'rb') as f:
+ while True:
+ b = f.read(8192)
+ if not b:
+ break
+ hash_obj.update(b)
+ return hash_obj
diff --git a/tools/util_test.py b/tools/util_test.py
index 30647ba..fa67696 100644
--- a/tools/util_test.py
+++ b/tools/util_test.py
@@ -16,28 +16,32 @@
import unittest
from util import resolve_url
+
class TestResolveUrl(unittest.TestCase):
- """ run to test:
- python -m unittest -v util_test
- """
+ """ run to test:
+ python -m unittest -v util_test
+ """
- def testKnown(self):
- url = resolve_url('GERRIT:foo.jar', {})
- self.assertEqual(url, 'http://gerrit-maven.storage.googleapis.com/foo.jar')
+ def testKnown(self):
+ url = resolve_url('GERRIT:foo.jar', {})
+ self.assertEqual(url,
+ 'http://gerrit-maven.storage.googleapis.com/foo.jar')
- def testKnownRedirect(self):
- url = resolve_url('MAVEN_CENTRAL:foo.jar',
- {'MAVEN_CENTRAL': 'http://my.company.mirror/maven2'})
- self.assertEqual(url, 'http://my.company.mirror/maven2/foo.jar')
+ def testKnownRedirect(self):
+ url = resolve_url('MAVEN_CENTRAL:foo.jar',
+ {'MAVEN_CENTRAL': 'http://my.company.mirror/maven2'})
+ self.assertEqual(url, 'http://my.company.mirror/maven2/foo.jar')
- def testCustom(self):
- url = resolve_url('http://maven.example.com/release/foo.jar', {})
- self.assertEqual(url, 'http://maven.example.com/release/foo.jar')
+ def testCustom(self):
+ url = resolve_url('http://maven.example.com/release/foo.jar', {})
+ self.assertEqual(url, 'http://maven.example.com/release/foo.jar')
- def testCustomRedirect(self):
- url = resolve_url('MAVEN_EXAMPLE:foo.jar',
- {'MAVEN_EXAMPLE': 'http://maven.example.com/release'})
- self.assertEqual(url, 'http://maven.example.com/release/foo.jar')
+ def testCustomRedirect(self):
+ url = resolve_url('MAVEN_EXAMPLE:foo.jar',
+ {'MAVEN_EXAMPLE':
+ 'http://maven.example.com/release'})
+ self.assertEqual(url, 'http://maven.example.com/release/foo.jar')
+
if __name__ == '__main__':
- unittest.main()
+ unittest.main()
diff --git a/tools/version.py b/tools/version.py
index 72b0134..4aafcb0 100755
--- a/tools/version.py
+++ b/tools/version.py
@@ -23,24 +23,24 @@
opts, args = parser.parse_args()
if not len(args):
- parser.error('not enough arguments')
+ parser.error('not enough arguments')
elif len(args) > 1:
- parser.error('too many arguments')
+ parser.error('too many arguments')
DEST_PATTERN = r'\g<1>%s\g<3>' % args[0]
def replace_in_file(filename, src_pattern):
- try:
- f = open(filename, "r")
- s = f.read()
- f.close()
- s = re.sub(src_pattern, DEST_PATTERN, s)
- f = open(filename, "w")
- f.write(s)
- f.close()
- except IOError as err:
- print('error updating %s: %s' % (filename, err), file=sys.stderr)
+ try:
+ f = open(filename, "r")
+ s = f.read()
+ f.close()
+ s = re.sub(src_pattern, DEST_PATTERN, s)
+ f = open(filename, "w")
+ f.write(s)
+ f.close()
+ except IOError as err:
+ print('error updating %s: %s' % (filename, err), file=sys.stderr)
src_pattern = re.compile(r'^(\s*<version>)([-.\w]+)(</version>\s*)$',
@@ -48,8 +48,8 @@
for project in ['gerrit-acceptance-framework', 'gerrit-extension-api',
'gerrit-plugin-api', 'gerrit-plugin-gwtui',
'gerrit-war']:
- pom = os.path.join('tools', 'maven', '%s_pom.xml' % project)
- replace_in_file(pom, src_pattern)
+ pom = os.path.join('tools', 'maven', '%s_pom.xml' % project)
+ replace_in_file(pom, src_pattern)
src_pattern = re.compile(r'^(GERRIT_VERSION = ")([-.\w]+)(")$', re.MULTILINE)
replace_in_file('version.bzl', src_pattern)