Merge changes I5a13a9fc,Ife10635b
* changes:
Upgrade protobuf-java to 3.5.1
Upgrade gson to 2.8.4
diff --git a/.bazelproject b/.bazelproject
index e3a7a9c..8a726eb 100644
--- a/.bazelproject
+++ b/.bazelproject
@@ -4,6 +4,7 @@
directories:
.
+ -bin
-eclipse-out
-contrib
-gerrit-package-plugins
diff --git a/Documentation/replace_macros.py b/Documentation/replace_macros.py
index c76d133..6f90697 100755
--- a/Documentation/replace_macros.py
+++ b/Documentation/replace_macros.py
@@ -183,7 +183,8 @@
element.insertBefore(a, element.firstChild);
// remove the link icon when the mouse is moved away,
- // but keep it shown if the mouse is over the element, the link or the icon
+ // but keep it shown if the mouse is over the element, the link or
+ // the icon
hide = function(evt) {
if (document.elementFromPoint(evt.clientX, evt.clientY) != element
&& document.elementFromPoint(evt.clientX, evt.clientY) != a
@@ -229,54 +230,54 @@
options, _ = opts.parse_args()
try:
- try:
- out_file = open(options.out, 'w', errors='ignore')
- src_file = open(options.src, 'r', errors='ignore')
- except TypeError:
- out_file = open(options.out, 'w')
- src_file = open(options.src, 'r')
- last_line = ''
- ignore_next_line = False
- last_title = ''
- for line in src_file:
- if PAT_GERRIT.match(last_line):
- # Case of "GERRIT\n------" at the footer
- out_file.write(GERRIT_UPLINK)
- last_line = ''
- elif PAT_SEARCHBOX.match(last_line):
- # Case of 'SEARCHBOX\n---------'
- if options.searchbox:
- out_file.write(SEARCH_BOX)
- last_line = ''
- elif PAT_INCLUDE.match(line):
- # Case of 'include::<filename>'
- match = PAT_INCLUDE.match(line)
- out_file.write(last_line)
- last_line = match.group(1) + options.suffix + match.group(2) + '\n'
- elif PAT_STARS.match(line):
- if PAT_TITLE.match(last_line):
- # Case of the title in '.<title>\n****\nget::<url>\n****'
- match = PAT_TITLE.match(last_line)
- last_title = GET_TITLE % match.group(1)
- else:
- out_file.write(last_line)
- last_title = ''
- elif PAT_GET.match(line):
- # Case of '****\nget::<url>\n****' in rest api
- url = PAT_GET.match(line).group(1)
- out_file.write(GET_MACRO.format(url) % last_title)
- ignore_next_line = True
- elif ignore_next_line:
- # Handle the trailing '****' of the 'get::' case
- last_line = ''
- ignore_next_line = False
- else:
- out_file.write(last_line)
- last_line = line
- out_file.write(last_line)
- out_file.write(LINK_SCRIPT)
- out_file.close()
+ try:
+ out_file = open(options.out, 'w', errors='ignore')
+ src_file = open(options.src, 'r', errors='ignore')
+ except TypeError:
+ out_file = open(options.out, 'w')
+ src_file = open(options.src, 'r')
+ last_line = ''
+ ignore_next_line = False
+ last_title = ''
+ for line in src_file:
+ if PAT_GERRIT.match(last_line):
+ # Case of "GERRIT\n------" at the footer
+ out_file.write(GERRIT_UPLINK)
+ last_line = ''
+ elif PAT_SEARCHBOX.match(last_line):
+ # Case of 'SEARCHBOX\n---------'
+ if options.searchbox:
+ out_file.write(SEARCH_BOX)
+ last_line = ''
+ elif PAT_INCLUDE.match(line):
+ # Case of 'include::<filename>'
+ match = PAT_INCLUDE.match(line)
+ out_file.write(last_line)
+ last_line = match.group(1) + options.suffix + match.group(2) + '\n'
+ elif PAT_STARS.match(line):
+ if PAT_TITLE.match(last_line):
+ # Case of the title in '.<title>\n****\nget::<url>\n****'
+ match = PAT_TITLE.match(last_line)
+ last_title = GET_TITLE % match.group(1)
+ else:
+ out_file.write(last_line)
+ last_title = ''
+ elif PAT_GET.match(line):
+ # Case of '****\nget::<url>\n****' in rest api
+ url = PAT_GET.match(line).group(1)
+ out_file.write(GET_MACRO.format(url) % last_title)
+ ignore_next_line = True
+ elif ignore_next_line:
+ # Handle the trailing '****' of the 'get::' case
+ last_line = ''
+ ignore_next_line = False
+ else:
+ out_file.write(last_line)
+ last_line = line
+ out_file.write(last_line)
+ out_file.write(LINK_SCRIPT)
+ out_file.close()
except IOError as err:
- sys.stderr.write(
- "error while expanding %s to %s: %s" % (options.src, options.out, err))
- exit(1)
+ sys.stderr.write(
+ "error while expanding %s to %s: %s" % (options.src, options.out, err))
+ exit(1)
diff --git a/contrib/check-valid-commit.py b/contrib/check-valid-commit.py
index d26fa58..763ae3e 100755
--- a/contrib/check-valid-commit.py
+++ b/contrib/check-valid-commit.py
@@ -10,13 +10,16 @@
SSH_USER = 'bot'
SSH_HOST = 'localhost'
SSH_PORT = 29418
-SSH_COMMAND = 'ssh %s@%s -p %d gerrit approve ' % (SSH_USER, SSH_HOST, SSH_PORT)
+SSH_COMMAND = 'ssh %s@%s -p %d gerrit approve ' % (SSH_USER,
+ SSH_HOST,
+ SSH_PORT)
FAILURE_SCORE = '--code-review=-2'
FAILURE_MESSAGE = 'This commit message does not match the standard.' \
+ ' Please correct the commit message and upload a replacement patch.'
PASS_SCORE = '--code-review=0'
PASS_MESSAGE = ''
+
def main():
change = None
project = None
@@ -25,8 +28,9 @@
patchset = None
try:
- opts, _args = getopt.getopt(sys.argv[1:], '', \
- ['change=', 'project=', 'branch=', 'commit=', 'patchset='])
+ opts, _args = getopt.getopt(sys.argv[1:], '',
+ ['change=', 'project=', 'branch=',
+ 'commit=', 'patchset='])
except getopt.GetoptError as err:
print('Error: %s' % (err))
usage()
@@ -48,8 +52,7 @@
usage()
sys.exit(-1)
- if change == None or project == None or branch == None \
- or commit == None or patchset == None:
+ if any(p is None for p in [change, project, branch, commit, patchset]):
usage()
sys.exit(-1)
@@ -57,16 +60,16 @@
status, output = subprocess.getstatusoutput(command)
if status != 0:
- print('Error running \'%s\'. status: %s, output:\n\n%s' % \
- (command, status, output))
+ print('Error running \'%s\'. status: %s, output:\n\n%s' %
+ (command, status, output))
sys.exit(-1)
commitMessage = output[(output.find('\n\n')+2):]
commitLines = commitMessage.split('\n')
if len(commitLines) > 1 and len(commitLines[1]) != 0:
- fail(commit, 'Invalid commit summary. The summary must be ' \
- + 'one line followed by a blank line.')
+ fail(commit, 'Invalid commit summary. The summary must be '
+ + 'one line followed by a blank line.')
i = 0
for line in commitLines:
@@ -76,23 +79,27 @@
passes(commit)
+
def usage():
print('Usage:\n')
- print(sys.argv[0] + ' --change <change id> --project <project name> ' \
- + '--branch <branch> --commit <sha1> --patchset <patchset id>')
+ print(sys.argv[0] + ' --change <change id> --project <project name> '
+ + '--branch <branch> --commit <sha1> --patchset <patchset id>')
-def fail( commit, message ):
+
+def fail(commit, message):
command = SSH_COMMAND + FAILURE_SCORE + ' -m \\\"' \
- + _shell_escape( FAILURE_MESSAGE + '\n\n' + message) \
+ + _shell_escape(FAILURE_MESSAGE + '\n\n' + message) \
+ '\\\" ' + commit
subprocess.getstatusoutput(command)
sys.exit(1)
-def passes( commit ):
+
+def passes(commit):
command = SSH_COMMAND + PASS_SCORE + ' -m \\\"' \
+ _shell_escape(PASS_MESSAGE) + ' \\\" ' + commit
subprocess.getstatusoutput(command)
+
def _shell_escape(x):
s = ''
for c in x:
@@ -102,6 +109,6 @@
s = s + c
return s
+
if __name__ == '__main__':
main()
-
diff --git a/contrib/populate-fixture-data.py b/contrib/populate-fixture-data.py
index e7e8d0b..07a0f01 100755
--- a/contrib/populate-fixture-data.py
+++ b/contrib/populate-fixture-data.py
@@ -47,228 +47,235 @@
# Random names from US Census Data
FIRST_NAMES = [
- "Casey", "Yesenia", "Shirley", "Tara", "Wanda", "Sheryl", "Jaime", "Elaine",
- "Charlotte", "Carly", "Bonnie", "Kirsten", "Kathryn", "Carla", "Katrina",
- "Melody", "Suzanne", "Sandy", "Joann", "Kristie", "Sally", "Emma", "Susan",
- "Amanda", "Alyssa", "Patty", "Angie", "Dominique", "Cynthia", "Jennifer",
- "Theresa", "Desiree", "Kaylee", "Maureen", "Jeanne", "Kellie", "Valerie",
- "Nina", "Judy", "Diamond", "Anita", "Rebekah", "Stefanie", "Kendra", "Erin",
- "Tammie", "Tracey", "Bridget", "Krystal", "Jasmin", "Sonia", "Meghan",
- "Rebecca", "Jeanette", "Meredith", "Beverly", "Natasha", "Chloe", "Selena",
- "Teresa", "Sheena", "Cassandra", "Rhonda", "Tami", "Jodi", "Shelly", "Angela",
- "Kimberly", "Terry", "Joanna", "Isabella", "Lindsey", "Loretta", "Dana",
- "Veronica", "Carolyn", "Laura", "Karen", "Dawn", "Alejandra", "Cassie",
- "Lorraine", "Yolanda", "Kerry", "Stephanie", "Caitlin", "Melanie", "Kerri",
- "Doris", "Sandra", "Beth", "Carol", "Vicki", "Shelia", "Bethany", "Rachael",
- "Donna", "Alexandra", "Barbara", "Ana", "Jillian", "Ann", "Rachel", "Lauren",
- "Hayley", "Misty", "Brianna", "Tanya", "Danielle", "Courtney", "Jacqueline",
- "Becky", "Christy", "Alisha", "Phyllis", "Faith", "Jocelyn", "Nancy",
- "Gloria", "Kristen", "Evelyn", "Julie", "Julia", "Kara", "Chelsey", "Cassidy",
- "Jean", "Chelsea", "Jenny", "Diana", "Haley", "Kristine", "Kristina", "Erika",
- "Jenna", "Alison", "Deanna", "Abigail", "Melissa", "Sierra", "Linda",
- "Monica", "Tasha", "Traci", "Yvonne", "Tracy", "Marie", "Maria", "Michaela",
- "Stacie", "April", "Morgan", "Cathy", "Darlene", "Cristina", "Emily"
- "Ian", "Russell", "Phillip", "Jay", "Barry", "Brad", "Frederick", "Fernando",
- "Timothy", "Ricardo", "Bernard", "Daniel", "Ruben", "Alexis", "Kyle", "Malik",
- "Norman", "Kent", "Melvin", "Stephen", "Daryl", "Kurt", "Greg", "Alex",
- "Mario", "Riley", "Marvin", "Dan", "Steven", "Roberto", "Lucas", "Leroy",
- "Preston", "Drew", "Fred", "Casey", "Wesley", "Elijah", "Reginald", "Joel",
- "Christopher", "Jacob", "Luis", "Philip", "Mark", "Rickey", "Todd", "Scott",
- "Terrence", "Jim", "Stanley", "Bobby", "Thomas", "Gabriel", "Tracy", "Marcus",
- "Peter", "Michael", "Calvin", "Herbert", "Darryl", "Billy", "Ross", "Dustin",
- "Jaime", "Adam", "Henry", "Xavier", "Dominic", "Lonnie", "Danny", "Victor",
- "Glen", "Perry", "Jackson", "Grant", "Gerald", "Garrett", "Alejandro",
- "Eddie", "Alan", "Ronnie", "Mathew", "Dave", "Wayne", "Joe", "Craig",
- "Terry", "Chris", "Randall", "Parker", "Francis", "Keith", "Neil", "Caleb",
- "Jon", "Earl", "Taylor", "Bryce", "Brady", "Max", "Sergio", "Leon", "Gene",
- "Darin", "Bill", "Edgar", "Antonio", "Dalton", "Arthur", "Austin", "Cristian",
- "Kevin", "Omar", "Kelly", "Aaron", "Ethan", "Tom", "Isaac", "Maurice",
- "Gilbert", "Hunter", "Willie", "Harry", "Dale", "Darius", "Jerome", "Jason",
- "Harold", "Kerry", "Clarence", "Gregg", "Shane", "Eduardo", "Micheal",
- "Howard", "Vernon", "Rodney", "Anthony", "Levi", "Larry", "Franklin", "Jimmy",
- "Jonathon", "Carl",
+ "Casey", "Yesenia", "Shirley", "Tara", "Wanda", "Sheryl", "Jaime",
+ "Elaine", "Charlotte", "Carly", "Bonnie", "Kirsten", "Kathryn", "Carla",
+ "Katrina", "Melody", "Suzanne", "Sandy", "Joann", "Kristie", "Sally",
+ "Emma", "Susan", "Amanda", "Alyssa", "Patty", "Angie", "Dominique",
+ "Cynthia", "Jennifer", "Theresa", "Desiree", "Kaylee", "Maureen",
+ "Jeanne", "Kellie", "Valerie", "Nina", "Judy", "Diamond", "Anita",
+ "Rebekah", "Stefanie", "Kendra", "Erin", "Tammie", "Tracey", "Bridget",
+ "Krystal", "Jasmin", "Sonia", "Meghan", "Rebecca", "Jeanette", "Meredith",
+ "Beverly", "Natasha", "Chloe", "Selena", "Teresa", "Sheena", "Cassandra",
+ "Rhonda", "Tami", "Jodi", "Shelly", "Angela", "Kimberly", "Terry",
+ "Joanna", "Isabella", "Lindsey", "Loretta", "Dana", "Veronica", "Carolyn",
+ "Laura", "Karen", "Dawn", "Alejandra", "Cassie", "Lorraine", "Yolanda",
+ "Kerry", "Stephanie", "Caitlin", "Melanie", "Kerri", "Doris", "Sandra",
+ "Beth", "Carol", "Vicki", "Shelia", "Bethany", "Rachael", "Donna",
+ "Alexandra", "Barbara", "Ana", "Jillian", "Ann", "Rachel", "Lauren",
+ "Hayley", "Misty", "Brianna", "Tanya", "Danielle", "Courtney",
+ "Jacqueline", "Becky", "Christy", "Alisha", "Phyllis", "Faith", "Jocelyn",
+ "Nancy", "Gloria", "Kristen", "Evelyn", "Julie", "Julia", "Kara",
+ "Chelsey", "Cassidy", "Jean", "Chelsea", "Jenny", "Diana", "Haley",
+ "Kristine", "Kristina", "Erika", "Jenna", "Alison", "Deanna", "Abigail",
+ "Melissa", "Sierra", "Linda", "Monica", "Tasha", "Traci", "Yvonne",
+ "Tracy", "Marie", "Maria", "Michaela", "Stacie", "April", "Morgan",
+ "Cathy", "Darlene", "Cristina", "Emily" "Ian", "Russell", "Phillip", "Jay",
+ "Barry", "Brad", "Frederick", "Fernando", "Timothy", "Ricardo", "Bernard",
+ "Daniel", "Ruben", "Alexis", "Kyle", "Malik", "Norman", "Kent", "Melvin",
+ "Stephen", "Daryl", "Kurt", "Greg", "Alex", "Mario", "Riley", "Marvin",
+ "Dan", "Steven", "Roberto", "Lucas", "Leroy", "Preston", "Drew", "Fred",
+ "Casey", "Wesley", "Elijah", "Reginald", "Joel", "Christopher", "Jacob",
+ "Luis", "Philip", "Mark", "Rickey", "Todd", "Scott", "Terrence", "Jim",
+ "Stanley", "Bobby", "Thomas", "Gabriel", "Tracy", "Marcus", "Peter",
+ "Michael", "Calvin", "Herbert", "Darryl", "Billy", "Ross", "Dustin",
+ "Jaime", "Adam", "Henry", "Xavier", "Dominic", "Lonnie", "Danny", "Victor",
+ "Glen", "Perry", "Jackson", "Grant", "Gerald", "Garrett", "Alejandro",
+ "Eddie", "Alan", "Ronnie", "Mathew", "Dave", "Wayne", "Joe", "Craig",
+ "Terry", "Chris", "Randall", "Parker", "Francis", "Keith", "Neil", "Caleb",
+ "Jon", "Earl", "Taylor", "Bryce", "Brady", "Max", "Sergio", "Leon", "Gene",
+ "Darin", "Bill", "Edgar", "Antonio", "Dalton", "Arthur", "Austin",
+ "Cristian", "Kevin", "Omar", "Kelly", "Aaron", "Ethan", "Tom", "Isaac",
+ "Maurice", "Gilbert", "Hunter", "Willie", "Harry", "Dale", "Darius",
+ "Jerome", "Jason", "Harold", "Kerry", "Clarence", "Gregg", "Shane",
+ "Eduardo", "Micheal", "Howard", "Vernon", "Rodney", "Anthony", "Levi",
+ "Larry", "Franklin", "Jimmy", "Jonathon", "Carl",
]
LAST_NAMES = [
- "Savage", "Hendrix", "Moon", "Larsen", "Rocha", "Burgess", "Bailey", "Farley",
- "Moses", "Schmidt", "Brown", "Hoover", "Klein", "Jennings", "Braun", "Rangel",
- "Casey", "Dougherty", "Hancock", "Wolf", "Henry", "Thomas", "Bentley",
- "Barnett", "Kline", "Pitts", "Rojas", "Sosa", "Paul", "Hess", "Chase",
- "Mckay", "Bender", "Colins", "Montoya", "Townsend", "Potts", "Ayala", "Avery",
- "Sherman", "Tapia", "Hamilton", "Ferguson", "Huang", "Hooper", "Zamora",
- "Logan", "Lloyd", "Quinn", "Monroe", "Brock", "Ibarra", "Fowler", "Weiss",
- "Montgomery", "Diaz", "Dixon", "Olson", "Robertson", "Arias", "Benjamin",
- "Abbott", "Stein", "Schroeder", "Beck", "Velasquez", "Barber", "Nichols",
- "Ortiz", "Burns", "Moody", "Stokes", "Wilcox", "Rush", "Michael", "Kidd",
- "Rowland", "Mclean", "Saunders", "Chung", "Newton", "Potter", "Hickman",
- "Ray", "Larson", "Figueroa", "Duncan", "Sparks", "Rose", "Hodge", "Huynh",
- "Joseph", "Morales", "Beasley", "Mora", "Fry", "Ross", "Novak", "Hahn",
- "Wise", "Knight", "Frederick", "Heath", "Pollard", "Vega", "Mcclain",
- "Buckley", "Conrad", "Cantrell", "Bond", "Mejia", "Wang", "Lewis", "Johns",
- "Mcknight", "Callahan", "Reynolds", "Norris", "Burnett", "Carey", "Jacobson",
- "Oneill", "Oconnor", "Leonard", "Mckenzie", "Hale", "Delgado", "Spence",
- "Brandt", "Obrien", "Bowman", "James", "Avila", "Roberts", "Barker", "Cohen",
- "Bradley", "Prince", "Warren", "Summers", "Little", "Caldwell", "Garrett",
- "Hughes", "Norton", "Burke", "Holden", "Merritt", "Lee", "Frank", "Wiley",
- "Ho", "Weber", "Keith", "Winters", "Gray", "Watts", "Brady", "Aguilar",
- "Nicholson", "David", "Pace", "Cervantes", "Davis", "Baxter", "Sanchez",
- "Singleton", "Taylor", "Strickland", "Glenn", "Valentine", "Roy", "Cameron",
- "Beard", "Norman", "Fritz", "Anthony", "Koch", "Parrish", "Herman", "Hines",
- "Sutton", "Gallegos", "Stephenson", "Lozano", "Franklin", "Howe", "Bauer",
- "Love", "Ali", "Ellison", "Lester", "Guzman", "Jarvis", "Espinoza",
- "Fletcher", "Burton", "Woodard", "Peterson", "Barajas", "Richard", "Bryan",
- "Goodman", "Cline", "Rowe", "Faulkner", "Crawford", "Mueller", "Patterson",
- "Hull", "Walton", "Wu", "Flores", "York", "Dickson", "Barnes", "Fisher",
- "Strong", "Juarez", "Fitzgerald", "Schmitt", "Blevins", "Villa", "Sullivan",
- "Velazquez", "Horton", "Meadows", "Riley", "Barrera", "Neal", "Mendez",
- "Mcdonald", "Floyd", "Lynch", "Mcdowell", "Benson", "Hebert", "Livingston",
- "Davies", "Richardson", "Vincent", "Davenport", "Osborn", "Mckee", "Marshall",
- "Ferrell", "Martinez", "Melton", "Mercer", "Yoder", "Jacobs", "Mcdaniel",
- "Mcmillan", "Peters", "Atkinson", "Wood", "Briggs", "Valencia", "Chandler",
- "Rios", "Hunter", "Bean", "Hicks", "Hays", "Lucero", "Malone", "Waller",
- "Banks", "Myers", "Mitchell", "Grimes", "Houston", "Hampton", "Trujillo",
- "Perkins", "Moran", "Welch", "Contreras", "Montes", "Ayers", "Hayden",
- "Daniel", "Weeks", "Porter", "Gill", "Mullen", "Nolan", "Dorsey", "Crane",
- "Estes", "Lam", "Wells", "Cisneros", "Giles", "Watson", "Vang", "Scott",
- "Knox", "Hanna", "Fields",
+ "Savage", "Hendrix", "Moon", "Larsen", "Rocha", "Burgess", "Bailey",
+ "Farley", "Moses", "Schmidt", "Brown", "Hoover", "Klein", "Jennings",
+ "Braun", "Rangel", "Casey", "Dougherty", "Hancock", "Wolf", "Henry",
+ "Thomas", "Bentley", "Barnett", "Kline", "Pitts", "Rojas", "Sosa", "Paul",
+ "Hess", "Chase", "Mckay", "Bender", "Colins", "Montoya", "Townsend",
+ "Potts", "Ayala", "Avery", "Sherman", "Tapia", "Hamilton", "Ferguson",
+ "Huang", "Hooper", "Zamora", "Logan", "Lloyd", "Quinn", "Monroe", "Brock",
+ "Ibarra", "Fowler", "Weiss", "Montgomery", "Diaz", "Dixon", "Olson",
+ "Robertson", "Arias", "Benjamin", "Abbott", "Stein", "Schroeder", "Beck",
+ "Velasquez", "Barber", "Nichols", "Ortiz", "Burns", "Moody", "Stokes",
+ "Wilcox", "Rush", "Michael", "Kidd", "Rowland", "Mclean", "Saunders",
+ "Chung", "Newton", "Potter", "Hickman", "Ray", "Larson", "Figueroa",
+ "Duncan", "Sparks", "Rose", "Hodge", "Huynh", "Joseph", "Morales",
+ "Beasley", "Mora", "Fry", "Ross", "Novak", "Hahn", "Wise", "Knight",
+ "Frederick", "Heath", "Pollard", "Vega", "Mcclain", "Buckley", "Conrad",
+ "Cantrell", "Bond", "Mejia", "Wang", "Lewis", "Johns", "Mcknight",
+ "Callahan", "Reynolds", "Norris", "Burnett", "Carey", "Jacobson", "Oneill",
+ "Oconnor", "Leonard", "Mckenzie", "Hale", "Delgado", "Spence", "Brandt",
+ "Obrien", "Bowman", "James", "Avila", "Roberts", "Barker", "Cohen",
+ "Bradley", "Prince", "Warren", "Summers", "Little", "Caldwell", "Garrett",
+ "Hughes", "Norton", "Burke", "Holden", "Merritt", "Lee", "Frank", "Wiley",
+ "Ho", "Weber", "Keith", "Winters", "Gray", "Watts", "Brady", "Aguilar",
+ "Nicholson", "David", "Pace", "Cervantes", "Davis", "Baxter", "Sanchez",
+ "Singleton", "Taylor", "Strickland", "Glenn", "Valentine", "Roy",
+ "Cameron", "Beard", "Norman", "Fritz", "Anthony", "Koch", "Parrish",
+ "Herman", "Hines", "Sutton", "Gallegos", "Stephenson", "Lozano",
+ "Franklin", "Howe", "Bauer", "Love", "Ali", "Ellison", "Lester", "Guzman",
+ "Jarvis", "Espinoza", "Fletcher", "Burton", "Woodard", "Peterson",
+ "Barajas", "Richard", "Bryan", "Goodman", "Cline", "Rowe", "Faulkner",
+ "Crawford", "Mueller", "Patterson", "Hull", "Walton", "Wu", "Flores",
+ "York", "Dickson", "Barnes", "Fisher", "Strong", "Juarez", "Fitzgerald",
+ "Schmitt", "Blevins", "Villa", "Sullivan", "Velazquez", "Horton",
+ "Meadows", "Riley", "Barrera", "Neal", "Mendez", "Mcdonald", "Floyd",
+ "Lynch", "Mcdowell", "Benson", "Hebert", "Livingston", "Davies",
+ "Richardson", "Vincent", "Davenport", "Osborn", "Mckee", "Marshall",
+ "Ferrell", "Martinez", "Melton", "Mercer", "Yoder", "Jacobs", "Mcdaniel",
+ "Mcmillan", "Peters", "Atkinson", "Wood", "Briggs", "Valencia", "Chandler",
+ "Rios", "Hunter", "Bean", "Hicks", "Hays", "Lucero", "Malone", "Waller",
+ "Banks", "Myers", "Mitchell", "Grimes", "Houston", "Hampton", "Trujillo",
+ "Perkins", "Moran", "Welch", "Contreras", "Montes", "Ayers", "Hayden",
+ "Daniel", "Weeks", "Porter", "Gill", "Mullen", "Nolan", "Dorsey", "Crane",
+ "Estes", "Lam", "Wells", "Cisneros", "Giles", "Watson", "Vang", "Scott",
+ "Knox", "Hanna", "Fields",
]
def clean(json_string):
- # Strip JSON XSS Tag
- json_string = json_string.strip()
- if json_string.startswith(")]}'"):
- return json_string[5:]
- return json_string
+ # Strip JSON XSS Tag
+ json_string = json_string.strip()
+ if json_string.startswith(")]}'"):
+ return json_string[5:]
+ return json_string
def basic_auth(user):
- return requests.auth.HTTPBasicAuth(user["username"], user["http_password"])
+ return requests.auth.HTTPBasicAuth(user["username"], user["http_password"])
def fetch_admin_group():
- global GROUP_ADMIN
- # Get admin group
- r = json.loads(clean(requests.get(BASE_URL + "groups/" + "?suggest=ad&p=All-Projects",
- headers=HEADERS,
- auth=ADMIN_BASIC_AUTH).text))
- admin_group_name = r.keys()[0]
- GROUP_ADMIN = r[admin_group_name]
- GROUP_ADMIN["name"] = admin_group_name
+ global GROUP_ADMIN
+ # Get admin group
+ r = json.loads(clean(requests.get(
+ BASE_URL + "groups/?suggest=ad&p=All-Projects",
+ headers=HEADERS,
+ auth=ADMIN_BASIC_AUTH).text))
+ admin_group_name = r.keys()[0]
+ GROUP_ADMIN = r[admin_group_name]
+ GROUP_ADMIN["name"] = admin_group_name
def generate_random_text():
- return " ".join([random.choice("lorem ipsum "
- "doleret delendam "
- "\n esse".split(" ")) for _ in range(1, 100)])
+ return " ".join([random.choice("lorem ipsum "
+ "doleret delendam "
+ "\n esse".split(" ")) for _ in range(1,
+ 100)])
def set_up():
- global TMP_PATH
- TMP_PATH = tempfile.mkdtemp()
- atexit.register(clean_up)
- os.makedirs(TMP_PATH + "/ssh")
- os.makedirs(TMP_PATH + "/repos")
- fetch_admin_group()
+ global TMP_PATH
+ TMP_PATH = tempfile.mkdtemp()
+ atexit.register(clean_up)
+ os.makedirs(TMP_PATH + "/ssh")
+ os.makedirs(TMP_PATH + "/repos")
+ fetch_admin_group()
def get_random_users(num_users):
- users = random.sample([(f, l) for f in FIRST_NAMES for l in LAST_NAMES],
- num_users)
- names = []
- for u in users:
- names.append({"firstname": u[0],
- "lastname": u[1],
- "name": u[0] + " " + u[1],
- "username": u[0] + u[1],
- "email": u[0] + "." + u[1] + "@gerritcodereview.com",
- "http_password": "secret",
- "groups": []})
- return names
+ users = random.sample([(f, l) for f in FIRST_NAMES for l in LAST_NAMES],
+ num_users)
+ names = []
+ for u in users:
+ names.append({"firstname": u[0],
+ "lastname": u[1],
+ "name": u[0] + " " + u[1],
+ "username": u[0] + u[1],
+ "email": u[0] + "." + u[1] + "@gerritcodereview.com",
+ "http_password": "secret",
+ "groups": []})
+ return names
def generate_ssh_keys(gerrit_users):
- for user in gerrit_users:
- key_file = TMP_PATH + "/ssh/" + user["username"] + ".key"
- subprocess.check_output(["ssh-keygen", "-f", key_file, "-N", ""])
- with open(key_file + ".pub", "r") as f:
- user["ssh_key"] = f.read()
+ for user in gerrit_users:
+ key_file = TMP_PATH + "/ssh/" + user["username"] + ".key"
+ subprocess.check_output(["ssh-keygen", "-f", key_file, "-N", ""])
+ with open(key_file + ".pub", "r") as f:
+ user["ssh_key"] = f.read()
def create_gerrit_groups():
- groups = [
- {"name": "iOS-Maintainers", "description": "iOS Maintainers",
- "visible_to_all": True, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]},
- {"name": "Android-Maintainers", "description": "Android Maintainers",
- "visible_to_all": True, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]},
- {"name": "Backend-Maintainers", "description": "Backend Maintainers",
- "visible_to_all": True, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]},
- {"name": "Script-Maintainers", "description": "Script Maintainers",
- "visible_to_all": True, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]},
- {"name": "Security-Team", "description": "Sec Team",
- "visible_to_all": False, "owner": GROUP_ADMIN["name"],
- "owner_id": GROUP_ADMIN["id"]}]
- for g in groups:
- requests.put(BASE_URL + "groups/" + g["name"],
- json.dumps(g),
- headers=HEADERS,
- auth=ADMIN_BASIC_AUTH)
- return [g["name"] for g in groups]
+ groups = [
+ {"name": "iOS-Maintainers", "description": "iOS Maintainers",
+ "visible_to_all": True, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]},
+ {"name": "Android-Maintainers", "description": "Android Maintainers",
+ "visible_to_all": True, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]},
+ {"name": "Backend-Maintainers", "description": "Backend Maintainers",
+ "visible_to_all": True, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]},
+ {"name": "Script-Maintainers", "description": "Script Maintainers",
+ "visible_to_all": True, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]},
+ {"name": "Security-Team", "description": "Sec Team",
+ "visible_to_all": False, "owner": GROUP_ADMIN["name"],
+ "owner_id": GROUP_ADMIN["id"]}]
+ for g in groups:
+ requests.put(BASE_URL + "groups/" + g["name"],
+ json.dumps(g),
+ headers=HEADERS,
+ auth=ADMIN_BASIC_AUTH)
+ return [g["name"] for g in groups]
def create_gerrit_projects(owner_groups):
- projects = [
- {"id": "android", "name": "Android", "parent": "All-Projects",
- "branches": ["master"], "description": "Our android app.",
- "owners": [owner_groups[0]], "create_empty_commit": True},
- {"id": "ios", "name": "iOS", "parent": "All-Projects",
- "branches": ["master"], "description": "Our ios app.",
- "owners": [owner_groups[1]], "create_empty_commit": True},
- {"id": "backend", "name": "Backend", "parent": "All-Projects",
- "branches": ["master"], "description": "Our awesome backend.",
- "owners": [owner_groups[2]], "create_empty_commit": True},
- {"id": "scripts", "name": "Scripts", "parent": "All-Projects",
- "branches": ["master"], "description": "some small scripts.",
- "owners": [owner_groups[3]], "create_empty_commit": True}]
- for p in projects:
- requests.put(BASE_URL + "projects/" + p["name"],
- json.dumps(p),
- headers=HEADERS,
- auth=ADMIN_BASIC_AUTH)
- return [p["name"] for p in projects]
+ projects = [
+ {"id": "android", "name": "Android", "parent": "All-Projects",
+ "branches": ["master"], "description": "Our android app.",
+ "owners": [owner_groups[0]], "create_empty_commit": True},
+ {"id": "ios", "name": "iOS", "parent": "All-Projects",
+ "branches": ["master"], "description": "Our ios app.",
+ "owners": [owner_groups[1]], "create_empty_commit": True},
+ {"id": "backend", "name": "Backend", "parent": "All-Projects",
+ "branches": ["master"], "description": "Our awesome backend.",
+ "owners": [owner_groups[2]], "create_empty_commit": True},
+ {"id": "scripts", "name": "Scripts", "parent": "All-Projects",
+ "branches": ["master"], "description": "some small scripts.",
+ "owners": [owner_groups[3]], "create_empty_commit": True}]
+ for p in projects:
+ requests.put(BASE_URL + "projects/" + p["name"],
+ json.dumps(p),
+ headers=HEADERS,
+ auth=ADMIN_BASIC_AUTH)
+ return [p["name"] for p in projects]
def create_gerrit_users(gerrit_users):
- for user in gerrit_users:
- requests.put(BASE_URL + "accounts/" + user["username"],
- json.dumps(user),
- headers=HEADERS,
- auth=ADMIN_BASIC_AUTH)
+ for user in gerrit_users:
+ requests.put(BASE_URL + "accounts/" + user["username"],
+ json.dumps(user),
+ headers=HEADERS,
+ auth=ADMIN_BASIC_AUTH)
def create_change(user, project_name):
- random_commit_message = generate_random_text()
- change = {
- "project": project_name,
- "subject": random_commit_message.split("\n")[0],
- "branch": "master",
- "status": "NEW",
- }
- requests.post(BASE_URL + "changes/",
- json.dumps(change),
- headers=HEADERS,
- auth=basic_auth(user))
+ random_commit_message = generate_random_text()
+ change = {
+ "project": project_name,
+ "subject": random_commit_message.split("\n")[0],
+ "branch": "master",
+ "status": "NEW",
+ }
+ requests.post(BASE_URL + "changes/",
+ json.dumps(change),
+ headers=HEADERS,
+ auth=basic_auth(user))
def clean_up():
- shutil.rmtree(TMP_PATH)
+ shutil.rmtree(TMP_PATH)
def main():
+<<<<<<< HEAD
p = optparse.OptionParser()
p.add_option("-u", "--user_count", action="store",
default=100,
@@ -301,5 +308,40 @@
for idx, u in enumerate(gerrit_users):
for _ in range(random.randint(1, 5)):
create_change(u, project_names[4 * idx / len(gerrit_users)])
+=======
+ p = optparse.OptionParser()
+ p.add_option("-u", "--user_count", action="store",
+ default=100,
+ type='int',
+ help="number of users to generate")
+ p.add_option("-p", "--port", action="store",
+ default=8080,
+ type='int',
+ help="port of server")
+ (options, _) = p.parse_args()
+ global BASE_URL
+ BASE_URL = BASE_URL % options.port
+ print(BASE_URL)
+
+ set_up()
+ gerrit_users = get_random_users(options.user_count)
+
+ group_names = create_gerrit_groups()
+ for idx, u in enumerate(gerrit_users):
+ u["groups"].append(group_names[idx % len(group_names)])
+ if idx % 5 == 0:
+ # Also add to security group
+ u["groups"].append(group_names[4])
+
+ generate_ssh_keys(gerrit_users)
+ create_gerrit_users(gerrit_users)
+
+ project_names = create_gerrit_projects(group_names)
+
+ for idx, u in enumerate(gerrit_users):
+ for _ in xrange(random.randint(1, 5)):
+ create_change(u, project_names[4 * idx / len(gerrit_users)])
+
+>>>>>>> 730efd14f4... Python cleanups, round 1: whitespace
main()
diff --git a/java/com/google/gerrit/server/git/GarbageCollection.java b/java/com/google/gerrit/server/git/GarbageCollection.java
index 3bf89c7..997907e 100644
--- a/java/com/google/gerrit/server/git/GarbageCollection.java
+++ b/java/com/google/gerrit/server/git/GarbageCollection.java
@@ -41,9 +41,6 @@
public class GarbageCollection {
private static final Logger log = LoggerFactory.getLogger(GarbageCollection.class);
- public static final String LOG_NAME = "gc_log";
- private static final Logger gcLog = LoggerFactory.getLogger(LOG_NAME);
-
private final GitRepositoryManager repoManager;
private final GarbageCollectionQueue gcQueue;
private final GcConfig gcConfig;
@@ -142,7 +139,7 @@
}
b.append(s);
}
- gcLog.info(b.toString());
+ log.info(b.toString());
}
private static void logGcConfiguration(
@@ -182,7 +179,6 @@
print(writer, "failed.\n\n");
StringBuilder b = new StringBuilder();
b.append("[").append(projectName.get()).append("]");
- gcLog.error(b.toString(), e);
log.error(b.toString(), e);
}
diff --git a/java/com/google/gerrit/server/git/GarbageCollectionLogFile.java b/java/com/google/gerrit/server/git/GarbageCollectionLogFile.java
index e03ef67..8796fdf 100644
--- a/java/com/google/gerrit/server/git/GarbageCollectionLogFile.java
+++ b/java/com/google/gerrit/server/git/GarbageCollectionLogFile.java
@@ -26,6 +26,8 @@
import org.eclipse.jgit.lib.Config;
public class GarbageCollectionLogFile implements LifecycleListener {
+ private static final String LOG_NAME = "gc_log";
+
@Inject
public GarbageCollectionLogFile(SitePaths sitePaths, @GerritServerConfig Config config) {
if (SystemLog.shouldConfigure()) {
@@ -38,15 +40,20 @@
@Override
public void stop() {
- LogManager.getLogger(GarbageCollection.LOG_NAME).removeAllAppenders();
+ LogManager.getLogger(GarbageCollection.class).removeAllAppenders();
+ LogManager.getLogger(GarbageCollectionRunner.class).removeAllAppenders();
}
private static void initLogSystem(Path logdir, boolean rotate) {
- Logger gcLogger = LogManager.getLogger(GarbageCollection.LOG_NAME);
+ initGcLogger(logdir, rotate, LogManager.getLogger(GarbageCollection.class));
+ initGcLogger(logdir, rotate, LogManager.getLogger(GarbageCollectionRunner.class));
+ }
+
+ private static void initGcLogger(Path logdir, boolean rotate, Logger gcLogger) {
gcLogger.removeAllAppenders();
gcLogger.addAppender(
SystemLog.createAppender(
- logdir, GarbageCollection.LOG_NAME, new PatternLayout("[%d] %-5p %x: %m%n"), rotate));
+ logdir, LOG_NAME, new PatternLayout("[%d] %-5p %x: %m%n"), rotate));
gcLogger.setAdditivity(false);
}
}
diff --git a/java/com/google/gerrit/server/git/GarbageCollectionRunner.java b/java/com/google/gerrit/server/git/GarbageCollectionRunner.java
index e4316c5..054e56a 100644
--- a/java/com/google/gerrit/server/git/GarbageCollectionRunner.java
+++ b/java/com/google/gerrit/server/git/GarbageCollectionRunner.java
@@ -24,7 +24,7 @@
/** Runnable to enable scheduling gc to run periodically */
public class GarbageCollectionRunner implements Runnable {
- private static final Logger gcLog = LoggerFactory.getLogger(GarbageCollection.LOG_NAME);
+ private static final Logger log = LoggerFactory.getLogger(GarbageCollectionRunner.class);
static class Lifecycle implements LifecycleListener {
private final WorkQueue queue;
@@ -61,7 +61,7 @@
@Override
public void run() {
- gcLog.info("Triggering gc on all repositories");
+ log.info("Triggering gc on all repositories");
garbageCollectionFactory.create().run(Lists.newArrayList(projectCache.all()));
}
diff --git a/plugins/codemirror-editor b/plugins/codemirror-editor
index c97e280..ee50e45 160000
--- a/plugins/codemirror-editor
+++ b/plugins/codemirror-editor
@@ -1 +1 @@
-Subproject commit c97e2806532cff00fea6424cde0d440f9ea5016d
+Subproject commit ee50e45b449e282ed78917175daf8b359da8d943
diff --git a/polygerrit-ui/app/behaviors/gr-tooltip-behavior/gr-tooltip-behavior.js b/polygerrit-ui/app/behaviors/gr-tooltip-behavior/gr-tooltip-behavior.js
index 4d53631..04d8b6e 100644
--- a/polygerrit-ui/app/behaviors/gr-tooltip-behavior/gr-tooltip-behavior.js
+++ b/polygerrit-ui/app/behaviors/gr-tooltip-behavior/gr-tooltip-behavior.js
@@ -51,7 +51,6 @@
detached() {
this._handleHideTooltip();
- this.unlisten(window, 'scroll', '_handleWindowScroll');
},
_setupTooltipListeners() {
@@ -59,9 +58,6 @@
this._hasSetupTooltipListeners = true;
this.addEventListener('mouseenter', this._handleShowTooltip.bind(this));
- this.addEventListener('mouseleave', this._handleHideTooltip.bind(this));
- this.addEventListener('tap', this._handleHideTooltip.bind(this));
- this.listen(window, 'scroll', '_handleWindowScroll');
},
_handleShowTooltip(e) {
@@ -91,6 +87,9 @@
tooltip.style.visibility = null;
this._tooltip = tooltip;
+ this.listen(window, 'scroll', '_handleWindowScroll');
+ this.listen(this, 'mouseleave', '_handleHideTooltip');
+ this.listen(this, 'tap', '_handleHideTooltip');
},
_handleHideTooltip(e) {
@@ -100,6 +99,9 @@
return;
}
+ this.unlisten(window, 'scroll', '_handleWindowScroll');
+ this.unlisten(this, 'mouseleave', '_handleHideTooltip');
+ this.unlisten(this, 'tap', '_handleHideTooltip');
this.setAttribute('title', this._titleText);
if (this._tooltip && this._tooltip.parentNode) {
this._tooltip.parentNode.removeChild(this._tooltip);
diff --git a/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.js b/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.js
index 29ffec8..cb9f4c5 100644
--- a/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.js
+++ b/polygerrit-ui/app/elements/change/gr-change-view/gr-change-view.js
@@ -57,6 +57,8 @@
UNIFIED: 'UNIFIED_DIFF',
};
+ const CHANGE_DATA_TIMING_LABEL = 'ChangeDataLoaded';
+
Polymer({
is: 'gr-change-view',
@@ -624,6 +626,8 @@
this.$.fileList.collapseAllDiffs();
this._patchRange = patchRange;
+ // If the change has already been loaded and the parameter change is only
+ // in the patch range, then don't do a full reload.
if (this._initialLoadComplete && patchChanged) {
if (patchRange.patchNum == null) {
patchRange.patchNum = this.computeLatestPatchNum(this._allPatchSets);
@@ -637,7 +641,7 @@
this._changeNum = value.changeNum;
this.$.relatedChanges.clear();
- this._reload().then(() => {
+ this._reload(true).then(() => {
this._performPostLoadTasks();
});
},
@@ -651,7 +655,6 @@
},
_performPostLoadTasks() {
- this.$.relatedChanges.reload();
this._maybeShowReplyDialog();
this._maybeShowRevertDialog();
@@ -1199,43 +1202,102 @@
});
},
- _reload() {
+ /**
+ * Reload the change.
+ * @param {boolean=} opt_reloadRelatedChanges Reloads the related chanegs
+ * when true.
+ * @return {Promise} A promise that resolves when the core data has loaded.
+ * Some non-core data loading may still be in-flight when the core data
+ * promise resolves.
+ */
+ _reload(opt_reloadRelatedChanges) {
this._loading = true;
this._relatedChangesCollapsed = true;
- const detailCompletes = this._getChangeDetail().then(() => {
- this._loading = false;
- this._getProjectConfig();
- });
+ // Array to house all promises related to data requests.
+ const allDataPromises = [];
- this._reloadComments();
+ // Resolves when the change detail and the edit patch set (if available)
+ // are loaded.
+ const detailCompletes = this._getChangeDetail();
+ allDataPromises.push(detailCompletes);
- let reloadPromise;
+ // Resolves when the loading flag is set to false, meaning that some
+ // change content may start appearing.
+ const loadingFlagSet = detailCompletes
+ .then(() => { this._loading = false; });
+ // Resolves when the project config has loaded.
+ const projectConfigLoaded = detailCompletes
+ .then(() => this._getProjectConfig());
+ allDataPromises.push(projectConfigLoaded);
+
+ // Resolves when change comments have loaded (comments, drafts and robot
+ // comments).
+ const commentsLoaded = this._reloadComments();
+ allDataPromises.push(commentsLoaded);
+
+ let coreDataPromise;
+
+ // If the patch number is specified
if (this._patchRange.patchNum) {
- reloadPromise = Promise.all([
- this._reloadPatchNumDependentResources(),
- detailCompletes,
- ]).then(() => {
- return Promise.all([
- this._getMergeability(),
- this.$.actions.reload(),
- ]);
- });
+ // Because a specific patchset is specified, reload the resources that
+ // are keyed by patch number or patch range.
+ const patchResourcesLoaded = this._reloadPatchNumDependentResources();
+ allDataPromises.push(patchResourcesLoaded);
+
+ // Promise resolves when the change detail and patch dependent resources
+ // have loaded.
+ const detailAndPatchResourcesLoaded =
+ Promise.all([patchResourcesLoaded, loadingFlagSet]);
+
+ // Promise resolves when mergeability information has loaded.
+ const mergeabilityLoaded = detailAndPatchResourcesLoaded
+ .then(() => this._getMergeability());
+ allDataPromises.push(mergeabilityLoaded);
+
+ // Promise resovles when the change actions have loaded.
+ const actionsLoaded = detailAndPatchResourcesLoaded
+ .then(() => this.$.actions.reload());
+ allDataPromises.push(actionsLoaded);
+
+ // The core data is loaded when both mergeability and actions are known.
+ coreDataPromise = Promise.all([mergeabilityLoaded, actionsLoaded]);
} else {
- // The patch number is reliant on the change detail request.
- reloadPromise = detailCompletes.then(() => {
- this.$.fileList.reload();
- if (!this._latestCommitMessage) {
- this._getLatestCommitMessage();
- }
- return this._getMergeability();
+ // Resolves when the file list has loaded.
+ const fileListReload = loadingFlagSet
+ .then(() => this.$.fileList.reload());
+ allDataPromises.push(fileListReload);
+
+ const latestCommitMessageLoaded = loadingFlagSet.then(() => {
+ // If the latest commit message is known, there is nothing to do.
+ if (this._latestCommitMessage) { return Promise.resolve(); }
+ return this._getLatestCommitMessage();
});
+ allDataPromises.push(latestCommitMessageLoaded);
+
+ // Promise resolves when mergeability information has loaded.
+ const mergeabilityLoaded = loadingFlagSet
+ .then(() => this._getMergeability());
+ allDataPromises.push(mergeabilityLoaded);
+
+ // Core data is loaded when mergeability has been loaded.
+ coreDataPromise = mergeabilityLoaded;
}
- return reloadPromise.then(() => {
- this.$.reporting.changeDisplayed();
+ if (opt_reloadRelatedChanges) {
+ const relatedChangesLoaded = coreDataPromise
+ .then(() => this.$.relatedChanges.reload());
+ allDataPromises.push(relatedChangesLoaded);
+ }
+
+ this.$.reporting.time(CHANGE_DATA_TIMING_LABEL);
+ Promise.all(allDataPromises).then(() => {
+ this.$.reporting.timeEnd(CHANGE_DATA_TIMING_LABEL);
});
+
+ return coreDataPromise
+ .then(() => { this.$.reporting.changeDisplayed(); });
},
/**
diff --git a/polygerrit-ui/app/elements/diff/gr-diff/gr-diff.html b/polygerrit-ui/app/elements/diff/gr-diff/gr-diff.html
index 81c6d99..540df98 100644
--- a/polygerrit-ui/app/elements/diff/gr-diff/gr-diff.html
+++ b/polygerrit-ui/app/elements/diff/gr-diff/gr-diff.html
@@ -108,7 +108,6 @@
cursor: pointer;
}
.content {
- overflow: hidden;
/* Set min width since setting width on table cells still
allows them to shrink. Do not set max width because
CJK (Chinese-Japanese-Korean) glyphs have variable width */
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.html b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.html
index cd9f9dc..017cd5d 100644
--- a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.html
+++ b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.html
@@ -15,11 +15,11 @@
limitations under the License.
-->
<link rel="import" href="../../../bower_components/polymer/polymer.html">
-<link rel="import" href="../gr-syntax-lib-loader/gr-syntax-lib-loader.html">
+<link rel="import" href="../../shared/gr-lib-loader/gr-lib-loader.html">
<dom-module id="gr-syntax-layer">
<template>
- <gr-syntax-lib-loader id="libLoader"></gr-syntax-lib-loader>
+ <gr-lib-loader id="libLoader"></gr-lib-loader>
</template>
<script src="../gr-diff/gr-diff-line.js"></script>
<script src="../gr-diff-highlight/gr-annotation.js"></script>
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.js b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.js
index f8db343..15a8a0a 100644
--- a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.js
+++ b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer.js
@@ -442,7 +442,7 @@
},
_loadHLJS() {
- return this.$.libLoader.get().then(hljs => {
+ return this.$.libLoader.getHLJS().then(hljs => {
this._hljs = hljs;
});
},
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer_test.html b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer_test.html
index 74fc3bf..f2458fc 100644
--- a/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer_test.html
+++ b/polygerrit-ui/app/elements/diff/gr-syntax-layer/gr-syntax-layer_test.html
@@ -185,7 +185,7 @@
const mockHLJS = getMockHLJS();
const highlightSpy = sinon.spy(mockHLJS, 'highlight');
- sandbox.stub(element.$.libLoader, 'get',
+ sandbox.stub(element.$.libLoader, 'getHLJS',
() => { return Promise.resolve(mockHLJS); });
const processNextSpy = sandbox.spy(element, '_processNextLine');
const processPromise = element.process();
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.js b/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.js
deleted file mode 100644
index 6ec7ab2..0000000
--- a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.js
+++ /dev/null
@@ -1,113 +0,0 @@
-/**
- * @license
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-(function() {
- 'use strict';
-
- const HLJS_PATH = 'bower_components/highlightjs/highlight.min.js';
- const LIB_ROOT_PATTERN = /(.+\/)elements\/gr-app\.html/;
-
- Polymer({
- is: 'gr-syntax-lib-loader',
-
- properties: {
- _state: {
- type: Object,
-
- // NOTE: intended singleton.
- value: {
- configured: false,
- loading: false,
- callbacks: [],
- },
- },
- },
-
- get() {
- return new Promise((resolve, reject) => {
- // If the lib is totally loaded, resolve immediately.
- if (this._getHighlightLib()) {
- resolve(this._getHighlightLib());
- return;
- }
-
- // If the library is not currently being loaded, then start loading it.
- if (!this._state.loading) {
- this._state.loading = true;
- this._loadHLJS().then(this._onLibLoaded.bind(this)).catch(reject);
- }
-
- this._state.callbacks.push(resolve);
- });
- },
-
- _onLibLoaded() {
- const lib = this._getHighlightLib();
- this._state.loading = false;
- for (const cb of this._state.callbacks) {
- cb(lib);
- }
- this._state.callbacks = [];
- },
-
- _getHighlightLib() {
- const lib = window.hljs;
- if (lib && !this._state.configured) {
- this._state.configured = true;
-
- lib.configure({classPrefix: 'gr-diff gr-syntax gr-syntax-'});
- }
- return lib;
- },
-
- _getLibRoot() {
- if (this._cachedLibRoot) { return this._cachedLibRoot; }
-
- const appLink = document.head
- .querySelector('link[rel=import][href$="gr-app.html"]');
-
- if (!appLink) { return null; }
-
- return this._cachedLibRoot = appLink
- .href
- .match(LIB_ROOT_PATTERN)[1];
- },
- _cachedLibRoot: null,
-
- _loadHLJS() {
- return new Promise((resolve, reject) => {
- const script = document.createElement('script');
- const src = this._getHLJSUrl();
-
- if (!src) {
- reject(new Error('Unable to load blank HLJS url.'));
- return;
- }
-
- script.src = src;
- script.onload = resolve;
- script.onerror = reject;
- Polymer.dom(document.head).appendChild(script);
- });
- },
-
- _getHLJSUrl() {
- const root = this._getLibRoot();
- if (!root) { return null; }
- return root + HLJS_PATH;
- },
- });
-})();
diff --git a/polygerrit-ui/app/elements/gr-app.html b/polygerrit-ui/app/elements/gr-app.html
index de62646..6a2bfe0 100644
--- a/polygerrit-ui/app/elements/gr-app.html
+++ b/polygerrit-ui/app/elements/gr-app.html
@@ -19,6 +19,11 @@
if (localStorage.getItem('USE_SHADOW_DOM') === 'true') {
window.Polymer = {
dom: 'shadow',
+ passiveTouchGestures: true,
+ };
+ } else if (!window.Polymer) {
+ window.Polymer = {
+ passiveTouchGestures: true,
};
}
</script>
@@ -56,6 +61,7 @@
<link rel="import" href="./settings/gr-registration-dialog/gr-registration-dialog.html">
<link rel="import" href="./settings/gr-settings-view/gr-settings-view.html">
<link rel="import" href="./shared/gr-fixed-panel/gr-fixed-panel.html">
+<link rel="import" href="./shared/gr-lib-loader/gr-lib-loader.html">
<link rel="import" href="./shared/gr-rest-api-interface/gr-rest-api-interface.html">
<script src="../scripts/util.js"></script>
@@ -229,6 +235,7 @@
<gr-plugin-host id="plugins"
config="[[_serverConfig]]">
</gr-plugin-host>
+ <gr-lib-loader id="libLoader"></gr-lib-loader>
<gr-external-style id="externalStyle" name="app-theme"></gr-external-style>
</template>
<script src="gr-app.js" crossorigin="anonymous"></script>
diff --git a/polygerrit-ui/app/elements/gr-app.js b/polygerrit-ui/app/elements/gr-app.js
index 921415f..53ffc60 100644
--- a/polygerrit-ui/app/elements/gr-app.js
+++ b/polygerrit-ui/app/elements/gr-app.js
@@ -128,7 +128,9 @@
});
if (window.localStorage.getItem('dark-theme')) {
- this.importHref('../styles/themes/dark-theme.html');
+ this.$.libLoader.getDarkTheme().then(module => {
+ Polymer.dom(this.root).appendChild(module);
+ });
}
// Note: this is evaluated here to ensure that it only happens after the
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.html b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.html
similarity index 88%
rename from polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.html
rename to polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.html
index f5b71be..f70aff4 100644
--- a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader.html
+++ b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.html
@@ -16,6 +16,6 @@
-->
<link rel="import" href="../../../bower_components/polymer/polymer.html">
-<dom-module id="gr-syntax-lib-loader">
- <script src="gr-syntax-lib-loader.js"></script>
+<dom-module id="gr-lib-loader">
+ <script src="gr-lib-loader.js"></script>
</dom-module>
diff --git a/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.js b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.js
new file mode 100644
index 0000000..28ff45d
--- /dev/null
+++ b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader.js
@@ -0,0 +1,164 @@
+/**
+ * @license
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+(function() {
+ 'use strict';
+
+ const HLJS_PATH = 'bower_components/highlightjs/highlight.min.js';
+ const DARK_THEME_PATH = 'styles/themes/dark-theme.html';
+ const LIB_ROOT_PATTERN = /(.+\/)elements\/gr-app\.html/;
+
+ Polymer({
+ is: 'gr-lib-loader',
+
+ properties: {
+ _hljsState: {
+ type: Object,
+
+ // NOTE: intended singleton.
+ value: {
+ configured: false,
+ loading: false,
+ callbacks: [],
+ },
+ },
+ },
+
+ /**
+ * Get the HLJS library. Returns a promise that resolves with a reference to
+ * the library after it's been loaded. The promise resolves immediately if
+ * it's already been loaded.
+ * @return {!Promise<Object>}
+ */
+ getHLJS() {
+ return new Promise((resolve, reject) => {
+ // If the lib is totally loaded, resolve immediately.
+ if (this._getHighlightLib()) {
+ resolve(this._getHighlightLib());
+ return;
+ }
+
+ // If the library is not currently being loaded, then start loading it.
+ if (!this._hljsState.loading) {
+ this._hljsState.loading = true;
+ this._loadScript(this._getHLJSUrl())
+ .then(this._onHLJSLibLoaded.bind(this)).catch(reject);
+ }
+
+ this._hljsState.callbacks.push(resolve);
+ });
+ },
+
+ /**
+ * Loads the dark theme document. Returns a promise that resolves with a
+ * custom-style DOM element.
+ * @return {!Promise<Element>}
+ */
+ getDarkTheme() {
+ return new Promise((resolve, reject) => {
+ this.importHref(this._getLibRoot() + DARK_THEME_PATH, () => {
+ const module = document.createElement('style', 'custom-style');
+ module.setAttribute('include', 'dark-theme');
+ resolve(module);
+ });
+ });
+ },
+
+ /**
+ * Execute callbacks awaiting the HLJS lib load.
+ */
+ _onHLJSLibLoaded() {
+ const lib = this._getHighlightLib();
+ this._hljsState.loading = false;
+ for (const cb of this._hljsState.callbacks) {
+ cb(lib);
+ }
+ this._hljsState.callbacks = [];
+ },
+
+ /**
+ * Get the HLJS library, assuming it has been loaded. Configure the library
+ * if it hasn't already been configured.
+ * @return {!Object}
+ */
+ _getHighlightLib() {
+ const lib = window.hljs;
+ if (lib && !this._hljsState.configured) {
+ this._hljsState.configured = true;
+
+ lib.configure({classPrefix: 'gr-diff gr-syntax gr-syntax-'});
+ }
+ return lib;
+ },
+
+ /**
+ * Get the resource path used to load the application. If the application
+ * was loaded through a CDN, then this will be the path to CDN resources.
+ * @return {string}
+ */
+ _getLibRoot() {
+ // TODO(wyatta): Remove the remainder of this method logic once the
+ // STATIC_RESOURCE_PATH variable is being provided generally.
+ if (window.STATIC_RESOURCE_PATH) { return window.STATIC_RESOURCE_PATH; }
+
+ if (this._cachedLibRoot) { return this._cachedLibRoot; }
+
+ const appLink = document.head
+ .querySelector('link[rel=import][href$="gr-app.html"]');
+
+ if (!appLink) { throw new Error('Could not find application link'); }
+
+ this._cachedLibRoot = appLink
+ .href
+ .match(LIB_ROOT_PATTERN)[1];
+
+ if (!this._cachedLibRoot) {
+ throw new Error('Could not extract lib root');
+ }
+
+ return this._cachedLibRoot;
+ },
+ _cachedLibRoot: null,
+
+ /**
+ * Load and execute a JS file from the lib root.
+ * @param {string} src The path to the JS file without the lib root.
+ * @return {Promise} a promise that resolves when the script's onload
+ * executes.
+ */
+ _loadScript(src) {
+ return new Promise((resolve, reject) => {
+ const script = document.createElement('script');
+
+ if (!src) {
+ reject(new Error('Unable to load blank script url.'));
+ return;
+ }
+
+ script.src = src;
+ script.onload = resolve;
+ script.onerror = reject;
+ Polymer.dom(document.head).appendChild(script);
+ });
+ },
+
+ _getHLJSUrl() {
+ const root = this._getLibRoot();
+ if (!root) { return null; }
+ return root + HLJS_PATH;
+ },
+ });
+})();
diff --git a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader_test.html b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader_test.html
similarity index 77%
rename from polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader_test.html
rename to polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader_test.html
index a260a97..cf9a41c 100644
--- a/polygerrit-ui/app/elements/diff/gr-syntax-lib-loader/gr-syntax-lib-loader_test.html
+++ b/polygerrit-ui/app/elements/shared/gr-lib-loader/gr-lib-loader_test.html
@@ -17,64 +17,67 @@
-->
<meta name="viewport" content="width=device-width, minimum-scale=1.0, initial-scale=1.0, user-scalable=yes">
-<title>gr-syntax-lib-loader</title>
+<title>gr-lib-loader</title>
<script src="../../../bower_components/webcomponentsjs/webcomponents-lite.min.js"></script>
<script src="../../../bower_components/web-component-tester/browser.js"></script>
<link rel="import" href="../../../test/common-test-setup.html"/>
-<link rel="import" href="gr-syntax-lib-loader.html">
+<link rel="import" href="gr-lib-loader.html">
<script>void(0);</script>
<test-fixture id="basic">
<template>
- <gr-syntax-lib-loader></gr-syntax-lib-loader>
+ <gr-lib-loader></gr-lib-loader>
</template>
</test-fixture>
<script>
- suite('gr-syntax-lib-loader tests', () => {
+ suite('gr-lib-loader tests', () => {
+ let sandbox;
let element;
let resolveLoad;
let loadStub;
setup(() => {
+ sandbox = sinon.sandbox.create();
element = fixture('basic');
- loadStub = sinon.stub(element, '_loadHLJS', () =>
+ loadStub = sandbox.stub(element, '_loadScript', () =>
new Promise(resolve => resolveLoad = resolve)
);
// Assert preconditions:
- assert.isFalse(element._state.loading);
+ assert.isFalse(element._hljsState.loading);
});
teardown(() => {
if (window.hljs) {
delete window.hljs;
}
- loadStub.restore();
+ sandbox.restore();
// Because the element state is a singleton, clean it up.
- element._state.configured = false;
- element._state.loading = false;
- element._state.callbacks = [];
+ element._hljsState.configured = false;
+ element._hljsState.loading = false;
+ element._hljsState.callbacks = [];
});
test('only load once', done => {
+ sandbox.stub(element, '_getHLJSUrl').returns('');
const firstCallHandler = sinon.stub();
- element.get().then(firstCallHandler);
+ element.getHLJS().then(firstCallHandler);
// It should now be in the loading state.
assert.isTrue(loadStub.called);
- assert.isTrue(element._state.loading);
+ assert.isTrue(element._hljsState.loading);
assert.isFalse(firstCallHandler.called);
const secondCallHandler = sinon.stub();
- element.get().then(secondCallHandler);
+ element.getHLJS().then(secondCallHandler);
// No change in state.
- assert.isTrue(element._state.loading);
+ assert.isTrue(element._hljsState.loading);
assert.isFalse(firstCallHandler.called);
assert.isFalse(secondCallHandler.called);
@@ -82,7 +85,7 @@
resolveLoad();
flush(() => {
// The state should be loaded and both handlers called.
- assert.isFalse(element._state.loading);
+ assert.isFalse(element._hljsState.loading);
assert.isTrue(firstCallHandler.called);
assert.isTrue(secondCallHandler.called);
done();
@@ -105,7 +108,7 @@
test('returns hljs', done => {
const firstCallHandler = sinon.stub();
- element.get().then(firstCallHandler);
+ element.getHLJS().then(firstCallHandler);
flush(() => {
assert.isTrue(firstCallHandler.called);
assert.isTrue(firstCallHandler.calledWith(hljsStub));
@@ -114,7 +117,7 @@
});
test('configures hljs', done => {
- element.get().then(() => {
+ element.getHLJS().then(() => {
assert.isTrue(window.hljs.configure.calledOnce);
done();
});
@@ -123,15 +126,10 @@
suite('_getHLJSUrl', () => {
suite('checking _getLibRoot', () => {
- let libRootStub;
let root;
setup(() => {
- libRootStub = sinon.stub(element, '_getLibRoot', () => root);
- });
-
- teardown(() => {
- libRootStub.restore();
+ sandbox.stub(element, '_getLibRoot', () => root);
});
test('with no root', () => {
diff --git a/polygerrit-ui/app/styles/themes/app-theme.html b/polygerrit-ui/app/styles/themes/app-theme.html
index 69262c9..4500e10 100644
--- a/polygerrit-ui/app/styles/themes/app-theme.html
+++ b/polygerrit-ui/app/styles/themes/app-theme.html
@@ -42,7 +42,7 @@
--table-header-background-color: #fafafa;
--table-subheader-background-color: #eaeaea;
- --chip-background-color: var(--header-background-color);
+ --chip-background-color: #eee;
--dropdown-background-color: #fff;
diff --git a/polygerrit-ui/app/template_test_srcs/convert_for_template_tests.py b/polygerrit-ui/app/template_test_srcs/convert_for_template_tests.py
index 3a5cd83b..579e783 100644
--- a/polygerrit-ui/app/template_test_srcs/convert_for_template_tests.py
+++ b/polygerrit-ui/app/template_test_srcs/convert_for_template_tests.py
@@ -1,5 +1,6 @@
-import os, re, json
-from shutil import copyfile, rmtree
+import json
+import os
+import re
polymerRegex = r"Polymer\({"
polymerCompiledRegex = re.compile(polymerRegex)
@@ -10,103 +11,119 @@
regexBehavior = r"<script>(.+)<\/script>"
behaviorCompiledRegex = re.compile(regexBehavior, re.DOTALL)
+
def _open(filename, mode="r"):
- try:
- return open(filename, mode, encoding="utf-8")
- except TypeError:
- return open(filename, mode)
+ try:
+ return open(filename, mode, encoding="utf-8")
+ except TypeError:
+ return open(filename, mode)
-def replaceBehaviorLikeHTML (fileIn, fileOut):
- with _open(fileIn) as f:
- file_str = f.read()
- match = behaviorCompiledRegex.search(file_str)
- if (match):
- with _open("polygerrit-ui/temp/behaviors/" + fileOut.replace("html", "js") , "w+") as f:
- f.write(match.group(1))
-def replaceBehaviorLikeJS (fileIn, fileOut):
- with _open(fileIn) as f:
- file_str = f.read()
- with _open("polygerrit-ui/temp/behaviors/" + fileOut , "w+") as f:
- f.write(file_str)
+def replaceBehaviorLikeHTML(fileIn, fileOut):
+ with _open(fileIn) as f:
+ file_str = f.read()
+ match = behaviorCompiledRegex.search(file_str)
+ if match:
+ with _open("polygerrit-ui/temp/behaviors/" +
+ fileOut.replace("html", "js"), "w+") as f:
+ f.write(match.group(1))
+
+
+def replaceBehaviorLikeJS(fileIn, fileOut):
+ with _open(fileIn) as f:
+ file_str = f.read()
+ with _open("polygerrit-ui/temp/behaviors/" + fileOut, "w+") as f:
+ f.write(file_str)
+
def generateStubBehavior(behaviorName):
- with _open("polygerrit-ui/temp/behaviors/" + behaviorName + ".js", "w+") as f:
- f.write("/** @polymerBehavior **/\n" + behaviorName + "= {};")
+ with _open("polygerrit-ui/temp/behaviors/" +
+ behaviorName + ".js", "w+") as f:
+ f.write("/** @polymerBehavior **/\n" + behaviorName + "= {};")
-def replacePolymerElement (fileIn, fileOut, root):
- with _open(fileIn) as f:
- key = fileOut.split('.')[0]
- # Removed self invoked function
- file_str = f.read()
- file_str_no_fn = fnCompiledRegex.search(file_str)
- if file_str_no_fn:
- package = root.replace("/", ".") + "." + fileOut
+def replacePolymerElement(fileIn, fileOut, root):
+ with _open(fileIn) as f:
+ key = fileOut.split('.')[0]
+ # Removed self invoked function
+ file_str = f.read()
+ file_str_no_fn = fnCompiledRegex.search(file_str)
- with _open("polygerrit-ui/temp/" + fileOut, "w+") as f:
- mainFileContents = re.sub(polymerCompiledRegex, "exports = Polymer({", file_str_no_fn.group(1)).replace("'use strict';", "")
- f.write("/** \n" \
- "* @fileoverview \n" \
- "* @suppress {missingProperties} \n" \
- "*/ \n\n" \
- "goog.module('polygerrit." + package + "')\n\n" + mainFileContents)
+ if file_str_no_fn:
+ package = root.replace("/", ".") + "." + fileOut
- # Add package and javascript to files object.
- elements[key]["js"] = "polygerrit-ui/temp/" + fileOut
- elements[key]["package"] = package
+ with _open("polygerrit-ui/temp/" + fileOut, "w+") as f:
+ mainFileContents = re.sub(
+ polymerCompiledRegex,
+ "exports = Polymer({",
+ file_str_no_fn.group(1)).replace("'use strict';", "")
+ f.write("/** \n"
+ "* @fileoverview \n"
+ "* @suppress {missingProperties} \n"
+ "*/ \n\n"
+ "goog.module('polygerrit." + package + "')\n\n" +
+ mainFileContents)
+
+ # Add package and javascript to files object.
+ elements[key]["js"] = "polygerrit-ui/temp/" + fileOut
+ elements[key]["package"] = package
+
def writeTempFile(file, root):
- # This is included in an extern because it is directly on the window object.
- # (for now at least).
- if "gr-reporting" in file:
- return
- key = file.split('.')[0]
- if not key in elements:
- # gr-app doesn't have an additional level
- elements[key] = {"directory": 'gr-app' if len(root.split("/")) < 4 else root.split("/")[3]}
- if file.endswith(".html") and not file.endswith("_test.html"):
- # gr-navigation is treated like a behavior rather than a standard element
- # because of the way it added to the Gerrit object.
- if file.endswith("gr-navigation.html"):
- replaceBehaviorLikeHTML(os.path.join(root, file), file)
- else:
- elements[key]["html"] = os.path.join(root, file)
- if file.endswith(".js"):
- replacePolymerElement(os.path.join(root, file), file, root)
+ # This is included in an extern because it is directly on the window object
+ # (for now at least).
+ if "gr-reporting" in file:
+ return
+ key = file.split('.')[0]
+ if key not in elements:
+ # gr-app doesn't have an additional level
+ elements[key] = {
+ "directory":
+ 'gr-app' if len(root.split("/")) < 4 else root.split("/")[3]
+ }
+ if file.endswith(".html") and not file.endswith("_test.html"):
+ # gr-navigation is treated like a behavior rather than a standard
+ # element because of the way it added to the Gerrit object.
+ if file.endswith("gr-navigation.html"):
+ replaceBehaviorLikeHTML(os.path.join(root, file), file)
+ else:
+ elements[key]["html"] = os.path.join(root, file)
+ if file.endswith(".js"):
+ replacePolymerElement(os.path.join(root, file), file, root)
if __name__ == "__main__":
- # Create temp directory.
- if not os.path.exists("polygerrit-ui/temp"):
- os.makedirs("polygerrit-ui/temp")
+ # Create temp directory.
+ if not os.path.exists("polygerrit-ui/temp"):
+ os.makedirs("polygerrit-ui/temp")
- # Within temp directory create behavior directory.
- if not os.path.exists("polygerrit-ui/temp/behaviors"):
- os.makedirs("polygerrit-ui/temp/behaviors")
+ # Within temp directory create behavior directory.
+ if not os.path.exists("polygerrit-ui/temp/behaviors"):
+ os.makedirs("polygerrit-ui/temp/behaviors")
- elements = {}
+ elements = {}
- # Go through every file in app/elements, and re-write accordingly to temp
- # directory, and also added to elements object, which is used to generate a
- # map of html files, package names, and javascript files.
- for root, dirs, files in os.walk("polygerrit-ui/app/elements"):
- for file in files:
- writeTempFile(file, root)
+ # Go through every file in app/elements, and re-write accordingly to temp
+ # directory, and also added to elements object, which is used to generate a
+ # map of html files, package names, and javascript files.
+ for root, dirs, files in os.walk("polygerrit-ui/app/elements"):
+ for file in files:
+ writeTempFile(file, root)
- # Special case for polymer behaviors we are using.
- replaceBehaviorLikeHTML("polygerrit-ui/app/bower_components/iron-a11y-keys-behavior/iron-a11y-keys-behavior.html", "iron-a11y-keys-behavior.html")
- generateStubBehavior("Polymer.IronOverlayBehavior")
- generateStubBehavior("Polymer.IronFitBehavior")
+ # Special case for polymer behaviors we are using.
+ replaceBehaviorLikeHTML("polygerrit-ui/app/bower_components/iron-a11y-keys-behavior/iron-a11y-keys-behavior.html", "iron-a11y-keys-behavior.html")
+ generateStubBehavior("Polymer.IronOverlayBehavior")
+ generateStubBehavior("Polymer.IronFitBehavior")
- #TODO figure out something to do with iron-overlay-behavior. it is hard-coded reformatted.
+ # TODO figure out something to do with iron-overlay-behavior.
+ # it is hard-coded reformatted.
- with _open("polygerrit-ui/temp/map.json", "w+") as f:
- f.write(json.dumps(elements))
+ with _open("polygerrit-ui/temp/map.json", "w+") as f:
+ f.write(json.dumps(elements))
- for root, dirs, files in os.walk("polygerrit-ui/app/behaviors"):
- for file in files:
- if file.endswith("behavior.html"):
- replaceBehaviorLikeHTML(os.path.join(root, file), file)
- elif file.endswith("behavior.js"):
- replaceBehaviorLikeJS(os.path.join(root, file), file)
+ for root, dirs, files in os.walk("polygerrit-ui/app/behaviors"):
+ for file in files:
+ if file.endswith("behavior.html"):
+ replaceBehaviorLikeHTML(os.path.join(root, file), file)
+ elif file.endswith("behavior.js"):
+ replaceBehaviorLikeJS(os.path.join(root, file), file)
diff --git a/polygerrit-ui/app/test/index.html b/polygerrit-ui/app/test/index.html
index 6a562fc..5a5dbcd 100644
--- a/polygerrit-ui/app/test/index.html
+++ b/polygerrit-ui/app/test/index.html
@@ -112,7 +112,6 @@
'diff/gr-ranged-comment-layer/gr-ranged-comment-layer_test.html',
'diff/gr-selection-action-box/gr-selection-action-box_test.html',
'diff/gr-syntax-layer/gr-syntax-layer_test.html',
- 'diff/gr-syntax-lib-loader/gr-syntax-lib-loader_test.html',
'edit/gr-default-editor/gr-default-editor_test.html',
'edit/gr-edit-controls/gr-edit-controls_test.html',
'edit/gr-edit-file-controls/gr-edit-file-controls_test.html',
@@ -165,6 +164,7 @@
'shared/gr-js-api-interface/gr-plugin-endpoints_test.html',
'shared/gr-js-api-interface/gr-plugin-rest-api_test.html',
'shared/gr-fixed-panel/gr-fixed-panel_test.html',
+ 'shared/gr-lib-loader/gr-lib-loader_test.html',
'shared/gr-limited-text/gr-limited-text_test.html',
'shared/gr-linked-chip/gr-linked-chip_test.html',
'shared/gr-linked-text/gr-linked-text_test.html',
diff --git a/resources/com/google/gerrit/httpd/raw/PolyGerritIndexHtml.soy b/resources/com/google/gerrit/httpd/raw/PolyGerritIndexHtml.soy
index 699dd0e..c51e9dc 100644
--- a/resources/com/google/gerrit/httpd/raw/PolyGerritIndexHtml.soy
+++ b/resources/com/google/gerrit/httpd/raw/PolyGerritIndexHtml.soy
@@ -33,6 +33,7 @@
window.CLOSURE_NO_DEPS = true;
{if $canonicalPath != ''}window.CANONICAL_PATH = '{$canonicalPath}';{/if}
{if $versionInfo}window.VERSION_INFO = '{$versionInfo}';{/if}
+ {if $staticResourcePath != ''}window.STATIC_RESOURCE_PATH = '{$staticResourcePath}';{/if}
</script>{\n}
{if $faviconPath}
diff --git a/resources/com/google/gerrit/pgm/Startup.py b/resources/com/google/gerrit/pgm/Startup.py
index 469d5df..ec18f42 100644
--- a/resources/com/google/gerrit/pgm/Startup.py
+++ b/resources/com/google/gerrit/pgm/Startup.py
@@ -19,14 +19,16 @@
from __future__ import print_function
import sys
+
def print_help():
- for (n, v) in vars(sys.modules['__main__']).items():
- if not n.startswith("__") and not n in ['help', 'reload'] \
- and str(type(v)) != "<type 'javapackage'>" \
- and not str(v).startswith("<module"):
- print("\"%s\" is \"%s\"" % (n, v))
- print()
- print("Welcome to the Gerrit Inspector")
- print("Enter help() to see the above again, EOF to quit and stop Gerrit")
+ for (n, v) in vars(sys.modules['__main__']).items():
+ if not n.startswith("__") and n not in ['help', 'reload'] \
+ and str(type(v)) != "<type 'javapackage'>" \
+ and not str(v).startswith("<module"):
+ print("\"%s\" is \"%s\"" % (n, v))
+ print()
+ print("Welcome to the Gerrit Inspector")
+ print("Enter help() to see the above again, EOF to quit and stop Gerrit")
+
print_help()
diff --git a/tools/bzl/license-map.py b/tools/bzl/license-map.py
index 74a84cc..476ccb9 100644
--- a/tools/bzl/license-map.py
+++ b/tools/bzl/license-map.py
@@ -25,35 +25,34 @@
handled_rules = []
for xml in args.xmls:
- tree = ET.parse(xml)
- root = tree.getroot()
+ tree = ET.parse(xml)
+ root = tree.getroot()
- for child in root:
- rule_name = child.attrib["name"]
- if rule_name in handled_rules:
- # already handled in other xml files
- continue
+ for child in root:
+ rule_name = child.attrib["name"]
+ if rule_name in handled_rules:
+ # already handled in other xml files
+ continue
- handled_rules.append(rule_name)
- for c in child.getchildren():
- if c.tag != "rule-input":
- continue
+ handled_rules.append(rule_name)
+ for c in child.getchildren():
+ if c.tag != "rule-input":
+ continue
- license_name = c.attrib["name"]
- if LICENSE_PREFIX in license_name:
- entries[rule_name].append(license_name)
- graph[license_name].append(rule_name)
+ license_name = c.attrib["name"]
+ if LICENSE_PREFIX in license_name:
+ entries[rule_name].append(license_name)
+ graph[license_name].append(rule_name)
if len(graph[DO_NOT_DISTRIBUTE]):
- print("DO_NOT_DISTRIBUTE license found in:", file=stderr)
- for target in graph[DO_NOT_DISTRIBUTE]:
- print(target, file=stderr)
- exit(1)
+ print("DO_NOT_DISTRIBUTE license found in:", file=stderr)
+ for target in graph[DO_NOT_DISTRIBUTE]:
+ print(target, file=stderr)
+ exit(1)
if args.asciidoctor:
- print(
-# We don't want any blank line before "= Gerrit Code Review - Licenses"
-"""= Gerrit Code Review - Licenses
+ # We don't want any blank line before "= Gerrit Code Review - Licenses"
+ print("""= Gerrit Code Review - Licenses
Gerrit open source software is licensed under the <<Apache2_0,Apache
License 2.0>>. Executable distributions also include other software
@@ -93,40 +92,39 @@
""")
for n in sorted(graph.keys()):
- if len(graph[n]) == 0:
- continue
+ if len(graph[n]) == 0:
+ continue
- name = n[len(LICENSE_PREFIX):]
- safename = name.replace(".", "_")
- print()
- print("[[%s]]" % safename)
- print(name)
- print()
- for d in sorted(graph[n]):
- if d.startswith("//lib:") or d.startswith("//lib/"):
- p = d[len("//lib:"):]
- else:
- p = d[d.index(":")+1:].lower()
- if "__" in p:
- p = p[:p.index("__")]
- print("* " + p)
- print()
- print("[[%s_license]]" % safename)
- print("----")
- filename = n[2:].replace(":", "/")
- try:
- with open(filename, errors='ignore') as fd:
- copyfileobj(fd, stdout)
- except TypeError:
- with open(filename) as fd:
- copyfileobj(fd, stdout)
- print()
- print("----")
- print()
+ name = n[len(LICENSE_PREFIX):]
+ safename = name.replace(".", "_")
+ print()
+ print("[[%s]]" % safename)
+ print(name)
+ print()
+ for d in sorted(graph[n]):
+ if d.startswith("//lib:") or d.startswith("//lib/"):
+ p = d[len("//lib:"):]
+ else:
+ p = d[d.index(":")+1:].lower()
+ if "__" in p:
+ p = p[:p.index("__")]
+ print("* " + p)
+ print()
+ print("[[%s_license]]" % safename)
+ print("----")
+ filename = n[2:].replace(":", "/")
+ try:
+ with open(filename, errors='ignore') as fd:
+ copyfileobj(fd, stdout)
+ except TypeError:
+ with open(filename) as fd:
+ copyfileobj(fd, stdout)
+ print()
+ print("----")
+ print()
if args.asciidoctor:
- print(
-"""
+ print("""
GERRIT
------
Part of link:index.html[Gerrit Code Review]
diff --git a/tools/download_file.py b/tools/download_file.py
index 26671f0..29398e6 100755
--- a/tools/download_file.py
+++ b/tools/download_file.py
@@ -30,49 +30,50 @@
def safe_mkdirs(d):
- if path.isdir(d):
- return
- try:
- makedirs(d)
- except OSError as err:
- if not path.isdir(d):
- raise err
+ if path.isdir(d):
+ return
+ try:
+ makedirs(d)
+ except OSError as err:
+ if not path.isdir(d):
+ raise err
def download_properties(root_dir):
- """ Get the download properties.
+ """ Get the download properties.
- First tries to find the properties file in the given root directory,
- and if not found there, tries in the Gerrit settings folder in the
- user's home directory.
+ First tries to find the properties file in the given root directory,
+ and if not found there, tries in the Gerrit settings folder in the
+ user's home directory.
- Returns a set of download properties, which may be empty.
+ Returns a set of download properties, which may be empty.
- """
- p = {}
- local_prop = path.join(root_dir, LOCAL_PROPERTIES)
- if not path.isfile(local_prop):
- local_prop = path.join(GERRIT_HOME, LOCAL_PROPERTIES)
- if path.isfile(local_prop):
- try:
- with open(local_prop) as fd:
- for line in fd:
- if line.startswith('download.'):
- d = [e.strip() for e in line.split('=', 1)]
- name, url = d[0], d[1]
- p[name[len('download.'):]] = url
- except OSError:
- pass
- return p
+ """
+ p = {}
+ local_prop = path.join(root_dir, LOCAL_PROPERTIES)
+ if not path.isfile(local_prop):
+ local_prop = path.join(GERRIT_HOME, LOCAL_PROPERTIES)
+ if path.isfile(local_prop):
+ try:
+ with open(local_prop) as fd:
+ for line in fd:
+ if line.startswith('download.'):
+ d = [e.strip() for e in line.split('=', 1)]
+ name, url = d[0], d[1]
+ p[name[len('download.'):]] = url
+ except OSError:
+ pass
+ return p
def cache_entry(args):
- if args.v:
- h = args.v
- else:
- h = sha1(args.u.encode('utf-8')).hexdigest()
- name = '%s-%s' % (path.basename(args.o), h)
- return path.join(CACHE_DIR, name)
+ if args.v:
+ h = args.v
+ else:
+ h = sha1(args.u.encode('utf-8')).hexdigest()
+ name = '%s-%s' % (path.basename(args.o), h)
+ return path.join(CACHE_DIR, name)
+
opts = OptionParser()
opts.add_option('-o', help='local output file')
@@ -85,89 +86,90 @@
root_dir = args.o
while root_dir and path.dirname(root_dir) != root_dir:
- root_dir, n = path.split(root_dir)
- if n == 'WORKSPACE':
- break
+ root_dir, n = path.split(root_dir)
+ if n == 'WORKSPACE':
+ break
redirects = download_properties(root_dir)
cache_ent = cache_entry(args)
src_url = resolve_url(args.u, redirects)
if not path.exists(cache_ent):
- try:
- safe_mkdirs(path.dirname(cache_ent))
- except OSError as err:
- print('error creating directory %s: %s' %
- (path.dirname(cache_ent), err), file=stderr)
- exit(1)
+ try:
+ safe_mkdirs(path.dirname(cache_ent))
+ except OSError as err:
+ print('error creating directory %s: %s' %
+ (path.dirname(cache_ent), err), file=stderr)
+ exit(1)
- print('Download %s' % src_url, file=stderr)
- try:
- check_call(['curl', '--proxy-anyauth', '-ksSfLo', cache_ent, src_url])
- except OSError as err:
- print('could not invoke curl: %s\nis curl installed?' % err, file=stderr)
- exit(1)
- except CalledProcessError as err:
- print('error using curl: %s' % err, file=stderr)
- exit(1)
+ print('Download %s' % src_url, file=stderr)
+ try:
+ check_call(['curl', '--proxy-anyauth', '-ksSfLo', cache_ent, src_url])
+ except OSError as err:
+ print('could not invoke curl: %s\nis curl installed?' % err,
+ file=stderr)
+ exit(1)
+ except CalledProcessError as err:
+ print('error using curl: %s' % err, file=stderr)
+ exit(1)
if args.v:
- have = hash_file(sha1(), cache_ent).hexdigest()
- if args.v != have:
- print((
- '%s:\n' +
- 'expected %s\n' +
- 'received %s\n') % (src_url, args.v, have), file=stderr)
- try:
- remove(cache_ent)
- except OSError as err:
- if path.exists(cache_ent):
- print('error removing %s: %s' % (cache_ent, err), file=stderr)
- exit(1)
+ have = hash_file(sha1(), cache_ent).hexdigest()
+ if args.v != have:
+ print((
+ '%s:\n' +
+ 'expected %s\n' +
+ 'received %s\n') % (src_url, args.v, have), file=stderr)
+ try:
+ remove(cache_ent)
+ except OSError as err:
+ if path.exists(cache_ent):
+ print('error removing %s: %s' % (cache_ent, err), file=stderr)
+ exit(1)
exclude = []
if args.x:
- exclude += args.x
+ exclude += args.x
if args.exclude_java_sources:
- try:
- with ZipFile(cache_ent, 'r') as zf:
- for n in zf.namelist():
- if n.endswith('.java'):
- exclude.append(n)
- except (BadZipfile, LargeZipFile) as err:
- print('error opening %s: %s' % (cache_ent, err), file=stderr)
- exit(1)
+ try:
+ with ZipFile(cache_ent, 'r') as zf:
+ for n in zf.namelist():
+ if n.endswith('.java'):
+ exclude.append(n)
+ except (BadZipfile, LargeZipFile) as err:
+ print('error opening %s: %s' % (cache_ent, err), file=stderr)
+ exit(1)
if args.unsign:
- try:
- with ZipFile(cache_ent, 'r') as zf:
- for n in zf.namelist():
- if (n.endswith('.RSA')
- or n.endswith('.SF')
- or n.endswith('.LIST')):
- exclude.append(n)
- except (BadZipfile, LargeZipFile) as err:
- print('error opening %s: %s' % (cache_ent, err), file=stderr)
- exit(1)
+ try:
+ with ZipFile(cache_ent, 'r') as zf:
+ for n in zf.namelist():
+ if (n.endswith('.RSA')
+ or n.endswith('.SF')
+ or n.endswith('.LIST')):
+ exclude.append(n)
+ except (BadZipfile, LargeZipFile) as err:
+ print('error opening %s: %s' % (cache_ent, err), file=stderr)
+ exit(1)
safe_mkdirs(path.dirname(args.o))
if exclude:
- try:
- shutil.copyfile(cache_ent, args.o)
- except (shutil.Error, IOError) as err:
- print('error copying to %s: %s' % (args.o, err), file=stderr)
- exit(1)
- try:
- check_call(['zip', '-d', args.o] + exclude)
- except CalledProcessError as err:
- print('error removing files from zip: %s' % err, file=stderr)
- exit(1)
-else:
- try:
- link(cache_ent, args.o)
- except OSError as err:
try:
- shutil.copyfile(cache_ent, args.o)
+ shutil.copyfile(cache_ent, args.o)
except (shutil.Error, IOError) as err:
- print('error copying to %s: %s' % (args.o, err), file=stderr)
- exit(1)
+ print('error copying to %s: %s' % (args.o, err), file=stderr)
+ exit(1)
+ try:
+ check_call(['zip', '-d', args.o] + exclude)
+ except CalledProcessError as err:
+ print('error removing files from zip: %s' % err, file=stderr)
+ exit(1)
+else:
+ try:
+ link(cache_ent, args.o)
+ except OSError as err:
+ try:
+ shutil.copyfile(cache_ent, args.o)
+ except (shutil.Error, IOError) as err:
+ print('error copying to %s: %s' % (args.o, err), file=stderr)
+ exit(1)
diff --git a/tools/eclipse/project.py b/tools/eclipse/project.py
index a6b0964..b99c04e 100755
--- a/tools/eclipse/project.py
+++ b/tools/eclipse/project.py
@@ -30,20 +30,20 @@
GWT = '//gerrit-gwtui:ui_module'
AUTO = '//lib/auto:auto-value'
JRE = '/'.join([
- 'org.eclipse.jdt.launching.JRE_CONTAINER',
- 'org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType',
- 'JavaSE-1.8',
+ 'org.eclipse.jdt.launching.JRE_CONTAINER',
+ 'org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType',
+ 'JavaSE-1.8',
])
# Map of targets to corresponding classpath collector rules
cp_targets = {
- AUTO: '//tools/eclipse:autovalue_classpath_collect',
- GWT: '//tools/eclipse:gwt_classpath_collect',
- MAIN: '//tools/eclipse:main_classpath_collect',
+ AUTO: '//tools/eclipse:autovalue_classpath_collect',
+ GWT: '//tools/eclipse:gwt_classpath_collect',
+ MAIN: '//tools/eclipse:main_classpath_collect',
}
ROOT = path.abspath(__file__)
while not path.exists(path.join(ROOT, 'WORKSPACE')):
- ROOT = path.dirname(ROOT)
+ ROOT = path.dirname(ROOT)
opts = OptionParser()
opts.add_option('--plugins', help='create eclipse projects for plugins',
@@ -56,38 +56,43 @@
batch_option = '--batch' if args.batch else None
+
def _build_bazel_cmd(*args):
- cmd = ['bazel']
- if batch_option:
- cmd.append('--batch')
- for arg in args:
- cmd.append(arg)
- return cmd
+ cmd = ['bazel']
+ if batch_option:
+ cmd.append('--batch')
+ for arg in args:
+ cmd.append(arg)
+ return cmd
+
def retrieve_ext_location():
- return check_output(_build_bazel_cmd('info', 'output_base')).strip()
+ return check_output(_build_bazel_cmd('info', 'output_base')).strip()
+
def gen_bazel_path():
- bazel = check_output(['which', 'bazel']).strip().decode('UTF-8')
- with open(path.join(ROOT, ".bazel_path"), 'w') as fd:
- fd.write("bazel=%s\n" % bazel)
- fd.write("PATH=%s\n" % environ["PATH"])
+ bazel = check_output(['which', 'bazel']).strip().decode('UTF-8')
+ with open(path.join(ROOT, ".bazel_path"), 'w') as fd:
+ fd.write("bazel=%s\n" % bazel)
+ fd.write("PATH=%s\n" % environ["PATH"])
+
def _query_classpath(target):
- deps = []
- t = cp_targets[target]
- try:
- check_call(_build_bazel_cmd('build', t))
- except CalledProcessError:
- exit(1)
- name = 'bazel-bin/tools/eclipse/' + t.split(':')[1] + '.runtime_classpath'
- deps = [line.rstrip('\n') for line in open(name)]
- return deps
+ deps = []
+ t = cp_targets[target]
+ try:
+ check_call(_build_bazel_cmd('build', t))
+ except CalledProcessError:
+ exit(1)
+ name = 'bazel-bin/tools/eclipse/' + t.split(':')[1] + '.runtime_classpath'
+ deps = [line.rstrip('\n') for line in open(name)]
+ return deps
+
def gen_project(name='gerrit', root=ROOT):
- p = path.join(root, '.project')
- with open(p, 'w') as fd:
- print("""\
+ p = path.join(root, '.project')
+ with open(p, 'w') as fd:
+ print("""\
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>%(name)s</name>
@@ -102,16 +107,17 @@
</projectDescription>\
""" % {"name": name}, file=fd)
+
def gen_plugin_classpath(root):
- p = path.join(root, '.classpath')
- with open(p, 'w') as fd:
- if path.exists(path.join(root, 'src', 'test', 'java')):
- testpath = """
+ p = path.join(root, '.classpath')
+ with open(p, 'w') as fd:
+ if path.exists(path.join(root, 'src', 'test', 'java')):
+ testpath = """
<classpathentry excluding="**/BUILD" kind="src" path="src/test/java"\
out="eclipse-out/test"/>"""
- else:
- testpath = ""
- print("""\
+ else:
+ testpath = ""
+ print("""\
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry excluding="**/BUILD" kind="src" path="src/main/java"/>%(testpath)s
@@ -120,186 +126,193 @@
<classpathentry kind="output" path="eclipse-out/classes"/>
</classpath>""" % {"testpath": testpath}, file=fd)
+
def gen_classpath(ext):
- def make_classpath():
- impl = minidom.getDOMImplementation()
- return impl.createDocument(None, 'classpath', None)
+ def make_classpath():
+ impl = minidom.getDOMImplementation()
+ return impl.createDocument(None, 'classpath', None)
- def classpathentry(kind, path, src=None, out=None, exported=None):
- e = doc.createElement('classpathentry')
- e.setAttribute('kind', kind)
- # TODO(davido): Remove this and other exclude BUILD files hack
- # when this Bazel bug is fixed:
- # https://github.com/bazelbuild/bazel/issues/1083
- if kind == 'src':
- e.setAttribute('excluding', '**/BUILD')
- e.setAttribute('path', path)
- if src:
- e.setAttribute('sourcepath', src)
- if out:
- e.setAttribute('output', out)
- if exported:
- e.setAttribute('exported', 'true')
- doc.documentElement.appendChild(e)
+ def classpathentry(kind, path, src=None, out=None, exported=None):
+ e = doc.createElement('classpathentry')
+ e.setAttribute('kind', kind)
+ # TODO(davido): Remove this and other exclude BUILD files hack
+ # when this Bazel bug is fixed:
+ # https://github.com/bazelbuild/bazel/issues/1083
+ if kind == 'src':
+ e.setAttribute('excluding', '**/BUILD')
+ e.setAttribute('path', path)
+ if src:
+ e.setAttribute('sourcepath', src)
+ if out:
+ e.setAttribute('output', out)
+ if exported:
+ e.setAttribute('exported', 'true')
+ doc.documentElement.appendChild(e)
- doc = make_classpath()
- src = set()
- lib = set()
- proto = set()
- gwt_src = set()
- gwt_lib = set()
- plugins = set()
+ doc = make_classpath()
+ src = set()
+ lib = set()
+ proto = set()
+ gwt_src = set()
+ gwt_lib = set()
+ plugins = set()
- # Classpath entries are absolute for cross-cell support
- java_library = re.compile('bazel-out/.*?-fastbuild/bin/(.*)/[^/]+[.]jar$')
- srcs = re.compile('(.*/external/[^/]+)/jar/(.*)[.]jar')
- for p in _query_classpath(MAIN):
- if p.endswith('-src.jar'):
- # gwt_module() depends on -src.jar for Java to JavaScript compiles.
- if p.startswith("external"):
- p = path.join(ext, p)
- gwt_lib.add(p)
- continue
-
- m = java_library.match(p)
- if m:
- src.add(m.group(1))
- # Exceptions: both source and lib
- if p.endswith('libquery_parser.jar') or \
- p.endswith('libgerrit-prolog-common.jar'):
- lib.add(p)
- # JGit dependency from external repository
- if 'gerrit-' not in p and 'jgit' in p:
- lib.add(p)
- # Assume any jars in /proto/ are from java_proto_library rules
- if '/bin/proto/' in p:
- proto.add(p)
- else:
- # Don't mess up with Bazel internal test runner dependencies.
- # When we use Eclipse we rely on it for running the tests
- if p.endswith("external/bazel_tools/tools/jdk/TestRunner_deploy.jar"):
- continue
- if p.startswith("external"):
- p = path.join(ext, p)
- lib.add(p)
-
- for p in _query_classpath(GWT):
- m = java_library.match(p)
- if m:
- gwt_src.add(m.group(1))
-
- classpathentry('src', 'java')
- classpathentry('src', 'javatests', out='eclipse-out/test')
- classpathentry('src', 'resources')
- for s in sorted(src):
- out = None
-
- if s.startswith('lib/'):
- out = 'eclipse-out/lib'
- elif s.startswith('plugins/'):
- if args.plugins:
- plugins.add(s)
- continue
- out = 'eclipse-out/' + s
-
- p = path.join(s, 'java')
- if path.exists(p):
- classpathentry('src', p, out=out)
- continue
-
- for env in ['main', 'test']:
- o = None
- if out:
- o = out + '/' + env
- elif env == 'test':
- o = 'eclipse-out/test'
-
- for srctype in ['java', 'resources']:
- p = path.join(s, 'src', env, srctype)
- if path.exists(p):
- classpathentry('src', p, out=o)
-
- for libs in [lib, gwt_lib]:
- for j in sorted(libs):
- s = None
- m = srcs.match(j)
- if m:
- prefix = m.group(1)
- suffix = m.group(2)
- p = path.join(prefix, "jar", "%s-src.jar" % suffix)
- if path.exists(p):
- s = p
- if args.plugins:
- classpathentry('lib', j, s, exported=True)
- else:
- # Filter out the source JARs that we pull through transitive closure of
- # GWT plugin API (we add source directories themself). Exception is
- # libEdit-src.jar, that is needed for GWT SDM to work.
- m = java_library.match(j)
- if m:
- if m.group(1).startswith("gerrit-") and \
- j.endswith("-src.jar") and \
- not j.endswith("libEdit-src.jar"):
+ # Classpath entries are absolute for cross-cell support
+ java_library = re.compile('bazel-out/.*?-fastbuild/bin/(.*)/[^/]+[.]jar$')
+ srcs = re.compile('(.*/external/[^/]+)/jar/(.*)[.]jar')
+ for p in _query_classpath(MAIN):
+ if p.endswith('-src.jar'):
+ # gwt_module() depends on -src.jar for Java to JavaScript compiles.
+ if p.startswith("external"):
+ p = path.join(ext, p)
+ gwt_lib.add(p)
continue
- classpathentry('lib', j, s)
- for p in sorted(proto):
- s = p.replace('-fastbuild/bin/proto/lib', '-fastbuild/genfiles/proto/')
- s = s.replace('.jar', '-src.jar')
- classpathentry('lib', p, s)
+ m = java_library.match(p)
+ if m:
+ src.add(m.group(1))
+ # Exceptions: both source and lib
+ if p.endswith('libquery_parser.jar') or \
+ p.endswith('libgerrit-prolog-common.jar'):
+ lib.add(p)
+ # JGit dependency from external repository
+ if 'gerrit-' not in p and 'jgit' in p:
+ lib.add(p)
+ # Assume any jars in /proto/ are from java_proto_library rules
+ if '/bin/proto/' in p:
+ proto.add(p)
+ else:
+ # Don't mess up with Bazel internal test runner dependencies.
+ # When we use Eclipse we rely on it for running the tests
+ if p.endswith(
+ "external/bazel_tools/tools/jdk/TestRunner_deploy.jar"):
+ continue
+ if p.startswith("external"):
+ p = path.join(ext, p)
+ lib.add(p)
- for s in sorted(gwt_src):
- p = path.join(ROOT, s, 'src', 'main', 'java')
- if path.exists(p):
- classpathentry('lib', p, out='eclipse-out/gwtsrc')
+ for p in _query_classpath(GWT):
+ m = java_library.match(p)
+ if m:
+ gwt_src.add(m.group(1))
- classpathentry('con', JRE)
- classpathentry('output', 'eclipse-out/classes')
+ classpathentry('src', 'java')
+ classpathentry('src', 'javatests', out='eclipse-out/test')
+ classpathentry('src', 'resources')
+ for s in sorted(src):
+ out = None
- p = path.join(ROOT, '.classpath')
- with open(p, 'w') as fd:
- doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
+ if s.startswith('lib/'):
+ out = 'eclipse-out/lib'
+ elif s.startswith('plugins/'):
+ if args.plugins:
+ plugins.add(s)
+ continue
+ out = 'eclipse-out/' + s
- if args.plugins:
- for plugin in plugins:
- plugindir = path.join(ROOT, plugin)
- try:
- gen_project(plugin.replace('plugins/', ""), plugindir)
- gen_plugin_classpath(plugindir)
- except (IOError, OSError) as err:
- print('error generating project for %s: %s' % (plugin, err),
- file=sys.stderr)
+ p = path.join(s, 'java')
+ if path.exists(p):
+ classpathentry('src', p, out=out)
+ continue
+
+ for env in ['main', 'test']:
+ o = None
+ if out:
+ o = out + '/' + env
+ elif env == 'test':
+ o = 'eclipse-out/test'
+
+ for srctype in ['java', 'resources']:
+ p = path.join(s, 'src', env, srctype)
+ if path.exists(p):
+ classpathentry('src', p, out=o)
+
+ for libs in [lib, gwt_lib]:
+ for j in sorted(libs):
+ s = None
+ m = srcs.match(j)
+ if m:
+ prefix = m.group(1)
+ suffix = m.group(2)
+ p = path.join(prefix, "jar", "%s-src.jar" % suffix)
+ if path.exists(p):
+ s = p
+ if args.plugins:
+ classpathentry('lib', j, s, exported=True)
+ else:
+ # Filter out the source JARs that we pull through transitive
+ # closure of GWT plugin API (we add source directories
+ # themselves). Exception is libEdit-src.jar, that is needed
+ # for GWT SDM to work.
+ m = java_library.match(j)
+ if m:
+ if m.group(1).startswith("gerrit-") and \
+ j.endswith("-src.jar") and \
+ not j.endswith("libEdit-src.jar"):
+ continue
+ classpathentry('lib', j, s)
+
+ for p in sorted(proto):
+ s = p.replace('-fastbuild/bin/proto/lib', '-fastbuild/genfiles/proto/')
+ s = s.replace('.jar', '-src.jar')
+ classpathentry('lib', p, s)
+
+ for s in sorted(gwt_src):
+ p = path.join(ROOT, s, 'src', 'main', 'java')
+ if path.exists(p):
+ classpathentry('lib', p, out='eclipse-out/gwtsrc')
+
+ classpathentry('con', JRE)
+ classpathentry('output', 'eclipse-out/classes')
+
+ p = path.join(ROOT, '.classpath')
+ with open(p, 'w') as fd:
+ doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
+
+ if args.plugins:
+ for plugin in plugins:
+ plugindir = path.join(ROOT, plugin)
+ try:
+ gen_project(plugin.replace('plugins/', ""), plugindir)
+ gen_plugin_classpath(plugindir)
+ except (IOError, OSError) as err:
+ print('error generating project for %s: %s' % (plugin, err),
+ file=sys.stderr)
+
def gen_factorypath(ext):
- doc = minidom.getDOMImplementation().createDocument(None, 'factorypath', None)
- for jar in _query_classpath(AUTO):
- e = doc.createElement('factorypathentry')
- e.setAttribute('kind', 'EXTJAR')
- e.setAttribute('id', path.join(ext, jar))
- e.setAttribute('enabled', 'true')
- e.setAttribute('runInBatchMode', 'false')
- doc.documentElement.appendChild(e)
+ doc = minidom.getDOMImplementation().createDocument(None, 'factorypath',
+ None)
+ for jar in _query_classpath(AUTO):
+ e = doc.createElement('factorypathentry')
+ e.setAttribute('kind', 'EXTJAR')
+ e.setAttribute('id', path.join(ext, jar))
+ e.setAttribute('enabled', 'true')
+ e.setAttribute('runInBatchMode', 'false')
+ doc.documentElement.appendChild(e)
- p = path.join(ROOT, '.factorypath')
- with open(p, 'w') as fd:
- doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
+ p = path.join(ROOT, '.factorypath')
+ with open(p, 'w') as fd:
+ doc.writexml(fd, addindent='\t', newl='\n', encoding='UTF-8')
+
try:
- ext_location = retrieve_ext_location().decode("utf-8")
- gen_project(args.project_name)
- gen_classpath(ext_location)
- gen_factorypath(ext_location)
- gen_bazel_path()
+ ext_location = retrieve_ext_location().decode("utf-8")
+ gen_project(args.project_name)
+ gen_classpath(ext_location)
+ gen_factorypath(ext_location)
+ gen_bazel_path()
- # TODO(davido): Remove this when GWT gone
- gwt_working_dir = ".gwt_work_dir"
- if not path.isdir(gwt_working_dir):
- makedirs(path.join(ROOT, gwt_working_dir))
+ # TODO(davido): Remove this when GWT gone
+ gwt_working_dir = ".gwt_work_dir"
+ if not path.isdir(gwt_working_dir):
+ makedirs(path.join(ROOT, gwt_working_dir))
- try:
- check_call(_build_bazel_cmd('build', MAIN, GWT, '//java/org/eclipse/jgit:libEdit-src.jar'))
- except CalledProcessError:
- exit(1)
+ try:
+ check_call(_build_bazel_cmd('build', MAIN, GWT,
+ '//java/org/eclipse/jgit:libEdit-src.jar'))
+ except CalledProcessError:
+ exit(1)
except KeyboardInterrupt:
- print('Interrupted by user', file=sys.stderr)
- exit(1)
+ print('Interrupted by user', file=sys.stderr)
+ exit(1)
diff --git a/tools/js/bower2bazel.py b/tools/js/bower2bazel.py
index 171ab55..7b24524 100755
--- a/tools/js/bower2bazel.py
+++ b/tools/js/bower2bazel.py
@@ -13,9 +13,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Suggested call sequence:
+"""
+Suggested call sequence:
-python tools/js/bower2bazel.py -w lib/js/bower_archives.bzl -b lib/js/bower_components.bzl
+python tools/js/bower2bazel.py -w lib/js/bower_archives.bzl \
+ -b lib/js/bower_components.bzl
"""
from __future__ import print_function
@@ -31,139 +33,147 @@
import glob
import bowerutil
-# list of licenses for packages that don't specify one in their bower.json file.
+# list of licenses for packages that don't specify one in their bower.json file
package_licenses = {
- "codemirror-minified": "codemirror-minified",
- "es6-promise": "es6-promise",
- "fetch": "fetch",
- "font-roboto": "polymer",
- "iron-a11y-announcer": "polymer",
- "iron-a11y-keys-behavior": "polymer",
- "iron-autogrow-textarea": "polymer",
- "iron-behaviors": "polymer",
- "iron-dropdown": "polymer",
- "iron-fit-behavior": "polymer",
- "iron-flex-layout": "polymer",
- "iron-form-element-behavior": "polymer",
- "iron-icon": "polymer",
- "iron-iconset-svg": "polymer",
- "iron-input": "polymer",
- "iron-menu-behavior": "polymer",
- "iron-meta": "polymer",
- "iron-overlay-behavior": "polymer",
- "iron-resizable-behavior": "polymer",
- "iron-selector": "polymer",
- "iron-validatable-behavior": "polymer",
- "moment": "moment",
- "neon-animation": "polymer",
- "page": "page.js",
- "paper-button": "polymer",
- "paper-icon-button": "polymer",
- "paper-input": "polymer",
- "paper-item": "polymer",
- "paper-listbox": "polymer",
- "paper-toggle-button": "polymer",
- "paper-styles": "polymer",
- "paper-tabs": "polymer",
- "polymer": "polymer",
- "polymer-resin": "polymer",
- "promise-polyfill": "promise-polyfill",
- "web-animations-js": "Apache2.0",
- "webcomponentsjs": "polymer",
- "paper-material": "polymer",
- "paper-styles": "polymer",
- "paper-behaviors": "polymer",
- "paper-ripple": "polymer",
- "iron-checked-element-behavior": "polymer",
- "font-roboto": "polymer",
+ "codemirror-minified": "codemirror-minified",
+ "es6-promise": "es6-promise",
+ "fetch": "fetch",
+ "font-roboto": "polymer",
+ "iron-a11y-announcer": "polymer",
+ "iron-a11y-keys-behavior": "polymer",
+ "iron-autogrow-textarea": "polymer",
+ "iron-behaviors": "polymer",
+ "iron-dropdown": "polymer",
+ "iron-fit-behavior": "polymer",
+ "iron-flex-layout": "polymer",
+ "iron-form-element-behavior": "polymer",
+ "iron-icon": "polymer",
+ "iron-iconset-svg": "polymer",
+ "iron-input": "polymer",
+ "iron-menu-behavior": "polymer",
+ "iron-meta": "polymer",
+ "iron-overlay-behavior": "polymer",
+ "iron-resizable-behavior": "polymer",
+ "iron-selector": "polymer",
+ "iron-validatable-behavior": "polymer",
+ "moment": "moment",
+ "neon-animation": "polymer",
+ "page": "page.js",
+ "paper-button": "polymer",
+ "paper-icon-button": "polymer",
+ "paper-input": "polymer",
+ "paper-item": "polymer",
+ "paper-listbox": "polymer",
+ "paper-toggle-button": "polymer",
+ "paper-styles": "polymer",
+ "paper-tabs": "polymer",
+ "polymer": "polymer",
+ "polymer-resin": "polymer",
+ "promise-polyfill": "promise-polyfill",
+ "web-animations-js": "Apache2.0",
+ "webcomponentsjs": "polymer",
+ "paper-material": "polymer",
+ "paper-styles": "polymer",
+ "paper-behaviors": "polymer",
+ "paper-ripple": "polymer",
+ "iron-checked-element-behavior": "polymer",
+ "font-roboto": "polymer",
}
def build_bower_json(version_targets, seeds):
- """Generate bower JSON file, return its path.
+ """Generate bower JSON file, return its path.
- Args:
- version_targets: bazel target names of the versions.json file.
- seeds: an iterable of bower package names of the seed packages, ie.
- the packages whose versions we control manually.
- """
- bower_json = collections.OrderedDict()
- bower_json['name'] = 'bower2bazel-output'
- bower_json['version'] = '0.0.0'
- bower_json['description'] = 'Auto-generated bower.json for dependency management'
- bower_json['private'] = True
- bower_json['dependencies'] = {}
+ Args:
+ version_targets: bazel target names of the versions.json file.
+ seeds: an iterable of bower package names of the seed packages, ie.
+ the packages whose versions we control manually.
+ """
+ bower_json = collections.OrderedDict()
+ bower_json['name'] = 'bower2bazel-output'
+ bower_json['version'] = '0.0.0'
+ bower_json['description'] = 'Auto-generated bower.json for dependency ' + \
+ 'management'
+ bower_json['private'] = True
+ bower_json['dependencies'] = {}
- seeds = set(seeds)
- for v in version_targets:
- path = os.path.join("bazel-out/*-fastbuild/bin", v.lstrip("/").replace(":", "/"))
- fs = glob.glob(path)
- assert len(fs) == 1, '%s: file not found or multiple files found: %s' % (path, fs)
- with open(fs[0]) as f:
- j = json.load(f)
- if "" in j:
- # drop dummy entries.
- del j[""]
+ seeds = set(seeds)
+ for v in version_targets:
+ path = os.path.join("bazel-out/*-fastbuild/bin",
+ v.lstrip("/").replace(":", "/"))
+ fs = glob.glob(path)
+ err_msg = '%s: file not found or multiple files found: %s' % (path, fs)
+ assert len(fs) == 1, err_msg
+ with open(fs[0]) as f:
+ j = json.load(f)
+ if "" in j:
+ # drop dummy entries.
+ del j[""]
- trimmed = {}
- for k, v in j.items():
- if k in seeds:
- trimmed[k] = v
+ trimmed = {}
+ for k, v in j.items():
+ if k in seeds:
+ trimmed[k] = v
- bower_json['dependencies'].update(trimmed)
+ bower_json['dependencies'].update(trimmed)
- tmpdir = tempfile.mkdtemp()
- ret = os.path.join(tmpdir, 'bower.json')
- with open(ret, 'w') as f:
- json.dump(bower_json, f, indent=2)
- return ret
+ tmpdir = tempfile.mkdtemp()
+ ret = os.path.join(tmpdir, 'bower.json')
+ with open(ret, 'w') as f:
+ json.dump(bower_json, f, indent=2)
+ return ret
+
def decode(input):
- try:
- return input.decode("utf-8")
- except TypeError:
- return input
+ try:
+ return input.decode("utf-8")
+ except TypeError:
+ return input
+
def bower_command(args):
- base = subprocess.check_output(["bazel", "info", "output_base"]).strip()
- exp = os.path.join(decode(base), "external", "bower", "*npm_binary.tgz")
- fs = sorted(glob.glob(exp))
- assert len(fs) == 1, "bower tarball not found or have multiple versions %s" % fs
- return ["python", os.getcwd() + "/tools/js/run_npm_binary.py", sorted(fs)[0]] + args
+ base = subprocess.check_output(["bazel", "info", "output_base"]).strip()
+ exp = os.path.join(decode(base), "external", "bower", "*npm_binary.tgz")
+ fs = sorted(glob.glob(exp))
+ err_msg = "bower tarball not found or have multiple versions %s" % fs
+ assert len(fs) == 1, err_msg
+ return ["python",
+ os.getcwd() + "/tools/js/run_npm_binary.py", sorted(fs)[0]] + args
def main(args):
- opts = optparse.OptionParser()
- opts.add_option('-w', help='.bzl output for WORKSPACE')
- opts.add_option('-b', help='.bzl output for //lib:BUILD')
- opts, args = opts.parse_args()
+ opts = optparse.OptionParser()
+ opts.add_option('-w', help='.bzl output for WORKSPACE')
+ opts.add_option('-b', help='.bzl output for //lib:BUILD')
+ opts, args = opts.parse_args()
- target_str = subprocess.check_output([
- "bazel", "query", "kind(bower_component_bundle, //polygerrit-ui/...)"])
- seed_str = subprocess.check_output([
- "bazel", "query", "attr(seed, 1, kind(bower_component, deps(//polygerrit-ui/...)))"])
- targets = [s for s in decode(target_str).split('\n') if s]
- seeds = [s for s in decode(seed_str).split('\n') if s]
- prefix = "//lib/js:"
- non_seeds = [s for s in seeds if not s.startswith(prefix)]
- assert not non_seeds, non_seeds
- seeds = set([s[len(prefix):] for s in seeds])
+ target_str = subprocess.check_output([
+ "bazel", "query", "kind(bower_component_bundle, //polygerrit-ui/...)"])
+ seed_str = subprocess.check_output(
+ ["bazel", "query",
+ "attr(seed, 1, kind(bower_component, deps(//polygerrit-ui/...)))"])
+ targets = [s for s in decode(target_str).split('\n') if s]
+ seeds = [s for s in decode(seed_str).split('\n') if s]
+ prefix = "//lib/js:"
+ non_seeds = [s for s in seeds if not s.startswith(prefix)]
+ assert not non_seeds, non_seeds
+ seeds = set([s[len(prefix):] for s in seeds])
- version_targets = [t + "-versions.json" for t in targets]
- subprocess.check_call(['bazel', 'build'] + version_targets)
- bower_json_path = build_bower_json(version_targets, seeds)
- dir = os.path.dirname(bower_json_path)
- cmd = bower_command(["install"])
+ version_targets = [t + "-versions.json" for t in targets]
+ subprocess.check_call(['bazel', 'build'] + version_targets)
+ bower_json_path = build_bower_json(version_targets, seeds)
+ dir = os.path.dirname(bower_json_path)
+ cmd = bower_command(["install"])
- build_out = sys.stdout
- if opts.b:
- build_out = open(opts.b + ".tmp", 'w')
+ build_out = sys.stdout
+ if opts.b:
+ build_out = open(opts.b + ".tmp", 'w')
- ws_out = sys.stdout
- if opts.b:
- ws_out = open(opts.w + ".tmp", 'w')
+ ws_out = sys.stdout
+ if opts.b:
+ ws_out = open(opts.w + ".tmp", 'w')
- header = """# DO NOT EDIT
+ header = """# DO NOT EDIT
# generated with the following command:
#
# %s
@@ -171,30 +181,30 @@
""" % ' '.join(sys.argv)
- ws_out.write(header)
- build_out.write(header)
+ ws_out.write(header)
+ build_out.write(header)
- oldwd = os.getcwd()
- os.chdir(dir)
- subprocess.check_call(cmd)
+ oldwd = os.getcwd()
+ os.chdir(dir)
+ subprocess.check_call(cmd)
- interpret_bower_json(seeds, ws_out, build_out)
- ws_out.close()
- build_out.close()
+ interpret_bower_json(seeds, ws_out, build_out)
+ ws_out.close()
+ build_out.close()
- os.chdir(oldwd)
- os.rename(opts.w + ".tmp", opts.w)
- os.rename(opts.b + ".tmp", opts.b)
+ os.chdir(oldwd)
+ os.rename(opts.w + ".tmp", opts.w)
+ os.rename(opts.b + ".tmp", opts.b)
def dump_workspace(data, seeds, out):
- out.write('load("//tools/bzl:js.bzl", "bower_archive")\n\n')
- out.write('def load_bower_archives():\n')
+ out.write('load("//tools/bzl:js.bzl", "bower_archive")\n\n')
+ out.write('def load_bower_archives():\n')
- for d in data:
- if d["name"] in seeds:
- continue
- out.write(""" bower_archive(
+ for d in data:
+ if d["name"] in seeds:
+ continue
+ out.write(""" bower_archive(
name = "%(name)s",
package = "%(normalized-name)s",
version = "%(version)s",
@@ -203,48 +213,49 @@
def dump_build(data, seeds, out):
- out.write('load("//tools/bzl:js.bzl", "bower_component")\n\n')
- out.write('def define_bower_components():\n')
- for d in data:
- out.write(" bower_component(\n")
- out.write(" name = \"%s\",\n" % d["name"])
- out.write(" license = \"//lib:LICENSE-%s\",\n" % d["bazel-license"])
- deps = sorted(d.get("dependencies", {}).keys())
- if deps:
- if len(deps) == 1:
- out.write(" deps = [ \":%s\" ],\n" % deps[0])
- else:
- out.write(" deps = [\n")
- for dep in deps:
- out.write(" \":%s\",\n" % dep)
- out.write(" ],\n")
- if d["name"] in seeds:
- out.write(" seed = True,\n")
- out.write(" )\n")
- # done
+ out.write('load("//tools/bzl:js.bzl", "bower_component")\n\n')
+ out.write('def define_bower_components():\n')
+ for d in data:
+ out.write(" bower_component(\n")
+ out.write(" name = \"%s\",\n" % d["name"])
+ out.write(" license = \"//lib:LICENSE-%s\",\n" % d["bazel-license"])
+ deps = sorted(d.get("dependencies", {}).keys())
+ if deps:
+ if len(deps) == 1:
+ out.write(" deps = [ \":%s\" ],\n" % deps[0])
+ else:
+ out.write(" deps = [\n")
+ for dep in deps:
+ out.write(" \":%s\",\n" % dep)
+ out.write(" ],\n")
+ if d["name"] in seeds:
+ out.write(" seed = True,\n")
+ out.write(" )\n")
+ # done
def interpret_bower_json(seeds, ws_out, build_out):
- out = subprocess.check_output(["find", "bower_components/", "-name", ".bower.json"])
+ out = subprocess.check_output(["find", "bower_components/", "-name",
+ ".bower.json"])
- data = []
- for f in sorted(decode(out).split('\n')):
- if not f:
- continue
- pkg = json.load(open(f))
- pkg_name = pkg["name"]
+ data = []
+ for f in sorted(decode(out).split('\n')):
+ if not f:
+ continue
+ pkg = json.load(open(f))
+ pkg_name = pkg["name"]
- pkg["bazel-sha1"] = bowerutil.hash_bower_component(
- hashlib.sha1(), os.path.dirname(f)).hexdigest()
- license = package_licenses.get(pkg_name, "DO_NOT_DISTRIBUTE")
+ pkg["bazel-sha1"] = bowerutil.hash_bower_component(
+ hashlib.sha1(), os.path.dirname(f)).hexdigest()
+ license = package_licenses.get(pkg_name, "DO_NOT_DISTRIBUTE")
- pkg["bazel-license"] = license
- pkg["normalized-name"] = pkg["_originalSource"]
- data.append(pkg)
+ pkg["bazel-license"] = license
+ pkg["normalized-name"] = pkg["_originalSource"]
+ data.append(pkg)
- dump_workspace(data, seeds, ws_out)
- dump_build(data, seeds, build_out)
+ dump_workspace(data, seeds, ws_out)
+ dump_build(data, seeds, build_out)
if __name__ == '__main__':
- main(sys.argv[1:])
+ main(sys.argv[1:])
diff --git a/tools/js/bowerutil.py b/tools/js/bowerutil.py
index c2e11cd..9fb82af 100644
--- a/tools/js/bowerutil.py
+++ b/tools/js/bowerutil.py
@@ -16,31 +16,31 @@
def hash_bower_component(hash_obj, path):
- """Hash the contents of a bower component directory.
+ """Hash the contents of a bower component directory.
- This is a stable hash of a directory downloaded with `bower install`, minus
- the .bower.json file, which is autogenerated each time by bower. Used in lieu
- of hashing a zipfile of the contents, since zipfiles are difficult to hash in
- a stable manner.
+ This is a stable hash of a directory downloaded with `bower install`, minus
+ the .bower.json file, which is autogenerated each time by bower. Used in
+ lieu of hashing a zipfile of the contents, since zipfiles are difficult to
+ hash in a stable manner.
- Args:
- hash_obj: an open hash object, e.g. hashlib.sha1().
- path: path to the directory to hash.
+ Args:
+ hash_obj: an open hash object, e.g. hashlib.sha1().
+ path: path to the directory to hash.
- Returns:
- The passed-in hash_obj.
- """
- if not os.path.isdir(path):
- raise ValueError('Not a directory: %s' % path)
+ Returns:
+ The passed-in hash_obj.
+ """
+ if not os.path.isdir(path):
+ raise ValueError('Not a directory: %s' % path)
- path = os.path.abspath(path)
- for root, dirs, files in os.walk(path):
- dirs.sort()
- for f in sorted(files):
- if f == '.bower.json':
- continue
- p = os.path.join(root, f)
- hash_obj.update(p[len(path)+1:].encode("utf-8"))
- hash_obj.update(open(p, "rb").read())
+ path = os.path.abspath(path)
+ for root, dirs, files in os.walk(path):
+ dirs.sort()
+ for f in sorted(files):
+ if f == '.bower.json':
+ continue
+ p = os.path.join(root, f)
+ hash_obj.update(p[len(path)+1:].encode("utf-8"))
+ hash_obj.update(open(p, "rb").read())
- return hash_obj
+ return hash_obj
diff --git a/tools/js/download_bower.py b/tools/js/download_bower.py
index 3db39d5..c9a5df6 100755
--- a/tools/js/download_bower.py
+++ b/tools/js/download_bower.py
@@ -30,99 +30,105 @@
def bower_cmd(bower, *args):
- cmd = bower.split(' ')
- cmd.extend(args)
- return cmd
+ cmd = bower.split(' ')
+ cmd.extend(args)
+ return cmd
def bower_info(bower, name, package, version):
- cmd = bower_cmd(bower, '-l=error', '-j',
- 'info', '%s#%s' % (package, version))
- try:
- p = subprocess.Popen(cmd , stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- except:
- sys.stderr.write("error executing: %s\n" % ' '.join(cmd))
- raise
- out, err = p.communicate()
- if p.returncode:
- sys.stderr.write(err)
- raise OSError('Command failed: %s' % ' '.join(cmd))
+ cmd = bower_cmd(bower, '-l=error', '-j',
+ 'info', '%s#%s' % (package, version))
+ try:
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ except:
+ sys.stderr.write("error executing: %s\n" % ' '.join(cmd))
+ raise
+ out, err = p.communicate()
+ if p.returncode:
+ sys.stderr.write(err)
+ raise OSError('Command failed: %s' % ' '.join(cmd))
- try:
- info = json.loads(out)
- except ValueError:
- raise ValueError('invalid JSON from %s:\n%s' % (" ".join(cmd), out))
- info_name = info.get('name')
- if info_name != name:
- raise ValueError('expected package name %s, got: %s' % (name, info_name))
- return info
+ try:
+ info = json.loads(out)
+ except ValueError:
+ raise ValueError('invalid JSON from %s:\n%s' % (" ".join(cmd), out))
+ info_name = info.get('name')
+ if info_name != name:
+ raise ValueError(
+ 'expected package name %s, got: %s' % (name, info_name))
+ return info
def ignore_deps(info):
- # Tell bower to ignore dependencies so we just download this component. This
- # is just an optimization, since we only pick out the component we need, but
- # it's important when downloading sizable dependency trees.
- #
- # As of 1.6.5 I don't think ignoredDependencies can be specified on the
- # command line with --config, so we have to create .bowerrc.
- deps = info.get('dependencies')
- if deps:
- with open(os.path.join('.bowerrc'), 'w') as f:
- json.dump({'ignoredDependencies': list(deps.keys())}, f)
+ # Tell bower to ignore dependencies so we just download this component.
+ # This is just an optimization, since we only pick out the component we
+ # need, but it's important when downloading sizable dependency trees.
+ #
+ # As of 1.6.5 I don't think ignoredDependencies can be specified on the
+ # command line with --config, so we have to create .bowerrc.
+ deps = info.get('dependencies')
+ if deps:
+ with open(os.path.join('.bowerrc'), 'w') as f:
+ json.dump({'ignoredDependencies': list(deps.keys())}, f)
def cache_entry(name, package, version, sha1):
- if not sha1:
- sha1 = hashlib.sha1('%s#%s' % (package, version)).hexdigest()
- return os.path.join(CACHE_DIR, '%s-%s.zip-%s' % (name, version, sha1))
+ if not sha1:
+ sha1 = hashlib.sha1('%s#%s' % (package, version)).hexdigest()
+ return os.path.join(CACHE_DIR, '%s-%s.zip-%s' % (name, version, sha1))
def main(args):
- opts = optparse.OptionParser()
- opts.add_option('-n', help='short name of component')
- opts.add_option('-b', help='bower command')
- opts.add_option('-p', help='full package name of component')
- opts.add_option('-v', help='version number')
- opts.add_option('-s', help='expected content sha1')
- opts.add_option('-o', help='output file location')
- opts, args_ = opts.parse_args(args)
+ opts = optparse.OptionParser()
+ opts.add_option('-n', help='short name of component')
+ opts.add_option('-b', help='bower command')
+ opts.add_option('-p', help='full package name of component')
+ opts.add_option('-v', help='version number')
+ opts.add_option('-s', help='expected content sha1')
+ opts.add_option('-o', help='output file location')
+ opts, args_ = opts.parse_args(args)
- assert opts.p
- assert opts.v
- assert opts.n
+ assert opts.p
+ assert opts.v
+ assert opts.n
- cwd = os.getcwd()
- outzip = os.path.join(cwd, opts.o)
- cached = cache_entry(opts.n, opts.p, opts.v, opts.s)
+ cwd = os.getcwd()
+ outzip = os.path.join(cwd, opts.o)
+ cached = cache_entry(opts.n, opts.p, opts.v, opts.s)
- if not os.path.exists(cached):
- info = bower_info(opts.b, opts.n, opts.p, opts.v)
- ignore_deps(info)
- subprocess.check_call(
- bower_cmd(opts.b, '--quiet', 'install', '%s#%s' % (opts.p, opts.v)))
- bc = os.path.join(cwd, 'bower_components')
- subprocess.check_call(
- ['zip', '-q', '--exclude', '.bower.json', '-r', cached, opts.n],
- cwd=bc)
+ if not os.path.exists(cached):
+ info = bower_info(opts.b, opts.n, opts.p, opts.v)
+ ignore_deps(info)
+ subprocess.check_call(
+ bower_cmd(
+ opts.b, '--quiet', 'install', '%s#%s' % (opts.p, opts.v)))
+ bc = os.path.join(cwd, 'bower_components')
+ subprocess.check_call(
+ ['zip', '-q', '--exclude', '.bower.json', '-r', cached, opts.n],
+ cwd=bc)
- if opts.s:
- path = os.path.join(bc, opts.n)
- sha1 = bowerutil.hash_bower_component(hashlib.sha1(), path).hexdigest()
- if opts.s != sha1:
- print((
- '%s#%s:\n'
- 'expected %s\n'
- 'received %s\n') % (opts.p, opts.v, opts.s, sha1), file=sys.stderr)
- try:
- os.remove(cached)
- except OSError as err:
- if path.exists(cached):
- print('error removing %s: %s' % (cached, err), file=sys.stderr)
- return 1
+ if opts.s:
+ path = os.path.join(bc, opts.n)
+ sha1 = bowerutil.hash_bower_component(
+ hashlib.sha1(), path).hexdigest()
+ if opts.s != sha1:
+ print((
+ '%s#%s:\n'
+ 'expected %s\n'
+ 'received %s\n') % (opts.p, opts.v, opts.s, sha1),
+ file=sys.stderr)
+ try:
+ os.remove(cached)
+ except OSError as err:
+ if path.exists(cached):
+ print('error removing %s: %s' % (cached, err),
+ file=sys.stderr)
+ return 1
- shutil.copyfile(cached, outzip)
- return 0
+ shutil.copyfile(cached, outzip)
+ return 0
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/js/npm_pack.py b/tools/js/npm_pack.py
index de45083..d817701 100755
--- a/tools/js/npm_pack.py
+++ b/tools/js/npm_pack.py
@@ -32,49 +32,49 @@
def is_bundled(tar):
- # No entries for directories, so scan for a matching prefix.
- for entry in tar.getmembers():
- if entry.name.startswith('package/node_modules/'):
- return True
- return False
+ # No entries for directories, so scan for a matching prefix.
+ for entry in tar.getmembers():
+ if entry.name.startswith('package/node_modules/'):
+ return True
+ return False
def bundle_dependencies():
- with open('package.json') as f:
- package = json.load(f)
- package['bundledDependencies'] = list(package['dependencies'].keys())
- with open('package.json', 'w') as f:
- json.dump(package, f)
+ with open('package.json') as f:
+ package = json.load(f)
+ package['bundledDependencies'] = list(package['dependencies'].keys())
+ with open('package.json', 'w') as f:
+ json.dump(package, f)
def main(args):
- if len(args) != 2:
- print('Usage: %s <package> <version>' % sys.argv[0], file=sys.stderr)
- return 1
+ if len(args) != 2:
+ print('Usage: %s <package> <version>' % sys.argv[0], file=sys.stderr)
+ return 1
- name, version = args
- filename = '%s-%s.tgz' % (name, version)
- url = 'http://registry.npmjs.org/%s/-/%s' % (name, filename)
+ name, version = args
+ filename = '%s-%s.tgz' % (name, version)
+ url = 'http://registry.npmjs.org/%s/-/%s' % (name, filename)
- tmpdir = tempfile.mkdtemp();
- tgz = os.path.join(tmpdir, filename)
- atexit.register(lambda: shutil.rmtree(tmpdir))
+ tmpdir = tempfile.mkdtemp()
+ tgz = os.path.join(tmpdir, filename)
+ atexit.register(lambda: shutil.rmtree(tmpdir))
- subprocess.check_call(['curl', '--proxy-anyauth', '-ksfo', tgz, url])
- with tarfile.open(tgz, 'r:gz') as tar:
- if is_bundled(tar):
- print('%s already has bundled node_modules' % filename)
- return 1
- tar.extractall(path=tmpdir)
+ subprocess.check_call(['curl', '--proxy-anyauth', '-ksfo', tgz, url])
+ with tarfile.open(tgz, 'r:gz') as tar:
+ if is_bundled(tar):
+ print('%s already has bundled node_modules' % filename)
+ return 1
+ tar.extractall(path=tmpdir)
- oldpwd = os.getcwd()
- os.chdir(os.path.join(tmpdir, 'package'))
- bundle_dependencies()
- subprocess.check_call(['npm', 'install'])
- subprocess.check_call(['npm', 'pack'])
- shutil.copy(filename, os.path.join(oldpwd, filename))
- return 0
+ oldpwd = os.getcwd()
+ os.chdir(os.path.join(tmpdir, 'package'))
+ bundle_dependencies()
+ subprocess.check_call(['npm', 'install'])
+ subprocess.check_call(['npm', 'pack'])
+ shutil.copy(filename, os.path.join(oldpwd, filename))
+ return 0
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/js/run_npm_binary.py b/tools/js/run_npm_binary.py
index d769b98..dfcdaca 100644
--- a/tools/js/run_npm_binary.py
+++ b/tools/js/run_npm_binary.py
@@ -27,65 +27,68 @@
def extract(path, outdir, bin):
- if os.path.exists(os.path.join(outdir, bin)):
- return # Another process finished extracting, ignore.
+ if os.path.exists(os.path.join(outdir, bin)):
+ return # Another process finished extracting, ignore.
- # Use a temp directory adjacent to outdir so shutil.move can use the same
- # device atomically.
- tmpdir = tempfile.mkdtemp(dir=os.path.dirname(outdir))
- def cleanup():
- try:
- shutil.rmtree(tmpdir)
- except OSError:
- pass # Too late now
- atexit.register(cleanup)
+ # Use a temp directory adjacent to outdir so shutil.move can use the same
+ # device atomically.
+ tmpdir = tempfile.mkdtemp(dir=os.path.dirname(outdir))
- def extract_one(mem):
- dest = os.path.join(outdir, mem.name)
- tar.extract(mem, path=tmpdir)
- try:
- os.makedirs(os.path.dirname(dest))
- except OSError:
- pass # Either exists, or will fail on the next line.
- shutil.move(os.path.join(tmpdir, mem.name), dest)
+ def cleanup():
+ try:
+ shutil.rmtree(tmpdir)
+ except OSError:
+ pass # Too late now
+ atexit.register(cleanup)
- with tarfile.open(path, 'r:gz') as tar:
- for mem in tar.getmembers():
- if mem.name != bin:
- extract_one(mem)
- # Extract bin last so other processes only short circuit when extraction is
- # finished.
- extract_one(tar.getmember(bin))
+ def extract_one(mem):
+ dest = os.path.join(outdir, mem.name)
+ tar.extract(mem, path=tmpdir)
+ try:
+ os.makedirs(os.path.dirname(dest))
+ except OSError:
+ pass # Either exists, or will fail on the next line.
+ shutil.move(os.path.join(tmpdir, mem.name), dest)
+
+ with tarfile.open(path, 'r:gz') as tar:
+ for mem in tar.getmembers():
+ if mem.name != bin:
+ extract_one(mem)
+ # Extract bin last so other processes only short circuit when
+ # extraction is finished.
+ extract_one(tar.getmember(bin))
+
def main(args):
- path = args[0]
- suffix = '.npm_binary.tgz'
- tgz = os.path.basename(path)
+ path = args[0]
+ suffix = '.npm_binary.tgz'
+ tgz = os.path.basename(path)
- parts = tgz[:-len(suffix)].split('@')
+ parts = tgz[:-len(suffix)].split('@')
- if not tgz.endswith(suffix) or len(parts) != 2:
- print('usage: %s <path/to/npm_binary>' % sys.argv[0], file=sys.stderr)
- return 1
+ if not tgz.endswith(suffix) or len(parts) != 2:
+ print('usage: %s <path/to/npm_binary>' % sys.argv[0], file=sys.stderr)
+ return 1
- name, _ = parts
+ name, _ = parts
- # Avoid importing from gerrit because we don't want to depend on the right CWD.
- sha1 = hashlib.sha1(open(path, 'rb').read()).hexdigest()
- outdir = '%s-%s' % (path[:-len(suffix)], sha1)
- rel_bin = os.path.join('package', 'bin', name)
- bin = os.path.join(outdir, rel_bin)
- if not os.path.isfile(bin):
- extract(path, outdir, rel_bin)
+ # Avoid importing from gerrit because we don't want to depend on the right
+ # working directory
+ sha1 = hashlib.sha1(open(path, 'rb').read()).hexdigest()
+ outdir = '%s-%s' % (path[:-len(suffix)], sha1)
+ rel_bin = os.path.join('package', 'bin', name)
+ bin = os.path.join(outdir, rel_bin)
+ if not os.path.isfile(bin):
+ extract(path, outdir, rel_bin)
- nodejs = spawn.find_executable('nodejs')
- if nodejs:
- # Debian installs Node.js as 'nodejs', due to a conflict with another
- # package.
- subprocess.check_call([nodejs, bin] + args[1:])
- else:
- subprocess.check_call([bin] + args[1:])
+ nodejs = spawn.find_executable('nodejs')
+ if nodejs:
+ # Debian installs Node.js as 'nodejs', due to a conflict with another
+ # package.
+ subprocess.check_call([nodejs, bin] + args[1:])
+ else:
+ subprocess.check_call([bin] + args[1:])
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(main(sys.argv[1:]))
diff --git a/tools/maven/mvn.py b/tools/maven/mvn.py
index 50c4ac6..d47d027 100755
--- a/tools/maven/mvn.py
+++ b/tools/maven/mvn.py
@@ -29,56 +29,57 @@
args, ctx = opts.parse_args()
if not args.v:
- print('version is empty', file=stderr)
- exit(1)
+ print('version is empty', file=stderr)
+ exit(1)
root = path.abspath(__file__)
while not path.exists(path.join(root, 'WORKSPACE')):
- root = path.dirname(root)
+ root = path.dirname(root)
if 'install' == args.a:
- cmd = [
- 'mvn',
- 'install:install-file',
- '-Dversion=%s' % args.v,
- ]
+ cmd = [
+ 'mvn',
+ 'install:install-file',
+ '-Dversion=%s' % args.v,
+ ]
elif 'deploy' == args.a:
- cmd = [
- 'mvn',
- 'gpg:sign-and-deploy-file',
- '-DrepositoryId=%s' % args.repository,
- '-Durl=%s' % args.url,
- ]
+ cmd = [
+ 'mvn',
+ 'gpg:sign-and-deploy-file',
+ '-DrepositoryId=%s' % args.repository,
+ '-Durl=%s' % args.url,
+ ]
else:
- print("unknown action -a %s" % args.a, file=stderr)
- exit(1)
+ print("unknown action -a %s" % args.a, file=stderr)
+ exit(1)
for spec in args.s:
- artifact, packaging_type, src = spec.split(':')
- exe = cmd + [
- '-DpomFile=%s' % path.join(root, 'tools', 'maven', '%s_pom.xml' % artifact),
- '-Dpackaging=%s' % packaging_type,
- '-Dfile=%s' % src,
- ]
- try:
- if environ.get('VERBOSE'):
- print(' '.join(exe), file=stderr)
- check_output(exe)
- except Exception as e:
- print('%s command failed: %s\n%s' % (args.a, ' '.join(exe), e),
- file=stderr)
- if environ.get('VERBOSE') and isinstance(e, CalledProcessError):
- print('Command output\n%s' % e.output, file=stderr)
- exit(1)
+ artifact, packaging_type, src = spec.split(':')
+ exe = cmd + [
+ '-DpomFile=%s' % path.join(root, 'tools', 'maven',
+ '%s_pom.xml' % artifact),
+ '-Dpackaging=%s' % packaging_type,
+ '-Dfile=%s' % src,
+ ]
+ try:
+ if environ.get('VERBOSE'):
+ print(' '.join(exe), file=stderr)
+ check_output(exe)
+ except Exception as e:
+ print('%s command failed: %s\n%s' % (args.a, ' '.join(exe), e),
+ file=stderr)
+ if environ.get('VERBOSE') and isinstance(e, CalledProcessError):
+ print('Command output\n%s' % e.output, file=stderr)
+ exit(1)
out = stderr
if args.o:
- out = open(args.o, 'w')
+ out = open(args.o, 'w')
with out as fd:
- if args.repository:
- print('Repository: %s' % args.repository, file=fd)
- if args.url:
- print('URL: %s' % args.url, file=fd)
- print('Version: %s' % args.v, file=fd)
+ if args.repository:
+ print('Repository: %s' % args.repository, file=fd)
+ if args.url:
+ print('URL: %s' % args.url, file=fd)
+ print('Version: %s' % args.v, file=fd)
diff --git a/tools/merge_jars.py b/tools/merge_jars.py
index 97a87c4..6b46069 100755
--- a/tools/merge_jars.py
+++ b/tools/merge_jars.py
@@ -17,11 +17,10 @@
import collections
import sys
import zipfile
-import io
if len(sys.argv) < 3:
- print('usage: %s <out.zip> <in.zip>...' % sys.argv[0], file=sys.stderr)
- exit(1)
+ print('usage: %s <out.zip> <in.zip>...' % sys.argv[0], file=sys.stderr)
+ exit(1)
outfile = sys.argv[1]
infiles = sys.argv[2:]
@@ -29,22 +28,22 @@
SERVICES = 'META-INF/services/'
try:
- with zipfile.ZipFile(outfile, 'w') as outzip:
- services = collections.defaultdict(lambda: '')
- for infile in infiles:
- with zipfile.ZipFile(infile) as inzip:
- for info in inzip.infolist():
- n = info.filename
- if n in seen:
- continue
- elif n.startswith(SERVICES):
- # Concatenate all provider configuration files.
- services[n] += inzip.read(n).decode("UTF-8")
- continue
- outzip.writestr(info, inzip.read(n))
- seen.add(n)
+ with zipfile.ZipFile(outfile, 'w') as outzip:
+ services = collections.defaultdict(lambda: '')
+ for infile in infiles:
+ with zipfile.ZipFile(infile) as inzip:
+ for info in inzip.infolist():
+ n = info.filename
+ if n in seen:
+ continue
+ elif n.startswith(SERVICES):
+ # Concatenate all provider configuration files.
+ services[n] += inzip.read(n).decode("UTF-8")
+ continue
+ outzip.writestr(info, inzip.read(n))
+ seen.add(n)
- for n, v in list(services.items()):
- outzip.writestr(n, v)
+ for n, v in list(services.items()):
+ outzip.writestr(n, v)
except Exception as err:
- exit('Failed to merge jars: %s' % err)
+ exit('Failed to merge jars: %s' % err)
diff --git a/tools/release-announcement.py b/tools/release-announcement.py
index f700185..a25a340 100755
--- a/tools/release-announcement.py
+++ b/tools/release-announcement.py
@@ -101,9 +101,9 @@
summary = summary + "."
data = {
- "version": Version(options.version),
- "previous": options.previous,
- "summary": summary
+ "version": Version(options.version),
+ "previous": options.previous,
+ "summary": summary
}
war = os.path.join(
diff --git a/tools/util.py b/tools/util.py
index e8182ed..45d0541 100644
--- a/tools/util.py
+++ b/tools/util.py
@@ -15,57 +15,59 @@
from os import path
REPO_ROOTS = {
- 'GERRIT': 'http://gerrit-maven.storage.googleapis.com',
- 'GERRIT_API': 'https://gerrit-api.commondatastorage.googleapis.com/release',
- 'MAVEN_CENTRAL': 'http://repo1.maven.org/maven2',
- 'MAVEN_LOCAL': 'file://' + path.expanduser('~/.m2/repository'),
- 'MAVEN_SNAPSHOT': 'https://oss.sonatype.org/content/repositories/snapshots',
+ 'GERRIT': 'http://gerrit-maven.storage.googleapis.com',
+ 'GERRIT_API':
+ 'https://gerrit-api.commondatastorage.googleapis.com/release',
+ 'MAVEN_CENTRAL': 'http://repo1.maven.org/maven2',
+ 'MAVEN_LOCAL': 'file://' + path.expanduser('~/.m2/repository'),
+ 'MAVEN_SNAPSHOT':
+ 'https://oss.sonatype.org/content/repositories/snapshots',
}
def resolve_url(url, redirects):
- """ Resolve URL of a Maven artifact.
+ """ Resolve URL of a Maven artifact.
- prefix:path is passed as URL. prefix identifies known or custom
- repositories that can be rewritten in redirects set, passed as
- second arguments.
+ prefix:path is passed as URL. prefix identifies known or custom
+ repositories that can be rewritten in redirects set, passed as
+ second arguments.
- A special case is supported, when prefix neither exists in
- REPO_ROOTS, no in redirects set: the url is returned as is.
- This enables plugins to pass custom maven_repository URL as is
- directly to maven_jar().
+ A special case is supported, when prefix neither exists in
+ REPO_ROOTS, no in redirects set: the url is returned as is.
+ This enables plugins to pass custom maven_repository URL as is
+ directly to maven_jar().
- Returns a resolved path for Maven artifact.
- """
- s = url.find(':')
- if s < 0:
- return url
- scheme, rest = url[:s], url[s+1:]
- if scheme in redirects:
- root = redirects[scheme]
- elif scheme in REPO_ROOTS:
- root = REPO_ROOTS[scheme]
- else:
- return url
- root = root.rstrip('/')
- rest = rest.lstrip('/')
- return '/'.join([root, rest])
+ Returns a resolved path for Maven artifact.
+ """
+ s = url.find(':')
+ if s < 0:
+ return url
+ scheme, rest = url[:s], url[s+1:]
+ if scheme in redirects:
+ root = redirects[scheme]
+ elif scheme in REPO_ROOTS:
+ root = REPO_ROOTS[scheme]
+ else:
+ return url
+ root = root.rstrip('/')
+ rest = rest.lstrip('/')
+ return '/'.join([root, rest])
def hash_file(hash_obj, path):
- """Hash the contents of a file.
+ """Hash the contents of a file.
- Args:
- hash_obj: an open hash object, e.g. hashlib.sha1().
- path: path to the file to hash.
+ Args:
+ hash_obj: an open hash object, e.g. hashlib.sha1().
+ path: path to the file to hash.
- Returns:
- The passed-in hash_obj.
- """
- with open(path, 'rb') as f:
- while True:
- b = f.read(8192)
- if not b:
- break
- hash_obj.update(b)
- return hash_obj
+ Returns:
+ The passed-in hash_obj.
+ """
+ with open(path, 'rb') as f:
+ while True:
+ b = f.read(8192)
+ if not b:
+ break
+ hash_obj.update(b)
+ return hash_obj
diff --git a/tools/util_test.py b/tools/util_test.py
index 30647ba..fa67696 100644
--- a/tools/util_test.py
+++ b/tools/util_test.py
@@ -16,28 +16,32 @@
import unittest
from util import resolve_url
+
class TestResolveUrl(unittest.TestCase):
- """ run to test:
- python -m unittest -v util_test
- """
+ """ run to test:
+ python -m unittest -v util_test
+ """
- def testKnown(self):
- url = resolve_url('GERRIT:foo.jar', {})
- self.assertEqual(url, 'http://gerrit-maven.storage.googleapis.com/foo.jar')
+ def testKnown(self):
+ url = resolve_url('GERRIT:foo.jar', {})
+ self.assertEqual(url,
+ 'http://gerrit-maven.storage.googleapis.com/foo.jar')
- def testKnownRedirect(self):
- url = resolve_url('MAVEN_CENTRAL:foo.jar',
- {'MAVEN_CENTRAL': 'http://my.company.mirror/maven2'})
- self.assertEqual(url, 'http://my.company.mirror/maven2/foo.jar')
+ def testKnownRedirect(self):
+ url = resolve_url('MAVEN_CENTRAL:foo.jar',
+ {'MAVEN_CENTRAL': 'http://my.company.mirror/maven2'})
+ self.assertEqual(url, 'http://my.company.mirror/maven2/foo.jar')
- def testCustom(self):
- url = resolve_url('http://maven.example.com/release/foo.jar', {})
- self.assertEqual(url, 'http://maven.example.com/release/foo.jar')
+ def testCustom(self):
+ url = resolve_url('http://maven.example.com/release/foo.jar', {})
+ self.assertEqual(url, 'http://maven.example.com/release/foo.jar')
- def testCustomRedirect(self):
- url = resolve_url('MAVEN_EXAMPLE:foo.jar',
- {'MAVEN_EXAMPLE': 'http://maven.example.com/release'})
- self.assertEqual(url, 'http://maven.example.com/release/foo.jar')
+ def testCustomRedirect(self):
+ url = resolve_url('MAVEN_EXAMPLE:foo.jar',
+ {'MAVEN_EXAMPLE':
+ 'http://maven.example.com/release'})
+ self.assertEqual(url, 'http://maven.example.com/release/foo.jar')
+
if __name__ == '__main__':
- unittest.main()
+ unittest.main()
diff --git a/tools/version.py b/tools/version.py
index 72b0134..4aafcb0 100755
--- a/tools/version.py
+++ b/tools/version.py
@@ -23,24 +23,24 @@
opts, args = parser.parse_args()
if not len(args):
- parser.error('not enough arguments')
+ parser.error('not enough arguments')
elif len(args) > 1:
- parser.error('too many arguments')
+ parser.error('too many arguments')
DEST_PATTERN = r'\g<1>%s\g<3>' % args[0]
def replace_in_file(filename, src_pattern):
- try:
- f = open(filename, "r")
- s = f.read()
- f.close()
- s = re.sub(src_pattern, DEST_PATTERN, s)
- f = open(filename, "w")
- f.write(s)
- f.close()
- except IOError as err:
- print('error updating %s: %s' % (filename, err), file=sys.stderr)
+ try:
+ f = open(filename, "r")
+ s = f.read()
+ f.close()
+ s = re.sub(src_pattern, DEST_PATTERN, s)
+ f = open(filename, "w")
+ f.write(s)
+ f.close()
+ except IOError as err:
+ print('error updating %s: %s' % (filename, err), file=sys.stderr)
src_pattern = re.compile(r'^(\s*<version>)([-.\w]+)(</version>\s*)$',
@@ -48,8 +48,8 @@
for project in ['gerrit-acceptance-framework', 'gerrit-extension-api',
'gerrit-plugin-api', 'gerrit-plugin-gwtui',
'gerrit-war']:
- pom = os.path.join('tools', 'maven', '%s_pom.xml' % project)
- replace_in_file(pom, src_pattern)
+ pom = os.path.join('tools', 'maven', '%s_pom.xml' % project)
+ replace_in_file(pom, src_pattern)
src_pattern = re.compile(r'^(GERRIT_VERSION = ")([-.\w]+)(")$', re.MULTILINE)
replace_in_file('version.bzl', src_pattern)