Merge "Show loading errors inside inline diffs"
diff --git a/Documentation/config-project-config.txt b/Documentation/config-project-config.txt
index ac303e9..c3244cc 100644
--- a/Documentation/config-project-config.txt
+++ b/Documentation/config-project-config.txt
@@ -256,7 +256,8 @@
- 'rejectEmptyCommit': Defines whether empty commits should be rejected when a change is merged.
Changes might not seem empty at first but when attempting to merge, rebasing can lead to an empty
-commit. If this option is set to 'true' the merge would fail.
+commit. If this option is set to 'true' the merge would fail. An empty commit is still allowed as
+the initial commit on a branch.
Merge strategy
diff --git a/Documentation/user-upload.txt b/Documentation/user-upload.txt
index baf388e..31a32ad 100644
--- a/Documentation/user-upload.txt
+++ b/Documentation/user-upload.txt
@@ -421,7 +421,6 @@
$ git push exp
----
-
[[push_replace]]
=== Replace Changes
diff --git a/java/Main.java b/java/Main.java
index f26b6df..11d8234 100644
--- a/java/Main.java
+++ b/java/Main.java
@@ -14,6 +14,7 @@
public final class Main {
private static final String FLOGGER_BACKEND_PROPERTY = "flogger.backend_factory";
+ private static final String FLOGGER_LOGGING_CONTEXT = "flogger.logging_context";
// We don't do any real work here because we need to import
// the archive lookup code and we cannot import a class in
@@ -42,6 +43,9 @@
}
private static void configureFloggerBackend() {
+ System.setProperty(
+ FLOGGER_LOGGING_CONTEXT, "com.google.gerrit.server.logging.LoggingContext#getInstance");
+
if (System.getProperty(FLOGGER_BACKEND_PROPERTY) != null) {
// Flogger backend is already configured
return;
diff --git a/java/com/google/gerrit/extensions/api/access/GerritPermission.java b/java/com/google/gerrit/extensions/api/access/GerritPermission.java
index 133de31..02afbdc 100644
--- a/java/com/google/gerrit/extensions/api/access/GerritPermission.java
+++ b/java/com/google/gerrit/extensions/api/access/GerritPermission.java
@@ -18,7 +18,13 @@
/** Gerrit permission for hosts, projects, refs, changes, labels and plugins. */
public interface GerritPermission {
- /** @return readable identifier of this permission for exception message. */
+ /**
+ * A description in the context of an exception message.
+ *
+ * <p>Should be grammatical when used in the construction "not permitted: [description] on
+ * [resource]", although individual {@code PermissionBackend} implementations may vary the
+ * wording.
+ */
String describeForException();
static String describeEnumValue(Enum<?> value) {
diff --git a/java/com/google/gerrit/extensions/restapi/AuthException.java b/java/com/google/gerrit/extensions/restapi/AuthException.java
index 0b4f459..fe1744b 100644
--- a/java/com/google/gerrit/extensions/restapi/AuthException.java
+++ b/java/com/google/gerrit/extensions/restapi/AuthException.java
@@ -14,10 +14,17 @@
package com.google.gerrit.extensions.restapi;
+import static com.google.common.base.Preconditions.checkArgument;
+
+import com.google.common.base.Strings;
+import java.util.Optional;
+
/** Caller cannot perform the request operation (HTTP 403 Forbidden). */
public class AuthException extends RestApiException {
private static final long serialVersionUID = 1L;
+ private Optional<String> advice = Optional.empty();
+
/** @param msg message to return to the client. */
public AuthException(String msg) {
super(msg);
@@ -30,4 +37,19 @@
public AuthException(String msg, Throwable cause) {
super(msg, cause);
}
+
+ public void setAdvice(String advice) {
+ checkArgument(!Strings.isNullOrEmpty(advice));
+ this.advice = Optional.of(advice);
+ }
+
+ /**
+ * Advice that the user can follow to acquire authorization to perform the action.
+ *
+ * <p>This may be long-form text with newlines, and may be printed to a terminal, for example in
+ * the message stream in response to a push.
+ */
+ public Optional<String> getAdvice() {
+ return advice;
+ }
}
diff --git a/java/com/google/gerrit/git/testing/PushResultSubject.java b/java/com/google/gerrit/git/testing/PushResultSubject.java
index c5163d1..929e182 100644
--- a/java/com/google/gerrit/git/testing/PushResultSubject.java
+++ b/java/com/google/gerrit/git/testing/PushResultSubject.java
@@ -54,6 +54,13 @@
Truth.assertThat(trimMessages()).isEqualTo(Arrays.stream(expectedLines).collect(joining("\n")));
}
+ public void containsMessages(String... expectedLines) {
+ checkArgument(expectedLines.length > 0, "use hasNoMessages()");
+ isNotNull();
+ Iterable<String> got = Splitter.on("\n").split(trimMessages());
+ Truth.assertThat(got).containsAllIn(expectedLines).inOrder();
+ }
+
private String trimMessages() {
return trimMessages(actual().getMessages());
}
diff --git a/java/com/google/gerrit/httpd/QueryDocumentationFilter.java b/java/com/google/gerrit/httpd/QueryDocumentationFilter.java
index 8b82c00..c41a7b9 100644
--- a/java/com/google/gerrit/httpd/QueryDocumentationFilter.java
+++ b/java/com/google/gerrit/httpd/QueryDocumentationFilter.java
@@ -59,7 +59,7 @@
HttpServletResponse rsp = (HttpServletResponse) response;
try {
List<DocResult> result = searcher.doQuery(request.getParameter("q"));
- RestApiServlet.replyJson(req, rsp, ImmutableListMultimap.of(), result);
+ RestApiServlet.replyJson(req, rsp, false, ImmutableListMultimap.of(), result);
} catch (DocQueryException e) {
logger.atSevere().withCause(e).log("Doc search failed");
rsp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
diff --git a/java/com/google/gerrit/httpd/restapi/ParameterParser.java b/java/com/google/gerrit/httpd/restapi/ParameterParser.java
index 2870cd0..172321d 100644
--- a/java/com/google/gerrit/httpd/restapi/ParameterParser.java
+++ b/java/com/google/gerrit/httpd/restapi/ParameterParser.java
@@ -56,8 +56,10 @@
import org.kohsuke.args4j.CmdLineException;
public class ParameterParser {
+ public static final String TRACE_PARAMETER = "trace";
+
private static final ImmutableSet<String> RESERVED_KEYS =
- ImmutableSet.of("pp", "prettyPrint", "strict", "callback", "alt", "fields");
+ ImmutableSet.of("pp", "prettyPrint", "strict", "callback", "alt", "fields", TRACE_PARAMETER);
@AutoValue
public abstract static class QueryParams {
diff --git a/java/com/google/gerrit/httpd/restapi/RestApiServlet.java b/java/com/google/gerrit/httpd/restapi/RestApiServlet.java
index a5c5a53..31d2a82 100644
--- a/java/com/google/gerrit/httpd/restapi/RestApiServlet.java
+++ b/java/com/google/gerrit/httpd/restapi/RestApiServlet.java
@@ -18,6 +18,7 @@
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
+import static com.google.common.flogger.LazyArgs.lazy;
import static com.google.common.net.HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS;
import static com.google.common.net.HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS;
import static com.google.common.net.HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS;
@@ -48,6 +49,7 @@
import static javax.servlet.http.HttpServletResponse.SC_PRECONDITION_FAILED;
import static javax.servlet.http.HttpServletResponse.SC_SERVICE_UNAVAILABLE;
+import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.CharMatcher;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
@@ -107,10 +109,12 @@
import com.google.gerrit.server.cache.PerThreadCache;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.git.LockFailureException;
+import com.google.gerrit.server.logging.TraceContext;
import com.google.gerrit.server.permissions.GlobalPermission;
import com.google.gerrit.server.permissions.PermissionBackend;
import com.google.gerrit.server.permissions.PermissionBackendException;
import com.google.gerrit.server.update.UpdateException;
+import com.google.gerrit.server.util.RequestId;
import com.google.gerrit.util.http.CacheHeaders;
import com.google.gerrit.util.http.RequestUtil;
import com.google.gson.ExclusionStrategy;
@@ -131,6 +135,7 @@
import com.google.inject.util.Providers;
import java.io.BufferedReader;
import java.io.BufferedWriter;
+import java.io.ByteArrayOutputStream;
import java.io.EOFException;
import java.io.FilterOutputStream;
import java.io.IOException;
@@ -177,6 +182,8 @@
private static final String FORM_TYPE = "application/x-www-form-urlencoded";
+ @VisibleForTesting public static final String X_GERRIT_TRACE = "X-Gerrit-Trace";
+
// HTTP 422 Unprocessable Entity.
// TODO: Remove when HttpServletResponse.SC_UNPROCESSABLE_ENTITY is available
private static final int SC_UNPROCESSABLE_ENTITY = 422;
@@ -280,332 +287,345 @@
RestResource rsrc = TopLevelResource.INSTANCE;
ViewData viewData = null;
- try (PerThreadCache ignored = PerThreadCache.create()) {
- if (isCorsPreflight(req)) {
- doCorsPreflight(req, res);
- return;
- }
+ try (TraceContext traceContext = enableTracing(req, res)) {
+ try (PerThreadCache ignored = PerThreadCache.create()) {
+ logger.atFinest().log(
+ "Received REST request: %s %s (parameters: %s)",
+ req.getMethod(), req.getRequestURI(), getParameterNames(req));
+ logger.atFinest().log("Calling user: %s", globals.currentUser.get().getLoggableName());
- qp = ParameterParser.getQueryParams(req);
- checkCors(req, res, qp.hasXdOverride());
- if (qp.hasXdOverride()) {
- req = applyXdOverrides(req, qp);
- }
- checkUserSession(req);
-
- List<IdString> path = splitPath(req);
- RestCollection<RestResource, RestResource> rc = members.get();
- globals
- .permissionBackend
- .currentUser()
- .checkAny(GlobalPermission.fromAnnotation(rc.getClass()));
-
- viewData = new ViewData(null, null);
-
- if (path.isEmpty()) {
- if (rc instanceof NeedsParams) {
- ((NeedsParams) rc).setParams(qp.params());
+ if (isCorsPreflight(req)) {
+ doCorsPreflight(req, res);
+ return;
}
- if (isRead(req)) {
- viewData = new ViewData(null, rc.list());
- } else if (isPost(req)) {
- RestView<RestResource> restCollectionView =
- rc.views().get("gerrit", "POST_ON_COLLECTION./");
- if (restCollectionView != null) {
- viewData = new ViewData(null, restCollectionView);
- } else {
- throw methodNotAllowed(req);
- }
- } else {
- // DELETE on root collections is not supported
- throw methodNotAllowed(req);
+ qp = ParameterParser.getQueryParams(req);
+ checkCors(req, res, qp.hasXdOverride());
+ if (qp.hasXdOverride()) {
+ req = applyXdOverrides(req, qp);
}
- } else {
- IdString id = path.remove(0);
- try {
- rsrc = rc.parse(rsrc, id);
- if (path.isEmpty()) {
- checkPreconditions(req);
- }
- } catch (ResourceNotFoundException e) {
- if (!path.isEmpty()) {
- throw e;
- }
+ checkUserSession(req);
- if (isPost(req) || isPut(req)) {
- RestView<RestResource> createView = rc.views().get("gerrit", "CREATE./");
- if (createView != null) {
- viewData = new ViewData(null, createView);
- status = SC_CREATED;
- path.add(id);
- } else {
- throw e;
- }
- } else if (isDelete(req)) {
- RestView<RestResource> deleteView = rc.views().get("gerrit", "DELETE_MISSING./");
- if (deleteView != null) {
- viewData = new ViewData(null, deleteView);
- status = SC_NO_CONTENT;
- path.add(id);
- } else {
- throw e;
- }
- } else {
- throw e;
- }
- }
- if (viewData.view == null) {
- viewData = view(rc, req.getMethod(), path);
- }
- }
- checkRequiresCapability(viewData);
+ List<IdString> path = splitPath(req);
+ RestCollection<RestResource, RestResource> rc = members.get();
+ globals
+ .permissionBackend
+ .currentUser()
+ .checkAny(GlobalPermission.fromAnnotation(rc.getClass()));
- while (viewData.view instanceof RestCollection<?, ?>) {
- @SuppressWarnings("unchecked")
- RestCollection<RestResource, RestResource> c =
- (RestCollection<RestResource, RestResource>) viewData.view;
+ viewData = new ViewData(null, null);
if (path.isEmpty()) {
+ if (rc instanceof NeedsParams) {
+ ((NeedsParams) rc).setParams(qp.params());
+ }
+
if (isRead(req)) {
- viewData = new ViewData(null, c.list());
+ viewData = new ViewData(null, rc.list());
} else if (isPost(req)) {
RestView<RestResource> restCollectionView =
- c.views().get(viewData.pluginName, "POST_ON_COLLECTION./");
- if (restCollectionView != null) {
- viewData = new ViewData(null, restCollectionView);
- } else {
- throw methodNotAllowed(req);
- }
- } else if (isDelete(req)) {
- RestView<RestResource> restCollectionView =
- c.views().get(viewData.pluginName, "DELETE_ON_COLLECTION./");
+ rc.views().get("gerrit", "POST_ON_COLLECTION./");
if (restCollectionView != null) {
viewData = new ViewData(null, restCollectionView);
} else {
throw methodNotAllowed(req);
}
} else {
+ // DELETE on root collections is not supported
throw methodNotAllowed(req);
}
- break;
- }
- IdString id = path.remove(0);
- try {
- rsrc = c.parse(rsrc, id);
- checkPreconditions(req);
- viewData = new ViewData(null, null);
- } catch (ResourceNotFoundException e) {
- if (!path.isEmpty()) {
- throw e;
- }
+ } else {
+ IdString id = path.remove(0);
+ try {
+ rsrc = rc.parse(rsrc, id);
+ if (path.isEmpty()) {
+ checkPreconditions(req);
+ }
+ } catch (ResourceNotFoundException e) {
+ if (!path.isEmpty()) {
+ throw e;
+ }
- if (isPost(req) || isPut(req)) {
- RestView<RestResource> createView = c.views().get("gerrit", "CREATE./");
- if (createView != null) {
- viewData = new ViewData(null, createView);
- status = SC_CREATED;
- path.add(id);
+ if (isPost(req) || isPut(req)) {
+ RestView<RestResource> createView = rc.views().get("gerrit", "CREATE./");
+ if (createView != null) {
+ viewData = new ViewData(null, createView);
+ status = SC_CREATED;
+ path.add(id);
+ } else {
+ throw e;
+ }
+ } else if (isDelete(req)) {
+ RestView<RestResource> deleteView = rc.views().get("gerrit", "DELETE_MISSING./");
+ if (deleteView != null) {
+ viewData = new ViewData(null, deleteView);
+ status = SC_NO_CONTENT;
+ path.add(id);
+ } else {
+ throw e;
+ }
} else {
throw e;
}
- } else if (isDelete(req)) {
- RestView<RestResource> deleteView = c.views().get("gerrit", "DELETE_MISSING./");
- if (deleteView != null) {
- viewData = new ViewData(null, deleteView);
- status = SC_NO_CONTENT;
- path.add(id);
- } else {
- throw e;
- }
- } else {
- throw e;
}
- }
- if (viewData.view == null) {
- viewData = view(c, req.getMethod(), path);
+ if (viewData.view == null) {
+ viewData = view(rc, req.getMethod(), path);
+ }
}
checkRequiresCapability(viewData);
- }
- if (notModified(req, rsrc, viewData.view)) {
- res.sendError(SC_NOT_MODIFIED);
- return;
- }
+ while (viewData.view instanceof RestCollection<?, ?>) {
+ @SuppressWarnings("unchecked")
+ RestCollection<RestResource, RestResource> c =
+ (RestCollection<RestResource, RestResource>) viewData.view;
- if (!globals.paramParser.get().parse(viewData.view, qp.params(), req, res)) {
- return;
- }
-
- if (viewData.view instanceof RestReadView<?> && isRead(req)) {
- result = ((RestReadView<RestResource>) viewData.view).apply(rsrc);
- } else if (viewData.view instanceof RestModifyView<?, ?>) {
- @SuppressWarnings("unchecked")
- RestModifyView<RestResource, Object> m =
- (RestModifyView<RestResource, Object>) viewData.view;
-
- Type type = inputType(m);
- inputRequestBody = parseRequest(req, type);
- result = m.apply(rsrc, inputRequestBody);
- if (inputRequestBody instanceof RawInput) {
- try (InputStream is = req.getInputStream()) {
- ServletUtils.consumeRequestBody(is);
+ if (path.isEmpty()) {
+ if (isRead(req)) {
+ viewData = new ViewData(null, c.list());
+ } else if (isPost(req)) {
+ RestView<RestResource> restCollectionView =
+ c.views().get(viewData.pluginName, "POST_ON_COLLECTION./");
+ if (restCollectionView != null) {
+ viewData = new ViewData(null, restCollectionView);
+ } else {
+ throw methodNotAllowed(req);
+ }
+ } else if (isDelete(req)) {
+ RestView<RestResource> restCollectionView =
+ c.views().get(viewData.pluginName, "DELETE_ON_COLLECTION./");
+ if (restCollectionView != null) {
+ viewData = new ViewData(null, restCollectionView);
+ } else {
+ throw methodNotAllowed(req);
+ }
+ } else {
+ throw methodNotAllowed(req);
+ }
+ break;
}
- }
- } else if (viewData.view instanceof RestCollectionCreateView<?, ?, ?>) {
- @SuppressWarnings("unchecked")
- RestCollectionCreateView<RestResource, RestResource, Object> m =
- (RestCollectionCreateView<RestResource, RestResource, Object>) viewData.view;
+ IdString id = path.remove(0);
+ try {
+ rsrc = c.parse(rsrc, id);
+ checkPreconditions(req);
+ viewData = new ViewData(null, null);
+ } catch (ResourceNotFoundException e) {
+ if (!path.isEmpty()) {
+ throw e;
+ }
- Type type = inputType(m);
- inputRequestBody = parseRequest(req, type);
- result = m.apply(rsrc, path.get(0), inputRequestBody);
- if (inputRequestBody instanceof RawInput) {
- try (InputStream is = req.getInputStream()) {
- ServletUtils.consumeRequestBody(is);
+ if (isPost(req) || isPut(req)) {
+ RestView<RestResource> createView = c.views().get("gerrit", "CREATE./");
+ if (createView != null) {
+ viewData = new ViewData(null, createView);
+ status = SC_CREATED;
+ path.add(id);
+ } else {
+ throw e;
+ }
+ } else if (isDelete(req)) {
+ RestView<RestResource> deleteView = c.views().get("gerrit", "DELETE_MISSING./");
+ if (deleteView != null) {
+ viewData = new ViewData(null, deleteView);
+ status = SC_NO_CONTENT;
+ path.add(id);
+ } else {
+ throw e;
+ }
+ } else {
+ throw e;
+ }
}
- }
- } else if (viewData.view instanceof RestCollectionDeleteMissingView<?, ?, ?>) {
- @SuppressWarnings("unchecked")
- RestCollectionDeleteMissingView<RestResource, RestResource, Object> m =
- (RestCollectionDeleteMissingView<RestResource, RestResource, Object>) viewData.view;
-
- Type type = inputType(m);
- inputRequestBody = parseRequest(req, type);
- result = m.apply(rsrc, path.get(0), inputRequestBody);
- if (inputRequestBody instanceof RawInput) {
- try (InputStream is = req.getInputStream()) {
- ServletUtils.consumeRequestBody(is);
+ if (viewData.view == null) {
+ viewData = view(c, req.getMethod(), path);
}
+ checkRequiresCapability(viewData);
}
- } else if (viewData.view instanceof RestCollectionModifyView<?, ?, ?>) {
- @SuppressWarnings("unchecked")
- RestCollectionModifyView<RestResource, RestResource, Object> m =
- (RestCollectionModifyView<RestResource, RestResource, Object>) viewData.view;
- Type type = inputType(m);
- inputRequestBody = parseRequest(req, type);
- result = m.apply(rsrc, inputRequestBody);
- if (inputRequestBody instanceof RawInput) {
- try (InputStream is = req.getInputStream()) {
- ServletUtils.consumeRequestBody(is);
+ if (notModified(req, rsrc, viewData.view)) {
+ res.sendError(SC_NOT_MODIFIED);
+ return;
+ }
+
+ if (!globals.paramParser.get().parse(viewData.view, qp.params(), req, res)) {
+ return;
+ }
+
+ if (viewData.view instanceof RestReadView<?> && isRead(req)) {
+ result = ((RestReadView<RestResource>) viewData.view).apply(rsrc);
+ } else if (viewData.view instanceof RestModifyView<?, ?>) {
+ @SuppressWarnings("unchecked")
+ RestModifyView<RestResource, Object> m =
+ (RestModifyView<RestResource, Object>) viewData.view;
+
+ Type type = inputType(m);
+ inputRequestBody = parseRequest(req, type);
+ result = m.apply(rsrc, inputRequestBody);
+ if (inputRequestBody instanceof RawInput) {
+ try (InputStream is = req.getInputStream()) {
+ ServletUtils.consumeRequestBody(is);
+ }
}
- }
- } else {
- throw new ResourceNotFoundException();
- }
+ } else if (viewData.view instanceof RestCollectionCreateView<?, ?, ?>) {
+ @SuppressWarnings("unchecked")
+ RestCollectionCreateView<RestResource, RestResource, Object> m =
+ (RestCollectionCreateView<RestResource, RestResource, Object>) viewData.view;
- if (result instanceof Response) {
- @SuppressWarnings("rawtypes")
- Response<?> r = (Response) result;
- status = r.statusCode();
- configureCaching(req, res, rsrc, viewData.view, r.caching());
- } else if (result instanceof Response.Redirect) {
- CacheHeaders.setNotCacheable(res);
- res.sendRedirect(((Response.Redirect) result).location());
- return;
- } else if (result instanceof Response.Accepted) {
- CacheHeaders.setNotCacheable(res);
- res.setStatus(SC_ACCEPTED);
- res.setHeader(HttpHeaders.LOCATION, ((Response.Accepted) result).location());
- return;
- } else {
- CacheHeaders.setNotCacheable(res);
- }
- res.setStatus(status);
+ Type type = inputType(m);
+ inputRequestBody = parseRequest(req, type);
+ result = m.apply(rsrc, path.get(0), inputRequestBody);
+ if (inputRequestBody instanceof RawInput) {
+ try (InputStream is = req.getInputStream()) {
+ ServletUtils.consumeRequestBody(is);
+ }
+ }
+ } else if (viewData.view instanceof RestCollectionDeleteMissingView<?, ?, ?>) {
+ @SuppressWarnings("unchecked")
+ RestCollectionDeleteMissingView<RestResource, RestResource, Object> m =
+ (RestCollectionDeleteMissingView<RestResource, RestResource, Object>) viewData.view;
- if (result != Response.none()) {
- result = Response.unwrap(result);
- if (result instanceof BinaryResult) {
- responseBytes = replyBinaryResult(req, res, (BinaryResult) result);
+ Type type = inputType(m);
+ inputRequestBody = parseRequest(req, type);
+ result = m.apply(rsrc, path.get(0), inputRequestBody);
+ if (inputRequestBody instanceof RawInput) {
+ try (InputStream is = req.getInputStream()) {
+ ServletUtils.consumeRequestBody(is);
+ }
+ }
+ } else if (viewData.view instanceof RestCollectionModifyView<?, ?, ?>) {
+ @SuppressWarnings("unchecked")
+ RestCollectionModifyView<RestResource, RestResource, Object> m =
+ (RestCollectionModifyView<RestResource, RestResource, Object>) viewData.view;
+
+ Type type = inputType(m);
+ inputRequestBody = parseRequest(req, type);
+ result = m.apply(rsrc, inputRequestBody);
+ if (inputRequestBody instanceof RawInput) {
+ try (InputStream is = req.getInputStream()) {
+ ServletUtils.consumeRequestBody(is);
+ }
+ }
} else {
- responseBytes = replyJson(req, res, qp.config(), result);
+ throw new ResourceNotFoundException();
}
- }
- } catch (MalformedJsonException | JsonParseException e) {
- responseBytes =
- replyError(req, res, status = SC_BAD_REQUEST, "Invalid " + JSON_TYPE + " in request", e);
- } catch (BadRequestException e) {
- responseBytes =
- replyError(
- req, res, status = SC_BAD_REQUEST, messageOr(e, "Bad Request"), e.caching(), e);
- } catch (AuthException e) {
- responseBytes =
- replyError(req, res, status = SC_FORBIDDEN, messageOr(e, "Forbidden"), e.caching(), e);
- } catch (AmbiguousViewException e) {
- responseBytes = replyError(req, res, status = SC_NOT_FOUND, messageOr(e, "Ambiguous"), e);
- } catch (ResourceNotFoundException e) {
- responseBytes =
- replyError(req, res, status = SC_NOT_FOUND, messageOr(e, "Not Found"), e.caching(), e);
- } catch (MethodNotAllowedException e) {
- responseBytes =
- replyError(
- req,
- res,
- status = SC_METHOD_NOT_ALLOWED,
- messageOr(e, "Method Not Allowed"),
- e.caching(),
- e);
- } catch (ResourceConflictException e) {
- responseBytes =
- replyError(req, res, status = SC_CONFLICT, messageOr(e, "Conflict"), e.caching(), e);
- } catch (PreconditionFailedException e) {
- responseBytes =
- replyError(
- req,
- res,
- status = SC_PRECONDITION_FAILED,
- messageOr(e, "Precondition Failed"),
- e.caching(),
- e);
- } catch (UnprocessableEntityException e) {
- responseBytes =
- replyError(
- req,
- res,
- status = SC_UNPROCESSABLE_ENTITY,
- messageOr(e, "Unprocessable Entity"),
- e.caching(),
- e);
- } catch (NotImplementedException e) {
- responseBytes =
- replyError(req, res, status = SC_NOT_IMPLEMENTED, messageOr(e, "Not Implemented"), e);
- } catch (UpdateException e) {
- Throwable t = e.getCause();
- if (t instanceof LockFailureException) {
+
+ if (result instanceof Response) {
+ @SuppressWarnings("rawtypes")
+ Response<?> r = (Response) result;
+ status = r.statusCode();
+ configureCaching(req, res, rsrc, viewData.view, r.caching());
+ } else if (result instanceof Response.Redirect) {
+ CacheHeaders.setNotCacheable(res);
+ String location = ((Response.Redirect) result).location();
+ res.sendRedirect(location);
+ logger.atFinest().log("REST call redirected to: %s", location);
+ return;
+ } else if (result instanceof Response.Accepted) {
+ CacheHeaders.setNotCacheable(res);
+ res.setStatus(SC_ACCEPTED);
+ res.setHeader(HttpHeaders.LOCATION, ((Response.Accepted) result).location());
+ logger.atFinest().log("REST call succeeded: %d", SC_ACCEPTED);
+ return;
+ } else {
+ CacheHeaders.setNotCacheable(res);
+ }
+ res.setStatus(status);
+ logger.atFinest().log("REST call succeeded: %d", status);
+
+ if (result != Response.none()) {
+ result = Response.unwrap(result);
+ if (result instanceof BinaryResult) {
+ responseBytes = replyBinaryResult(req, res, (BinaryResult) result);
+ } else {
+ responseBytes = replyJson(req, res, false, qp.config(), result);
+ }
+ }
+ } catch (MalformedJsonException | JsonParseException e) {
responseBytes =
- replyError(req, res, status = SC_SERVICE_UNAVAILABLE, messageOr(t, "Lock failure"), e);
- } else {
+ replyError(
+ req, res, status = SC_BAD_REQUEST, "Invalid " + JSON_TYPE + " in request", e);
+ } catch (BadRequestException e) {
+ responseBytes =
+ replyError(
+ req, res, status = SC_BAD_REQUEST, messageOr(e, "Bad Request"), e.caching(), e);
+ } catch (AuthException e) {
+ responseBytes =
+ replyError(req, res, status = SC_FORBIDDEN, messageOr(e, "Forbidden"), e.caching(), e);
+ } catch (AmbiguousViewException e) {
+ responseBytes = replyError(req, res, status = SC_NOT_FOUND, messageOr(e, "Ambiguous"), e);
+ } catch (ResourceNotFoundException e) {
+ responseBytes =
+ replyError(req, res, status = SC_NOT_FOUND, messageOr(e, "Not Found"), e.caching(), e);
+ } catch (MethodNotAllowedException e) {
+ responseBytes =
+ replyError(
+ req,
+ res,
+ status = SC_METHOD_NOT_ALLOWED,
+ messageOr(e, "Method Not Allowed"),
+ e.caching(),
+ e);
+ } catch (ResourceConflictException e) {
+ responseBytes =
+ replyError(req, res, status = SC_CONFLICT, messageOr(e, "Conflict"), e.caching(), e);
+ } catch (PreconditionFailedException e) {
+ responseBytes =
+ replyError(
+ req,
+ res,
+ status = SC_PRECONDITION_FAILED,
+ messageOr(e, "Precondition Failed"),
+ e.caching(),
+ e);
+ } catch (UnprocessableEntityException e) {
+ responseBytes =
+ replyError(
+ req,
+ res,
+ status = SC_UNPROCESSABLE_ENTITY,
+ messageOr(e, "Unprocessable Entity"),
+ e.caching(),
+ e);
+ } catch (NotImplementedException e) {
+ responseBytes =
+ replyError(req, res, status = SC_NOT_IMPLEMENTED, messageOr(e, "Not Implemented"), e);
+ } catch (UpdateException e) {
+ Throwable t = e.getCause();
+ if (t instanceof LockFailureException) {
+ responseBytes =
+ replyError(
+ req, res, status = SC_SERVICE_UNAVAILABLE, messageOr(t, "Lock failure"), e);
+ } else {
+ status = SC_INTERNAL_SERVER_ERROR;
+ responseBytes = handleException(e, req, res);
+ }
+ } catch (Exception e) {
status = SC_INTERNAL_SERVER_ERROR;
responseBytes = handleException(e, req, res);
+ } finally {
+ String metric =
+ viewData != null && viewData.view != null ? globals.metrics.view(viewData) : "_unknown";
+ globals.metrics.count.increment(metric);
+ if (status >= SC_BAD_REQUEST) {
+ globals.metrics.errorCount.increment(metric, status);
+ }
+ if (responseBytes != -1) {
+ globals.metrics.responseBytes.record(metric, responseBytes);
+ }
+ globals.metrics.serverLatency.record(
+ metric, System.nanoTime() - startNanos, TimeUnit.NANOSECONDS);
+ globals.auditService.dispatch(
+ new ExtendedHttpAuditEvent(
+ globals.webSession.get().getSessionId(),
+ globals.currentUser.get(),
+ req,
+ auditStartTs,
+ qp != null ? qp.params() : ImmutableListMultimap.of(),
+ inputRequestBody,
+ status,
+ result,
+ rsrc,
+ viewData == null ? null : viewData.view));
}
- } catch (Exception e) {
- status = SC_INTERNAL_SERVER_ERROR;
- responseBytes = handleException(e, req, res);
- } finally {
- String metric =
- viewData != null && viewData.view != null ? globals.metrics.view(viewData) : "_unknown";
- globals.metrics.count.increment(metric);
- if (status >= SC_BAD_REQUEST) {
- globals.metrics.errorCount.increment(metric, status);
- }
- if (responseBytes != -1) {
- globals.metrics.responseBytes.record(metric, responseBytes);
- }
- globals.metrics.serverLatency.record(
- metric, System.nanoTime() - startNanos, TimeUnit.NANOSECONDS);
- globals.auditService.dispatch(
- new ExtendedHttpAuditEvent(
- globals.webSession.get().getSessionId(),
- globals.currentUser.get(),
- req,
- auditStartTs,
- qp != null ? qp.params() : ImmutableListMultimap.of(),
- inputRequestBody,
- status,
- result,
- rsrc,
- viewData == null ? null : viewData.view));
}
}
@@ -968,9 +988,22 @@
throw new InstantiationException("Cannot make " + type);
}
+ /**
+ * Sets a JSON reply on the given HTTP servlet response.
+ *
+ * @param req the HTTP servlet request
+ * @param res the HTTP servlet response on which the reply should be set
+ * @param allowTracing whether it is allowed to log the reply if tracing is enabled, must not be
+ * set to {@code true} if the reply may contain sensitive data
+ * @param config config parameters for the JSON formatting
+ * @param result the object that should be formatted as JSON
+ * @return the length of the response
+ * @throws IOException
+ */
public static long replyJson(
@Nullable HttpServletRequest req,
HttpServletResponse res,
+ boolean allowTracing,
ListMultimap<String, String> config,
Object result)
throws IOException {
@@ -985,6 +1018,21 @@
}
w.write('\n');
w.flush();
+
+ if (allowTracing) {
+ logger.atFinest().log(
+ "JSON response body:\n%s",
+ lazy(
+ () -> {
+ try {
+ ByteArrayOutputStream debugOut = new ByteArrayOutputStream();
+ buf.writeTo(debugOut, null);
+ return debugOut.toString(UTF_8.name());
+ } catch (IOException e) {
+ return "<JSON formatting failed>";
+ }
+ }));
+ }
return replyBinaryResult(
req, res, asBinaryResult(buf).setContentType(JSON_TYPE).setCharacterEncoding(UTF_8));
}
@@ -1265,6 +1313,22 @@
}
}
+ private List<String> getParameterNames(HttpServletRequest req) {
+ List<String> parameterNames = new ArrayList<>(req.getParameterMap().keySet());
+ Collections.sort(parameterNames);
+ return parameterNames;
+ }
+
+ private TraceContext enableTracing(HttpServletRequest req, HttpServletResponse res) {
+ String v = req.getParameter(ParameterParser.TRACE_PARAMETER);
+ if (v != null && (v.isEmpty() || Boolean.parseBoolean(v))) {
+ RequestId traceId = new RequestId();
+ res.setHeader(X_GERRIT_TRACE, traceId.toString());
+ return TraceContext.open().addTag(RequestId.Type.TRACE_ID, traceId);
+ }
+ return TraceContext.DISABLED;
+ }
+
private boolean isDelete(HttpServletRequest req) {
return "DELETE".equals(req.getMethod());
}
@@ -1341,17 +1405,34 @@
configureCaching(req, res, null, null, c);
checkArgument(statusCode >= 400, "non-error status: %s", statusCode);
res.setStatus(statusCode);
- return replyText(req, res, msg);
+ logger.atFinest().log("REST call failed: %d", statusCode);
+ return replyText(req, res, true, msg);
}
- static long replyText(@Nullable HttpServletRequest req, HttpServletResponse res, String text)
+ /**
+ * Sets a text reply on the given HTTP servlet response.
+ *
+ * @param req the HTTP servlet request
+ * @param res the HTTP servlet response on which the reply should be set
+ * @param allowTracing whether it is allowed to log the reply if tracing is enabled, must not be
+ * set to {@code true} if the reply may contain sensitive data
+ * @param text the text reply
+ * @return the length of the response
+ * @throws IOException
+ */
+ static long replyText(
+ @Nullable HttpServletRequest req, HttpServletResponse res, boolean allowTracing, String text)
throws IOException {
if ((req == null || isRead(req)) && isMaybeHTML(text)) {
- return replyJson(req, res, ImmutableListMultimap.of("pp", "0"), new JsonPrimitive(text));
+ return replyJson(
+ req, res, allowTracing, ImmutableListMultimap.of("pp", "0"), new JsonPrimitive(text));
}
if (!text.endsWith("\n")) {
text += "\n";
}
+ if (allowTracing) {
+ logger.atFinest().log("Text response body:\n%s", text);
+ }
return replyBinaryResult(req, res, BinaryResult.create(text).setContentType(PLAIN_TEXT));
}
diff --git a/java/com/google/gerrit/index/project/ProjectData.java b/java/com/google/gerrit/index/project/ProjectData.java
index 7365660..fb029ac 100644
--- a/java/com/google/gerrit/index/project/ProjectData.java
+++ b/java/com/google/gerrit/index/project/ProjectData.java
@@ -16,6 +16,7 @@
import static com.google.common.collect.ImmutableList.toImmutableList;
+import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableList;
import com.google.gerrit.reviewdb.client.Project;
import java.util.ArrayList;
@@ -53,4 +54,11 @@
public ImmutableList<String> getParentNames() {
return tree().stream().skip(1).map(p -> p.getProject().getName()).collect(toImmutableList());
}
+
+ @Override
+ public String toString() {
+ MoreObjects.ToStringHelper h = MoreObjects.toStringHelper(this);
+ h.addValue(project.getName());
+ return h.toString();
+ }
}
diff --git a/java/com/google/gerrit/index/query/QueryProcessor.java b/java/com/google/gerrit/index/query/QueryProcessor.java
index 27ed72f..1a42ebd 100644
--- a/java/com/google/gerrit/index/query/QueryProcessor.java
+++ b/java/com/google/gerrit/index/query/QueryProcessor.java
@@ -22,11 +22,13 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Ordering;
+import com.google.common.flogger.FluentLogger;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.index.Index;
import com.google.gerrit.index.IndexCollection;
import com.google.gerrit.index.IndexConfig;
import com.google.gerrit.index.IndexRewriter;
+import com.google.gerrit.index.IndexedQuery;
import com.google.gerrit.index.QueryOptions;
import com.google.gerrit.index.SchemaDefinitions;
import com.google.gerrit.metrics.Description;
@@ -52,6 +54,8 @@
* holding on to a single instance.
*/
public abstract class QueryProcessor<T> {
+ private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+
protected static class Metrics {
final Timer1<String> executionTime;
@@ -206,6 +210,7 @@
List<Integer> limits = new ArrayList<>(cnt);
List<Predicate<T>> predicates = new ArrayList<>(cnt);
List<DataSource<T>> sources = new ArrayList<>(cnt);
+ int queryCount = 0;
for (Predicate<T> q : queries) {
int limit = getEffectiveLimit(q);
limits.add(limit);
@@ -224,11 +229,17 @@
// max for this user. The only way to see if there are more entities is to
// ask for one more result from the query.
QueryOptions opts = createOptions(indexConfig, start, limit + 1, getRequestedFields());
+ logger.atFine().log("Query options: " + opts);
Predicate<T> pred = rewriter.rewrite(q, opts);
if (enforceVisibility) {
pred = enforceVisibility(pred);
}
predicates.add(pred);
+ logger.atFine().log(
+ "%s index query[%d]:\n%s",
+ schemaDef.getName(),
+ queryCount++,
+ pred instanceof IndexedQuery ? pred.getChild(0) : pred);
@SuppressWarnings("unchecked")
DataSource<T> s = (DataSource<T>) pred;
@@ -243,12 +254,14 @@
out = new ArrayList<>(cnt);
for (int i = 0; i < cnt; i++) {
+ List<T> matchesList = matches.get(i).toList();
+ logger.atFine().log("Matches[%d]:\n%s", i, matchesList);
out.add(
QueryResult.create(
queryStrings != null ? queryStrings.get(i) : null,
predicates.get(i),
limits.get(i),
- matches.get(i).toList()));
+ matchesList));
}
// Only measure successful queries that actually touched the index.
diff --git a/java/com/google/gerrit/lucene/AbstractLuceneIndex.java b/java/com/google/gerrit/lucene/AbstractLuceneIndex.java
index 3871ced..12f88d5 100644
--- a/java/com/google/gerrit/lucene/AbstractLuceneIndex.java
+++ b/java/com/google/gerrit/lucene/AbstractLuceneIndex.java
@@ -39,6 +39,7 @@
import com.google.gerrit.index.query.FieldBundle;
import com.google.gerrit.server.config.SitePaths;
import com.google.gerrit.server.index.IndexUtils;
+import com.google.gerrit.server.logging.LoggingContextAwareThreadFactory;
import com.google.gwtorm.server.OrmException;
import com.google.gwtorm.server.ResultSet;
import java.io.IOException;
@@ -131,6 +132,7 @@
new ScheduledThreadPoolExecutor(
1,
new ThreadFactoryBuilder()
+ .setThreadFactory(new LoggingContextAwareThreadFactory())
.setNameFormat(index + " Commit-%d")
.setDaemon(true)
.build());
@@ -171,6 +173,7 @@
Executors.newFixedThreadPool(
1,
new ThreadFactoryBuilder()
+ .setThreadFactory(new LoggingContextAwareThreadFactory())
.setNameFormat(index + " Write-%d")
.setDaemon(true)
.build()));
diff --git a/java/com/google/gerrit/server/StarredChangesUtil.java b/java/com/google/gerrit/server/StarredChangesUtil.java
index ec8620d..fa6cd6c 100644
--- a/java/com/google/gerrit/server/StarredChangesUtil.java
+++ b/java/com/google/gerrit/server/StarredChangesUtil.java
@@ -376,6 +376,8 @@
}
public static StarRef readLabels(Repository repo, String refName) throws IOException {
+ logger.atFine().log("Read star labels from %s", refName);
+
Ref ref = repo.exactRef(refName);
if (ref == null) {
return StarRef.MISSING;
@@ -448,6 +450,7 @@
private void updateLabels(
Repository repo, String refName, ObjectId oldObjectId, Collection<String> labels)
throws IOException, OrmException, InvalidLabelsException {
+ logger.atFine().log("Update star labels in %s (labels=%s)", refName, labels);
try (RevWalk rw = new RevWalk(repo)) {
RefUpdate u = repo.updateRef(refName);
u.setExpectedOldObjectId(oldObjectId);
@@ -485,6 +488,7 @@
return;
}
+ logger.atFine().log("Delete star labels in %s", refName);
RefUpdate u = repo.updateRef(refName);
u.setForceUpdate(true);
u.setExpectedOldObjectId(oldObjectId);
diff --git a/java/com/google/gerrit/server/account/AccountCacheImpl.java b/java/com/google/gerrit/server/account/AccountCacheImpl.java
index 76bfcfd..c74f9d4 100644
--- a/java/com/google/gerrit/server/account/AccountCacheImpl.java
+++ b/java/com/google/gerrit/server/account/AccountCacheImpl.java
@@ -179,6 +179,7 @@
@Override
public Optional<AccountState> load(Account.Id who) throws Exception {
+ logger.atFine().log("Loading account %s", who);
return accounts.get(who);
}
}
diff --git a/java/com/google/gerrit/server/account/AccountState.java b/java/com/google/gerrit/server/account/AccountState.java
index e56ad72..1854dc1 100644
--- a/java/com/google/gerrit/server/account/AccountState.java
+++ b/java/com/google/gerrit/server/account/AccountState.java
@@ -18,6 +18,7 @@
import static com.google.gerrit.server.account.externalids.ExternalId.SCHEME_USERNAME;
import com.google.common.base.Function;
+import com.google.common.base.MoreObjects;
import com.google.common.base.Strings;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
@@ -315,4 +316,11 @@
}
return properties;
}
+
+ @Override
+ public String toString() {
+ MoreObjects.ToStringHelper h = MoreObjects.toStringHelper(this);
+ h.addValue(getAccount().getId());
+ return h.toString();
+ }
}
diff --git a/java/com/google/gerrit/server/account/GroupCacheImpl.java b/java/com/google/gerrit/server/account/GroupCacheImpl.java
index e7aae15..06b51a7 100644
--- a/java/com/google/gerrit/server/account/GroupCacheImpl.java
+++ b/java/com/google/gerrit/server/account/GroupCacheImpl.java
@@ -144,6 +144,7 @@
@Override
public Optional<InternalGroup> load(AccountGroup.Id key) throws Exception {
+ logger.atFine().log("Loading group %s by ID", key);
return groupQueryProvider.get().byId(key);
}
}
@@ -158,6 +159,7 @@
@Override
public Optional<InternalGroup> load(String name) throws Exception {
+ logger.atFine().log("Loading group '%s' by name", name);
return groupQueryProvider.get().byName(new AccountGroup.NameKey(name));
}
}
@@ -172,6 +174,7 @@
@Override
public Optional<InternalGroup> load(String uuid) throws Exception {
+ logger.atFine().log("Loading group %s by UUID", uuid);
return groups.getGroup(new AccountGroup.UUID(uuid));
}
}
diff --git a/java/com/google/gerrit/server/account/GroupIncludeCacheImpl.java b/java/com/google/gerrit/server/account/GroupIncludeCacheImpl.java
index f262a79..5906a06 100644
--- a/java/com/google/gerrit/server/account/GroupIncludeCacheImpl.java
+++ b/java/com/google/gerrit/server/account/GroupIncludeCacheImpl.java
@@ -148,6 +148,7 @@
@Override
public ImmutableSet<AccountGroup.UUID> load(Account.Id memberId) throws OrmException {
+ logger.atFine().log("Loading groups with member %s", memberId);
return groupQueryProvider
.get()
.byMember(memberId)
@@ -168,6 +169,7 @@
@Override
public ImmutableList<AccountGroup.UUID> load(AccountGroup.UUID key) throws OrmException {
+ logger.atFine().log("Loading parent groups of %s", key);
return groupQueryProvider
.get()
.bySubgroup(key)
@@ -187,6 +189,7 @@
@Override
public ImmutableList<AccountGroup.UUID> load(String key) throws Exception {
+ logger.atFine().log("Loading all external groups");
return groups.getExternalGroups().collect(toImmutableList());
}
}
diff --git a/java/com/google/gerrit/server/account/externalids/ExternalIdCacheImpl.java b/java/com/google/gerrit/server/account/externalids/ExternalIdCacheImpl.java
index 533b1c0..dc1a873 100644
--- a/java/com/google/gerrit/server/account/externalids/ExternalIdCacheImpl.java
+++ b/java/com/google/gerrit/server/account/externalids/ExternalIdCacheImpl.java
@@ -152,6 +152,7 @@
@Override
public AllExternalIds load(ObjectId notesRev) throws Exception {
+ logger.atFine().log("Loading external IDs (revision=%s)", notesRev);
Multimap<Account.Id, ExternalId> extIdsByAccount =
MultimapBuilder.hashKeys().arrayListValues().build();
for (ExternalId extId : externalIdReader.all(notesRev)) {
diff --git a/java/com/google/gerrit/server/account/externalids/ExternalIdNotes.java b/java/com/google/gerrit/server/account/externalids/ExternalIdNotes.java
index 7cd1db0..b049c40 100644
--- a/java/com/google/gerrit/server/account/externalids/ExternalIdNotes.java
+++ b/java/com/google/gerrit/server/account/externalids/ExternalIdNotes.java
@@ -615,6 +615,8 @@
@Override
protected void onLoad() throws IOException, ConfigInvalidException {
+ logger.atFine().log("Reading external IDs");
+
noteMap = revision != null ? NoteMap.read(reader, revision) : NoteMap.newEmptyMap();
if (afterReadRevision != null) {
@@ -701,6 +703,8 @@
return false;
}
+ logger.atFine().log("Updating external IDs");
+
if (Strings.isNullOrEmpty(commit.getMessage())) {
commit.setMessage("Update external IDs\n");
}
diff --git a/java/com/google/gerrit/server/auth/ldap/LdapRealm.java b/java/com/google/gerrit/server/auth/ldap/LdapRealm.java
index 8d12d32..ede8050 100644
--- a/java/com/google/gerrit/server/auth/ldap/LdapRealm.java
+++ b/java/com/google/gerrit/server/auth/ldap/LdapRealm.java
@@ -351,6 +351,7 @@
@Override
public Optional<Account.Id> load(String username) throws Exception {
+ logger.atFine().log("Loading account for username %s", username);
return externalIds
.get(ExternalId.Key.create(SCHEME_GERRIT, username))
.map(ExternalId::accountId);
@@ -367,6 +368,7 @@
@Override
public Set<AccountGroup.UUID> load(String username) throws Exception {
+ logger.atFine().log("Loading group for member with username %s", username);
final DirContext ctx = helper.open();
try {
return helper.queryForGroups(ctx, username, null);
@@ -386,6 +388,7 @@
@Override
public Boolean load(String groupDn) throws Exception {
+ logger.atFine().log("Loading groupDn %s", groupDn);
final DirContext ctx = helper.open();
try {
Name compositeGroupName = new CompositeName().add(groupDn);
diff --git a/java/com/google/gerrit/server/cache/h2/H2CacheFactory.java b/java/com/google/gerrit/server/cache/h2/H2CacheFactory.java
index 9abccbc..a7824ea 100644
--- a/java/com/google/gerrit/server/cache/h2/H2CacheFactory.java
+++ b/java/com/google/gerrit/server/cache/h2/H2CacheFactory.java
@@ -28,6 +28,7 @@
import com.google.gerrit.server.cache.h2.H2CacheImpl.ValueHolder;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.config.SitePaths;
+import com.google.gerrit.server.logging.LoggingContextAwareThreadFactory;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
@@ -75,11 +76,16 @@
if (cacheDir != null) {
executor =
Executors.newFixedThreadPool(
- 1, new ThreadFactoryBuilder().setNameFormat("DiskCache-Store-%d").build());
+ 1,
+ new ThreadFactoryBuilder()
+ .setThreadFactory(new LoggingContextAwareThreadFactory())
+ .setNameFormat("DiskCache-Store-%d")
+ .build());
cleanup =
Executors.newScheduledThreadPool(
1,
new ThreadFactoryBuilder()
+ .setThreadFactory(new LoggingContextAwareThreadFactory())
.setNameFormat("DiskCache-Prune-%d")
.setDaemon(true)
.build());
diff --git a/java/com/google/gerrit/server/cache/h2/H2CacheImpl.java b/java/com/google/gerrit/server/cache/h2/H2CacheImpl.java
index 6878e46..5620355 100644
--- a/java/com/google/gerrit/server/cache/h2/H2CacheImpl.java
+++ b/java/com/google/gerrit/server/cache/h2/H2CacheImpl.java
@@ -235,6 +235,8 @@
@Override
public ValueHolder<V> load(K key) throws Exception {
+ logger.atFine().log("Loading value for %s from cache", key);
+
if (store.mightContain(key)) {
ValueHolder<V> h = store.getIfPresent(key);
if (h != null) {
diff --git a/java/com/google/gerrit/server/config/SysExecutorModule.java b/java/com/google/gerrit/server/config/SysExecutorModule.java
index 2e97c06..2e97a58 100644
--- a/java/com/google/gerrit/server/config/SysExecutorModule.java
+++ b/java/com/google/gerrit/server/config/SysExecutorModule.java
@@ -19,6 +19,7 @@
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.gerrit.server.FanOutExecutor;
import com.google.gerrit.server.git.WorkQueue;
+import com.google.gerrit.server.logging.LoggingContextAwareThreadFactory;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.google.inject.Singleton;
@@ -89,7 +90,11 @@
10,
TimeUnit.MINUTES,
new ArrayBlockingQueue<Runnable>(poolSize),
- new ThreadFactoryBuilder().setNameFormat("ChangeUpdate-%d").setDaemon(true).build(),
+ new ThreadFactoryBuilder()
+ .setThreadFactory(new LoggingContextAwareThreadFactory())
+ .setNameFormat("ChangeUpdate-%d")
+ .setDaemon(true)
+ .build(),
new ThreadPoolExecutor.CallerRunsPolicy())));
}
}
diff --git a/java/com/google/gerrit/server/git/SearchingChangeCacheImpl.java b/java/com/google/gerrit/server/git/SearchingChangeCacheImpl.java
index 1b83097..5f1d8c6 100644
--- a/java/com/google/gerrit/server/git/SearchingChangeCacheImpl.java
+++ b/java/com/google/gerrit/server/git/SearchingChangeCacheImpl.java
@@ -144,6 +144,7 @@
@Override
public List<CachedChange> load(Project.NameKey key) throws Exception {
+ logger.atFine().log("Loading changes of project %s", key);
try (ManualRequestContext ctx = requestContext.open()) {
List<ChangeData> cds =
queryProvider
diff --git a/java/com/google/gerrit/server/git/WorkQueue.java b/java/com/google/gerrit/server/git/WorkQueue.java
index 98a1823..a2c12df 100644
--- a/java/com/google/gerrit/server/git/WorkQueue.java
+++ b/java/com/google/gerrit/server/git/WorkQueue.java
@@ -24,6 +24,7 @@
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.config.ScheduleConfig.Schedule;
+import com.google.gerrit.server.logging.LoggingContextAwareThreadFactory;
import com.google.gerrit.server.util.IdGenerator;
import com.google.inject.Inject;
import com.google.inject.Singleton;
@@ -165,11 +166,12 @@
if (threadPriority != Thread.NORM_PRIORITY) {
ThreadFactory parent = executor.getThreadFactory();
executor.setThreadFactory(
- task -> {
- Thread t = parent.newThread(task);
- t.setPriority(threadPriority);
- return t;
- });
+ new LoggingContextAwareThreadFactory(
+ task -> {
+ Thread t = parent.newThread(task);
+ t.setPriority(threadPriority);
+ return t;
+ }));
}
return executor;
@@ -251,18 +253,19 @@
Executor(int corePoolSize, final String queueName) {
super(
corePoolSize,
- new ThreadFactory() {
- private final ThreadFactory parent = Executors.defaultThreadFactory();
- private final AtomicInteger tid = new AtomicInteger(1);
+ new LoggingContextAwareThreadFactory(
+ new ThreadFactory() {
+ private final ThreadFactory parent = Executors.defaultThreadFactory();
+ private final AtomicInteger tid = new AtomicInteger(1);
- @Override
- public Thread newThread(Runnable task) {
- final Thread t = parent.newThread(task);
- t.setName(queueName + "-" + tid.getAndIncrement());
- t.setUncaughtExceptionHandler(LOG_UNCAUGHT_EXCEPTION);
- return t;
- }
- });
+ @Override
+ public Thread newThread(Runnable task) {
+ final Thread t = parent.newThread(task);
+ t.setName(queueName + "-" + tid.getAndIncrement());
+ t.setUncaughtExceptionHandler(LOG_UNCAUGHT_EXCEPTION);
+ return t;
+ }
+ }));
all =
new ConcurrentHashMap<>( //
diff --git a/java/com/google/gerrit/server/git/receive/ReceiveCommits.java b/java/com/google/gerrit/server/git/receive/ReceiveCommits.java
index 26795cb..ea66374 100644
--- a/java/com/google/gerrit/server/git/receive/ReceiveCommits.java
+++ b/java/com/google/gerrit/server/git/receive/ReceiveCommits.java
@@ -61,7 +61,6 @@
import com.google.gerrit.common.TimeUtil;
import com.google.gerrit.common.data.LabelType;
import com.google.gerrit.common.data.LabelTypes;
-import com.google.gerrit.common.data.PermissionRule;
import com.google.gerrit.extensions.api.changes.HashtagsInput;
import com.google.gerrit.extensions.api.changes.NotifyHandling;
import com.google.gerrit.extensions.api.changes.RecipientType;
@@ -100,7 +99,6 @@
import com.google.gerrit.server.change.ChangeInserter;
import com.google.gerrit.server.change.SetHashtagsOp;
import com.google.gerrit.server.config.AllProjectsName;
-import com.google.gerrit.server.config.CanonicalWebUrl;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.config.PluginConfig;
import com.google.gerrit.server.config.ProjectConfigEntry;
@@ -123,6 +121,7 @@
import com.google.gerrit.server.git.validators.RefOperationValidators;
import com.google.gerrit.server.git.validators.ValidationMessage;
import com.google.gerrit.server.index.change.ChangeIndexer;
+import com.google.gerrit.server.logging.TraceContext;
import com.google.gerrit.server.mail.MailUtil.MailRecipients;
import com.google.gerrit.server.notedb.ChangeNotes;
import com.google.gerrit.server.notedb.NotesMigration;
@@ -132,6 +131,7 @@
import com.google.gerrit.server.permissions.GlobalPermission;
import com.google.gerrit.server.permissions.PermissionBackend;
import com.google.gerrit.server.permissions.PermissionBackendException;
+import com.google.gerrit.server.permissions.PermissionDeniedException;
import com.google.gerrit.server.permissions.ProjectPermission;
import com.google.gerrit.server.permissions.RefPermission;
import com.google.gerrit.server.project.CreateRefControl;
@@ -248,6 +248,10 @@
}
}
+ private static final String CANNOT_DELETE_CHANGES = "Cannot delete from '" + REFS_CHANGES + "'";
+ private static final String CANNOT_DELETE_CONFIG =
+ "Cannot delete project configuration from '" + RefNames.REFS_CONFIG + "'";
+
interface Factory {
ReceiveCommits create(
ProjectState projectState,
@@ -347,7 +351,6 @@
private final SshInfo sshInfo;
private final SubmoduleOp.Factory subOpFactory;
private final TagCache tagCache;
- private final String canonicalWebUrl;
// Assisted injected fields.
private final AllRefsWatcher allRefsWatcher;
@@ -363,12 +366,13 @@
private final PermissionBackend.ForProject permissions;
private final Project project;
private final Repository repo;
- private final RequestId receiveId;
// Collections populated during processing.
private final List<UpdateGroupsRequest> updateGroups;
private final List<ValidationMessage> messages;
- private final ListMultimap<ReceiveError, String> errors;
+ /** Multimap of error text to refnames that produced that error. */
+ private final ListMultimap<String, String> errors;
+
private final ListMultimap<String, String> pushOptions;
private final Map<Change.Id, ReplaceRequest> replaceByChange;
@@ -400,6 +404,7 @@
private String setFullNameTo;
private boolean setChangeAsPrivate;
private Optional<NoteDbPushOption> noteDbPushOption;
+ private Optional<Boolean> tracePushOption;
// Handles for outputting back over the wire to the end user.
private Task newProgress;
@@ -446,7 +451,6 @@
SshInfo sshInfo,
SubmoduleOp.Factory subOpFactory,
TagCache tagCache,
- @CanonicalWebUrl @Nullable String canonicalWebUrl,
@Assisted ProjectState projectState,
@Assisted IdentifiedUser user,
@Assisted ReceivePack rp,
@@ -504,9 +508,7 @@
project = projectState.getProject();
labelTypes = projectState.getLabelTypes();
permissions = permissionBackend.user(user).project(project.getNameKey());
- receiveId = RequestId.forProject(project.getNameKey());
rejectCommits = BanCommit.loadRejectCommitsMap(rp.getRepository(), rp.getRevWalk());
- this.canonicalWebUrl = canonicalWebUrl;
// Collections populated during processing.
actualCommands = new ArrayList<>();
@@ -563,84 +565,128 @@
closeProgress = progress.beginSubTask("closed", UNKNOWN);
commandProgress = progress.beginSubTask("refs", UNKNOWN);
- try {
- parsePushOptions();
- logDebug("Parsing %d commands", commands.size());
- for (ReceiveCommand cmd : commands) {
+ parsePushOptions();
+ try (TraceContext traceContext =
+ TraceContext.open()
+ .addTag(RequestId.Type.RECEIVE_ID, RequestId.forProject(project.getNameKey()))) {
+ if (tracePushOption.orElse(false)) {
+ RequestId traceId = new RequestId();
+ traceContext.addTag(RequestId.Type.TRACE_ID, traceId);
+ addMessage(RequestId.Type.TRACE_ID.name() + ": " + traceId);
+ }
+ try {
if (!projectState.getProject().getState().permitsWrite()) {
- reject(cmd, "prohibited by Gerrit: project state does not permit write");
- break;
+ for (ReceiveCommand cmd : commands) {
+ reject(cmd, "prohibited by Gerrit: project state does not permit write");
+ }
+ return;
}
- parseCommand(cmd);
+
+ logger.atFine().log("Parsing %d commands", commands.size());
+
+ List<ReceiveCommand> magicCommands = new ArrayList<>();
+ List<ReceiveCommand> directPatchSetPushCommands = new ArrayList<>();
+ List<ReceiveCommand> regularCommands = new ArrayList<>();
+
+ for (ReceiveCommand cmd : commands) {
+ if (MagicBranch.isMagicBranch(cmd.getRefName())) {
+ magicCommands.add(cmd);
+ } else if (isDirectChangesPush(cmd.getRefName())) {
+ directPatchSetPushCommands.add(cmd);
+ } else {
+ regularCommands.add(cmd);
+ }
+ }
+
+ for (ReceiveCommand cmd : regularCommands) {
+ parseRegularCommand(cmd);
+ }
+
+ for (ReceiveCommand cmd : directPatchSetPushCommands) {
+ parseDirectChangesPush(cmd);
+ }
+
+ // Process the magicCommand last, so magicBranch settings can't interact with regular
+ // commands.
+ boolean first = true;
+ for (ReceiveCommand cmd : magicCommands) {
+ if (first) {
+ parseMagicBranch(cmd);
+ first = false;
+ } else {
+ reject(cmd, "duplicate request");
+ }
+ }
+ } catch (PermissionBackendException | NoSuchProjectException | IOException err) {
+ for (ReceiveCommand cmd : actualCommands) {
+ if (cmd.getResult() == NOT_ATTEMPTED) {
+ cmd.setResult(REJECTED_OTHER_REASON, "internal server error");
+ }
+ logger.atSevere().withCause(err).log("Failed to process refs in %s", project.getName());
+ }
+ return;
}
- } catch (PermissionBackendException | NoSuchProjectException | IOException err) {
- for (ReceiveCommand cmd : actualCommands) {
- if (cmd.getResult() == NOT_ATTEMPTED) {
- cmd.setResult(REJECTED_OTHER_REASON, "internal server error");
+
+ List<CreateRequest> newChanges = Collections.emptyList();
+ if (magicBranch != null && magicBranch.cmd.getResult() == NOT_ATTEMPTED) {
+ newChanges = selectNewAndReplacedChangesFromMagicBranch();
+ }
+ preparePatchSetsForReplace(newChanges);
+ insertChangesAndPatchSets(newChanges);
+ newProgress.end();
+ replaceProgress.end();
+
+ if (!errors.isEmpty()) {
+ logger.atFine().log("Handling error conditions: %s", errors.keySet());
+ for (String error : errors.keySet()) {
+ receivePack.sendMessage(buildError(error, errors.get(error)));
+ }
+ receivePack.sendMessage(String.format("User: %s", user.getLoggableName()));
+ receivePack.sendMessage(COMMAND_REJECTION_MESSAGE_FOOTER);
+ }
+
+ Set<Branch.NameKey> branches = new HashSet<>();
+ for (ReceiveCommand c : actualCommands) {
+ // Most post-update steps should happen in UpdateOneRefOp#postUpdate. The only steps that
+ // should happen in this loop are things that can't happen within one BatchUpdate because
+ // they involve kicking off an additional BatchUpdate.
+ if (c.getResult() != OK) {
+ continue;
+ }
+ if (isHead(c) || isConfig(c)) {
+ switch (c.getType()) {
+ case CREATE:
+ case UPDATE:
+ case UPDATE_NONFASTFORWARD:
+ autoCloseChanges(c);
+ branches.add(new Branch.NameKey(project.getNameKey(), c.getRefName()));
+ break;
+
+ case DELETE:
+ break;
+ }
}
}
- logError(String.format("Failed to process refs in %s", project.getName()), err);
- }
- List<CreateRequest> newChanges = Collections.emptyList();
- if (magicBranch != null && magicBranch.cmd.getResult() == NOT_ATTEMPTED) {
- newChanges = selectNewAndReplacedChangesFromMagicBranch();
- }
- preparePatchSetsForReplace(newChanges);
- insertChangesAndPatchSets(newChanges);
- newProgress.end();
- replaceProgress.end();
-
- if (!errors.isEmpty()) {
- logDebug("Handling error conditions: %s", errors.keySet());
- for (ReceiveError error : errors.keySet()) {
- receivePack.sendMessage(buildError(error, errors.get(error)));
- }
- receivePack.sendMessage(String.format("User: %s", user.getLoggableName()));
- receivePack.sendMessage(COMMAND_REJECTION_MESSAGE_FOOTER);
- }
-
- Set<Branch.NameKey> branches = new HashSet<>();
- for (ReceiveCommand c : actualCommands) {
- // Most post-update steps should happen in UpdateOneRefOp#postUpdate. The only steps that
- // should happen in this loop are things that can't happen within one BatchUpdate because they
- // involve kicking off an additional BatchUpdate.
- if (c.getResult() != OK) {
- continue;
- }
- if (isHead(c) || isConfig(c)) {
- switch (c.getType()) {
- case CREATE:
- case UPDATE:
- case UPDATE_NONFASTFORWARD:
- autoCloseChanges(c);
- branches.add(new Branch.NameKey(project.getNameKey(), c.getRefName()));
- break;
-
- case DELETE:
- break;
+ // Update superproject gitlinks if required.
+ if (!branches.isEmpty()) {
+ try (MergeOpRepoManager orm = ormProvider.get()) {
+ orm.setContext(db, TimeUtil.nowTs(), user);
+ SubmoduleOp op = subOpFactory.create(branches, orm);
+ op.updateSuperProjects();
+ } catch (SubmoduleException e) {
+ logger.atSevere().withCause(e).log("Can't update the superprojects");
}
}
+
+ // Update account info with details discovered during commit walking.
+ updateAccountInfo();
+
+ closeProgress.end();
+ commandProgress.end();
+ progress.end();
+ reportMessages(newChanges);
}
-
- // Update superproject gitlinks if required.
- if (!branches.isEmpty()) {
- try (MergeOpRepoManager orm = ormProvider.get()) {
- orm.setContext(db, TimeUtil.nowTs(), user, receiveId);
- SubmoduleOp op = subOpFactory.create(branches, orm);
- op.updateSuperProjects();
- } catch (SubmoduleException e) {
- logError("Can't update the superprojects", e);
- }
- }
-
- // Update account info with details discovered during commit walking.
- updateAccountInfo();
-
- closeProgress.end();
- commandProgress.end();
- progress.end();
- reportMessages(newChanges);
}
private void reportMessages(List<CreateRequest> newChanges) {
@@ -689,7 +735,7 @@
subject = receivePack.getRevWalk().parseCommit(u.newCommitId).getShortMessage();
} catch (IOException e) {
// Log and fall back to original change subject
- logWarn("failed to get subject for edit patch set", e);
+ logger.atWarning().withCause(e).log("failed to get subject for edit patch set");
subject = u.notes.getChange().getSubject();
}
} else {
@@ -725,12 +771,11 @@
private void insertChangesAndPatchSets(List<CreateRequest> newChanges) {
ReceiveCommand magicBranchCmd = magicBranch != null ? magicBranch.cmd : null;
if (magicBranchCmd != null && magicBranchCmd.getResult() != NOT_ATTEMPTED) {
- logWarn(
- String.format(
- "Skipping change updates on %s because ref update failed: %s %s",
- project.getName(),
- magicBranchCmd.getResult(),
- Strings.nullToEmpty(magicBranchCmd.getMessage())));
+ logger.atWarning().log(
+ "Skipping change updates on %s because ref update failed: %s %s",
+ project.getName(),
+ magicBranchCmd.getResult(),
+ Strings.nullToEmpty(magicBranchCmd.getMessage()));
return;
}
@@ -741,26 +786,25 @@
ObjectReader reader = ins.newReader();
RevWalk rw = new RevWalk(reader)) {
bu.setRepository(repo, rw, ins).updateChangesInParallel();
- bu.setRequestId(receiveId);
bu.setRefLogMessage("push");
- logDebug("Adding %d replace requests", newChanges.size());
+ logger.atFine().log("Adding %d replace requests", newChanges.size());
for (ReplaceRequest replace : replaceByChange.values()) {
replace.addOps(bu, replaceProgress);
}
- logDebug("Adding %d create requests", newChanges.size());
+ logger.atFine().log("Adding %d create requests", newChanges.size());
for (CreateRequest create : newChanges) {
create.addOps(bu);
}
- logDebug("Adding %d group update requests", newChanges.size());
+ logger.atFine().log("Adding %d group update requests", newChanges.size());
updateGroups.forEach(r -> r.addOps(bu));
- logDebug("Adding %d additional ref updates", actualCommands.size());
+ logger.atFine().log("Adding %d additional ref updates", actualCommands.size());
actualCommands.forEach(c -> bu.addRepoOnlyOp(new UpdateOneRefOp(c)));
- logDebug("Executing batch");
+ logger.atFine().log("Executing batch");
try {
bu.execute();
} catch (UpdateException e) {
@@ -777,7 +821,7 @@
replace.inputCommand.setResult(OK);
}
} else {
- logDebug("Rejecting due to message from ReplaceOp");
+ logger.atFine().log("Rejecting due to message from ReplaceOp");
reject(replace.inputCommand, rejectMessage);
}
}
@@ -786,7 +830,8 @@
addMessage(e.getMessage());
reject(magicBranchCmd, "conflict");
} catch (RestApiException | IOException err) {
- logError("Can't insert change/patch set for " + project.getName(), err);
+ logger.atSevere().withCause(err).log(
+ "Can't insert change/patch set for %s", project.getName());
reject(magicBranchCmd, "internal server error: " + err.getMessage());
}
@@ -802,17 +847,17 @@
| IOException
| ConfigInvalidException
| PermissionBackendException e) {
- logError("Error submitting changes to " + project.getName(), e);
+ logger.atSevere().withCause(e).log("Error submitting changes to %s", project.getName());
reject(magicBranchCmd, "error during submit");
}
}
}
- private String buildError(ReceiveError error, List<String> branches) {
+ private String buildError(String error, List<String> branches) {
StringBuilder sb = new StringBuilder();
if (branches.size() == 1) {
sb.append("Branch ").append(branches.get(0)).append(":\n");
- sb.append(error.get());
+ sb.append(error);
return sb.toString();
}
sb.append("Branches");
@@ -821,7 +866,7 @@
sb.append(delim).append(branch);
delim = ", ";
}
- return sb.append(":\n").append(error.get()).toString();
+ return sb.append(":\n").append(error).toString();
}
/** Parses push options specified as "git push -o OPTION" */
@@ -850,40 +895,22 @@
} else {
noteDbPushOption = Optional.of(NoteDbPushOption.DISALLOW);
}
+
+ List<String> traceValues = pushOptions.get("trace");
+ if (!traceValues.isEmpty()) {
+ String value = traceValues.get(traceValues.size() - 1);
+ tracePushOption = Optional.of(value.isEmpty() || Boolean.parseBoolean(value));
+ } else {
+ tracePushOption = Optional.empty();
+ }
}
- private void parseCommand(ReceiveCommand cmd)
- throws PermissionBackendException, NoSuchProjectException, IOException {
- if (cmd.getResult() != NOT_ATTEMPTED) {
- // Already rejected by the core receive process.
- logDebug("Already processed by core: %s %s", cmd.getResult(), cmd);
- return;
- }
+ private static boolean isDirectChangesPush(String refname) {
+ Matcher m = NEW_PATCHSET_PATTERN.matcher(refname);
+ return m.matches();
+ }
- if (!Repository.isValidRefName(cmd.getRefName()) || cmd.getRefName().contains("//")) {
- reject(cmd, "not valid ref");
- return;
- }
-
- if (MagicBranch.isMagicBranch(cmd.getRefName())) {
- parseMagicBranch(cmd);
- return;
- }
-
- if (projectState.isAllUsers() && RefNames.REFS_USERS_SELF.equals(cmd.getRefName())) {
- String newName = RefNames.refsUsers(user.getAccountId());
- logDebug("Swapping out command for %s to %s", RefNames.REFS_USERS_SELF, newName);
- final ReceiveCommand orgCmd = cmd;
- cmd =
- new ReceiveCommand(cmd.getOldId(), cmd.getNewId(), newName, cmd.getType()) {
- @Override
- public void setResult(Result s, String m) {
- super.setResult(s, m);
- orgCmd.setResult(s, m);
- }
- };
- }
-
+ private void parseDirectChangesPush(ReceiveCommand cmd) {
Matcher m = NEW_PATCHSET_PATTERN.matcher(cmd.getRefName());
if (m.matches()) {
if (allowPushToRefsChanges) {
@@ -896,13 +923,44 @@
}
return;
}
+ }
+
+ /*
+ * Interpret a normal push, and optionally add a corresponding command to actualCommands.
+ */
+ private void parseRegularCommand(ReceiveCommand cmd)
+ throws PermissionBackendException, NoSuchProjectException, IOException {
+ if (cmd.getResult() != NOT_ATTEMPTED) {
+ // Already rejected by the core receive process.
+ logger.atFine().log("Already processed by core: %s %s", cmd.getResult(), cmd);
+ return;
+ }
+
+ if (!Repository.isValidRefName(cmd.getRefName()) || cmd.getRefName().contains("//")) {
+ reject(cmd, "not valid ref");
+ return;
+ }
+
+ if (projectState.isAllUsers() && RefNames.REFS_USERS_SELF.equals(cmd.getRefName())) {
+ String newName = RefNames.refsUsers(user.getAccountId());
+ logger.atFine().log("Swapping out command for %s to %s", RefNames.REFS_USERS_SELF, newName);
+ final ReceiveCommand orgCmd = cmd;
+ cmd =
+ new ReceiveCommand(cmd.getOldId(), cmd.getNewId(), newName, cmd.getType()) {
+ @Override
+ public void setResult(Result s, String m) {
+ super.setResult(s, m);
+ orgCmd.setResult(s, m);
+ }
+ };
+ }
if (RefNames.isNoteDbMetaRef(cmd.getRefName())) {
// Reject pushes to NoteDb refs without a special option and permission. Note that this
// prohibition doesn't depend on NoteDb being enabled in any way, since all sites will
// migrate to NoteDb eventually, and we don't want garbage data waiting there when the
// migration finishes.
- logDebug(
+ logger.atFine().log(
"%s NoteDb ref %s with %s=%s",
cmd.getType(), cmd.getRefName(), NoteDbPushOption.OPTION_NAME, noteDbPushOption);
if (!Optional.of(NoteDbPushOption.ALLOW).equals(noteDbPushOption)) {
@@ -953,7 +1011,7 @@
}
if (isConfig(cmd)) {
- logDebug("Processing %s command", cmd.getRefName());
+ logger.atFine().log("Processing %s command", cmd.getRefName());
try {
permissions.check(ProjectPermission.WRITE_CONFIG);
} catch (AuthException e) {
@@ -978,13 +1036,9 @@
addError(" " + err.getMessage());
}
reject(cmd, "invalid project configuration");
- logError(
- "User "
- + user.getLoggableName()
- + " tried to push invalid project configuration "
- + cmd.getNewId().name()
- + " for "
- + project.getName());
+ logger.atSevere().log(
+ "User %s tried to push invalid project configuration %s for %s",
+ user.getLoggableName(), cmd.getNewId().name(), project.getName());
return;
}
Project.NameKey newParent = cfg.getProject().getParent(allProjectsName);
@@ -1054,14 +1108,9 @@
}
} catch (Exception e) {
reject(cmd, "invalid project configuration");
- logError(
- "User "
- + user.getLoggableName()
- + " tried to push invalid project configuration "
- + cmd.getNewId().name()
- + " for "
- + project.getName(),
- e);
+ logger.atSevere().withCause(e).log(
+ "User %s tried to push invalid project configuration %s for %s",
+ user.getLoggableName(), cmd.getNewId().name(), project.getName());
return;
}
break;
@@ -1085,13 +1134,12 @@
try {
obj = receivePack.getRevWalk().parseAny(cmd.getNewId());
} catch (IOException err) {
- logError(
- "Invalid object " + cmd.getNewId().name() + " for " + cmd.getRefName() + " creation",
- err);
+ logger.atSevere().withCause(err).log(
+ "Invalid object %s for %s creation", cmd.getNewId().name(), cmd.getRefName());
reject(cmd, "invalid object");
return;
}
- logDebug("Creating %s", cmd);
+ logger.atFine().log("Creating %s", cmd);
if (isHead(cmd) && !isCommit(cmd)) {
return;
@@ -1102,7 +1150,10 @@
// Must pass explicit user instead of injecting a provider into CreateRefControl, since
// Provider<CurrentUser> within ReceiveCommits will always return anonymous.
createRefControl.checkCreateRef(Providers.of(user), receivePack.getRepository(), branch, obj);
- } catch (AuthException | ResourceConflictException denied) {
+ } catch (AuthException denied) {
+ rejectProhibited(cmd, denied);
+ return;
+ } catch (ResourceConflictException denied) {
reject(cmd, "prohibited by Gerrit: " + denied.getMessage());
return;
}
@@ -1117,15 +1168,9 @@
}
private void parseUpdate(ReceiveCommand cmd) throws PermissionBackendException {
- logDebug("Updating %s", cmd);
- boolean ok;
- try {
- permissions.ref(cmd.getRefName()).check(RefPermission.UPDATE);
- ok = true;
- } catch (AuthException err) {
- ok = false;
- }
- if (ok) {
+ logger.atFine().log("Updating %s", cmd);
+ Optional<AuthException> err = checkRefPermission(cmd, RefPermission.UPDATE);
+ if (!err.isPresent()) {
if (isHead(cmd) && !isCommit(cmd)) {
return;
}
@@ -1135,12 +1180,7 @@
validateNewCommits(new Branch.NameKey(project.getNameKey(), cmd.getRefName()), cmd);
actualCommands.add(cmd);
} else {
- if (RefNames.REFS_CONFIG.equals(cmd.getRefName())) {
- errors.put(ReceiveError.CONFIG_UPDATE, RefNames.REFS_CONFIG);
- } else {
- errors.put(ReceiveError.UPDATE, cmd.getRefName());
- }
- reject(cmd, "prohibited by Gerrit: ref update access denied");
+ rejectProhibited(cmd, err.get());
}
}
@@ -1149,7 +1189,8 @@
try {
obj = receivePack.getRevWalk().parseAny(cmd.getNewId());
} catch (IOException err) {
- logError("Invalid object " + cmd.getNewId().name() + " for " + cmd.getRefName(), err);
+ logger.atSevere().withCause(err).log(
+ "Invalid object %s for %s", cmd.getNewId().name(), cmd.getRefName());
reject(cmd, "invalid object");
return false;
}
@@ -1162,34 +1203,23 @@
}
private void parseDelete(ReceiveCommand cmd) throws PermissionBackendException {
- logDebug("Deleting %s", cmd);
+ logger.atFine().log("Deleting %s", cmd);
if (cmd.getRefName().startsWith(REFS_CHANGES)) {
- errors.put(ReceiveError.DELETE_CHANGES, cmd.getRefName());
+ errors.put(CANNOT_DELETE_CHANGES, cmd.getRefName());
reject(cmd, "cannot delete changes");
- } else if (canDelete(cmd)) {
+ } else if (isConfigRef(cmd.getRefName())) {
+ errors.put(CANNOT_DELETE_CONFIG, cmd.getRefName());
+ reject(cmd, "cannot delete project configuration");
+ }
+
+ Optional<AuthException> err = checkRefPermission(cmd, RefPermission.DELETE);
+ if (!err.isPresent()) {
if (!validRefOperation(cmd)) {
return;
}
actualCommands.add(cmd);
- } else if (RefNames.REFS_CONFIG.equals(cmd.getRefName())) {
- reject(cmd, "cannot delete project configuration");
} else {
- errors.put(ReceiveError.DELETE, cmd.getRefName());
- reject(cmd, "cannot delete references");
- }
- }
-
- private boolean canDelete(ReceiveCommand cmd) throws PermissionBackendException {
- if (isConfigRef(cmd.getRefName())) {
- // Never allow to delete the meta config branch.
- return false;
- }
-
- try {
- permissions.ref(cmd.getRefName()).check(RefPermission.DELETE);
- return projectState.statePermitsWrite();
- } catch (AuthException e) {
- return false;
+ rejectProhibited(cmd, err.get());
}
}
@@ -1200,13 +1230,12 @@
} catch (IncorrectObjectTypeException notCommit) {
newObject = null;
} catch (IOException err) {
- logError(
- "Invalid object " + cmd.getNewId().name() + " for " + cmd.getRefName() + " forced update",
- err);
+ logger.atSevere().withCause(err).log(
+ "Invalid object %s for %s forced update", cmd.getNewId().name(), cmd.getRefName());
reject(cmd, "invalid object");
return;
}
- logDebug("Rewinding %s", cmd);
+ logger.atFine().log("Rewinding %s", cmd);
if (newObject != null) {
validateNewCommits(new Branch.NameKey(project.getNameKey(), cmd.getRefName()), cmd);
@@ -1215,23 +1244,51 @@
}
}
- boolean ok;
- try {
- permissions.ref(cmd.getRefName()).check(RefPermission.FORCE_UPDATE);
- ok = true;
- } catch (AuthException err) {
- ok = false;
- }
- if (ok) {
+ Optional<AuthException> err = checkRefPermission(cmd, RefPermission.FORCE_UPDATE);
+ if (!err.isPresent()) {
if (!validRefOperation(cmd)) {
return;
}
actualCommands.add(cmd);
} else {
- cmd.setResult(REJECTED_OTHER_REASON, "need '" + PermissionRule.FORCE_PUSH + "' privilege.");
+ rejectProhibited(cmd, err.get());
}
}
+ private Optional<AuthException> checkRefPermission(ReceiveCommand cmd, RefPermission perm)
+ throws PermissionBackendException {
+ return checkRefPermission(permissions.ref(cmd.getRefName()), perm);
+ }
+
+ private Optional<AuthException> checkRefPermission(
+ PermissionBackend.ForRef forRef, RefPermission perm) throws PermissionBackendException {
+ try {
+ forRef.check(perm);
+ return Optional.empty();
+ } catch (AuthException e) {
+ return Optional.of(e);
+ }
+ }
+
+ private void rejectProhibited(ReceiveCommand cmd, AuthException err) {
+ err.getAdvice().ifPresent(a -> errors.put(a, cmd.getRefName()));
+ reject(cmd, prohibited(err, cmd.getRefName()));
+ }
+
+ private static String prohibited(AuthException e, String alreadyDisplayedResource) {
+ String msg = e.getMessage();
+ if (e instanceof PermissionDeniedException) {
+ PermissionDeniedException pde = (PermissionDeniedException) e;
+ if (pde.getResource().isPresent()
+ && pde.getResource().get().equals(alreadyDisplayedResource)) {
+ // Avoid repeating resource name if exactly the given name was already displayed by the
+ // generic git push machinery.
+ msg = PermissionDeniedException.MESSAGE_PREFIX + pde.describePermission();
+ }
+ }
+ return "prohibited by Gerrit: " + msg;
+ }
+
static class MagicBranchInput {
private static final Splitter COMMAS = Splitter.on(',').omitEmptyStrings();
@@ -1249,6 +1306,9 @@
CmdLineParser cmdLineParser;
Set<String> hashtags = new HashSet<>();
+ @Option(name = "--trace", metaVar = "NAME", usage = "enable tracing")
+ boolean trace;
+
@Option(name = "--base", metaVar = "BASE", usage = "merge base of changes")
List<ObjectId> base;
@@ -1501,13 +1561,7 @@
* <p>Assumes we are handling a magic branch here.
*/
private void parseMagicBranch(ReceiveCommand cmd) throws PermissionBackendException {
- // Permit exactly one new change request per push.
- if (magicBranch != null) {
- reject(cmd, "duplicate request");
- return;
- }
-
- logDebug("Found magic branch %s", cmd.getRefName());
+ logger.atFine().log("Found magic branch %s", cmd.getRefName());
magicBranch = new MagicBranchInput(user, cmd, labelTypes, notesMigration);
magicBranch.reviewer.addAll(extraReviewers.get(ReviewerStateInternal.REVIEWER));
magicBranch.cc.addAll(extraReviewers.get(ReviewerStateInternal.CC));
@@ -1519,7 +1573,7 @@
ref = magicBranch.parse(repo, receivePack.getAdvertisedRefs().keySet(), pushOptions);
} catch (CmdLineException e) {
if (!magicBranch.cmdLineParser.wasHelpRequestedByOption()) {
- logDebug("Invalid branch syntax");
+ logger.atFine().log("Invalid branch syntax");
reject(cmd, e.getMessage());
return;
}
@@ -1540,13 +1594,13 @@
return;
}
if (projectState.isAllUsers() && RefNames.REFS_USERS_SELF.equals(ref)) {
- logDebug("Handling %s", RefNames.REFS_USERS_SELF);
+ logger.atFine().log("Handling %s", RefNames.REFS_USERS_SELF);
ref = RefNames.refsUsers(user.getAccountId());
}
if (!receivePack.getAdvertisedRefs().containsKey(ref)
&& !ref.equals(readHEAD(repo))
&& !ref.equals(RefNames.REFS_CONFIG)) {
- logDebug("Ref %s not found", ref);
+ logger.atFine().log("Ref %s not found", ref);
if (ref.startsWith(Constants.R_HEADS)) {
String n = ref.substring(Constants.R_HEADS.length());
reject(cmd, "branch " + n + " not found");
@@ -1559,11 +1613,9 @@
magicBranch.dest = new Branch.NameKey(project.getNameKey(), ref);
magicBranch.perm = permissions.ref(ref);
- try {
- magicBranch.perm.check(RefPermission.CREATE_CHANGE);
- } catch (AuthException denied) {
- errors.put(ReceiveError.CODE_REVIEW, ref);
- reject(cmd, denied.getMessage());
+ Optional<AuthException> err = checkRefPermission(magicBranch.perm, RefPermission.CREATE_CHANGE);
+ if (err.isPresent()) {
+ rejectProhibited(cmd, err.get());
return;
}
if (!projectState.statePermitsWrite()) {
@@ -1574,7 +1626,7 @@
// TODO(davido): Remove legacy support for drafts magic branch option
// after repo-tool supports private and work-in-progress changes.
if (magicBranch.draft && !receiveConfig.allowDrafts) {
- errors.put(ReceiveError.CODE_REVIEW, ref);
+ errors.put(ReceiveError.CODE_REVIEW.get(), ref);
reject(cmd, "draft workflow is disabled");
return;
}
@@ -1607,10 +1659,9 @@
}
if (magicBranch.submit) {
- try {
- permissions.ref(ref).check(RefPermission.UPDATE_BY_SUBMIT);
- } catch (AuthException e) {
- reject(cmd, e.getMessage());
+ err = checkRefPermission(magicBranch.perm, RefPermission.UPDATE_BY_SUBMIT);
+ if (err.isPresent()) {
+ rejectProhibited(cmd, err.get());
return;
}
}
@@ -1619,10 +1670,10 @@
RevCommit tip;
try {
tip = walk.parseCommit(magicBranch.cmd.getNewId());
- logDebug("Tip of push: %s", tip.name());
+ logger.atFine().log("Tip of push: %s", tip.name());
} catch (IOException ex) {
magicBranch.cmd.setResult(REJECTED_MISSING_OBJECT);
- logError("Invalid pack upload; one or more objects weren't sent", ex);
+ logger.atSevere().withCause(ex).log("Invalid pack upload; one or more objects weren't sent");
return;
}
@@ -1649,12 +1700,12 @@
|| magicBranch.base != null
|| magicBranch.merged
|| tip.getParentCount() == 0) {
- logDebug("Forcing newChangeForAllNotInTarget = false");
+ logger.atFine().log("Forcing newChangeForAllNotInTarget = false");
newChangeForAllNotInTarget = false;
}
if (magicBranch.base != null) {
- logDebug("Handling %base: %s", magicBranch.base);
+ logger.atFine().log("Handling %%base: %s", magicBranch.base);
magicBranch.baseCommit = Lists.newArrayListWithCapacity(magicBranch.base.size());
for (ObjectId id : magicBranch.base) {
try {
@@ -1666,7 +1717,8 @@
reject(cmd, "base not found");
return;
} catch (IOException e) {
- logWarn(String.format("Project %s cannot read %s", project.getName(), id.name()), e);
+ logger.atWarning().withCause(e).log(
+ "Project %s cannot read %s", project.getName(), id.name());
reject(cmd, "internal server error");
return;
}
@@ -1677,11 +1729,11 @@
return; // readBranchTip already rejected cmd.
}
magicBranch.baseCommit = Collections.singletonList(branchTip);
- logDebug("Set baseCommit = %s", magicBranch.baseCommit.get(0).name());
+ logger.atFine().log("Set baseCommit = %s", magicBranch.baseCommit.get(0).name());
}
} catch (IOException ex) {
- logWarn(
- String.format("Error walking to %s in project %s", destBranch, project.getName()), ex);
+ logger.atWarning().withCause(ex).log(
+ "Error walking to %s in project %s", destBranch, project.getName());
reject(cmd, "internal server error");
return;
}
@@ -1697,11 +1749,11 @@
// The destination branch does not yet exist. Assume the
// history being sent for review will start it and thus
// is "connected" to the branch.
- logDebug("Branch is unborn");
+ logger.atFine().log("Branch is unborn");
return;
}
RevCommit h = walk.parseCommit(targetRef.getObjectId());
- logDebug("Current branch tip: %s", h.name());
+ logger.atFine().log("Current branch tip: %s", h.name());
RevFilter oldRevFilter = walk.getRevFilter();
try {
walk.reset();
@@ -1717,7 +1769,7 @@
}
} catch (IOException e) {
magicBranch.cmd.setResult(REJECTED_MISSING_OBJECT);
- logError("Invalid pack upload; one or more objects weren't sent", e);
+ logger.atSevere().withCause(e).log("Invalid pack upload; one or more objects weren't sent");
}
}
@@ -1741,7 +1793,7 @@
// Handle an upload to refs/changes/XX/CHANGED-NUMBER.
private void parseReplaceCommand(ReceiveCommand cmd, Change.Id changeId) {
- logDebug("Parsing replace command");
+ logger.atFine().log("Parsing replace command");
if (cmd.getType() != ReceiveCommand.Type.CREATE) {
reject(cmd, "invalid usage");
return;
@@ -1750,9 +1802,9 @@
RevCommit newCommit;
try {
newCommit = receivePack.getRevWalk().parseCommit(cmd.getNewId());
- logDebug("Replacing with %s", newCommit);
+ logger.atFine().log("Replacing with %s", newCommit);
} catch (IOException e) {
- logError("Cannot parse " + cmd.getNewId().name() + " as commit", e);
+ logger.atSevere().withCause(e).log("Cannot parse %s as commit", cmd.getNewId().name());
reject(cmd, "invalid commit");
return;
}
@@ -1761,11 +1813,11 @@
try {
changeEnt = notesFactory.createChecked(db, project.getNameKey(), changeId).getChange();
} catch (NoSuchChangeException e) {
- logError("Change not found " + changeId, e);
+ logger.atSevere().withCause(e).log("Change not found %s", changeId);
reject(cmd, "change " + changeId + " not found");
return;
} catch (OrmException e) {
- logError("Cannot lookup existing change " + changeId, e);
+ logger.atSevere().withCause(e).log("Cannot lookup existing change %s", changeId);
reject(cmd, "database error");
return;
}
@@ -1774,7 +1826,7 @@
return;
}
- logDebug("Replacing change %s", changeEnt.getId());
+ logger.atFine().log("Replacing change %s", changeEnt.getId());
requestReplace(cmd, true, changeEnt, newCommit);
}
@@ -1798,7 +1850,7 @@
}
private List<CreateRequest> selectNewAndReplacedChangesFromMagicBranch() {
- logDebug("Finding new and replaced changes");
+ logger.atFine().log("Finding new and replaced changes");
List<CreateRequest> newChanges = new ArrayList<>();
ListMultimap<ObjectId, Ref> existing = changeRefsById();
@@ -1880,7 +1932,7 @@
}
int n = pending.size() + newChanges.size();
if (maxBatchChanges != 0 && n > maxBatchChanges) {
- logDebug("%d changes exceeds limit of %d", n, maxBatchChanges);
+ logger.atFine().log("%d changes exceeds limit of %d", n, maxBatchChanges);
reject(
magicBranch.cmd,
"the number of pushed changes in a batch exceeds the max limit " + maxBatchChanges);
@@ -1899,12 +1951,12 @@
continue;
}
- logDebug("Creating new change for %s even though it is already tracked", name);
+ logger.atFine().log("Creating new change for %s even though it is already tracked", name);
}
if (!validCommit(receivePack.getRevWalk(), magicBranch.dest, magicBranch.cmd, c, null)) {
// Not a change the user can propose? Abort as early as possible.
- logDebug("Aborting early due to invalid commit");
+ logger.atFine().log("Aborting early due to invalid commit");
return Collections.emptyList();
}
@@ -1914,7 +1966,7 @@
magicBranch.cmd,
"Pushing merges in commit chains with 'all not in target' is not allowed,\n"
+ "to override please set the base manually");
- logDebug("Rejecting merge commit %s with newChangeForAllNotInTarget", name);
+ logger.atFine().log("Rejecting merge commit %s with newChangeForAllNotInTarget", name);
// TODO(dborowitz): Should we early return here?
}
@@ -1923,7 +1975,7 @@
continue;
}
}
- logDebug(
+ logger.atFine().log(
"Finished initial RevWalk with %d commits total: %d already"
+ " tracked, %d new changes with no Change-Id, and %d deferred"
+ " lookups",
@@ -1940,14 +1992,14 @@
}
if (newChangeIds.contains(p.changeKey)) {
- logDebug("Multiple commits with Change-Id %s", p.changeKey);
+ logger.atFine().log("Multiple commits with Change-Id %s", p.changeKey);
reject(magicBranch.cmd, SAME_CHANGE_ID_IN_MULTIPLE_CHANGES);
return Collections.emptyList();
}
List<ChangeData> changes = p.destChanges;
if (changes.size() > 1) {
- logDebug(
+ logger.atFine().log(
"Multiple changes in branch %s with Change-Id %s: %s",
magicBranch.dest,
p.changeKey,
@@ -2005,7 +2057,7 @@
}
newChanges.add(new CreateRequest(p.commit, magicBranch.dest.get()));
}
- logDebug(
+ logger.atFine().log(
"Finished deferred lookups with %d updates and %d new changes",
replaceByChange.size(), newChanges.size());
} catch (IOException e) {
@@ -2013,10 +2065,10 @@
// identified the missing object earlier before we got control.
//
magicBranch.cmd.setResult(REJECTED_MISSING_OBJECT);
- logError("Invalid pack upload; one or more objects weren't sent", e);
+ logger.atSevere().withCause(e).log("Invalid pack upload; one or more objects weren't sent");
return Collections.emptyList();
} catch (OrmException e) {
- logError("Cannot query database to locate prior changes", e);
+ logger.atSevere().withCause(e).log("Cannot query database to locate prior changes");
reject(magicBranch.cmd, "database error");
return Collections.emptyList();
}
@@ -2044,9 +2096,9 @@
for (UpdateGroupsRequest update : updateGroups) {
update.groups = ImmutableList.copyOf((groups.get(update.commit)));
}
- logDebug("Finished updating groups from GroupCollector");
+ logger.atFine().log("Finished updating groups from GroupCollector");
} catch (OrmException e) {
- logError("Error collecting groups for changes", e);
+ logger.atSevere().withCause(e).log("Error collecting groups for changes");
reject(magicBranch.cmd, "internal server error");
}
return newChanges;
@@ -2058,7 +2110,7 @@
notesFactory.create(db, project.getNameKey(), Change.Id.fromRef(ref.getName()));
Change change = notes.getChange();
if (change.getDest().equals(magicBranch.dest)) {
- logDebug("Found change %s from existing refs.", change.getKey());
+ logger.atFine().log("Found change %s from existing refs.", change.getKey());
// Reindex the change asynchronously, ignoring errors.
@SuppressWarnings("unused")
Future<?> possiblyIgnoredError = indexer.indexAsync(project.getNameKey(), change.getId());
@@ -2079,7 +2131,7 @@
if (magicBranch.baseCommit != null) {
markExplicitBasesUninteresting();
} else if (magicBranch.merged) {
- logDebug("Marking parents of merged commit %s uninteresting", start.name());
+ logger.atFine().log("Marking parents of merged commit %s uninteresting", start.name());
for (RevCommit c : start.getParents()) {
rw.markUninteresting(c);
}
@@ -2090,13 +2142,13 @@
}
private void markExplicitBasesUninteresting() throws IOException {
- logDebug("Marking %d base commits uninteresting", magicBranch.baseCommit.size());
+ logger.atFine().log("Marking %d base commits uninteresting", magicBranch.baseCommit.size());
for (RevCommit c : magicBranch.baseCommit) {
receivePack.getRevWalk().markUninteresting(c);
}
Ref targetRef = allRefs().get(magicBranch.dest.get());
if (targetRef != null) {
- logDebug(
+ logger.atFine().log(
"Marking target ref %s (%s) uninteresting",
magicBranch.dest.get(), targetRef.getObjectId().name());
receivePack
@@ -2150,11 +2202,12 @@
rw.markUninteresting(rw.parseCommit(ref.getObjectId()));
i++;
} catch (IOException e) {
- logWarn(String.format("Invalid ref %s in %s", ref.getName(), project.getName()), e);
+ logger.atWarning().withCause(e).log(
+ "Invalid ref %s in %s", ref.getName(), project.getName());
}
}
}
- logDebug("Marked %d heads as uninteresting", i);
+ logger.atFine().log("Marked %d heads as uninteresting", i);
}
private static boolean isValidChangeId(String idStr) {
@@ -2308,7 +2361,7 @@
Change tipChange = bySha.get(magicBranch.cmd.getNewId());
checkNotNull(
tipChange, "tip of push does not correspond to a change; found these changes: %s", bySha);
- logDebug(
+ logger.atFine().log(
"Processing submit with tip change %s (%s)", tipChange.getId(), magicBranch.cmd.getNewId());
try (MergeOp op = mergeOpProvider.get()) {
op.merge(db, tipChange, user, false, new SubmitInput(), false);
@@ -2325,27 +2378,23 @@
}
}
} catch (OrmException err) {
- logError(
- String.format(
- "Cannot read database before replacement for project %s", project.getName()),
- err);
+ logger.atSevere().withCause(err).log(
+ "Cannot read database before replacement for project %s", project.getName());
for (ReplaceRequest req : replaceByChange.values()) {
if (req.inputCommand.getResult() == NOT_ATTEMPTED) {
req.inputCommand.setResult(REJECTED_OTHER_REASON, "internal server error");
}
}
} catch (IOException | PermissionBackendException err) {
- logError(
- String.format(
- "Cannot read repository before replacement for project %s", project.getName()),
- err);
+ logger.atSevere().withCause(err).log(
+ "Cannot read repository before replacement for project %s", project.getName());
for (ReplaceRequest req : replaceByChange.values()) {
if (req.inputCommand.getResult() == NOT_ATTEMPTED) {
req.inputCommand.setResult(REJECTED_OTHER_REASON, "internal server error");
}
}
}
- logDebug("Read %d changes to replace", replaceByChange.size());
+ logger.atFine().log("Read %d changes to replace", replaceByChange.size());
if (magicBranch != null && magicBranch.cmd.getResult() != NOT_ATTEMPTED) {
// Cancel creations tied to refs/for/ or refs/drafts/ command.
@@ -2398,10 +2447,8 @@
receivePack.getRevWalk().parseCommit(ref.getObjectId()),
PatchSet.Id.fromRef(ref.getName()));
} catch (IOException err) {
- logWarn(
- String.format(
- "Project %s contains invalid change ref %s", project.getName(), ref.getName()),
- err);
+ logger.atWarning().withCause(err).log(
+ "Project %s contains invalid change ref %s", project.getName(), ref.getName());
}
}
}
@@ -2562,7 +2609,7 @@
try {
edit = editUtil.byChange(notes, user);
} catch (AuthException | IOException e) {
- logError("Cannot retrieve edit", e);
+ logger.atSevere().withCause(e).log("Cannot retrieve edit");
return false;
}
@@ -2695,11 +2742,11 @@
public void postUpdate(Context ctx) {
String refName = cmd.getRefName();
if (cmd.getType() == ReceiveCommand.Type.UPDATE) { // aka fast-forward
- logDebug("Updating tag cache on fast-forward of %s", cmd.getRefName());
+ logger.atFine().log("Updating tag cache on fast-forward of %s", cmd.getRefName());
tagCache.updateFastForward(project.getNameKey(), refName, cmd.getOldId(), cmd.getNewId());
}
if (isConfig(cmd)) {
- logDebug("Reloading project in cache");
+ logger.atFine().log("Reloading project in cache");
try {
projectCache.evict(project);
} catch (IOException e) {
@@ -2708,7 +2755,7 @@
}
ProjectState ps = projectCache.get(project.getNameKey());
try {
- logDebug("Updating project description");
+ logger.atFine().log("Updating project description");
repo.setGitwebDescription(ps.getProject().getDescription());
} catch (IOException e) {
logger.atWarning().withCause(e).log("cannot update description of %s", project.getName());
@@ -2814,20 +2861,21 @@
&& !(MagicBranch.isMagicBranch(cmd.getRefName())
|| NEW_PATCHSET_PATTERN.matcher(cmd.getRefName()).matches())
&& pushOptions.containsKey(PUSH_OPTION_SKIP_VALIDATION)) {
- try {
- if (projectState.is(BooleanProjectConfig.USE_SIGNED_OFF_BY)) {
- throw new AuthException(
- "requireSignedOffBy prevents option " + PUSH_OPTION_SKIP_VALIDATION);
- }
-
- perm.check(RefPermission.SKIP_VALIDATION);
- if (!Iterables.isEmpty(rejectCommits)) {
- throw new AuthException("reject-commits prevents " + PUSH_OPTION_SKIP_VALIDATION);
- }
- logDebug("Short-circuiting new commit validation");
- } catch (AuthException denied) {
- reject(cmd, denied.getMessage());
+ if (projectState.is(BooleanProjectConfig.USE_SIGNED_OFF_BY)) {
+ reject(cmd, "requireSignedOffBy prevents option " + PUSH_OPTION_SKIP_VALIDATION);
+ return;
}
+
+ Optional<AuthException> err =
+ checkRefPermission(permissions.ref(branch.get()), RefPermission.SKIP_VALIDATION);
+ if (err.isPresent()) {
+ rejectProhibited(cmd, err.get());
+ return;
+ }
+ if (!Iterables.isEmpty(rejectCommits)) {
+ reject(cmd, "reject-commits prevents " + PUSH_OPTION_SKIP_VALIDATION);
+ }
+ logger.atFine().log("Short-circuiting new commit validation");
return;
}
@@ -2847,13 +2895,11 @@
int n = 0;
for (RevCommit c; (c = walk.next()) != null; ) {
if (++n > limit) {
- logDebug("Number of new commits exceeds limit of %d", limit);
- addMessage(
+ logger.atFine().log("Number of new commits exceeds limit of %d", limit);
+ reject(
+ cmd,
String.format(
- "Cannot push more than %d commits to %s without %s option "
- + "(see %sDocumentation/user-upload.html#skip_validation for details)",
- limit, branch.get(), PUSH_OPTION_SKIP_VALIDATION, canonicalWebUrl));
- reject(cmd, "too many commits");
+ "more than %d commits, and %s not set", limit, PUSH_OPTION_SKIP_VALIDATION));
return;
}
if (existing.keySet().contains(c)) {
@@ -2863,15 +2909,15 @@
}
if (missingFullName && user.hasEmailAddress(c.getCommitterIdent().getEmailAddress())) {
- logDebug("Will update full name of caller");
+ logger.atFine().log("Will update full name of caller");
setFullNameTo = c.getCommitterIdent().getName();
missingFullName = false;
}
}
- logDebug("Validated %d new commits", n);
+ logger.atFine().log("Validated %d new commits", n);
} catch (IOException err) {
cmd.setResult(REJECTED_MISSING_OBJECT);
- logError("Invalid pack upload; one or more objects weren't sent", err);
+ logger.atSevere().withCause(err).log("Invalid pack upload; one or more objects weren't sent");
}
}
@@ -2902,7 +2948,7 @@
perm, branch, user.asIdentifiedUser(), sshInfo, repo, rw, change);
messages.addAll(validators.validate(receiveEvent));
} catch (CommitValidationException e) {
- logDebug("Commit validation failed on %s", c.name());
+ logger.atFine().log("Commit validation failed on %s", c.name());
messages.addAll(e.getMessages());
reject(cmd, e.getMessage());
return false;
@@ -2912,7 +2958,7 @@
}
private void autoCloseChanges(ReceiveCommand cmd) {
- logDebug("Starting auto-closing of changes");
+ logger.atFine().log("Starting auto-closing of changes");
String refName = cmd.getRefName();
checkState(
!MagicBranch.isMagicBranch(refName),
@@ -2929,7 +2975,6 @@
ObjectReader reader = ins.newReader();
RevWalk rw = new RevWalk(reader)) {
bu.setRepository(repo, rw, ins).updateChangesInParallel();
- bu.setRequestId(receiveId);
// TODO(dborowitz): Teach BatchUpdate to ignore missing changes.
RevCommit newTip = rw.parseCommit(cmd.getNewId());
@@ -2984,7 +3029,7 @@
for (ReplaceRequest req : replaceAndClose) {
Change.Id id = req.notes.getChangeId();
if (!req.validate(true)) {
- logDebug("Not closing %s because validation failed", id);
+ logger.atFine().log("Not closing %s because validation failed", id);
continue;
}
req.addOps(bu, null);
@@ -2996,12 +3041,12 @@
bu.addOp(id, new ChangeProgressOp(closeProgress));
}
- logDebug(
+ logger.atFine().log(
"Auto-closing %s changes with existing patch sets and %s with new patch sets",
existingPatchSets, newPatchSets);
bu.execute();
} catch (IOException | OrmException | PermissionBackendException e) {
- logError("Failed to auto-close changes", e);
+ logger.atSevere().withCause(e).log("Failed to auto-close changes");
}
return null;
},
@@ -3011,9 +3056,9 @@
.timeout(retryHelper.getDefaultTimeout(ActionType.CHANGE_UPDATE).multipliedBy(5))
.build());
} catch (RestApiException e) {
- logError("Can't insert patchset", e);
+ logger.atSevere().withCause(e).log("Can't insert patchset");
} catch (UpdateException e) {
- logError("Failed to auto-close changes", e);
+ logger.atSevere().withCause(e).log("Failed to auto-close changes");
}
}
@@ -3039,7 +3084,7 @@
if (setFullNameTo == null) {
return;
}
- logDebug("Updating full name of caller");
+ logger.atFine().log("Updating full name of caller");
try {
Optional<AccountState> accountState =
accountsUpdateProvider
@@ -3056,7 +3101,7 @@
.map(AccountState::getAccount)
.ifPresent(a -> user.getAccount().setFullName(a.getFullName()));
} catch (OrmException | IOException | ConfigInvalidException e) {
- logWarn("Failed to update full name of caller", e);
+ logger.atWarning().withCause(e).log("Failed to update full name of caller");
}
}
@@ -3094,46 +3139,4 @@
private static boolean isConfig(ReceiveCommand cmd) {
return cmd.getRefName().equals(RefNames.REFS_CONFIG);
}
-
- private void logDebug(String msg) {
- logger.atFine().log(receiveId + msg);
- }
-
- private void logDebug(String msg, @Nullable Object arg) {
- logger.atFine().log(receiveId + msg, arg);
- }
-
- private void logDebug(String msg, @Nullable Object arg1, @Nullable Object arg2) {
- logger.atFine().log(receiveId + msg, arg1, arg2);
- }
-
- private void logDebug(
- String msg, @Nullable Object arg1, @Nullable Object arg2, @Nullable Object arg3) {
- logger.atFine().log(receiveId + msg, arg1, arg2, arg3);
- }
-
- private void logDebug(
- String msg,
- @Nullable Object arg1,
- @Nullable Object arg2,
- @Nullable Object arg3,
- @Nullable Object arg4) {
- logger.atFine().log(receiveId + msg, arg1, arg2, arg3, arg4);
- }
-
- private void logWarn(String msg, Throwable t) {
- logger.atWarning().withCause(t).log("%s%s", receiveId, msg);
- }
-
- private void logWarn(String msg) {
- logWarn(msg, null);
- }
-
- private void logError(String msg, Throwable t) {
- logger.atSevere().withCause(t).log("%s%s", receiveId, msg);
- }
-
- private void logError(String msg) {
- logError(msg, null);
- }
}
diff --git a/java/com/google/gerrit/server/group/db/GroupNameNotes.java b/java/com/google/gerrit/server/group/db/GroupNameNotes.java
index 1b74241..80d462f 100644
--- a/java/com/google/gerrit/server/group/db/GroupNameNotes.java
+++ b/java/com/google/gerrit/server/group/db/GroupNameNotes.java
@@ -25,6 +25,7 @@
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Multiset;
+import com.google.common.flogger.FluentLogger;
import com.google.common.hash.Hashing;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.common.data.GroupReference;
@@ -87,6 +88,8 @@
* </ul>
*/
public class GroupNameNotes extends VersionedMetaData {
+ private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+
private static final String SECTION_NAME = "group";
private static final String UUID_PARAM = "uuid";
private static final String NAME_PARAM = "name";
@@ -323,6 +326,8 @@
protected void onLoad() throws IOException, ConfigInvalidException {
nameConflicting = false;
+ logger.atFine().log("Reading group notes");
+
if (revision != null) {
NoteMap noteMap = NoteMap.read(reader, revision);
if (newGroupName.isPresent()) {
@@ -365,6 +370,8 @@
return false;
}
+ logger.atFine().log("Updating group notes");
+
NoteMap noteMap = revision == null ? NoteMap.newEmptyMap() : NoteMap.read(reader, revision);
if (oldGroupName.isPresent()) {
removeNote(noteMap, oldGroupName.get(), inserter);
diff --git a/java/com/google/gerrit/server/logging/LoggingContext.java b/java/com/google/gerrit/server/logging/LoggingContext.java
new file mode 100644
index 0000000..04a23e9
--- /dev/null
+++ b/java/com/google/gerrit/server/logging/LoggingContext.java
@@ -0,0 +1,92 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.logging;
+
+import com.google.common.collect.ImmutableSetMultimap;
+import com.google.common.flogger.backend.Tags;
+import java.util.logging.Level;
+
+/**
+ * Logging context for Flogger.
+ *
+ * <p>To configure this logging context for Flogger set the following system property (also see
+ * {@link com.google.common.flogger.backend.system.DefaultPlatform}):
+ *
+ * <ul>
+ * <li>{@code
+ * flogger.logging_context=com.google.gerrit.server.logging.LoggingContext#getInstance}.
+ * </ul>
+ */
+public class LoggingContext extends com.google.common.flogger.backend.system.LoggingContext {
+ private static final LoggingContext INSTANCE = new LoggingContext();
+
+ private static final ThreadLocal<MutableTags> tags = new ThreadLocal<>();
+
+ private LoggingContext() {}
+
+ /** This method is expected to be called via reflection (and might otherwise be unused). */
+ public static LoggingContext getInstance() {
+ return INSTANCE;
+ }
+
+ @Override
+ public boolean shouldForceLogging(String loggerName, Level level, boolean isEnabled) {
+ return false;
+ }
+
+ @Override
+ public Tags getTags() {
+ MutableTags mutableTags = tags.get();
+ return mutableTags != null ? mutableTags.getTags() : Tags.empty();
+ }
+
+ public ImmutableSetMultimap<String, String> getTagsAsMap() {
+ MutableTags mutableTags = tags.get();
+ return mutableTags != null ? mutableTags.asMap() : ImmutableSetMultimap.of();
+ }
+
+ boolean addTag(String name, String value) {
+ return getMutableTags().add(name, value);
+ }
+
+ void removeTag(String name, String value) {
+ MutableTags mutableTags = getMutableTags();
+ mutableTags.remove(name, value);
+ if (mutableTags.isEmpty()) {
+ tags.remove();
+ }
+ }
+
+ void setTags(ImmutableSetMultimap<String, String> newTags) {
+ if (newTags.isEmpty()) {
+ tags.remove();
+ return;
+ }
+ getMutableTags().set(newTags);
+ }
+
+ void clearTags() {
+ tags.remove();
+ }
+
+ private MutableTags getMutableTags() {
+ MutableTags mutableTags = tags.get();
+ if (mutableTags == null) {
+ mutableTags = new MutableTags();
+ tags.set(mutableTags);
+ }
+ return mutableTags;
+ }
+}
diff --git a/java/com/google/gerrit/server/logging/LoggingContextAwareThreadFactory.java b/java/com/google/gerrit/server/logging/LoggingContextAwareThreadFactory.java
new file mode 100644
index 0000000..16d24ac
--- /dev/null
+++ b/java/com/google/gerrit/server/logging/LoggingContextAwareThreadFactory.java
@@ -0,0 +1,57 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.logging;
+
+import com.google.common.collect.ImmutableSetMultimap;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ThreadFactory;
+
+/**
+ * ThreadFactory that copies the logging context of the current thread to any new thread that is
+ * created by this ThreadFactory.
+ */
+public class LoggingContextAwareThreadFactory implements ThreadFactory {
+ private final ThreadFactory parentThreadFactory;
+
+ public LoggingContextAwareThreadFactory() {
+ this.parentThreadFactory = Executors.defaultThreadFactory();
+ }
+
+ public LoggingContextAwareThreadFactory(ThreadFactory parentThreadFactory) {
+ this.parentThreadFactory = parentThreadFactory;
+ }
+
+ @Override
+ public Thread newThread(Runnable r) {
+ Thread callingThread = Thread.currentThread();
+ ImmutableSetMultimap<String, String> tags = LoggingContext.getInstance().getTagsAsMap();
+ return parentThreadFactory.newThread(
+ () -> {
+ if (callingThread.equals(Thread.currentThread())) {
+ // propagation of logging context is not needed
+ r.run();
+ return;
+ }
+
+ // propagate logging context
+ LoggingContext.getInstance().setTags(tags);
+ try {
+ r.run();
+ } finally {
+ LoggingContext.getInstance().clearTags();
+ }
+ });
+ }
+}
diff --git a/java/com/google/gerrit/server/logging/MutableTags.java b/java/com/google/gerrit/server/logging/MutableTags.java
new file mode 100644
index 0000000..a936a43
--- /dev/null
+++ b/java/com/google/gerrit/server/logging/MutableTags.java
@@ -0,0 +1,113 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.logging;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.common.collect.ImmutableSetMultimap;
+import com.google.common.collect.MultimapBuilder;
+import com.google.common.collect.SetMultimap;
+import com.google.common.flogger.backend.Tags;
+
+public class MutableTags {
+ private final SetMultimap<String, String> tagMap =
+ MultimapBuilder.hashKeys().hashSetValues().build();
+ private Tags tags = Tags.empty();
+
+ public Tags getTags() {
+ return tags;
+ }
+
+ /**
+ * Adds a tag if a tag with the same name and value doesn't exist yet.
+ *
+ * @param name the name of the tag
+ * @param value the value of the tag
+ * @return {@code true} if the tag was added, {@code false} if the tag was not added because it
+ * already exists
+ */
+ public boolean add(String name, String value) {
+ checkNotNull(name, "tag name is required");
+ checkNotNull(value, "tag value is required");
+ boolean ret = tagMap.put(name, value);
+ if (ret) {
+ buildTags();
+ }
+ return ret;
+ }
+
+ /**
+ * Removes the tag with the given name and value.
+ *
+ * @param name the name of the tag
+ * @param value the value of the tag
+ */
+ public void remove(String name, String value) {
+ checkNotNull(name, "tag name is required");
+ checkNotNull(value, "tag value is required");
+ if (tagMap.remove(name, value)) {
+ buildTags();
+ }
+ }
+
+ /**
+ * Checks if the contained tag map is empty.
+ *
+ * @return {@code true} if there are no tags, otherwise {@code false}
+ */
+ public boolean isEmpty() {
+ return tagMap.isEmpty();
+ }
+
+ /** Clears all tags. */
+ public void clear() {
+ tagMap.clear();
+ tags = Tags.empty();
+ }
+
+ /**
+ * Returns the tags as Multimap.
+ *
+ * @return the tags as Multimap
+ */
+ public ImmutableSetMultimap<String, String> asMap() {
+ return ImmutableSetMultimap.copyOf(tagMap);
+ }
+
+ /**
+ * Replaces the existing tags with the provided tags.
+ *
+ * @param tags the tags that should be set.
+ */
+ void set(ImmutableSetMultimap<String, String> tags) {
+ tagMap.clear();
+ tags.forEach(tagMap::put);
+ buildTags();
+ }
+
+ private void buildTags() {
+ if (tagMap.isEmpty()) {
+ if (tags.isEmpty()) {
+ return;
+ }
+ tags = Tags.empty();
+ return;
+ }
+
+ Tags.Builder tagsBuilder = Tags.builder();
+ tagMap.forEach(tagsBuilder::addTag);
+ tags = tagsBuilder.build();
+ }
+}
diff --git a/java/com/google/gerrit/server/logging/TraceContext.java b/java/com/google/gerrit/server/logging/TraceContext.java
new file mode 100644
index 0000000..cb479cc
--- /dev/null
+++ b/java/com/google/gerrit/server/logging/TraceContext.java
@@ -0,0 +1,54 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.logging;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.common.collect.HashBasedTable;
+import com.google.common.collect.Table;
+import com.google.gerrit.server.util.RequestId;
+
+public class TraceContext implements AutoCloseable {
+ public static final TraceContext DISABLED = new TraceContext();
+
+ public static TraceContext open() {
+ return new TraceContext();
+ }
+
+ // Table<TAG_NAME, TAG_VALUE, REMOVE_ON_CLOSE>
+ private final Table<String, String, Boolean> tags = HashBasedTable.create();
+
+ private TraceContext() {}
+
+ public TraceContext addTag(RequestId.Type requestId, Object tagValue) {
+ return addTag(checkNotNull(requestId, "request ID is required").name(), tagValue);
+ }
+
+ public TraceContext addTag(String tagName, Object tagValue) {
+ String name = checkNotNull(tagName, "tag name is required");
+ String value = checkNotNull(tagValue, "tag value is required").toString();
+ tags.put(name, value, LoggingContext.getInstance().addTag(name, value));
+ return this;
+ }
+
+ @Override
+ public void close() {
+ for (Table.Cell<String, String, Boolean> cell : tags.cellSet()) {
+ if (cell.getValue()) {
+ LoggingContext.getInstance().removeTag(cell.getRowKey(), cell.getColumnKey());
+ }
+ }
+ }
+}
diff --git a/java/com/google/gerrit/server/notedb/AbstractChangeNotes.java b/java/com/google/gerrit/server/notedb/AbstractChangeNotes.java
index a083a71..8f4ad74 100644
--- a/java/com/google/gerrit/server/notedb/AbstractChangeNotes.java
+++ b/java/com/google/gerrit/server/notedb/AbstractChangeNotes.java
@@ -19,6 +19,7 @@
import com.google.auto.value.AutoValue;
import com.google.common.annotations.VisibleForTesting;
+import com.google.common.flogger.FluentLogger;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.metrics.Timer1;
import com.google.gerrit.reviewdb.client.Change;
@@ -42,6 +43,8 @@
/** View of contents at a single ref related to some change. * */
public abstract class AbstractChangeNotes<T> {
+ private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+
@VisibleForTesting
@Singleton
public static class Args {
@@ -145,6 +148,11 @@
if (loaded) {
return self();
}
+
+ logger.atFine().log(
+ "Load %s for change %s of project %s from %s (%s)",
+ getClass().getSimpleName(), getChangeId(), getProjectName(), getRefName(), primaryStorage);
+
boolean read = args.migration.readChanges();
if (!read && primaryStorage == PrimaryStorage.NOTE_DB) {
throw new OrmException("NoteDb is required to read change " + changeId);
diff --git a/java/com/google/gerrit/server/patch/DiffExecutorModule.java b/java/com/google/gerrit/server/patch/DiffExecutorModule.java
index 5359479..f3776e0 100644
--- a/java/com/google/gerrit/server/patch/DiffExecutorModule.java
+++ b/java/com/google/gerrit/server/patch/DiffExecutorModule.java
@@ -15,6 +15,7 @@
package com.google.gerrit.server.patch;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import com.google.gerrit.server.logging.LoggingContextAwareThreadFactory;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.google.inject.Singleton;
@@ -32,6 +33,10 @@
@DiffExecutor
public ExecutorService createDiffExecutor() {
return Executors.newCachedThreadPool(
- new ThreadFactoryBuilder().setNameFormat("Diff-%d").setDaemon(true).build());
+ new ThreadFactoryBuilder()
+ .setThreadFactory(new LoggingContextAwareThreadFactory())
+ .setNameFormat("Diff-%d")
+ .setDaemon(true)
+ .build());
}
}
diff --git a/java/com/google/gerrit/server/permissions/PermissionDeniedException.java b/java/com/google/gerrit/server/permissions/PermissionDeniedException.java
new file mode 100644
index 0000000..6018263
--- /dev/null
+++ b/java/com/google/gerrit/server/permissions/PermissionDeniedException.java
@@ -0,0 +1,58 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.permissions;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.gerrit.extensions.api.access.GerritPermission;
+import com.google.gerrit.extensions.restapi.AuthException;
+import java.util.Optional;
+
+/**
+ * This signals that some permission check failed. The message is short so it can print on a
+ * single-line in the Git output.
+ */
+public class PermissionDeniedException extends AuthException {
+ private static final long serialVersionUID = 1L;
+
+ public static final String MESSAGE_PREFIX = "not permitted: ";
+
+ private final GerritPermission permission;
+ private final Optional<String> resource;
+
+ public PermissionDeniedException(GerritPermission permission) {
+ super(MESSAGE_PREFIX + checkNotNull(permission).describeForException());
+ this.permission = permission;
+ this.resource = Optional.empty();
+ }
+
+ public PermissionDeniedException(GerritPermission permission, String resource) {
+ super(
+ MESSAGE_PREFIX
+ + checkNotNull(permission).describeForException()
+ + " on "
+ + checkNotNull(resource));
+ this.permission = permission;
+ this.resource = Optional.of(resource);
+ }
+
+ public String describePermission() {
+ return permission.describeForException();
+ }
+
+ public Optional<String> getResource() {
+ return resource;
+ }
+}
diff --git a/java/com/google/gerrit/server/permissions/RefControl.java b/java/com/google/gerrit/server/permissions/RefControl.java
index 3bd2817..762dfbe 100644
--- a/java/com/google/gerrit/server/permissions/RefControl.java
+++ b/java/com/google/gerrit/server/permissions/RefControl.java
@@ -16,6 +16,7 @@
import static com.google.common.base.Preconditions.checkArgument;
+import com.google.common.flogger.FluentLogger;
import com.google.gerrit.common.data.Permission;
import com.google.gerrit.common.data.PermissionRange;
import com.google.gerrit.common.data.PermissionRule;
@@ -41,6 +42,8 @@
/** Manages access control for Git references (aka branches, tags). */
class RefControl {
+ private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+
private final IdentifiedUser.GenericFactory identifiedUserFactory;
private final ProjectControl projectControl;
private final String refName;
@@ -392,15 +395,37 @@
/** True if the user has this permission. */
private boolean canPerform(String permissionName, boolean isChangeOwner, boolean withForce) {
if (isBlocked(permissionName, isChangeOwner, withForce)) {
+ logger.atFine().log(
+ "'%s' cannot perform '%s' with force=%s on project '%s' for ref '%s'"
+ + " because this permission is blocked",
+ getUser().getLoggableName(),
+ permissionName,
+ withForce,
+ projectControl.getProject().getName(),
+ refName);
return false;
}
for (PermissionRule pr : relevant.getAllowRules(permissionName)) {
if (isAllow(pr, withForce) && projectControl.match(pr, isChangeOwner)) {
+ logger.atFine().log(
+ "'%s' can perform '%s' with force=%s on project '%s' for ref '%s'",
+ getUser().getLoggableName(),
+ permissionName,
+ withForce,
+ projectControl.getProject().getName(),
+ refName);
return true;
}
}
+ logger.atFine().log(
+ "'%s' cannot perform '%s' with force=%s on project '%s' for ref '%s'",
+ getUser().getLoggableName(),
+ permissionName,
+ withForce,
+ projectControl.getProject().getName(),
+ refName);
return false;
}
@@ -449,7 +474,88 @@
@Override
public void check(RefPermission perm) throws AuthException, PermissionBackendException {
if (!can(perm)) {
- throw new AuthException(perm.describeForException() + " not permitted for " + refName);
+ PermissionDeniedException pde = new PermissionDeniedException(perm, refName);
+ switch (perm) {
+ case UPDATE:
+ if (refName.equals(RefNames.REFS_CONFIG)) {
+ pde.setAdvice(
+ "Configuration changes can only be pushed by project owners\n"
+ + "who also have 'Push' rights on "
+ + RefNames.REFS_CONFIG);
+ } else {
+ pde.setAdvice("To push into this reference you need 'Push' rights.");
+ }
+ break;
+ case DELETE:
+ pde.setAdvice(
+ "You need 'Delete Reference' rights or 'Push' rights with the \n"
+ + "'Force Push' flag set to delete references.");
+ break;
+ case CREATE_CHANGE:
+ // This is misleading in the default permission backend, since "create change" on a
+ // branch is encoded as "push" on refs/for/DESTINATION.
+ pde.setAdvice(
+ "You need 'Create Change' rights to upload code review requests.\n"
+ + "Verify that you are pushing to the right branch.");
+ break;
+ case CREATE:
+ pde.setAdvice("You need 'Create' rights to create new references.");
+ break;
+ case CREATE_SIGNED_TAG:
+ pde.setAdvice("You need 'Create Signed Tag' rights to push a signed tag.");
+ break;
+ case CREATE_TAG:
+ pde.setAdvice("You need 'Create Tag' rights to push a normal tag.");
+ break;
+ case FORCE_UPDATE:
+ pde.setAdvice(
+ "You need 'Push' rights with 'Force' flag set to do a non-fastforward push.");
+ break;
+ case FORGE_AUTHOR:
+ pde.setAdvice(
+ "You need 'Forge Author' rights to push commits with another user as author.");
+ break;
+ case FORGE_COMMITTER:
+ pde.setAdvice(
+ "You need 'Forge Committer' rights to push commits with another user as committer.");
+ break;
+ case FORGE_SERVER:
+ pde.setAdvice(
+ "You need 'Forge Server' rights to push merge commits authored by the server.");
+ break;
+ case MERGE:
+ pde.setAdvice(
+ "You need 'Push Merge' in addition to 'Push' rights to push merge commits.");
+ break;
+
+ case READ:
+ pde.setAdvice("You need 'Read' rights to fetch or clone this ref.");
+ break;
+
+ case READ_CONFIG:
+ pde.setAdvice("You need 'Read' rights on refs/meta/config to see the configuration.");
+ break;
+ case READ_PRIVATE_CHANGES:
+ pde.setAdvice("You need 'Read Private Changes' to see private changes.");
+ break;
+ case SET_HEAD:
+ pde.setAdvice("You need 'Set HEAD' rights to set the default branch.");
+ break;
+ case SKIP_VALIDATION:
+ pde.setAdvice(
+ "You need 'Forge Author', 'Forge Server', 'Forge Committer'\n"
+ + "and 'Push Merge' rights to skip validation.");
+ break;
+ case UPDATE_BY_SUBMIT:
+ pde.setAdvice(
+ "You need 'Submit' rights on refs/for/ to submit changes during change upload.");
+ break;
+
+ case WRITE_CONFIG:
+ pde.setAdvice("You need 'Write' rights on refs/meta/config.");
+ break;
+ }
+ throw pde;
}
}
diff --git a/java/com/google/gerrit/server/project/ProjectCacheClock.java b/java/com/google/gerrit/server/project/ProjectCacheClock.java
index 5d208f3..188ee08 100644
--- a/java/com/google/gerrit/server/project/ProjectCacheClock.java
+++ b/java/com/google/gerrit/server/project/ProjectCacheClock.java
@@ -18,6 +18,7 @@
import com.google.gerrit.extensions.events.LifecycleListener;
import com.google.gerrit.server.config.ConfigUtil;
import com.google.gerrit.server.config.GerritServerConfig;
+import com.google.gerrit.server.logging.LoggingContextAwareThreadFactory;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import java.util.concurrent.Executors;
@@ -56,6 +57,7 @@
Executors.newScheduledThreadPool(
1,
new ThreadFactoryBuilder()
+ .setThreadFactory(new LoggingContextAwareThreadFactory())
.setNameFormat("ProjectCacheClock-%d")
.setDaemon(true)
.setPriority(Thread.MIN_PRIORITY)
diff --git a/java/com/google/gerrit/server/project/ProjectCacheImpl.java b/java/com/google/gerrit/server/project/ProjectCacheImpl.java
index df80e35..090c4f5 100644
--- a/java/com/google/gerrit/server/project/ProjectCacheImpl.java
+++ b/java/com/google/gerrit/server/project/ProjectCacheImpl.java
@@ -269,6 +269,7 @@
@Override
public ProjectState load(String projectName) throws Exception {
+ logger.atFine().log("Loading project %s", projectName);
long now = clock.read();
Project.NameKey key = new Project.NameKey(projectName);
try (Repository git = mgr.openRepository(key)) {
@@ -298,6 +299,7 @@
@Override
public ImmutableSortedSet<Project.NameKey> load(ListKey key) throws Exception {
+ logger.atFine().log("Loading project list");
return ImmutableSortedSet.copyOf(mgr.list());
}
}
diff --git a/java/com/google/gerrit/server/project/ProjectCacheWarmer.java b/java/com/google/gerrit/server/project/ProjectCacheWarmer.java
index 7ebbc51..adfaf62 100644
--- a/java/com/google/gerrit/server/project/ProjectCacheWarmer.java
+++ b/java/com/google/gerrit/server/project/ProjectCacheWarmer.java
@@ -19,6 +19,7 @@
import com.google.gerrit.extensions.events.LifecycleListener;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.server.config.GerritServerConfig;
+import com.google.gerrit.server.logging.LoggingContextAwareThreadFactory;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import java.util.concurrent.ScheduledThreadPoolExecutor;
@@ -46,7 +47,10 @@
ThreadPoolExecutor pool =
new ScheduledThreadPoolExecutor(
config.getInt("cache", "projects", "loadThreads", cpus),
- new ThreadFactoryBuilder().setNameFormat("ProjectCacheLoader-%d").build());
+ new ThreadFactoryBuilder()
+ .setThreadFactory(new LoggingContextAwareThreadFactory())
+ .setNameFormat("ProjectCacheLoader-%d")
+ .build());
Thread scheduler =
new Thread(
() -> {
diff --git a/java/com/google/gerrit/server/query/account/AccountIsVisibleToPredicate.java b/java/com/google/gerrit/server/query/account/AccountIsVisibleToPredicate.java
index cc9fc0d..bd7b7fe 100644
--- a/java/com/google/gerrit/server/query/account/AccountIsVisibleToPredicate.java
+++ b/java/com/google/gerrit/server/query/account/AccountIsVisibleToPredicate.java
@@ -14,6 +14,7 @@
package com.google.gerrit.server.query.account;
+import com.google.common.flogger.FluentLogger;
import com.google.gerrit.index.query.IsVisibleToPredicate;
import com.google.gerrit.server.account.AccountControl;
import com.google.gerrit.server.account.AccountState;
@@ -21,6 +22,8 @@
import com.google.gwtorm.server.OrmException;
public class AccountIsVisibleToPredicate extends IsVisibleToPredicate<AccountState> {
+ private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+
protected final AccountControl accountControl;
public AccountIsVisibleToPredicate(AccountControl accountControl) {
@@ -30,7 +33,11 @@
@Override
public boolean match(AccountState accountState) throws OrmException {
- return accountControl.canSee(accountState);
+ boolean canSee = accountControl.canSee(accountState);
+ if (!canSee) {
+ logger.atFine().log("Filter out non-visisble account: %s", accountState);
+ }
+ return canSee;
}
@Override
diff --git a/java/com/google/gerrit/server/query/change/ChangeIsVisibleToPredicate.java b/java/com/google/gerrit/server/query/change/ChangeIsVisibleToPredicate.java
index 27baef1..f81ea15 100644
--- a/java/com/google/gerrit/server/query/change/ChangeIsVisibleToPredicate.java
+++ b/java/com/google/gerrit/server/query/change/ChangeIsVisibleToPredicate.java
@@ -74,10 +74,11 @@
try {
ProjectState projectState = projectCache.checkedGet(cd.project());
if (projectState == null) {
- logger.atInfo().log("No such project: %s", cd.project());
+ logger.atFine().log("Filter out change %s of non-existing project %s", cd, cd.project());
return false;
}
if (!projectState.statePermitsRead()) {
+ logger.atFine().log("Filter out change %s of non-reabable project %s", cd, cd.project());
return false;
}
} catch (IOException e) {
@@ -94,11 +95,13 @@
Throwable cause = e.getCause();
if (cause instanceof RepositoryNotFoundException) {
logger.atWarning().withCause(e).log(
- "Skipping change %s because the corresponding repository was not found", cd.getId());
+ "Filter out change %s because the corresponding repository %s was not found",
+ cd, cd.project());
return false;
}
throw new OrmException("unable to check permissions on change " + cd.getId(), e);
} catch (AuthException e) {
+ logger.atFine().log("Filter out non-visisble change: %s", cd);
return false;
}
diff --git a/java/com/google/gerrit/server/query/group/GroupIsVisibleToPredicate.java b/java/com/google/gerrit/server/query/group/GroupIsVisibleToPredicate.java
index ffa59c2..144a81c 100644
--- a/java/com/google/gerrit/server/query/group/GroupIsVisibleToPredicate.java
+++ b/java/com/google/gerrit/server/query/group/GroupIsVisibleToPredicate.java
@@ -14,6 +14,7 @@
package com.google.gerrit.server.query.group;
+import com.google.common.flogger.FluentLogger;
import com.google.gerrit.common.errors.NoSuchGroupException;
import com.google.gerrit.index.query.IsVisibleToPredicate;
import com.google.gerrit.server.CurrentUser;
@@ -24,6 +25,8 @@
import com.google.gwtorm.server.OrmException;
public class GroupIsVisibleToPredicate extends IsVisibleToPredicate<InternalGroup> {
+ private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+
protected final GroupControl.GenericFactory groupControlFactory;
protected final CurrentUser user;
@@ -37,7 +40,11 @@
@Override
public boolean match(InternalGroup group) throws OrmException {
try {
- return groupControlFactory.controlFor(user, group.getGroupUUID()).isVisible();
+ boolean canSee = groupControlFactory.controlFor(user, group.getGroupUUID()).isVisible();
+ if (!canSee) {
+ logger.atFine().log("Filter out non-visisble group: %s", group.getGroupUUID());
+ }
+ return canSee;
} catch (NoSuchGroupException e) {
// Ignored
return false;
diff --git a/java/com/google/gerrit/server/query/project/ProjectIsVisibleToPredicate.java b/java/com/google/gerrit/server/query/project/ProjectIsVisibleToPredicate.java
index dc567ea..b1c5af0 100644
--- a/java/com/google/gerrit/server/query/project/ProjectIsVisibleToPredicate.java
+++ b/java/com/google/gerrit/server/query/project/ProjectIsVisibleToPredicate.java
@@ -14,6 +14,7 @@
package com.google.gerrit.server.query.project;
+import com.google.common.flogger.FluentLogger;
import com.google.gerrit.index.project.ProjectData;
import com.google.gerrit.index.query.IsVisibleToPredicate;
import com.google.gerrit.server.CurrentUser;
@@ -24,6 +25,8 @@
import com.google.gwtorm.server.OrmException;
public class ProjectIsVisibleToPredicate extends IsVisibleToPredicate<ProjectData> {
+ private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+
protected final PermissionBackend permissionBackend;
protected final CurrentUser user;
@@ -36,13 +39,19 @@
@Override
public boolean match(ProjectData pd) throws OrmException {
if (!pd.getProject().getState().permitsRead()) {
+ logger.atFine().log("Filter out non-readable project: %s", pd);
return false;
}
- return permissionBackend
- .user(user)
- .project(pd.getProject().getNameKey())
- .testOrFalse(ProjectPermission.ACCESS);
+ boolean canSee =
+ permissionBackend
+ .user(user)
+ .project(pd.getProject().getNameKey())
+ .testOrFalse(ProjectPermission.ACCESS);
+ if (!canSee) {
+ logger.atFine().log("Filter out non-visible project: %s", pd);
+ }
+ return canSee;
}
@Override
diff --git a/java/com/google/gerrit/server/restapi/change/ReviewerRecommender.java b/java/com/google/gerrit/server/restapi/change/ReviewerRecommender.java
index 47c6970..6b7a708 100644
--- a/java/com/google/gerrit/server/restapi/change/ReviewerRecommender.java
+++ b/java/com/google/gerrit/server/restapi/change/ReviewerRecommender.java
@@ -107,8 +107,13 @@
ProjectState projectState,
List<Account.Id> candidateList)
throws OrmException, IOException, ConfigInvalidException {
+ logger.atFine().log("Candidates %s", candidateList);
+
String query = suggestReviewers.getQuery();
+ logger.atFine().log("query: %s", query);
+
double baseWeight = config.getInt("addReviewer", "baseWeight", 1);
+ logger.atFine().log("base weight: %s", baseWeight);
Map<Account.Id, MutableDouble> reviewerScores;
if (Strings.isNullOrEmpty(query)) {
@@ -116,6 +121,7 @@
} else {
reviewerScores = baseRankingForCandidateList(candidateList, projectState, baseWeight);
}
+ logger.atFine().log("Base reviewer scores: %s", reviewerScores);
// Send the query along with a candidate list to all plugins and merge the
// results. Plugins don't necessarily need to use the candidates list, they
@@ -163,6 +169,7 @@
}
}
}
+ logger.atFine().log("Reviewer scores: %s", reviewerScores);
} catch (ExecutionException | InterruptedException e) {
logger.atSevere().withCause(e).log("Exception while suggesting reviewers");
return ImmutableList.of();
@@ -170,12 +177,20 @@
if (changeNotes != null) {
// Remove change owner
- reviewerScores.remove(changeNotes.getChange().getOwner());
+ if (reviewerScores.remove(changeNotes.getChange().getOwner()) != null) {
+ logger.atFine().log("Remove change owner %s", changeNotes.getChange().getOwner());
+ }
// Remove existing reviewers
- reviewerScores
- .keySet()
- .removeAll(approvalsUtil.getReviewers(dbProvider.get(), changeNotes).byState(REVIEWER));
+ approvalsUtil
+ .getReviewers(dbProvider.get(), changeNotes)
+ .byState(REVIEWER)
+ .forEach(
+ r -> {
+ if (reviewerScores.remove(r) != null) {
+ logger.atFine().log("Remove existing reviewer %s", r);
+ }
+ });
}
// Sort results
@@ -185,6 +200,7 @@
.stream()
.sorted(Collections.reverseOrder(Map.Entry.comparingByValue()));
List<Account.Id> sortedSuggestions = sorted.map(Map.Entry::getKey).collect(toList());
+ logger.atFine().log("Sorted suggestions: %s", sortedSuggestions);
return sortedSuggestions;
}
diff --git a/java/com/google/gerrit/server/restapi/change/ReviewersUtil.java b/java/com/google/gerrit/server/restapi/change/ReviewersUtil.java
index 57ff0a3..3becf24 100644
--- a/java/com/google/gerrit/server/restapi/change/ReviewersUtil.java
+++ b/java/com/google/gerrit/server/restapi/change/ReviewersUtil.java
@@ -44,6 +44,7 @@
import com.google.gerrit.server.account.AccountControl;
import com.google.gerrit.server.account.AccountDirectory.FillOptions;
import com.google.gerrit.server.account.AccountLoader;
+import com.google.gerrit.server.account.AccountState;
import com.google.gerrit.server.account.GroupBackend;
import com.google.gerrit.server.account.GroupMembers;
import com.google.gerrit.server.index.account.AccountField;
@@ -228,24 +229,30 @@
// For performance reasons we don't use AccountQueryProvider as it would always load the
// complete account from the cache (or worse, from NoteDb) even though we only need the ID
// which we can directly get from the returned results.
+ Predicate<AccountState> pred =
+ Predicate.and(
+ AccountPredicates.isActive(),
+ accountQueryBuilder.defaultQuery(suggestReviewers.getQuery()));
+ logger.atFine().log("accounts index query: %s", pred);
ResultSet<FieldBundle> result =
accountIndexes
.getSearchIndex()
.getSource(
- Predicate.and(
- AccountPredicates.isActive(),
- accountQueryBuilder.defaultQuery(suggestReviewers.getQuery())),
+ pred,
QueryOptions.create(
indexConfig,
0,
suggestReviewers.getLimit() * CANDIDATE_LIST_MULTIPLIER,
ImmutableSet.of(AccountField.ID.getName())))
.readRaw();
- return result
- .toList()
- .stream()
- .map(f -> new Account.Id(f.getValue(AccountField.ID).intValue()))
- .collect(toList());
+ List<Account.Id> matches =
+ result
+ .toList()
+ .stream()
+ .map(f -> new Account.Id(f.getValue(AccountField.ID).intValue()))
+ .collect(toList());
+ logger.atFine().log("Matches: %s", matches);
+ return matches;
} catch (QueryParseException e) {
return ImmutableList.of();
}
@@ -374,8 +381,10 @@
GroupAsReviewer result = new GroupAsReviewer();
int maxAllowed = suggestReviewers.getMaxAllowed();
int maxAllowedWithoutConfirmation = suggestReviewers.getMaxAllowedWithoutConfirmation();
+ logger.atFine().log("maxAllowedWithoutConfirmation: " + maxAllowedWithoutConfirmation);
if (!PostReviewers.isLegalReviewerGroup(group.getUUID())) {
+ logger.atFine().log("Ignore group %s that is not legal as reviewer", group.getUUID());
return result;
}
@@ -383,6 +392,7 @@
Set<Account> members = groupMembers.listAccounts(group.getUUID(), project.getNameKey());
if (members.isEmpty()) {
+ logger.atFine().log("Ignore group %s since it has no members", group.getUUID());
return result;
}
@@ -392,6 +402,11 @@
}
boolean needsConfirmation = result.size > maxAllowedWithoutConfirmation;
+ if (needsConfirmation) {
+ logger.atFine().log(
+ "group %s needs confirmation to be added as reviewer, it has %d members",
+ group.getUUID(), result.size);
+ }
// require that at least one member in the group can see the change
for (Account account : members) {
@@ -401,9 +416,12 @@
} else {
result.allowed = true;
}
+ logger.atFine().log("Suggest group %s", group.getUUID());
return result;
}
}
+ logger.atFine().log(
+ "Ignore group %s since none of its members can see the change", group.getUUID());
} catch (NoSuchProjectException e) {
return result;
}
diff --git a/java/com/google/gerrit/server/restapi/change/SuggestReviewers.java b/java/com/google/gerrit/server/restapi/change/SuggestReviewers.java
index 8b39e8e..b1d49f2 100644
--- a/java/com/google/gerrit/server/restapi/change/SuggestReviewers.java
+++ b/java/com/google/gerrit/server/restapi/change/SuggestReviewers.java
@@ -16,6 +16,7 @@
import static com.google.gerrit.server.config.GerritConfigListenerHelper.acceptIfChanged;
+import com.google.common.flogger.FluentLogger;
import com.google.gerrit.extensions.common.AccountVisibility;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.config.ConfigKey;
@@ -27,6 +28,8 @@
import org.kohsuke.args4j.Option;
public class SuggestReviewers {
+ private static final FluentLogger logger = FluentLogger.forEnclosingClass();
+
private static final int DEFAULT_MAX_SUGGESTED = 10;
protected final Provider<ReviewDb> dbProvider;
@@ -101,6 +104,8 @@
"addreviewer",
"maxWithoutConfirmation",
PostReviewers.DEFAULT_MAX_REVIEWERS_WITHOUT_CHECK);
+
+ logger.atFine().log("AccountVisibility: %s", av.name());
}
public static GerritConfigListener configListener() {
diff --git a/java/com/google/gerrit/server/submit/FastForwardOp.java b/java/com/google/gerrit/server/submit/FastForwardOp.java
index f1749f4..08f5abb 100644
--- a/java/com/google/gerrit/server/submit/FastForwardOp.java
+++ b/java/com/google/gerrit/server/submit/FastForwardOp.java
@@ -28,6 +28,7 @@
@Override
protected void updateRepoImpl(RepoContext ctx) throws IntegrationException {
if (args.project.is(BooleanProjectConfig.REJECT_EMPTY_COMMIT)
+ && toMerge.getParentCount() > 0
&& toMerge.getTree().equals(toMerge.getParent(0).getTree())) {
toMerge.setStatusCode(EMPTY_COMMIT);
return;
diff --git a/java/com/google/gerrit/server/submit/GitModules.java b/java/com/google/gerrit/server/submit/GitModules.java
index fd9a6fa..1fccbdd 100644
--- a/java/com/google/gerrit/server/submit/GitModules.java
+++ b/java/com/google/gerrit/server/submit/GitModules.java
@@ -22,7 +22,6 @@
import com.google.gerrit.server.config.CanonicalWebUrl;
import com.google.gerrit.server.project.NoSuchProjectException;
import com.google.gerrit.server.submit.MergeOpRepoManager.OpenRepo;
-import com.google.gerrit.server.util.RequestId;
import com.google.gerrit.server.util.git.SubmoduleSectionParser;
import com.google.inject.Inject;
import com.google.inject.assistedinject.Assisted;
@@ -51,7 +50,6 @@
private static final String GIT_MODULES = ".gitmodules";
- private final RequestId submissionId;
private final Branch.NameKey branch;
Set<SubmoduleSubscription> subscriptions;
@@ -62,9 +60,8 @@
@Assisted MergeOpRepoManager orm)
throws IOException {
this.branch = branch;
- this.submissionId = orm.getSubmissionId();
Project.NameKey project = branch.getParentKey();
- logDebug("Loading .gitmodules of %s for project %s", branch, project);
+ logger.atFine().log("Loading .gitmodules of %s for project %s", branch, project);
try {
OpenRepo or = orm.getRepo(project);
ObjectId id = or.repo.resolve(branch.get());
@@ -76,7 +73,7 @@
try (TreeWalk tw = TreeWalk.forPath(or.repo, GIT_MODULES, commit.getTree())) {
if (tw == null || (tw.getRawMode(0) & FileMode.TYPE_MASK) != FileMode.TYPE_FILE) {
subscriptions = Collections.emptySet();
- logDebug("The .gitmodules file doesn't exist in %s", branch);
+ logger.atFine().log("The .gitmodules file doesn't exist in %s", branch);
return;
}
}
@@ -94,22 +91,14 @@
}
public Collection<SubmoduleSubscription> subscribedTo(Branch.NameKey src) {
- logDebug("Checking for a subscription of %s for %s", src, branch);
+ logger.atFine().log("Checking for a subscription of %s for %s", src, branch);
Collection<SubmoduleSubscription> ret = new ArrayList<>();
for (SubmoduleSubscription s : subscriptions) {
if (s.getSubmodule().equals(src)) {
- logDebug("Found %s", s);
+ logger.atFine().log("Found %s", s);
ret.add(s);
}
}
return ret;
}
-
- private void logDebug(String msg, @Nullable Object arg) {
- logger.atFine().log(submissionId + msg, arg);
- }
-
- private void logDebug(String msg, @Nullable Object arg1, @Nullable Object arg2) {
- logger.atFine().log(submissionId + msg, arg1, arg2);
- }
}
diff --git a/java/com/google/gerrit/server/submit/MergeOp.java b/java/com/google/gerrit/server/submit/MergeOp.java
index 9cfa272..0538f97 100644
--- a/java/com/google/gerrit/server/submit/MergeOp.java
+++ b/java/com/google/gerrit/server/submit/MergeOp.java
@@ -63,6 +63,7 @@
import com.google.gerrit.server.git.MergeTip;
import com.google.gerrit.server.git.validators.MergeValidationException;
import com.google.gerrit.server.git.validators.MergeValidators;
+import com.google.gerrit.server.logging.TraceContext;
import com.google.gerrit.server.notedb.ChangeNotes;
import com.google.gerrit.server.permissions.PermissionBackendException;
import com.google.gerrit.server.project.NoSuchProjectException;
@@ -455,78 +456,83 @@
this.dryrun = dryrun;
this.caller = caller;
this.ts = TimeUtil.nowTs();
- submissionId = RequestId.forChange(change);
this.db = db;
- openRepoManager();
+ this.submissionId = RequestId.forChange(change);
- logDebug("Beginning integration of %s", change);
- try {
- ChangeSet indexBackedChangeSet =
- mergeSuperSet.setMergeOpRepoManager(orm).completeChangeSet(db, change, caller);
- checkState(
- indexBackedChangeSet.ids().contains(change.getId()),
- "change %s missing from %s",
- change.getId(),
- indexBackedChangeSet);
- if (indexBackedChangeSet.furtherHiddenChanges()) {
- throw new AuthException(
- "A change to be submitted with " + change.getId() + " is not visible");
+ try (TraceContext traceContext =
+ TraceContext.open().addTag(RequestId.Type.SUBMISSION_ID, submissionId)) {
+ openRepoManager();
+
+ logger.atFine().log("Beginning integration of %s", change);
+ try {
+ ChangeSet indexBackedChangeSet =
+ mergeSuperSet.setMergeOpRepoManager(orm).completeChangeSet(db, change, caller);
+ checkState(
+ indexBackedChangeSet.ids().contains(change.getId()),
+ "change %s missing from %s",
+ change.getId(),
+ indexBackedChangeSet);
+ if (indexBackedChangeSet.furtherHiddenChanges()) {
+ throw new AuthException(
+ "A change to be submitted with " + change.getId() + " is not visible");
+ }
+ logger.atFine().log("Calculated to merge %s", indexBackedChangeSet);
+
+ // Reload ChangeSet so that we don't rely on (potentially) stale index data for merging
+ ChangeSet cs = reloadChanges(indexBackedChangeSet);
+
+ // Count cross-project submissions outside of the retry loop. The chance of a single project
+ // failing increases with the number of projects, so the failure count would be inflated if
+ // this metric were incremented inside of integrateIntoHistory.
+ int projects = cs.projects().size();
+ if (projects > 1) {
+ topicMetrics.topicSubmissions.increment();
+ }
+
+ RetryTracker retryTracker = new RetryTracker();
+ retryHelper.execute(
+ updateFactory -> {
+ long attempt = retryTracker.lastAttemptNumber + 1;
+ boolean isRetry = attempt > 1;
+ if (isRetry) {
+ logger.atFine().log("Retrying, attempt #%d; skipping merged changes", attempt);
+ this.ts = TimeUtil.nowTs();
+ openRepoManager();
+ }
+ this.commitStatus = new CommitStatus(cs, isRetry);
+ if (checkSubmitRules) {
+ logger.atFine().log("Checking submit rules and state");
+ checkSubmitRulesAndState(cs, isRetry);
+ } else {
+ logger.atFine().log("Bypassing submit rules");
+ bypassSubmitRules(cs, isRetry);
+ }
+ try {
+ integrateIntoHistory(cs);
+ } catch (IntegrationException e) {
+ logger.atSevere().withCause(e).log("Error from integrateIntoHistory");
+ throw new ResourceConflictException(e.getMessage(), e);
+ }
+ return null;
+ },
+ RetryHelper.options()
+ .listener(retryTracker)
+ // Up to the entire submit operation is retried, including possibly many projects.
+ // Multiply the timeout by the number of projects we're actually attempting to
+ // submit.
+ .timeout(
+ retryHelper
+ .getDefaultTimeout(ActionType.CHANGE_UPDATE)
+ .multipliedBy(cs.projects().size()))
+ .build());
+
+ if (projects > 1) {
+ topicMetrics.topicSubmissionsCompleted.increment();
+ }
+ } catch (IOException e) {
+ // Anything before the merge attempt is an error
+ throw new OrmException(e);
}
- logDebug("Calculated to merge %s", indexBackedChangeSet);
-
- // Reload ChangeSet so that we don't rely on (potentially) stale index data for merging
- ChangeSet cs = reloadChanges(indexBackedChangeSet);
-
- // Count cross-project submissions outside of the retry loop. The chance of a single project
- // failing increases with the number of projects, so the failure count would be inflated if
- // this metric were incremented inside of integrateIntoHistory.
- int projects = cs.projects().size();
- if (projects > 1) {
- topicMetrics.topicSubmissions.increment();
- }
-
- RetryTracker retryTracker = new RetryTracker();
- retryHelper.execute(
- updateFactory -> {
- long attempt = retryTracker.lastAttemptNumber + 1;
- boolean isRetry = attempt > 1;
- if (isRetry) {
- logDebug("Retrying, attempt #%d; skipping merged changes", attempt);
- this.ts = TimeUtil.nowTs();
- openRepoManager();
- }
- this.commitStatus = new CommitStatus(cs, isRetry);
- if (checkSubmitRules) {
- logDebug("Checking submit rules and state");
- checkSubmitRulesAndState(cs, isRetry);
- } else {
- logDebug("Bypassing submit rules");
- bypassSubmitRules(cs, isRetry);
- }
- try {
- integrateIntoHistory(cs);
- } catch (IntegrationException e) {
- logError("Error from integrateIntoHistory", e);
- throw new ResourceConflictException(e.getMessage(), e);
- }
- return null;
- },
- RetryHelper.options()
- .listener(retryTracker)
- // Up to the entire submit operation is retried, including possibly many projects.
- // Multiply the timeout by the number of projects we're actually attempting to submit.
- .timeout(
- retryHelper
- .getDefaultTimeout(ActionType.CHANGE_UPDATE)
- .multipliedBy(cs.projects().size()))
- .build());
-
- if (projects > 1) {
- topicMetrics.topicSubmissionsCompleted.increment();
- }
- } catch (IOException e) {
- // Anything before the merge attempt is an error
- throw new OrmException(e);
}
}
@@ -535,7 +541,7 @@
orm.close();
}
orm = ormProvider.get();
- orm.setContext(db, ts, caller, submissionId);
+ orm.setContext(db, ts, caller);
}
private ChangeSet reloadChanges(ChangeSet changeSet) {
@@ -581,7 +587,7 @@
private void integrateIntoHistory(ChangeSet cs)
throws IntegrationException, RestApiException, UpdateException {
checkArgument(!cs.furtherHiddenChanges(), "cannot integrate hidden changes into history");
- logDebug("Beginning merge attempt on %s", cs);
+ logger.atFine().log("Beginning merge attempt on %s", cs);
Map<Branch.NameKey, BranchBatch> toSubmit = new HashMap<>();
ListMultimap<Branch.NameKey, ChangeData> cbb;
@@ -609,7 +615,6 @@
batchUpdateFactory.execute(
orm.batchUpdates(allProjects),
new SubmitStrategyListener(submitInput, strategies, commitStatus),
- submissionId,
dryrun);
} catch (NoSuchProjectException e) {
throw new ResourceNotFoundException(e.getMessage());
@@ -723,7 +728,7 @@
throw new IntegrationException("Failed to determine already accepted commits.", e);
}
- logDebug("Found %d existing heads", alreadyAccepted.size());
+ logger.atFine().log("Found %d existing heads", alreadyAccepted.size());
return alreadyAccepted;
}
@@ -737,7 +742,7 @@
private BranchBatch validateChangeList(OpenRepo or, Collection<ChangeData> submitted)
throws IntegrationException {
- logDebug("Validating %d changes", submitted.size());
+ logger.atFine().log("Validating %d changes", submitted.size());
Set<CodeReviewCommit> toSubmit = new LinkedHashSet<>(submitted.size());
SetMultimap<ObjectId, PatchSet.Id> revisions = getRevisions(or, submitted);
@@ -775,7 +780,7 @@
}
if (chg.currentPatchSetId() == null) {
String msg = "Missing current patch set on change";
- logError(msg + " " + changeId);
+ logger.atSevere().log("%s %s", msg, changeId);
commitStatus.problem(changeId, msg);
continue;
}
@@ -856,7 +861,7 @@
commit.add(or.canMergeFlag);
toSubmit.add(commit);
}
- logDebug("Submitting on this run: %s", toSubmit);
+ logger.atFine().log("Submitting on this run: %s", toSubmit);
return new AutoValue_MergeOp_BranchBatch(submitType, toSubmit);
}
@@ -894,7 +899,7 @@
try {
return orm.getRepo(project);
} catch (NoSuchProjectException e) {
- logWarn("Project " + project + " no longer exists, abandoning open changes.");
+ logger.atWarning().log("Project %s no longer exists, abandoning open changes.", project);
abandonAllOpenChangeForDeletedProject(project);
} catch (IOException e) {
throw new IntegrationException("Error opening project " + project, e);
@@ -907,7 +912,6 @@
for (ChangeData cd : queryProvider.get().byProjectOpen(destProject)) {
try (BatchUpdate bu =
batchUpdateFactory.create(db, destProject, internalUserFactory.create(), ts)) {
- bu.setRequestId(submissionId);
bu.addOp(
cd.getId(),
new BatchUpdateOp() {
@@ -936,12 +940,14 @@
try {
bu.execute();
} catch (UpdateException | RestApiException e) {
- logWarn("Cannot abandon changes for deleted project " + destProject, e);
+ logger.atWarning().withCause(e).log(
+ "Cannot abandon changes for deleted project %s", destProject);
}
}
}
} catch (OrmException e) {
- logWarn("Cannot abandon changes for deleted project " + destProject, e);
+ logger.atWarning().withCause(e).log(
+ "Cannot abandon changes for deleted project %s", destProject);
}
}
@@ -965,28 +971,4 @@
+ " projects involved; some projects may have submitted successfully, but others may have"
+ " failed";
}
-
- private void logDebug(String msg) {
- logger.atFine().log(submissionId + msg);
- }
-
- private void logDebug(String msg, @Nullable Object arg) {
- logger.atFine().log(submissionId + msg, arg);
- }
-
- private void logWarn(String msg, Throwable t) {
- logger.atWarning().withCause(t).log("%s%s", submissionId, msg);
- }
-
- private void logWarn(String msg) {
- logger.atWarning().log("%s%s", submissionId, msg);
- }
-
- private void logError(String msg, Throwable t) {
- logger.atSevere().withCause(t).log("%s%s", submissionId, msg);
- }
-
- private void logError(String msg) {
- logError(msg, null);
- }
}
diff --git a/java/com/google/gerrit/server/submit/MergeOpRepoManager.java b/java/com/google/gerrit/server/submit/MergeOpRepoManager.java
index 67059e6..3f07ed7 100644
--- a/java/com/google/gerrit/server/submit/MergeOpRepoManager.java
+++ b/java/com/google/gerrit/server/submit/MergeOpRepoManager.java
@@ -31,7 +31,6 @@
import com.google.gerrit.server.project.ProjectCache;
import com.google.gerrit.server.project.ProjectState;
import com.google.gerrit.server.update.BatchUpdate;
-import com.google.gerrit.server.util.RequestId;
import com.google.inject.Inject;
import java.io.IOException;
import java.sql.Timestamp;
@@ -112,7 +111,6 @@
batchUpdateFactory
.create(db, getProjectName(), caller, ts)
.setRepository(repo, rw, ins)
- .setRequestId(submissionId)
.setOnSubmitValidators(onSubmitValidatorsFactory.create());
}
return update;
@@ -162,7 +160,6 @@
private ReviewDb db;
private Timestamp ts;
private IdentifiedUser caller;
- private RequestId submissionId;
@Inject
MergeOpRepoManager(
@@ -178,15 +175,10 @@
openRepos = new HashMap<>();
}
- public void setContext(ReviewDb db, Timestamp ts, IdentifiedUser caller, RequestId submissionId) {
+ public void setContext(ReviewDb db, Timestamp ts, IdentifiedUser caller) {
this.db = db;
this.ts = ts;
this.caller = caller;
- this.submissionId = submissionId;
- }
-
- public RequestId getSubmissionId() {
- return submissionId;
}
public OpenRepo getRepo(Project.NameKey project) throws NoSuchProjectException, IOException {
diff --git a/java/com/google/gerrit/server/submit/SubmitStrategyOp.java b/java/com/google/gerrit/server/submit/SubmitStrategyOp.java
index abe3632..290e917 100644
--- a/java/com/google/gerrit/server/submit/SubmitStrategyOp.java
+++ b/java/com/google/gerrit/server/submit/SubmitStrategyOp.java
@@ -22,7 +22,6 @@
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.google.common.flogger.FluentLogger;
-import com.google.gerrit.common.Nullable;
import com.google.gerrit.common.data.SubmitRecord;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.Branch;
@@ -103,7 +102,8 @@
@Override
public final void updateRepo(RepoContext ctx) throws Exception {
- logDebug("%s#updateRepo for change %s", getClass().getSimpleName(), toMerge.change().getId());
+ logger.atFine().log(
+ "%s#updateRepo for change %s", getClass().getSimpleName(), toMerge.change().getId());
checkState(
ctx.getRevWalk() == args.rw,
"SubmitStrategyOp requires callers to call BatchUpdate#setRepository with exactly the same"
@@ -117,18 +117,18 @@
if (alreadyMergedCommit == null) {
updateRepoImpl(ctx);
} else {
- logDebug("Already merged as %s", alreadyMergedCommit.name());
+ logger.atFine().log("Already merged as %s", alreadyMergedCommit.name());
}
CodeReviewCommit tipAfter = args.mergeTip.getCurrentTip();
if (Objects.equals(tipBefore, tipAfter)) {
- logDebug("Did not move tip", getClass().getSimpleName());
+ logger.atFine().log("Did not move tip");
return;
} else if (tipAfter == null) {
- logDebug("No merge tip, no update to perform");
+ logger.atFine().log("No merge tip, no update to perform");
return;
}
- logDebug("Moved tip from %s to %s", tipBefore, tipAfter);
+ logger.atFine().log("Moved tip from %s to %s", tipBefore, tipAfter);
checkProjectConfig(ctx, tipAfter);
@@ -144,7 +144,7 @@
throws IntegrationException {
String refName = getDest().get();
if (RefNames.REFS_CONFIG.equals(refName)) {
- logDebug("Loading new configuration from %s", RefNames.REFS_CONFIG);
+ logger.atFine().log("Loading new configuration from %s", RefNames.REFS_CONFIG);
try {
ProjectConfig cfg = new ProjectConfig(getProject());
cfg.load(ctx.getRevWalk(), commit);
@@ -216,7 +216,8 @@
@Override
public final boolean updateChange(ChangeContext ctx) throws Exception {
- logDebug("%s#updateChange for change %s", getClass().getSimpleName(), toMerge.change().getId());
+ logger.atFine().log(
+ "%s#updateChange for change %s", getClass().getSimpleName(), toMerge.change().getId());
toMerge.setNotes(ctx.getNotes()); // Update change and notes from ctx.
PatchSet.Id oldPsId = checkNotNull(toMerge.getPatchsetId());
PatchSet.Id newPsId;
@@ -225,12 +226,12 @@
// Either another thread won a race, or we are retrying a whole topic submission after one
// repo failed with lock failure.
if (alreadyMergedCommit == null) {
- logDebug(
+ logger.atFine().log(
"Change is already merged according to its status, but we were unable to find it"
+ " merged into the current tip (%s)",
args.mergeTip.getCurrentTip().name());
} else {
- logDebug("Change is already merged");
+ logger.atFine().log("Change is already merged");
}
changeAlreadyMerged = true;
return false;
@@ -276,7 +277,7 @@
checkNotNull(commit, "missing commit for change " + id);
CommitMergeStatus s = commit.getStatusCode();
checkNotNull(s, "status not set for change " + id + " expected to previously fail fast");
- logDebug("Status of change %s (%s) on %s: %s", id, commit.name(), c.getDest(), s);
+ logger.atFine().log("Status of change %s (%s) on %s: %s", id, commit.name(), c.getDest(), s);
setApproval(ctx, args.caller);
mergeResultRev =
@@ -302,7 +303,7 @@
private PatchSet getOrCreateAlreadyMergedPatchSet(ChangeContext ctx)
throws IOException, OrmException {
PatchSet.Id psId = alreadyMergedCommit.getPatchsetId();
- logDebug("Fixing up already-merged patch set %s", psId);
+ logger.atFine().log("Fixing up already-merged patch set %s", psId);
PatchSet prevPs = args.psUtil.current(ctx.getDb(), ctx.getNotes());
ctx.getRevWalk().parseBody(alreadyMergedCommit);
ctx.getChange()
@@ -310,7 +311,7 @@
psId, alreadyMergedCommit.getShortMessage(), ctx.getChange().getOriginalSubject());
PatchSet existing = args.psUtil.get(ctx.getDb(), ctx.getNotes(), psId);
if (existing != null) {
- logDebug("Patch set row exists, only updating change");
+ logger.atFine().log("Patch set row exists, only updating change");
return existing;
}
// No patch set for the already merged commit, although we know it came form
@@ -336,7 +337,7 @@
PatchSet.Id oldPsId = toMerge.getPatchsetId();
PatchSet.Id newPsId = ctx.getChange().currentPatchSetId();
- logDebug("Add approval for %s", id);
+ logger.atFine().log("Add approval for %s", id);
ChangeUpdate origPsUpdate = ctx.getUpdate(oldPsId);
origPsUpdate.putReviewer(user.getAccountId(), REVIEWER);
LabelNormalizer.Result normalized = approve(ctx, origPsUpdate);
@@ -397,7 +398,7 @@
// change happened.
for (PatchSetApproval psa : normalized.unchanged()) {
if (includeUnchanged || psa.isLegacySubmit()) {
- logDebug("Adding submit label %s", psa);
+ logger.atFine().log("Adding submit label %s", psa);
update.putApprovalFor(psa.getAccountId(), psa.getLabel(), psa.getValue());
}
}
@@ -488,7 +489,7 @@
private void setMerged(ChangeContext ctx, ChangeMessage msg) throws OrmException {
Change c = ctx.getChange();
ReviewDb db = ctx.getDb();
- logDebug("Setting change %s merged", c.getId());
+ logger.atFine().log("Setting change %s merged", c.getId());
c.setStatus(Change.Status.MERGED);
c.setSubmissionId(args.submissionId.toStringForStorage());
@@ -511,7 +512,7 @@
// If we naively execute postUpdate even if the change is already merged when updateChange
// being, then we are subject to a race where postUpdate steps are run twice if two submit
// processes run at the same time.
- logDebug("Skipping post-update steps for change %s", getId());
+ logger.atFine().log("Skipping post-update steps for change %s", getId());
return;
}
postUpdateImpl(ctx);
@@ -595,37 +596,4 @@
"cannot update gitlink for the commit at branch: " + args.destBranch);
}
}
-
- protected final void logDebug(String msg) {
- logger.atFine().log(this.args.submissionId + msg);
- }
-
- protected final void logDebug(String msg, @Nullable Object arg) {
- logger.atFine().log(this.args.submissionId + msg, arg);
- }
-
- protected final void logDebug(String msg, @Nullable Object arg1, @Nullable Object arg2) {
- logger.atFine().log(this.args.submissionId + msg, arg1, arg2);
- }
-
- protected final void logDebug(
- String msg,
- @Nullable Object arg1,
- @Nullable Object arg2,
- @Nullable Object arg3,
- @Nullable Object arg4) {
- logger.atFine().log(this.args.submissionId + msg, arg1, arg2, arg3, arg4);
- }
-
- protected final void logWarn(String msg, Throwable t) {
- logger.atWarning().withCause(t).log("%s%s", args.submissionId, msg);
- }
-
- protected void logError(String msg, Throwable t) {
- logger.atSevere().withCause(t).log("%s%s", args.submissionId, msg);
- }
-
- protected void logError(String msg) {
- logError(msg, null);
- }
}
diff --git a/java/com/google/gerrit/server/submit/SubmoduleOp.java b/java/com/google/gerrit/server/submit/SubmoduleOp.java
index 319e2e1..51e3c5d 100644
--- a/java/com/google/gerrit/server/submit/SubmoduleOp.java
+++ b/java/com/google/gerrit/server/submit/SubmoduleOp.java
@@ -15,12 +15,12 @@
package com.google.gerrit.server.submit;
import static java.util.Comparator.comparing;
+import static java.util.stream.Collectors.toList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.MultimapBuilder;
import com.google.common.collect.SetMultimap;
import com.google.common.flogger.FluentLogger;
-import com.google.gerrit.common.Nullable;
import com.google.gerrit.common.data.SubscribeSection;
import com.google.gerrit.extensions.restapi.RestApiException;
import com.google.gerrit.reviewdb.client.Branch;
@@ -140,17 +140,31 @@
private final MergeOpRepoManager orm;
private final Map<Branch.NameKey, GitModules> branchGitModules;
- // always update-to-current branch tips during submit process
+ /** Branches updated as part of the enclosing submit or push batch. */
+ private final ImmutableSet<Branch.NameKey> updatedBranches;
+
+ /**
+ * Current branch tips, taking into account commits created during the submit process as well as
+ * submodule updates produced by this class.
+ */
private final Map<Branch.NameKey, CodeReviewCommit> branchTips;
- // branches for all the submitting changes
- private final Set<Branch.NameKey> updatedBranches;
- // branches which in either a submodule or a superproject
+
+ /**
+ * Branches in a superproject that contain submodule subscriptions, plus branches in submodules
+ * which are subscribed to by some superproject.
+ */
private final Set<Branch.NameKey> affectedBranches;
- // sorted version of affectedBranches
+
+ /** Copy of {@link #affectedBranches}, sorted by submodule traversal order. */
private final ImmutableSet<Branch.NameKey> sortedBranches;
- // map of superproject branch and its submodule subscriptions
+
+ /** Multimap of superproject branch to submodule subscriptions contained in that branch. */
private final SetMultimap<Branch.NameKey, SubmoduleSubscription> targets;
- // map of superproject and its branches which has submodule subscriptions
+
+ /**
+ * Multimap of superproject name to all branch names within that superproject which have submodule
+ * subscriptions.
+ */
private final SetMultimap<Project.NameKey, Branch.NameKey> branchesByProject;
private SubmoduleOp(
@@ -174,29 +188,57 @@
cfg.getLong("submodule", "maxCombinedCommitMessageSize", 256 << 10);
this.maxCommitMessages = cfg.getLong("submodule", "maxCommitMessages", 1000);
this.orm = orm;
- this.updatedBranches = updatedBranches;
+ this.updatedBranches = ImmutableSet.copyOf(updatedBranches);
this.targets = MultimapBuilder.hashKeys().hashSetValues().build();
this.affectedBranches = new HashSet<>();
this.branchTips = new HashMap<>();
this.branchGitModules = new HashMap<>();
this.branchesByProject = MultimapBuilder.hashKeys().hashSetValues().build();
- this.sortedBranches = calculateSubscriptionMap();
+ this.sortedBranches = calculateSubscriptionMaps();
}
- private ImmutableSet<Branch.NameKey> calculateSubscriptionMap() throws SubmoduleException {
+ /**
+ * Calculate the internal maps used by the operation.
+ *
+ * <p>In addition to the return value, the following fields are populated as a side effect:
+ *
+ * <ul>
+ * <li>{@link #affectedBranches}
+ * <li>{@link #targets}
+ * <li>{@link #branchesByProject}
+ * </ul>
+ *
+ * @return the ordered set to be stored in {@link #sortedBranches}.
+ * @throws SubmoduleException if an error occurred walking projects.
+ */
+ // TODO(dborowitz): This setup process is hard to follow, in large part due to the accumulation of
+ // mutable maps, which makes this whole class difficult to understand.
+ //
+ // A cleaner architecture for this process might be:
+ // 1. Separate out the code to parse submodule subscriptions and build up an in-memory data
+ // structure representing the subscription graph, using a separate class with a properly-
+ // documented interface.
+ // 2. Walk the graph to produce a work plan. This would be a list of items indicating: create a
+ // commit in project X reading branch tips for submodules S1..Sn and updating gitlinks in X.
+ // 3. Execute the work plan, i.e. convert the items into BatchUpdate.Ops and add them to the
+ // relevant updates.
+ //
+ // In addition to improving readability, this approach has the advantage of making (1) and (2)
+ // testable using small tests.
+ private ImmutableSet<Branch.NameKey> calculateSubscriptionMaps() throws SubmoduleException {
if (!enableSuperProjectSubscriptions) {
- logDebug("Updating superprojects disabled");
+ logger.atFine().log("Updating superprojects disabled");
return null;
}
- logDebug("Calculating superprojects - submodules map");
+ logger.atFine().log("Calculating superprojects - submodules map");
LinkedHashSet<Branch.NameKey> allVisited = new LinkedHashSet<>();
for (Branch.NameKey updatedBranch : updatedBranches) {
if (allVisited.contains(updatedBranch)) {
continue;
}
- searchForSuperprojects(updatedBranch, new LinkedHashSet<Branch.NameKey>(), allVisited);
+ searchForSuperprojects(updatedBranch, new LinkedHashSet<>(), allVisited);
}
// Since the searchForSuperprojects will add all branches (related or
@@ -213,7 +255,7 @@
LinkedHashSet<Branch.NameKey> currentVisited,
LinkedHashSet<Branch.NameKey> allVisited)
throws SubmoduleException {
- logDebug("Now processing %s", current);
+ logger.atFine().log("Now processing %s", current);
if (currentVisited.contains(current)) {
throw new SubmoduleException(
@@ -275,9 +317,9 @@
private Collection<Branch.NameKey> getDestinationBranches(Branch.NameKey src, SubscribeSection s)
throws IOException {
Collection<Branch.NameKey> ret = new HashSet<>();
- logDebug("Inspecting SubscribeSection %s", s);
+ logger.atFine().log("Inspecting SubscribeSection %s", s);
for (RefSpec r : s.getMatchingRefSpecs()) {
- logDebug("Inspecting [matching] ref %s", r);
+ logger.atFine().log("Inspecting [matching] ref %s", r);
if (!r.matchSource(src.get())) {
continue;
}
@@ -295,7 +337,7 @@
}
for (RefSpec r : s.getMultiMatchRefSpecs()) {
- logDebug("Inspecting [all] ref %s", r);
+ logger.atFine().log("Inspecting [all] ref %s", r);
if (!r.matchSource(src.get())) {
continue;
}
@@ -319,17 +361,17 @@
}
}
}
- logDebug("Returning possible branches: %s for project %s", ret, s.getProject());
+ logger.atFine().log("Returning possible branches: %s for project %s", ret, s.getProject());
return ret;
}
- public Collection<SubmoduleSubscription> superProjectSubscriptionsForSubmoduleBranch(
+ private Collection<SubmoduleSubscription> superProjectSubscriptionsForSubmoduleBranch(
Branch.NameKey srcBranch) throws IOException {
- logDebug("Calculating possible superprojects for %s", srcBranch);
+ logger.atFine().log("Calculating possible superprojects for %s", srcBranch);
Collection<SubmoduleSubscription> ret = new ArrayList<>();
Project.NameKey srcProject = srcBranch.getParentKey();
for (SubscribeSection s : projectCache.get(srcProject).getSubscribeSections(srcBranch)) {
- logDebug("Checking subscribe section %s", s);
+ logger.atFine().log("Checking subscribe section %s", s);
Collection<Branch.NameKey> branches = getDestinationBranches(srcBranch, s);
for (Branch.NameKey targetBranch : branches) {
Project.NameKey targetProject = targetBranch.getParentKey();
@@ -337,11 +379,11 @@
OpenRepo or = orm.getRepo(targetProject);
ObjectId id = or.repo.resolve(targetBranch.get());
if (id == null) {
- logDebug("The branch %s doesn't exist.", targetBranch);
+ logger.atFine().log("The branch %s doesn't exist.", targetBranch);
continue;
}
} catch (NoSuchProjectException e) {
- logDebug("The project %s doesn't exist", targetProject);
+ logger.atFine().log("The project %s doesn't exist", targetProject);
continue;
}
@@ -353,7 +395,7 @@
ret.addAll(m.subscribedTo(srcBranch));
}
}
- logDebug("Calculated superprojects for %s are %s", srcBranch, ret);
+ logger.atFine().log("Calculated superprojects for %s are %s", srcBranch, ret);
return ret;
}
@@ -376,15 +418,14 @@
}
}
}
- batchUpdateFactory.execute(
- orm.batchUpdates(superProjects), BatchUpdateListener.NONE, orm.getSubmissionId(), false);
+ batchUpdateFactory.execute(orm.batchUpdates(superProjects), BatchUpdateListener.NONE, false);
} catch (RestApiException | UpdateException | IOException | NoSuchProjectException e) {
throw new SubmoduleException("Cannot update gitlinks", e);
}
}
/** Create a separate gitlink commit */
- public CodeReviewCommit composeGitlinksCommit(Branch.NameKey subscriber)
+ private CodeReviewCommit composeGitlinksCommit(Branch.NameKey subscriber)
throws IOException, SubmoduleException {
OpenRepo or;
try {
@@ -406,14 +447,18 @@
addBranchTip(subscriber, currentCommit);
}
- StringBuilder msgbuf = new StringBuilder("");
+ StringBuilder msgbuf = new StringBuilder();
PersonIdent author = null;
DirCache dc = readTree(or.rw, currentCommit);
DirCacheEditor ed = dc.editor();
int count = 0;
- List<SubmoduleSubscription> subscriptions = new ArrayList<>(targets.get(subscriber));
- Collections.sort(subscriptions, comparing(SubmoduleSubscription::getPath));
+ List<SubmoduleSubscription> subscriptions =
+ targets
+ .get(subscriber)
+ .stream()
+ .sorted(comparing(SubmoduleSubscription::getPath))
+ .collect(toList());
for (SubmoduleSubscription s : subscriptions) {
if (count > 0) {
msgbuf.append("\n\n");
@@ -452,8 +497,7 @@
}
/** Amend an existing commit with gitlink updates */
- public CodeReviewCommit composeGitlinksCommit(
- Branch.NameKey subscriber, CodeReviewCommit currentCommit)
+ CodeReviewCommit composeGitlinksCommit(Branch.NameKey subscriber, CodeReviewCommit currentCommit)
throws IOException, SubmoduleException {
OpenRepo or;
try {
@@ -462,7 +506,7 @@
throw new SubmoduleException("Cannot access superproject", e);
}
- StringBuilder msgbuf = new StringBuilder("");
+ StringBuilder msgbuf = new StringBuilder();
DirCache dc = readTree(or.rw, currentCommit);
DirCacheEditor ed = dc.editor();
for (SubmoduleSubscription s : targets.get(subscriber)) {
@@ -496,6 +540,7 @@
private RevCommit updateSubmodule(
DirCache dc, DirCacheEditor ed, StringBuilder msgbuf, SubmoduleSubscription s)
throws SubmoduleException, IOException {
+ logger.atFine().log("Updating gitlink for %s", s);
OpenRepo subOr;
try {
subOr = orm.getRepo(s.getSubmodule().getParentKey());
@@ -523,7 +568,14 @@
// check that the old gitlink is a commit that actually exists. If not, then there is an
// inconsistency between the superproject and subproject state, and we don't want to risk
// making things worse by updating the gitlink to something else.
- oldCommit = subOr.rw.parseCommit(dce.getObjectId());
+ try {
+ oldCommit = subOr.rw.parseCommit(dce.getObjectId());
+ } catch (IOException e) {
+ // Broken gitlink; sanity check failed. Warn and continue so the submit operation can
+ // proceed, it will just skip this gitlink update.
+ logger.atSevere().withCause(e).log("Failed to read commit %s", dce.getObjectId().name());
+ return null;
+ }
}
final CodeReviewCommit newCommit;
@@ -577,7 +629,8 @@
msgbuf.append(" from branch '");
msgbuf.append(s.getSubmodule().getShortName());
msgbuf.append("'");
- msgbuf.append("\n to " + newCommit.getName());
+ msgbuf.append("\n to ");
+ msgbuf.append(newCommit.getName());
// newly created submodule gitlink, do not append whole history
if (oldCommit == null) {
@@ -628,7 +681,7 @@
return dc;
}
- public ImmutableSet<Project.NameKey> getProjectsInOrder() throws SubmoduleException {
+ ImmutableSet<Project.NameKey> getProjectsInOrder() throws SubmoduleException {
LinkedHashSet<Project.NameKey> projects = new LinkedHashSet<>();
for (Project.NameKey project : branchesByProject.keySet()) {
addAllSubmoduleProjects(project, new LinkedHashSet<>(), projects);
@@ -671,7 +724,7 @@
projects.add(project);
}
- public ImmutableSet<Branch.NameKey> getBranchesInOrder() {
+ ImmutableSet<Branch.NameKey> getBranchesInOrder() {
LinkedHashSet<Branch.NameKey> branches = new LinkedHashSet<>();
if (sortedBranches != null) {
branches.addAll(sortedBranches);
@@ -680,27 +733,15 @@
return ImmutableSet.copyOf(branches);
}
- public boolean hasSubscription(Branch.NameKey branch) {
+ boolean hasSubscription(Branch.NameKey branch) {
return targets.containsKey(branch);
}
- public void addBranchTip(Branch.NameKey branch, CodeReviewCommit tip) {
+ void addBranchTip(Branch.NameKey branch, CodeReviewCommit tip) {
branchTips.put(branch, tip);
}
- public void addOp(BatchUpdate bu, Branch.NameKey branch) {
+ void addOp(BatchUpdate bu, Branch.NameKey branch) {
bu.addRepoOnlyOp(new GitlinkOp(branch));
}
-
- private void logDebug(String msg) {
- logger.atFine().log(orm.getSubmissionId() + " " + msg);
- }
-
- private void logDebug(String msg, @Nullable Object arg) {
- logger.atFine().log(orm.getSubmissionId() + " " + msg, arg);
- }
-
- private void logDebug(String msg, @Nullable Object arg1, @Nullable Object arg2) {
- logger.atFine().log(orm.getSubmissionId() + " " + msg, arg1, arg2);
- }
}
diff --git a/java/com/google/gerrit/server/submit/TestHelperOp.java b/java/com/google/gerrit/server/submit/TestHelperOp.java
index 2f0a3f6..bbb198a 100644
--- a/java/com/google/gerrit/server/submit/TestHelperOp.java
+++ b/java/com/google/gerrit/server/submit/TestHelperOp.java
@@ -15,12 +15,10 @@
package com.google.gerrit.server.submit;
import com.google.common.flogger.FluentLogger;
-import com.google.gerrit.common.Nullable;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.server.change.TestSubmitInput;
import com.google.gerrit.server.update.BatchUpdateOp;
import com.google.gerrit.server.update.RepoContext;
-import com.google.gerrit.server.util.RequestId;
import java.io.IOException;
import java.util.Queue;
import org.eclipse.jgit.lib.ObjectId;
@@ -30,27 +28,22 @@
private final Change.Id changeId;
private final TestSubmitInput input;
- private final RequestId submissionId;
TestHelperOp(Change.Id changeId, SubmitStrategy.Arguments args) {
this.changeId = changeId;
this.input = (TestSubmitInput) args.submitInput;
- this.submissionId = args.submissionId;
}
@Override
public void updateRepo(RepoContext ctx) throws IOException {
Queue<Boolean> q = input.generateLockFailures;
if (q != null && !q.isEmpty() && q.remove()) {
- logDebug("Adding bogus ref update to trigger lock failure, via change %s", changeId);
+ logger.atFine().log(
+ "Adding bogus ref update to trigger lock failure, via change %s", changeId);
ctx.addRefUpdate(
ObjectId.fromString("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef"),
ObjectId.zeroId(),
"refs/test/" + getClass().getSimpleName());
}
}
-
- private void logDebug(String msg, @Nullable Object arg) {
- logger.atFine().log(submissionId + msg, arg);
- }
}
diff --git a/java/com/google/gerrit/server/update/BatchUpdate.java b/java/com/google/gerrit/server/update/BatchUpdate.java
index dd3cc73..f72512f 100644
--- a/java/com/google/gerrit/server/update/BatchUpdate.java
+++ b/java/com/google/gerrit/server/update/BatchUpdate.java
@@ -126,10 +126,7 @@
@SuppressWarnings({"rawtypes", "unchecked"})
public void execute(
- Collection<BatchUpdate> updates,
- BatchUpdateListener listener,
- @Nullable RequestId requestId,
- boolean dryRun)
+ Collection<BatchUpdate> updates, BatchUpdateListener listener, boolean dryRun)
throws UpdateException, RestApiException {
checkNotNull(listener);
checkDifferentProject(updates);
@@ -141,11 +138,11 @@
if (migration.disableChangeReviewDb()) {
ImmutableList<NoteDbBatchUpdate> noteDbUpdates =
(ImmutableList) ImmutableList.copyOf(updates);
- NoteDbBatchUpdate.execute(noteDbUpdates, listener, requestId, dryRun);
+ NoteDbBatchUpdate.execute(noteDbUpdates, listener, dryRun);
} else {
ImmutableList<ReviewDbBatchUpdate> reviewDbUpdates =
(ImmutableList) ImmutableList.copyOf(updates);
- ReviewDbBatchUpdate.execute(reviewDbUpdates, listener, requestId, dryRun);
+ ReviewDbBatchUpdate.execute(reviewDbUpdates, listener, dryRun);
}
}
@@ -159,20 +156,6 @@
}
}
- static void setRequestIds(
- Collection<? extends BatchUpdate> updates, @Nullable RequestId requestId) {
- if (requestId != null) {
- for (BatchUpdate u : updates) {
- checkArgument(
- u.requestId == null || u.requestId == requestId,
- "refusing to overwrite RequestId %s in update with %s",
- u.requestId,
- requestId);
- u.setRequestId(requestId);
- }
- }
- }
-
static Order getOrder(Collection<? extends BatchUpdate> updates, BatchUpdateListener listener) {
Order o = null;
for (BatchUpdate u : updates) {
@@ -248,7 +231,6 @@
protected BatchRefUpdate batchRefUpdate;
protected Order order;
protected OnSubmitValidators onSubmitValidators;
- protected RequestId requestId;
protected PushCertificate pushCert;
protected String refLogMessage;
@@ -284,11 +266,6 @@
protected abstract Context newContext();
- public BatchUpdate setRequestId(RequestId requestId) {
- this.requestId = requestId;
- return this;
- }
-
public BatchUpdate setRepository(Repository repo, RevWalk revWalk, ObjectInserter inserter) {
checkState(this.repoView == null, "repo already set");
repoView = new RepoView(repo, revWalk, inserter);
@@ -384,39 +361,39 @@
return this;
}
- protected void logDebug(String msg, Throwable t) {
+ protected static void logDebug(String msg, Throwable t) {
// Only log if there is a requestId assigned, since those are the
// expensive/complicated requests like MergeOp. Doing it every time would be
// noisy.
- if (requestId != null) {
- logger.atFine().withCause(t).log(requestId + "%s", msg);
+ if (RequestId.isSet()) {
+ logger.atFine().withCause(t).log("%s", msg);
}
}
- protected void logDebug(String msg) {
+ protected static void logDebug(String msg) {
// Only log if there is a requestId assigned, since those are the
// expensive/complicated requests like MergeOp. Doing it every time would be
// noisy.
- if (requestId != null) {
- logger.atFine().log(requestId + msg);
+ if (RequestId.isSet()) {
+ logger.atFine().log(msg);
}
}
- protected void logDebug(String msg, @Nullable Object arg) {
+ protected static void logDebug(String msg, @Nullable Object arg) {
// Only log if there is a requestId assigned, since those are the
// expensive/complicated requests like MergeOp. Doing it every time would be
// noisy.
- if (requestId != null) {
- logger.atFine().log(requestId + msg, arg);
+ if (RequestId.isSet()) {
+ logger.atFine().log(msg, arg);
}
}
- protected void logDebug(String msg, @Nullable Object arg1, @Nullable Object arg2) {
+ protected static void logDebug(String msg, @Nullable Object arg1, @Nullable Object arg2) {
// Only log if there is a requestId assigned, since those are the
// expensive/complicated requests like MergeOp. Doing it every time would be
// noisy.
- if (requestId != null) {
- logger.atFine().log(requestId + msg, arg1, arg2);
+ if (RequestId.isSet()) {
+ logger.atFine().log(msg, arg1, arg2);
}
}
}
diff --git a/java/com/google/gerrit/server/update/NoteDbBatchUpdate.java b/java/com/google/gerrit/server/update/NoteDbBatchUpdate.java
index 8612fac..abe865c 100644
--- a/java/com/google/gerrit/server/update/NoteDbBatchUpdate.java
+++ b/java/com/google/gerrit/server/update/NoteDbBatchUpdate.java
@@ -21,7 +21,6 @@
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
-import com.google.gerrit.common.Nullable;
import com.google.gerrit.extensions.restapi.RestApiException;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.PatchSet;
@@ -35,7 +34,6 @@
import com.google.gerrit.server.notedb.ChangeNotes;
import com.google.gerrit.server.notedb.ChangeUpdate;
import com.google.gerrit.server.notedb.NoteDbUpdateManager;
-import com.google.gerrit.server.util.RequestId;
import com.google.gwtorm.server.OrmException;
import com.google.inject.Inject;
import com.google.inject.assistedinject.Assisted;
@@ -68,15 +66,11 @@
}
static void execute(
- ImmutableList<NoteDbBatchUpdate> updates,
- BatchUpdateListener listener,
- @Nullable RequestId requestId,
- boolean dryrun)
+ ImmutableList<NoteDbBatchUpdate> updates, BatchUpdateListener listener, boolean dryrun)
throws UpdateException, RestApiException {
if (updates.isEmpty()) {
return;
}
- setRequestIds(updates, requestId);
try {
@SuppressWarnings("deprecation")
@@ -293,7 +287,7 @@
@Override
public void execute(BatchUpdateListener listener) throws UpdateException, RestApiException {
- execute(ImmutableList.of(this), listener, requestId, false);
+ execute(ImmutableList.of(this), listener, false);
}
@Override
diff --git a/java/com/google/gerrit/server/update/ReviewDbBatchUpdate.java b/java/com/google/gerrit/server/update/ReviewDbBatchUpdate.java
index 9bf4bb2..df50bd5 100644
--- a/java/com/google/gerrit/server/update/ReviewDbBatchUpdate.java
+++ b/java/com/google/gerrit/server/update/ReviewDbBatchUpdate.java
@@ -28,7 +28,6 @@
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
-import com.google.gerrit.common.Nullable;
import com.google.gerrit.extensions.restapi.ResourceConflictException;
import com.google.gerrit.extensions.restapi.RestApiException;
import com.google.gerrit.metrics.Description;
@@ -51,6 +50,7 @@
import com.google.gerrit.server.git.InsertedObject;
import com.google.gerrit.server.git.LockFailureException;
import com.google.gerrit.server.index.change.ChangeIndexer;
+import com.google.gerrit.server.logging.TraceContext;
import com.google.gerrit.server.notedb.ChangeNotes;
import com.google.gerrit.server.notedb.ChangeUpdate;
import com.google.gerrit.server.notedb.NoteDbChangeState;
@@ -58,7 +58,6 @@
import com.google.gerrit.server.notedb.NoteDbUpdateManager;
import com.google.gerrit.server.notedb.NoteDbUpdateManager.MismatchedStateException;
import com.google.gerrit.server.notedb.NotesMigration;
-import com.google.gerrit.server.util.RequestId;
import com.google.gwtorm.server.OrmException;
import com.google.gwtorm.server.SchemaFactory;
import com.google.inject.Inject;
@@ -240,15 +239,11 @@
}
static void execute(
- ImmutableList<ReviewDbBatchUpdate> updates,
- BatchUpdateListener listener,
- @Nullable RequestId requestId,
- boolean dryrun)
+ ImmutableList<ReviewDbBatchUpdate> updates, BatchUpdateListener listener, boolean dryrun)
throws UpdateException, RestApiException {
if (updates.isEmpty()) {
return;
}
- setRequestIds(updates, requestId);
try {
Order order = getOrder(updates, listener);
boolean updateChangesInParallel = getUpdateChangesInParallel(updates);
@@ -358,7 +353,7 @@
@Override
public void execute(BatchUpdateListener listener) throws UpdateException, RestApiException {
- execute(ImmutableList.of(this), listener, requestId, false);
+ execute(ImmutableList.of(this), listener, false);
}
@Override
@@ -616,7 +611,6 @@
NoteDbUpdateManager.StagedResult noteDbResult;
boolean dirty;
boolean deleted;
- private String taskId;
private ChangeTask(
Change.Id id, Collection<BatchUpdateOp> changeOps, Thread mainThread, boolean dryrun) {
@@ -628,27 +622,30 @@
@Override
public Void call() throws Exception {
- taskId = id.toString() + "-" + Thread.currentThread().getId();
- if (Thread.currentThread() == mainThread) {
- initRepository();
- Repository repo = repoView.getRepository();
- try (RevWalk rw = new RevWalk(repo)) {
- call(ReviewDbBatchUpdate.this.db, repo, rw);
+ try (TraceContext traceContext =
+ TraceContext.open()
+ .addTag("TASK_ID", id.toString() + "-" + Thread.currentThread().getId())) {
+ if (Thread.currentThread() == mainThread) {
+ initRepository();
+ Repository repo = repoView.getRepository();
+ try (RevWalk rw = new RevWalk(repo)) {
+ call(ReviewDbBatchUpdate.this.db, repo, rw);
+ }
+ } else {
+ // Possible optimization: allow Ops to declare whether they need to
+ // access the repo from updateChange, and don't open in this thread
+ // unless we need it. However, as of this writing the only operations
+ // that are executed in parallel are during ReceiveCommits, and they
+ // all need the repo open anyway. (The non-parallel case above does not
+ // reopen the repo.)
+ try (ReviewDb threadLocalDb = schemaFactory.open();
+ Repository repo = repoManager.openRepository(project);
+ RevWalk rw = new RevWalk(repo)) {
+ call(threadLocalDb, repo, rw);
+ }
}
- } else {
- // Possible optimization: allow Ops to declare whether they need to
- // access the repo from updateChange, and don't open in this thread
- // unless we need it. However, as of this writing the only operations
- // that are executed in parallel are during ReceiveCommits, and they
- // all need the repo open anyway. (The non-parallel case above does not
- // reopen the repo.)
- try (ReviewDb threadLocalDb = schemaFactory.open();
- Repository repo = repoManager.openRepository(project);
- RevWalk rw = new RevWalk(repo)) {
- call(threadLocalDb, repo, rw);
- }
+ return null;
}
- return null;
}
private void call(ReviewDb db, Repository repo, RevWalk rw) throws Exception {
@@ -822,18 +819,6 @@
private boolean isNewChange(Change.Id id) {
return newChanges.containsKey(id);
}
-
- private void logDebug(String msg, Throwable t) {
- ReviewDbBatchUpdate.this.logDebug("[" + taskId + "] " + msg, t);
- }
-
- private void logDebug(String msg) {
- ReviewDbBatchUpdate.this.logDebug("[" + taskId + "] " + msg);
- }
-
- private void logDebug(String msg, @Nullable Object arg) {
- ReviewDbBatchUpdate.this.logDebug("[" + taskId + "] " + msg, arg);
- }
}
private static Iterable<Change> changesToUpdate(ChangeContextImpl ctx) {
diff --git a/java/com/google/gerrit/server/util/RequestId.java b/java/com/google/gerrit/server/util/RequestId.java
index 8e8db12..78f68aa 100644
--- a/java/com/google/gerrit/server/util/RequestId.java
+++ b/java/com/google/gerrit/server/util/RequestId.java
@@ -14,11 +14,14 @@
package com.google.gerrit.server.util;
+import com.google.common.base.Enums;
import com.google.common.hash.Hasher;
import com.google.common.hash.Hashing;
+import com.google.gerrit.common.Nullable;
import com.google.gerrit.common.TimeUtil;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.Project;
+import com.google.gerrit.server.logging.LoggingContext;
import java.net.InetAddress;
import java.net.UnknownHostException;
@@ -36,6 +39,20 @@
MACHINE_ID = id;
}
+ public enum Type {
+ RECEIVE_ID,
+ SUBMISSION_ID,
+ TRACE_ID;
+
+ static boolean isId(String id) {
+ return id != null && Enums.getIfPresent(Type.class, id).isPresent();
+ }
+ }
+
+ public static boolean isSet() {
+ return LoggingContext.getInstance().getTagsAsMap().keySet().stream().anyMatch(Type::isId);
+ }
+
public static RequestId forChange(Change c) {
return new RequestId(c.getId().toString());
}
@@ -46,17 +63,18 @@
private final String str;
- private RequestId(String resourceId) {
+ public RequestId() {
+ this(null);
+ }
+
+ private RequestId(@Nullable String resourceId) {
Hasher h = Hashing.murmur3_128().newHasher();
h.putLong(Thread.currentThread().getId()).putUnencodedChars(MACHINE_ID);
str =
- "["
- + resourceId
- + "-"
+ (resourceId != null ? resourceId + "-" : "")
+ TimeUtil.nowTs().getTime()
+ "-"
- + h.hash().toString().substring(0, 8)
- + "]";
+ + h.hash().toString().substring(0, 8);
}
@Override
diff --git a/java/com/google/gerrit/sshd/CommandFactoryProvider.java b/java/com/google/gerrit/sshd/CommandFactoryProvider.java
index 3eef4d6..68ea7bb 100644
--- a/java/com/google/gerrit/sshd/CommandFactoryProvider.java
+++ b/java/com/google/gerrit/sshd/CommandFactoryProvider.java
@@ -22,6 +22,7 @@
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.git.WorkQueue;
+import com.google.gerrit.server.logging.LoggingContextAwareThreadFactory;
import com.google.gerrit.sshd.SshScope.Context;
import com.google.gwtorm.server.SchemaFactory;
import com.google.inject.Inject;
@@ -79,6 +80,7 @@
destroyExecutor =
Executors.newSingleThreadExecutor(
new ThreadFactoryBuilder()
+ .setThreadFactory(new LoggingContextAwareThreadFactory())
.setNameFormat("SshCommandDestroy-%s")
.setDaemon(true)
.build());
diff --git a/java/com/google/gerrit/sshd/SshKeyCacheImpl.java b/java/com/google/gerrit/sshd/SshKeyCacheImpl.java
index b573062..3cd1a0c 100644
--- a/java/com/google/gerrit/sshd/SshKeyCacheImpl.java
+++ b/java/com/google/gerrit/sshd/SshKeyCacheImpl.java
@@ -102,6 +102,8 @@
@Override
public Iterable<SshKeyCacheEntry> load(String username) throws Exception {
+ logger.atFine().log("Loading SSH keys for account with username %s", username);
+
Optional<ExternalId> user = externalIds.get(ExternalId.Key.create(SCHEME_USERNAME, username));
if (!user.isPresent()) {
return NO_SUCH_USER;
diff --git a/java/com/google/gerrit/testing/ConfigSuite.java b/java/com/google/gerrit/testing/ConfigSuite.java
index b0229c3..ff87fd8 100644
--- a/java/com/google/gerrit/testing/ConfigSuite.java
+++ b/java/com/google/gerrit/testing/ConfigSuite.java
@@ -24,6 +24,7 @@
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
+import com.google.gerrit.server.logging.LoggingContext;
import java.lang.annotation.Annotation;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
@@ -105,11 +106,13 @@
*/
public class ConfigSuite extends Suite {
private static final String FLOGGER_BACKEND_PROPERTY = "flogger.backend_factory";
+ private static final String FLOGGER_LOGGING_CONTEXT = "flogger.logging_context";
static {
System.setProperty(
FLOGGER_BACKEND_PROPERTY,
"com.google.common.flogger.backend.log4j.Log4jBackendFactory#getInstance");
+ System.setProperty(FLOGGER_LOGGING_CONTEXT, LoggingContext.class.getName() + "#getInstance");
}
public static final String DEFAULT = "default";
diff --git a/javatests/com/google/gerrit/acceptance/api/project/ProjectIT.java b/javatests/com/google/gerrit/acceptance/api/project/ProjectIT.java
index 8479dd1..6a9e41b 100644
--- a/javatests/com/google/gerrit/acceptance/api/project/ProjectIT.java
+++ b/javatests/com/google/gerrit/acceptance/api/project/ProjectIT.java
@@ -368,7 +368,7 @@
gApi.projects().name(project.get()).branch("test").create(new BranchInput());
setApiUser(user);
exception.expect(AuthException.class);
- exception.expectMessage("set HEAD not permitted for refs/heads/test");
+ exception.expectMessage("not permitted: set HEAD on refs/heads/test");
gApi.projects().name(project.get()).head("test");
}
diff --git a/javatests/com/google/gerrit/acceptance/git/AbstractPushForReview.java b/javatests/com/google/gerrit/acceptance/git/AbstractPushForReview.java
index b4ae8a2..d04409c 100644
--- a/javatests/com/google/gerrit/acceptance/git/AbstractPushForReview.java
+++ b/javatests/com/google/gerrit/acceptance/git/AbstractPushForReview.java
@@ -2037,7 +2037,8 @@
assertPushOk(pushHead(testRepo, master), master);
commits.addAll(initChanges(3));
- assertPushRejected(pushHead(testRepo, master), master, "too many commits");
+ assertPushRejected(
+ pushHead(testRepo, master), master, "more than 2 commits, and skip-validation not set");
grantSkipValidation(project, master, SystemGroupBackend.REGISTERED_USERS);
PushResult r =
@@ -2079,7 +2080,7 @@
allowGlobalCapabilities(REGISTERED_USERS, GlobalCapability.ACCESS_DATABASE);
pr = pushOne(testRepo, c.name(), ref, false, false, opts);
- assertPushRejected(pr, ref, "prohibited by Gerrit: create not permitted for " + ref);
+ assertPushRejected(pr, ref, "prohibited by Gerrit: not permitted: create");
grant(project, "refs/changes/*", Permission.CREATE);
grant(project, "refs/changes/*", Permission.PUSH);
diff --git a/javatests/com/google/gerrit/acceptance/git/AbstractSubmoduleSubscription.java b/javatests/com/google/gerrit/acceptance/git/AbstractSubmoduleSubscription.java
index 252ec88..943b052 100644
--- a/javatests/com/google/gerrit/acceptance/git/AbstractSubmoduleSubscription.java
+++ b/javatests/com/google/gerrit/acceptance/git/AbstractSubmoduleSubscription.java
@@ -29,11 +29,22 @@
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.StreamSupport;
+import org.eclipse.jgit.dircache.DirCache;
+import org.eclipse.jgit.dircache.DirCacheBuilder;
+import org.eclipse.jgit.dircache.DirCacheEditor;
+import org.eclipse.jgit.dircache.DirCacheEditor.PathEdit;
+import org.eclipse.jgit.dircache.DirCacheEntry;
import org.eclipse.jgit.junit.TestRepository;
+import org.eclipse.jgit.lib.AnyObjectId;
+import org.eclipse.jgit.lib.CommitBuilder;
import org.eclipse.jgit.lib.Config;
+import org.eclipse.jgit.lib.FileMode;
import org.eclipse.jgit.lib.ObjectId;
+import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.Ref;
+import org.eclipse.jgit.lib.RefUpdate;
+import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevTree;
@@ -451,4 +462,47 @@
RevCommit c = rw.parseCommit(commitId);
return c.getAuthorIdent();
}
+
+ protected void directUpdateSubmodule(String project, String refName, String path, AnyObjectId id)
+ throws Exception {
+ path = name(path);
+ try (Repository serverRepo = repoManager.openRepository(new Project.NameKey(name(project)));
+ ObjectInserter ins = serverRepo.newObjectInserter();
+ RevWalk rw = new RevWalk(serverRepo)) {
+ Ref ref = serverRepo.exactRef(refName);
+ assertThat(ref).named(refName).isNotNull();
+ ObjectId oldCommitId = ref.getObjectId();
+
+ DirCache dc = DirCache.newInCore();
+ DirCacheBuilder b = dc.builder();
+ b.addTree(
+ new byte[0], DirCacheEntry.STAGE_0, rw.getObjectReader(), rw.parseTree(oldCommitId));
+ b.finish();
+ DirCacheEditor e = dc.editor();
+ e.add(
+ new PathEdit(path) {
+ @Override
+ public void apply(DirCacheEntry ent) {
+ ent.setFileMode(FileMode.GITLINK);
+ ent.setObjectId(id);
+ }
+ });
+ e.finish();
+
+ CommitBuilder cb = new CommitBuilder();
+ cb.addParentId(oldCommitId);
+ cb.setTreeId(dc.writeTree(ins));
+ PersonIdent ident = serverIdent.get();
+ cb.setAuthor(ident);
+ cb.setCommitter(ident);
+ cb.setMessage("Direct update submodule " + path);
+ ObjectId newCommitId = ins.insert(cb);
+ ins.flush();
+
+ RefUpdate ru = serverRepo.updateRef(refName);
+ ru.setExpectedOldObjectId(oldCommitId);
+ ru.setNewObjectId(newCommitId);
+ assertThat(ru.update()).isEqualTo(RefUpdate.Result.FAST_FORWARD);
+ }
+ }
}
diff --git a/javatests/com/google/gerrit/acceptance/git/ForcePushIT.java b/javatests/com/google/gerrit/acceptance/git/ForcePushIT.java
index 87ac022..d80faa8 100644
--- a/javatests/com/google/gerrit/acceptance/git/ForcePushIT.java
+++ b/javatests/com/google/gerrit/acceptance/git/ForcePushIT.java
@@ -51,7 +51,7 @@
pushFactory.create(db, admin.getIdent(), testRepo, "change2", "b.txt", "content");
push2.setForce(true);
PushOneCommit.Result r2 = push2.to("refs/heads/master");
- r2.assertErrorStatus("need 'Force Push' privilege.");
+ r2.assertErrorStatus("not permitted: force update");
}
@Test
diff --git a/javatests/com/google/gerrit/acceptance/git/PushPermissionsIT.java b/javatests/com/google/gerrit/acceptance/git/PushPermissionsIT.java
index af74657..fa9298f 100644
--- a/javatests/com/google/gerrit/acceptance/git/PushPermissionsIT.java
+++ b/javatests/com/google/gerrit/acceptance/git/PushPermissionsIT.java
@@ -83,11 +83,10 @@
PushResult r = push("HEAD:refs/heads/master");
assertThat(r)
.onlyRef("refs/heads/master")
- .isRejected("prohibited by Gerrit: ref update access denied");
+ .isRejected("prohibited by Gerrit: not permitted: update");
assertThat(r)
.hasMessages(
"Branch refs/heads/master:",
- "You are not allowed to perform this operation.",
"To push into this reference you need 'Push' rights.",
"User: admin",
"Contact an administrator to fix the permissions");
@@ -98,16 +97,18 @@
public void nonFastForwardUpdateDenied() throws Exception {
ObjectId commit = testRepo.commit().create();
PushResult r = push("+" + commit.name() + ":refs/heads/master");
- assertThat(r).onlyRef("refs/heads/master").isRejected("need 'Force Push' privilege.");
- assertThat(r).hasNoMessages();
- // TODO(dborowitz): Why does this not mention refs?
- assertThat(r).hasProcessed(ImmutableMap.of());
+ assertThat(r)
+ .onlyRef("refs/heads/master")
+ .isRejected("prohibited by Gerrit: not permitted: force update");
+ assertThat(r).hasProcessed(ImmutableMap.of("refs", 1));
}
@Test
public void deleteDenied() throws Exception {
PushResult r = push(":refs/heads/master");
- assertThat(r).onlyRef("refs/heads/master").isRejected("cannot delete references");
+ assertThat(r)
+ .onlyRef("refs/heads/master")
+ .isRejected("prohibited by Gerrit: not permitted: delete");
assertThat(r)
.hasMessages(
"Branch refs/heads/master:",
@@ -124,8 +125,8 @@
PushResult r = push("HEAD:refs/heads/newbranch");
assertThat(r)
.onlyRef("refs/heads/newbranch")
- .isRejected("prohibited by Gerrit: create not permitted for refs/heads/newbranch");
- assertThat(r).hasNoMessages();
+ .isRejected("prohibited by Gerrit: not permitted: create");
+ assertThat(r).containsMessages("You need 'Create' rights to create new references.");
assertThat(r).hasProcessed(ImmutableMap.of("refs", 1));
}
@@ -139,18 +140,17 @@
testRepo.branch("HEAD").commit().create();
PushResult r = push(":refs/heads/foo", ":refs/heads/bar", "HEAD:refs/heads/master");
- assertThat(r).ref("refs/heads/foo").isRejected("cannot delete references");
- assertThat(r).ref("refs/heads/bar").isRejected("cannot delete references");
+ assertThat(r).ref("refs/heads/foo").isRejected("prohibited by Gerrit: not permitted: delete");
+ assertThat(r).ref("refs/heads/bar").isRejected("prohibited by Gerrit: not permitted: delete");
assertThat(r)
.ref("refs/heads/master")
- .isRejected("prohibited by Gerrit: ref update access denied");
+ .isRejected("prohibited by Gerrit: not permitted: update");
assertThat(r)
.hasMessages(
"Branches refs/heads/foo, refs/heads/bar:",
"You need 'Delete Reference' rights or 'Push' rights with the ",
"'Force Push' flag set to delete references.",
"Branch refs/heads/master:",
- "You are not allowed to perform this operation.",
"To push into this reference you need 'Push' rights.",
"User: admin",
"Contact an administrator to fix the permissions");
@@ -185,11 +185,10 @@
// ReceiveCommits theoretically has a different message when a WRITE_CONFIG check fails, but
// it never gets there, since DefaultPermissionBackend special-cases refs/meta/config and
// denies UPDATE if the user is not a project owner.
- .isRejected("prohibited by Gerrit: ref update access denied");
+ .isRejected("prohibited by Gerrit: not permitted: update");
assertThat(r)
.hasMessages(
"Branch refs/meta/config:",
- "You are not allowed to perform this operation.",
"Configuration changes can only be pushed by project owners",
"who also have 'Push' rights on refs/meta/config",
"User: admin",
@@ -212,14 +211,12 @@
PushResult r = push("HEAD:refs/for/master");
assertThat(r)
.onlyRef("refs/for/master")
- .isRejected("create change not permitted for refs/heads/master");
+ .isRejected("prohibited by Gerrit: not permitted: create change on refs/heads/master");
assertThat(r)
- .hasMessages(
- "Branch refs/heads/master:",
- "You need 'Push' rights to upload code review requests.",
- "Verify that you are pushing to the right branch.",
- "User: admin",
- "Contact an administrator to fix the permissions");
+ .containsMessages(
+ "Branch refs/for/master:",
+ "You need 'Create Change' rights to upload code review requests.",
+ "Verify that you are pushing to the right branch.");
assertThat(r).hasProcessed(ImmutableMap.of("refs", 1));
}
@@ -234,8 +231,10 @@
PushResult r = push("HEAD:refs/for/master%submit");
assertThat(r)
.onlyRef("refs/for/master%submit")
- .isRejected("update by submit not permitted for refs/heads/master");
- assertThat(r).hasNoMessages();
+ .isRejected("prohibited by Gerrit: not permitted: update by submit on refs/heads/master");
+ assertThat(r)
+ .containsMessages(
+ "You need 'Submit' rights on refs/for/ to submit changes during change upload.");
assertThat(r).hasProcessed(ImmutableMap.of("refs", 1));
}
@@ -269,8 +268,11 @@
push(c -> c.setPushOptions(ImmutableList.of("skip-validation")), "HEAD:refs/heads/master");
assertThat(r)
.onlyRef("refs/heads/master")
- .isRejected("skip validation not permitted for refs/heads/master");
- assertThat(r).hasNoMessages();
+ .isRejected("prohibited by Gerrit: not permitted: skip validation");
+ assertThat(r)
+ .containsMessages(
+ "You need 'Forge Author', 'Forge Server', 'Forge Committer'",
+ "and 'Push Merge' rights to skip validation.");
assertThat(r).hasProcessed(ImmutableMap.of("refs", 1));
}
diff --git a/javatests/com/google/gerrit/acceptance/git/SubmitOnPushIT.java b/javatests/com/google/gerrit/acceptance/git/SubmitOnPushIT.java
index 0705dca..e2614dd 100644
--- a/javatests/com/google/gerrit/acceptance/git/SubmitOnPushIT.java
+++ b/javatests/com/google/gerrit/acceptance/git/SubmitOnPushIT.java
@@ -158,7 +158,7 @@
@Test
public void submitOnPushNotAllowed_Error() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master%submit");
- r.assertErrorStatus("update by submit not permitted");
+ r.assertErrorStatus("not permitted: update by submit");
}
@Test
@@ -170,7 +170,7 @@
push(
"refs/for/master%submit",
PushOneCommit.SUBJECT, "a.txt", "other content", r.getChangeId());
- r.assertErrorStatus("update by submit not permitted");
+ r.assertErrorStatus("not permitted: update by submit ");
}
@Test
diff --git a/javatests/com/google/gerrit/acceptance/git/SubmoduleSubscriptionsIT.java b/javatests/com/google/gerrit/acceptance/git/SubmoduleSubscriptionsIT.java
index 847004f..98e3cae 100644
--- a/javatests/com/google/gerrit/acceptance/git/SubmoduleSubscriptionsIT.java
+++ b/javatests/com/google/gerrit/acceptance/git/SubmoduleSubscriptionsIT.java
@@ -29,6 +29,7 @@
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.PersonIdent;
+import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.transport.PushResult;
@@ -655,6 +656,65 @@
}
}
+ @Test
+ public void updateOnlyRelevantSubmodules() throws Exception {
+ TestRepository<?> superRepo = createProjectWithPush("super-project");
+ TestRepository<?> subRepo1 = createProjectWithPush("subscribed-to-project-1");
+ TestRepository<?> subRepo2 = createProjectWithPush("subscribed-to-project-2");
+
+ allowMatchingSubmoduleSubscription(
+ "subscribed-to-project-1", "refs/heads/master", "super-project", "refs/heads/master");
+ allowMatchingSubmoduleSubscription(
+ "subscribed-to-project-2", "refs/heads/master", "super-project", "refs/heads/master");
+
+ Config config = new Config();
+ prepareSubmoduleConfigEntry(config, "subscribed-to-project-1", "master");
+ prepareSubmoduleConfigEntry(config, "subscribed-to-project-2", "master");
+ pushSubmoduleConfig(superRepo, "master", config);
+
+ // Push once to initialize submodules.
+ ObjectId subTip2 = pushChangeTo(subRepo2, "master");
+ ObjectId subTip1 = pushChangeTo(subRepo1, "master");
+
+ expectToHaveSubmoduleState(superRepo, "master", "subscribed-to-project-1", subTip1);
+ expectToHaveSubmoduleState(superRepo, "master", "subscribed-to-project-2", subTip2);
+
+ directUpdateRef("subscribed-to-project-2", "refs/heads/master");
+ subTip1 = pushChangeTo(subRepo1, "master");
+
+ expectToHaveSubmoduleState(superRepo, "master", "subscribed-to-project-1", subTip1);
+ expectToHaveSubmoduleState(superRepo, "master", "subscribed-to-project-2", subTip2);
+ }
+
+ @Test
+ public void skipUpdatingBrokenGitlinkPointer() throws Exception {
+ TestRepository<?> superRepo = createProjectWithPush("super-project");
+ TestRepository<?> subRepo = createProjectWithPush("subscribed-to-project");
+
+ allowMatchingSubmoduleSubscription(
+ "subscribed-to-project", "refs/heads/master", "super-project", "refs/heads/master");
+ createSubmoduleSubscription(superRepo, "master", "subscribed-to-project", "master");
+
+ // Push once to initialize submodule.
+ ObjectId subTip = pushChangeTo(subRepo, "master");
+ expectToHaveSubmoduleState(superRepo, "master", "subscribed-to-project", subTip);
+
+ // Write an invalid SHA-1 directly to the gitlink.
+ ObjectId badId = ObjectId.fromString("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef");
+ directUpdateSubmodule("super-project", "refs/heads/master", "subscribed-to-project", badId);
+ expectToHaveSubmoduleState(superRepo, "master", "subscribed-to-project", badId);
+
+ // Push succeeds, but gitlink update is skipped.
+ pushChangeTo(subRepo, "master");
+ expectToHaveSubmoduleState(superRepo, "master", "subscribed-to-project", badId);
+ }
+
+ private ObjectId directUpdateRef(String project, String ref) throws Exception {
+ try (Repository serverRepo = repoManager.openRepository(new Project.NameKey(name(project)))) {
+ return new TestRepository<>(serverRepo).branch(ref).commit().create().copy();
+ }
+ }
+
private void testSubmoduleSubjectCommitMessageAndExpectTruncation() throws Exception {
TestRepository<?> superRepo = createProjectWithPush("super-project");
TestRepository<?> subRepo = createProjectWithPush("subscribed-to-project");
diff --git a/javatests/com/google/gerrit/acceptance/git/SubmoduleSubscriptionsWholeTopicMergeIT.java b/javatests/com/google/gerrit/acceptance/git/SubmoduleSubscriptionsWholeTopicMergeIT.java
index 2812c86..eef3295 100644
--- a/javatests/com/google/gerrit/acceptance/git/SubmoduleSubscriptionsWholeTopicMergeIT.java
+++ b/javatests/com/google/gerrit/acceptance/git/SubmoduleSubscriptionsWholeTopicMergeIT.java
@@ -856,4 +856,44 @@
.that(superRepo.getRepository().resolve("origin/master^"))
.isEqualTo(superPreviousId);
}
+
+ @Test
+ public void skipUpdatingBrokenGitlinkPointer() throws Exception {
+ TestRepository<?> superRepo = createProjectWithPush("super-project");
+ TestRepository<?> sub1 = createProjectWithPush("sub1");
+ TestRepository<?> sub2 = createProjectWithPush("sub2");
+
+ allowMatchingSubmoduleSubscription(
+ "sub1", "refs/heads/master", "super-project", "refs/heads/master");
+ allowMatchingSubmoduleSubscription(
+ "sub2", "refs/heads/master", "super-project", "refs/heads/master");
+
+ Config config = new Config();
+ prepareSubmoduleConfigEntry(config, "sub1", "master");
+ prepareSubmoduleConfigEntry(config, "sub2", "master");
+ pushSubmoduleConfig(superRepo, "master", config);
+
+ // Write an invalid SHA-1 directly to one of the gitlinks.
+ ObjectId badId = ObjectId.fromString("deadbeefdeadbeefdeadbeefdeadbeefdeadbeef");
+ directUpdateSubmodule("super-project", "refs/heads/master", "sub1", badId);
+ expectToHaveSubmoduleState(superRepo, "master", "sub1", badId);
+
+ String topic = "same-topic";
+ ObjectId sub1Id = pushChangeTo(sub1, "refs/for/master", "some message", topic);
+ ObjectId sub2Id = pushChangeTo(sub2, "refs/for/master", "some message", topic);
+
+ String changeId1 = getChangeId(sub1, sub1Id).get();
+ String changeId2 = getChangeId(sub2, sub2Id).get();
+ approve(changeId1);
+ approve(changeId2);
+
+ gApi.changes().id(changeId1).current().submit();
+
+ assertThat(info(changeId1).status).isEqualTo(ChangeStatus.MERGED);
+ assertThat(info(changeId2).status).isEqualTo(ChangeStatus.MERGED);
+
+ // sub1 was skipped but sub2 succeeded.
+ expectToHaveSubmoduleState(superRepo, "master", "sub1", badId);
+ expectToHaveSubmoduleState(superRepo, "master", "sub2", sub2, "master");
+ }
}
diff --git a/javatests/com/google/gerrit/acceptance/rest/TraceIT.java b/javatests/com/google/gerrit/acceptance/rest/TraceIT.java
new file mode 100644
index 0000000..2c32737
--- /dev/null
+++ b/javatests/com/google/gerrit/acceptance/rest/TraceIT.java
@@ -0,0 +1,191 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.acceptance.rest;
+
+import static com.google.common.truth.Truth.assertThat;
+import static org.apache.http.HttpStatus.SC_CREATED;
+
+import com.google.common.collect.ImmutableList;
+import com.google.gerrit.acceptance.AbstractDaemonTest;
+import com.google.gerrit.acceptance.PushOneCommit;
+import com.google.gerrit.acceptance.RestResponse;
+import com.google.gerrit.extensions.registration.DynamicSet;
+import com.google.gerrit.extensions.registration.RegistrationHandle;
+import com.google.gerrit.httpd.restapi.ParameterParser;
+import com.google.gerrit.httpd.restapi.RestApiServlet;
+import com.google.gerrit.server.events.CommitReceivedEvent;
+import com.google.gerrit.server.git.validators.CommitValidationException;
+import com.google.gerrit.server.git.validators.CommitValidationListener;
+import com.google.gerrit.server.git.validators.CommitValidationMessage;
+import com.google.gerrit.server.logging.LoggingContext;
+import com.google.gerrit.server.project.CreateProjectArgs;
+import com.google.gerrit.server.validators.ProjectCreationValidationListener;
+import com.google.gerrit.server.validators.ValidationException;
+import com.google.inject.Inject;
+import java.util.List;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TraceIT extends AbstractDaemonTest {
+ @Inject private DynamicSet<ProjectCreationValidationListener> projectCreationValidationListeners;
+ @Inject private DynamicSet<CommitValidationListener> commitValidationListeners;
+
+ private TraceValidatingProjectCreationValidationListener projectCreationListener;
+ private RegistrationHandle projectCreationListenerRegistrationHandle;
+ private TraceValidatingCommitValidationListener commitValidationListener;
+ private RegistrationHandle commitValidationRegistrationHandle;
+
+ @Before
+ public void setup() {
+ projectCreationListener = new TraceValidatingProjectCreationValidationListener();
+ projectCreationListenerRegistrationHandle =
+ projectCreationValidationListeners.add(projectCreationListener);
+ commitValidationListener = new TraceValidatingCommitValidationListener();
+ commitValidationRegistrationHandle = commitValidationListeners.add(commitValidationListener);
+ }
+
+ @After
+ public void cleanup() {
+ projectCreationListenerRegistrationHandle.remove();
+ commitValidationRegistrationHandle.remove();
+ }
+
+ @Test
+ public void restCallWithoutTrace() throws Exception {
+ RestResponse response = adminRestSession.put("/projects/new1");
+ assertThat(response.getStatusCode()).isEqualTo(SC_CREATED);
+ assertThat(response.getHeader(RestApiServlet.X_GERRIT_TRACE)).isNull();
+ assertThat(projectCreationListener.foundTraceId).isFalse();
+ }
+
+ @Test
+ public void restCallWithTrace() throws Exception {
+ RestResponse response =
+ adminRestSession.put("/projects/new2?" + ParameterParser.TRACE_PARAMETER);
+ assertThat(response.getStatusCode()).isEqualTo(SC_CREATED);
+ assertThat(response.getHeader(RestApiServlet.X_GERRIT_TRACE)).isNotNull();
+ assertThat(projectCreationListener.foundTraceId).isTrue();
+ }
+
+ @Test
+ public void restCallWithTraceTrue() throws Exception {
+ RestResponse response =
+ adminRestSession.put("/projects/new3?" + ParameterParser.TRACE_PARAMETER + "=true");
+ assertThat(response.getStatusCode()).isEqualTo(SC_CREATED);
+ assertThat(response.getHeader(RestApiServlet.X_GERRIT_TRACE)).isNotNull();
+ assertThat(projectCreationListener.foundTraceId).isTrue();
+ }
+
+ @Test
+ public void restCallWithTraceFalse() throws Exception {
+ RestResponse response =
+ adminRestSession.put("/projects/new4?" + ParameterParser.TRACE_PARAMETER + "=false");
+ assertThat(response.getStatusCode()).isEqualTo(SC_CREATED);
+ assertThat(response.getHeader(RestApiServlet.X_GERRIT_TRACE)).isNull();
+ assertThat(projectCreationListener.foundTraceId).isFalse();
+ }
+
+ @Test
+ public void pushWithoutTrace() throws Exception {
+ PushOneCommit push = pushFactory.create(db, admin.getIdent(), testRepo);
+ PushOneCommit.Result r = push.to("refs/heads/master");
+ r.assertOkStatus();
+ assertThat(commitValidationListener.foundTraceId).isFalse();
+ }
+
+ @Test
+ public void pushWithTrace() throws Exception {
+ PushOneCommit push = pushFactory.create(db, admin.getIdent(), testRepo);
+ push.setPushOptions(ImmutableList.of("trace"));
+ PushOneCommit.Result r = push.to("refs/heads/master");
+ r.assertOkStatus();
+ assertThat(commitValidationListener.foundTraceId).isTrue();
+ }
+
+ @Test
+ public void pushWithTraceTrue() throws Exception {
+ PushOneCommit push = pushFactory.create(db, admin.getIdent(), testRepo);
+ push.setPushOptions(ImmutableList.of("trace=true"));
+ PushOneCommit.Result r = push.to("refs/heads/master");
+ r.assertOkStatus();
+ assertThat(commitValidationListener.foundTraceId).isTrue();
+ }
+
+ @Test
+ public void pushWithTraceFalse() throws Exception {
+ PushOneCommit push = pushFactory.create(db, admin.getIdent(), testRepo);
+ push.setPushOptions(ImmutableList.of("trace=false"));
+ PushOneCommit.Result r = push.to("refs/heads/master");
+ r.assertOkStatus();
+ assertThat(commitValidationListener.foundTraceId).isFalse();
+ }
+
+ @Test
+ public void pushForReviewWithoutTrace() throws Exception {
+ PushOneCommit push = pushFactory.create(db, admin.getIdent(), testRepo);
+ PushOneCommit.Result r = push.to("refs/for/master");
+ r.assertOkStatus();
+ assertThat(commitValidationListener.foundTraceId).isFalse();
+ }
+
+ @Test
+ public void pushForReviewWithTrace() throws Exception {
+ PushOneCommit push = pushFactory.create(db, admin.getIdent(), testRepo);
+ push.setPushOptions(ImmutableList.of("trace"));
+ PushOneCommit.Result r = push.to("refs/for/master");
+ r.assertOkStatus();
+ assertThat(commitValidationListener.foundTraceId).isTrue();
+ }
+
+ @Test
+ public void pushForReviewWithTraceTrue() throws Exception {
+ PushOneCommit push = pushFactory.create(db, admin.getIdent(), testRepo);
+ push.setPushOptions(ImmutableList.of("trace=true"));
+ PushOneCommit.Result r = push.to("refs/for/master");
+ r.assertOkStatus();
+ assertThat(commitValidationListener.foundTraceId).isTrue();
+ }
+
+ @Test
+ public void pushForReviewWithTraceFalse() throws Exception {
+ PushOneCommit push = pushFactory.create(db, admin.getIdent(), testRepo);
+ push.setPushOptions(ImmutableList.of("trace=false"));
+ PushOneCommit.Result r = push.to("refs/for/master");
+ r.assertOkStatus();
+ assertThat(commitValidationListener.foundTraceId).isFalse();
+ }
+
+ private static class TraceValidatingProjectCreationValidationListener
+ implements ProjectCreationValidationListener {
+ Boolean foundTraceId;
+
+ @Override
+ public void validateNewProject(CreateProjectArgs args) throws ValidationException {
+ this.foundTraceId = LoggingContext.getInstance().getTagsAsMap().containsKey("TRACE_ID");
+ }
+ }
+
+ private static class TraceValidatingCommitValidationListener implements CommitValidationListener {
+ Boolean foundTraceId;
+
+ @Override
+ public List<CommitValidationMessage> onCommitReceived(CommitReceivedEvent receiveEvent)
+ throws CommitValidationException {
+ this.foundTraceId = LoggingContext.getInstance().getTagsAsMap().containsKey("TRACE_ID");
+ return ImmutableList.of();
+ }
+ }
+}
diff --git a/javatests/com/google/gerrit/acceptance/rest/change/AbstractSubmit.java b/javatests/com/google/gerrit/acceptance/rest/change/AbstractSubmit.java
index 61a2d84..5580279 100644
--- a/javatests/com/google/gerrit/acceptance/rest/change/AbstractSubmit.java
+++ b/javatests/com/google/gerrit/acceptance/rest/change/AbstractSubmit.java
@@ -27,6 +27,7 @@
import static java.util.stream.Collectors.toList;
import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
@@ -154,11 +155,12 @@
@Test
@TestProjectInput(createEmptyCommit = false)
public void submitToEmptyRepo() throws Exception {
- RevCommit initialHead = getRemoteHead();
+ assertThat(getRemoteHead()).isNull();
PushOneCommit.Result change = createChange();
+ assertThat(change.getCommit().getParents()).isEmpty();
Map<Branch.NameKey, ObjectId> actual = fetchFromSubmitPreview(change.getChangeId());
RevCommit headAfterSubmitPreview = getRemoteHead();
- assertThat(headAfterSubmitPreview).isEqualTo(initialHead);
+ assertThat(headAfterSubmitPreview).isNull();
assertThat(actual).hasSize(1);
submit(change.getChangeId());
@@ -1070,6 +1072,45 @@
change.current().submit();
}
+ @Test
+ @TestProjectInput(createEmptyCommit = false, rejectEmptyCommit = InheritableBoolean.TRUE)
+ public void submitNonemptyCommitToEmptyRepoWithRejectEmptyCommit_allowed() throws Exception {
+ assertThat(getRemoteHead()).isNull();
+ PushOneCommit.Result change = createChange();
+ assertThat(change.getCommit().getParents()).isEmpty();
+ Map<Branch.NameKey, ObjectId> actual = fetchFromSubmitPreview(change.getChangeId());
+ RevCommit headAfterSubmitPreview = getRemoteHead();
+ assertThat(headAfterSubmitPreview).isNull();
+ assertThat(actual).hasSize(1);
+
+ submit(change.getChangeId());
+ assertThat(getRemoteHead().getId()).isEqualTo(change.getCommit());
+ assertTrees(project, actual);
+ }
+
+ @Test
+ @TestProjectInput(createEmptyCommit = false, rejectEmptyCommit = InheritableBoolean.TRUE)
+ public void submitEmptyCommitToEmptyRepoWithRejectEmptyCommit_allowed() throws Exception {
+ assertThat(getRemoteHead()).isNull();
+ PushOneCommit.Result change =
+ pushFactory
+ .create(db, admin.getIdent(), testRepo, "Change 1", ImmutableMap.of())
+ .to("refs/for/master");
+ change.assertOkStatus();
+ // TODO(dborowitz): Use EMPTY_TREE_ID after upgrading to https://git.eclipse.org/r/127473
+ assertThat(change.getCommit().getTree())
+ .isEqualTo(ObjectId.fromString("4b825dc642cb6eb9a060e54bf8d69288fbee4904"));
+
+ Map<Branch.NameKey, ObjectId> actual = fetchFromSubmitPreview(change.getChangeId());
+ RevCommit headAfterSubmitPreview = getRemoteHead();
+ assertThat(headAfterSubmitPreview).isNull();
+ assertThat(actual).hasSize(1);
+
+ submit(change.getChangeId());
+ assertThat(getRemoteHead().getId()).isEqualTo(change.getCommit());
+ assertTrees(project, actual);
+ }
+
private void setChangeStatusToNew(PushOneCommit.Result... changes) throws Exception {
for (PushOneCommit.Result change : changes) {
try (BatchUpdate bu =
diff --git a/javatests/com/google/gerrit/acceptance/rest/project/CreateBranchIT.java b/javatests/com/google/gerrit/acceptance/rest/project/CreateBranchIT.java
index 0017e08..df89686 100644
--- a/javatests/com/google/gerrit/acceptance/rest/project/CreateBranchIT.java
+++ b/javatests/com/google/gerrit/acceptance/rest/project/CreateBranchIT.java
@@ -61,7 +61,7 @@
@Test
public void createBranch_Forbidden() throws Exception {
setApiUser(user);
- assertCreateFails(testBranch, AuthException.class, "create not permitted for refs/heads/test");
+ assertCreateFails(testBranch, AuthException.class, "not permitted: create on refs/heads/test");
}
@Test
@@ -85,7 +85,7 @@
@Test
public void createBranchByAdminCreateReferenceBlocked_Forbidden() throws Exception {
blockCreateReference();
- assertCreateFails(testBranch, AuthException.class, "create not permitted for refs/heads/test");
+ assertCreateFails(testBranch, AuthException.class, "not permitted: create on refs/heads/test");
}
@Test
@@ -93,7 +93,7 @@
grantOwner();
blockCreateReference();
setApiUser(user);
- assertCreateFails(testBranch, AuthException.class, "create not permitted for refs/heads/test");
+ assertCreateFails(testBranch, AuthException.class, "not permitted: create on refs/heads/test");
}
@Test
diff --git a/javatests/com/google/gerrit/acceptance/rest/project/DeleteBranchIT.java b/javatests/com/google/gerrit/acceptance/rest/project/DeleteBranchIT.java
index 5e1b0bf..b426a37 100644
--- a/javatests/com/google/gerrit/acceptance/rest/project/DeleteBranchIT.java
+++ b/javatests/com/google/gerrit/acceptance/rest/project/DeleteBranchIT.java
@@ -193,7 +193,7 @@
private void assertDeleteForbidden(Branch.NameKey branch) throws Exception {
assertThat(branch(branch).get().canDelete).isNull();
exception.expect(AuthException.class);
- exception.expectMessage("delete not permitted");
+ exception.expectMessage("not permitted: delete");
branch(branch).delete();
}
}
diff --git a/javatests/com/google/gerrit/acceptance/rest/project/DeleteBranchesIT.java b/javatests/com/google/gerrit/acceptance/rest/project/DeleteBranchesIT.java
index 330f2b8..c1bd8f1 100644
--- a/javatests/com/google/gerrit/acceptance/rest/project/DeleteBranchesIT.java
+++ b/javatests/com/google/gerrit/acceptance/rest/project/DeleteBranchesIT.java
@@ -75,7 +75,7 @@
project().deleteBranches(input);
fail("Expected AuthException");
} catch (AuthException e) {
- assertThat(e).hasMessageThat().isEqualTo("delete not permitted for refs/heads/test-1");
+ assertThat(e).hasMessageThat().isEqualTo("not permitted: delete on refs/heads/test-1");
}
setApiUser(admin);
assertBranches(BRANCHES);
diff --git a/javatests/com/google/gerrit/acceptance/rest/project/DeleteTagIT.java b/javatests/com/google/gerrit/acceptance/rest/project/DeleteTagIT.java
index 0cbbe44..3ae0b44 100644
--- a/javatests/com/google/gerrit/acceptance/rest/project/DeleteTagIT.java
+++ b/javatests/com/google/gerrit/acceptance/rest/project/DeleteTagIT.java
@@ -125,7 +125,7 @@
private void assertDeleteForbidden() throws Exception {
assertThat(tag().get().canDelete).isNull();
exception.expect(AuthException.class);
- exception.expectMessage("delete not permitted");
+ exception.expectMessage("not permitted: delete");
tag().delete();
}
}
diff --git a/javatests/com/google/gerrit/acceptance/rest/project/TagsIT.java b/javatests/com/google/gerrit/acceptance/rest/project/TagsIT.java
index 714751d..d4edc0d 100644
--- a/javatests/com/google/gerrit/acceptance/rest/project/TagsIT.java
+++ b/javatests/com/google/gerrit/acceptance/rest/project/TagsIT.java
@@ -254,7 +254,7 @@
TagInput input = new TagInput();
input.ref = "test";
exception.expect(AuthException.class);
- exception.expectMessage("create not permitted");
+ exception.expectMessage("not permitted: create");
tag(input.ref).create(input);
}
diff --git a/javatests/com/google/gerrit/server/BUILD b/javatests/com/google/gerrit/server/BUILD
index 18d9c71..8394fec 100644
--- a/javatests/com/google/gerrit/server/BUILD
+++ b/javatests/com/google/gerrit/server/BUILD
@@ -63,6 +63,7 @@
"//lib/auto:auto-value",
"//lib/auto:auto-value-annotations",
"//lib/commons:codec",
+ "//lib/flogger:api",
"//lib/guice",
"//lib/jgit/org.eclipse.jgit:jgit",
"//lib/jgit/org.eclipse.jgit.junit:junit",
diff --git a/javatests/com/google/gerrit/server/logging/LoggingContextAwareThreadFactoryTest.java b/javatests/com/google/gerrit/server/logging/LoggingContextAwareThreadFactoryTest.java
new file mode 100644
index 0000000..bd04d8a
--- /dev/null
+++ b/javatests/com/google/gerrit/server/logging/LoggingContextAwareThreadFactoryTest.java
@@ -0,0 +1,88 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.logging;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import com.google.common.truth.Expect;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import org.junit.Rule;
+import org.junit.Test;
+
+public class LoggingContextAwareThreadFactoryTest {
+ @Rule public final Expect expect = Expect.create();
+
+ @Test
+ public void loggingContextPropagationToNewThread() throws Exception {
+ assertThat(LoggingContext.getInstance().getTags().isEmpty()).isTrue();
+ try (TraceContext traceContext = TraceContext.open().addTag("foo", "bar")) {
+ SortedMap<String, SortedSet<Object>> tagMap = LoggingContext.getInstance().getTags().asMap();
+ assertThat(tagMap.keySet()).containsExactly("foo");
+ assertThat(tagMap.get("foo")).containsExactly("bar");
+
+ Thread thread =
+ new LoggingContextAwareThreadFactory(r -> new Thread(r, "test-thread"))
+ .newThread(
+ () -> {
+ // Verify that the tags have been propagated to the new thread.
+ SortedMap<String, SortedSet<Object>> threadTagMap =
+ LoggingContext.getInstance().getTags().asMap();
+ expect.that(threadTagMap.keySet()).containsExactly("foo");
+ expect.that(threadTagMap.get("foo")).containsExactly("bar");
+ });
+
+ // Execute in background.
+ thread.start();
+ thread.join();
+
+ // Verify that tags in the outer thread are still set.
+ tagMap = LoggingContext.getInstance().getTags().asMap();
+ assertThat(tagMap.keySet()).containsExactly("foo");
+ assertThat(tagMap.get("foo")).containsExactly("bar");
+ }
+ assertThat(LoggingContext.getInstance().getTags().isEmpty()).isTrue();
+ }
+
+ @Test
+ public void loggingContextPropagationToSameThread() throws Exception {
+ assertThat(LoggingContext.getInstance().getTags().isEmpty()).isTrue();
+ try (TraceContext traceContext = TraceContext.open().addTag("foo", "bar")) {
+ SortedMap<String, SortedSet<Object>> tagMap = LoggingContext.getInstance().getTags().asMap();
+ assertThat(tagMap.keySet()).containsExactly("foo");
+ assertThat(tagMap.get("foo")).containsExactly("bar");
+
+ Thread thread =
+ new LoggingContextAwareThreadFactory()
+ .newThread(
+ () -> {
+ // Verify that the tags have been propagated to the new thread.
+ SortedMap<String, SortedSet<Object>> threadTagMap =
+ LoggingContext.getInstance().getTags().asMap();
+ expect.that(threadTagMap.keySet()).containsExactly("foo");
+ expect.that(threadTagMap.get("foo")).containsExactly("bar");
+ });
+
+ // Execute in the same thread.
+ thread.run();
+
+ // Verify that tags in the outer thread are still set.
+ tagMap = LoggingContext.getInstance().getTags().asMap();
+ assertThat(tagMap.keySet()).containsExactly("foo");
+ assertThat(tagMap.get("foo")).containsExactly("bar");
+ }
+ assertThat(LoggingContext.getInstance().getTags().isEmpty()).isTrue();
+ }
+}
diff --git a/javatests/com/google/gerrit/server/logging/MutableTagsTest.java b/javatests/com/google/gerrit/server/logging/MutableTagsTest.java
new file mode 100644
index 0000000..4fadbb4
--- /dev/null
+++ b/javatests/com/google/gerrit/server/logging/MutableTagsTest.java
@@ -0,0 +1,176 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.logging;
+
+import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.Truth.assert_;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.ImmutableSetMultimap;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import org.junit.Before;
+import org.junit.Test;
+
+public class MutableTagsTest {
+ private MutableTags tags;
+
+ @Before
+ public void setup() {
+ tags = new MutableTags();
+ }
+
+ @Test
+ public void addTag() {
+ assertThat(tags.add("name", "value")).isTrue();
+ assertTags(ImmutableMap.of("name", ImmutableSet.of("value")));
+ }
+
+ @Test
+ public void addTagsWithDifferentName() {
+ assertThat(tags.add("name1", "value1")).isTrue();
+ assertThat(tags.add("name2", "value2")).isTrue();
+ assertTags(
+ ImmutableMap.of("name1", ImmutableSet.of("value1"), "name2", ImmutableSet.of("value2")));
+ }
+
+ @Test
+ public void addTagsWithSameNameButDifferentValues() {
+ assertThat(tags.add("name", "value1")).isTrue();
+ assertThat(tags.add("name", "value2")).isTrue();
+ assertTags(ImmutableMap.of("name", ImmutableSet.of("value1", "value2")));
+ }
+
+ @Test
+ public void addTagsWithSameNameAndSameValue() {
+ assertThat(tags.add("name", "value")).isTrue();
+ assertThat(tags.add("name", "value")).isFalse();
+ assertTags(ImmutableMap.of("name", ImmutableSet.of("value")));
+ }
+
+ @Test
+ public void getEmptyTags() {
+ assertThat(tags.getTags().isEmpty()).isTrue();
+ assertTags(ImmutableMap.of());
+ }
+
+ @Test
+ public void isEmpty() {
+ assertThat(tags.isEmpty()).isTrue();
+
+ tags.add("foo", "bar");
+ assertThat(tags.isEmpty()).isFalse();
+
+ tags.remove("foo", "bar");
+ assertThat(tags.isEmpty()).isTrue();
+ }
+
+ @Test
+ public void removeTags() {
+ tags.add("name1", "value1");
+ tags.add("name1", "value2");
+ tags.add("name2", "value");
+ assertTags(
+ ImmutableMap.of(
+ "name1", ImmutableSet.of("value1", "value2"), "name2", ImmutableSet.of("value")));
+
+ tags.remove("name2", "value");
+ assertTags(ImmutableMap.of("name1", ImmutableSet.of("value1", "value2")));
+
+ tags.remove("name1", "value1");
+ assertTags(ImmutableMap.of("name1", ImmutableSet.of("value2")));
+
+ tags.remove("name1", "value2");
+ assertTags(ImmutableMap.of());
+ }
+
+ @Test
+ public void removeNonExistingTag() {
+ tags.add("name", "value");
+ assertTags(ImmutableMap.of("name", ImmutableSet.of("value")));
+
+ tags.remove("foo", "bar");
+ assertTags(ImmutableMap.of("name", ImmutableSet.of("value")));
+
+ tags.remove("name", "foo");
+ assertTags(ImmutableMap.of("name", ImmutableSet.of("value")));
+ }
+
+ @Test
+ public void setTags() {
+ tags.add("name", "value");
+ assertTags(ImmutableMap.of("name", ImmutableSet.of("value")));
+
+ tags.set(ImmutableSetMultimap.of("foo", "bar", "foo", "baz", "bar", "baz"));
+ assertTags(
+ ImmutableMap.of("foo", ImmutableSet.of("bar", "baz"), "bar", ImmutableSet.of("baz")));
+ }
+
+ @Test
+ public void asMap() {
+ tags.add("name", "value");
+ assertThat(tags.asMap()).containsExactlyEntriesIn(ImmutableSetMultimap.of("name", "value"));
+
+ tags.set(ImmutableSetMultimap.of("foo", "bar", "foo", "baz", "bar", "baz"));
+ assertThat(tags.asMap())
+ .containsExactlyEntriesIn(
+ ImmutableSetMultimap.of("foo", "bar", "foo", "baz", "bar", "baz"));
+ }
+
+ @Test
+ public void clearTags() {
+ tags.add("name1", "value1");
+ tags.add("name1", "value2");
+ tags.add("name2", "value");
+ assertTags(
+ ImmutableMap.of(
+ "name1", ImmutableSet.of("value1", "value2"), "name2", ImmutableSet.of("value")));
+
+ tags.clear();
+ assertTags(ImmutableMap.of());
+ }
+
+ @Test
+ public void addInvalidTag() {
+ assertNullPointerException("tag name is required", () -> tags.add(null, "foo"));
+ assertNullPointerException("tag value is required", () -> tags.add("foo", null));
+ }
+
+ @Test
+ public void removeInvalidTag() {
+ assertNullPointerException("tag name is required", () -> tags.remove(null, "foo"));
+ assertNullPointerException("tag value is required", () -> tags.remove("foo", null));
+ }
+
+ private void assertTags(ImmutableMap<String, ImmutableSet<String>> expectedTagMap) {
+ SortedMap<String, SortedSet<Object>> actualTagMap = tags.getTags().asMap();
+ assertThat(actualTagMap.keySet()).containsExactlyElementsIn(expectedTagMap.keySet());
+ for (Map.Entry<String, ImmutableSet<String>> expectedEntry : expectedTagMap.entrySet()) {
+ assertThat(actualTagMap.get(expectedEntry.getKey()))
+ .containsExactlyElementsIn(expectedEntry.getValue());
+ }
+ }
+
+ private void assertNullPointerException(String expectedMessage, Runnable r) {
+ try {
+ r.run();
+ assert_().fail("expected NullPointerException");
+ } catch (NullPointerException e) {
+ assertThat(e.getMessage()).isEqualTo(expectedMessage);
+ }
+ }
+}
diff --git a/javatests/com/google/gerrit/server/logging/TraceContextTest.java b/javatests/com/google/gerrit/server/logging/TraceContextTest.java
new file mode 100644
index 0000000..c4ebd29
--- /dev/null
+++ b/javatests/com/google/gerrit/server/logging/TraceContextTest.java
@@ -0,0 +1,120 @@
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.google.gerrit.server.logging;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.gerrit.server.util.RequestId;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.SortedSet;
+import org.junit.After;
+import org.junit.Test;
+
+public class TraceContextTest {
+ @After
+ public void cleanup() {
+ LoggingContext.getInstance().clearTags();
+ }
+
+ @Test
+ public void openContext() {
+ assertTags(ImmutableMap.of());
+ try (TraceContext traceContext = TraceContext.open().addTag("foo", "bar")) {
+ assertTags(ImmutableMap.of("foo", ImmutableSet.of("bar")));
+ }
+ assertTags(ImmutableMap.of());
+ }
+
+ @Test
+ public void openNestedContexts() {
+ assertTags(ImmutableMap.of());
+ try (TraceContext traceContext = TraceContext.open().addTag("foo", "bar")) {
+ assertTags(ImmutableMap.of("foo", ImmutableSet.of("bar")));
+
+ try (TraceContext traceContext2 = TraceContext.open().addTag("abc", "xyz")) {
+ assertTags(ImmutableMap.of("abc", ImmutableSet.of("xyz"), "foo", ImmutableSet.of("bar")));
+ }
+
+ assertTags(ImmutableMap.of("foo", ImmutableSet.of("bar")));
+ }
+ assertTags(ImmutableMap.of());
+ }
+
+ @Test
+ public void openNestedContextsWithSameTagName() {
+ assertTags(ImmutableMap.of());
+ try (TraceContext traceContext = TraceContext.open().addTag("foo", "bar")) {
+ assertTags(ImmutableMap.of("foo", ImmutableSet.of("bar")));
+
+ try (TraceContext traceContext2 = TraceContext.open().addTag("foo", "baz")) {
+ assertTags(ImmutableMap.of("foo", ImmutableSet.of("bar", "baz")));
+ }
+
+ assertTags(ImmutableMap.of("foo", ImmutableSet.of("bar")));
+ }
+ assertTags(ImmutableMap.of());
+ }
+
+ @Test
+ public void openNestedContextsWithSameTagNameAndValue() {
+ assertTags(ImmutableMap.of());
+ try (TraceContext traceContext = TraceContext.open().addTag("foo", "bar")) {
+ assertTags(ImmutableMap.of("foo", ImmutableSet.of("bar")));
+
+ try (TraceContext traceContext2 = TraceContext.open().addTag("foo", "bar")) {
+ assertTags(ImmutableMap.of("foo", ImmutableSet.of("bar")));
+ }
+
+ assertTags(ImmutableMap.of("foo", ImmutableSet.of("bar")));
+ }
+ assertTags(ImmutableMap.of());
+ }
+
+ @Test
+ public void openContextWithRequestId() {
+ assertTags(ImmutableMap.of());
+ try (TraceContext traceContext = TraceContext.open().addTag(RequestId.Type.RECEIVE_ID, "foo")) {
+ assertTags(ImmutableMap.of("RECEIVE_ID", ImmutableSet.of("foo")));
+ }
+ assertTags(ImmutableMap.of());
+ }
+
+ @Test
+ public void addTag() {
+ assertTags(ImmutableMap.of());
+ try (TraceContext traceContext = TraceContext.open().addTag("foo", "bar")) {
+ assertTags(ImmutableMap.of("foo", ImmutableSet.of("bar")));
+
+ traceContext.addTag("foo", "baz");
+ traceContext.addTag("bar", "baz");
+ assertTags(
+ ImmutableMap.of("foo", ImmutableSet.of("bar", "baz"), "bar", ImmutableSet.of("baz")));
+ }
+ assertTags(ImmutableMap.of());
+ }
+
+ private void assertTags(ImmutableMap<String, ImmutableSet<String>> expectedTagMap) {
+ SortedMap<String, SortedSet<Object>> actualTagMap =
+ LoggingContext.getInstance().getTags().asMap();
+ assertThat(actualTagMap.keySet()).containsExactlyElementsIn(expectedTagMap.keySet());
+ for (Map.Entry<String, ImmutableSet<String>> expectedEntry : expectedTagMap.entrySet()) {
+ assertThat(actualTagMap.get(expectedEntry.getKey()))
+ .containsExactlyElementsIn(expectedEntry.getValue());
+ }
+ }
+}
diff --git a/resources/com/google/gerrit/httpd/raw/PolyGerritIndexHtml.soy b/resources/com/google/gerrit/httpd/raw/PolyGerritIndexHtml.soy
index 052de6b..9590f15 100644
--- a/resources/com/google/gerrit/httpd/raw/PolyGerritIndexHtml.soy
+++ b/resources/com/google/gerrit/httpd/raw/PolyGerritIndexHtml.soy
@@ -19,7 +19,8 @@
/**
* @param canonicalPath
* @param staticResourcePath
- * @param? assetsUrl
+ * @param? assetsPath {string} URL to static assets root, if served from CDN.
+ * @param? assetsBundle {string} Assets bundle .html file, served from $assetsPath.
* @param? faviconPath
* @param? versionInfo
* @param? deprecateGwtUi
@@ -37,6 +38,7 @@
{if $deprecateGwtUi}window.DEPRECATE_GWT_UI = true;{/if}
{if $versionInfo}window.VERSION_INFO = '{$versionInfo}';{/if}
{if $staticResourcePath != ''}window.STATIC_RESOURCE_PATH = '{$staticResourcePath}';{/if}
+ {if $assetsPath != ''}window.ASSETS_PATH = '{$assetsPath}';{/if}
</script>{\n}
{if $faviconPath}
@@ -62,8 +64,8 @@
// CC them on any changes that load content before gr-app.html.
//
// github.com/Polymer/polymer-resin/blob/master/getting-started.md#integrating
- {if $assetsUrl}
- <link rel="import" href="{$assetsUrl}">{\n}
+ {if $assetsPath and $assetsBundle}
+ <link rel="import" href="{$assetsPath + $assetsBundle}">{\n}
{/if}
<link rel="preload" href="{$staticResourcePath}/elements/gr-app.js" as="script" crossorigin="anonymous">{\n}
diff --git a/tools/eclipse/BUILD b/tools/eclipse/BUILD
index d022c40..0c9d023 100644
--- a/tools/eclipse/BUILD
+++ b/tools/eclipse/BUILD
@@ -11,7 +11,6 @@
"//gerrit-gwtui:ui_tests",
"//javatests/com/google/gerrit/elasticsearch:elasticsearch_test_utils",
"//javatests/com/google/gerrit/server:server_tests",
- "//proto:reviewdb_java_proto",
]
DEPS = [
@@ -33,8 +32,7 @@
"//lib/gwt:w3c-css-sac",
"//lib/jetty:servlets",
"//lib/prolog:compiler-lib",
- # TODO(davido): I do not understand why it must be on the Eclipse classpath
- #'//Documentation:index',
+ "//proto:reviewdb_java_proto",
]
java_library(