Merge "Move CURATOR_VER constant to right before its use" into stable-2.16
diff --git a/dockerised_local_env/.gitignore b/dockerised_local_env/.gitignore
index c65f4d5..c88a11c 100644
--- a/dockerised_local_env/.gitignore
+++ b/dockerised_local_env/.gitignore
@@ -7,6 +7,8 @@
gerrit-1/bin
gerrit-1/etc
gerrit-1/tmp
+gerrit-1/lib
+gerrit-1/ssh/known_hosts
gerrit-2/plugins
gerrit-2/db
gerrit-2/git
@@ -16,3 +18,8 @@
gerrit-2/bin
gerrit-2/etc
gerrit-2/tmp
+gerrit-2/lib
+gerrit-2/ssh/known_hosts
+gerrit-2/bin
+syslog-sidecar/logs
+syslog-sidecar/socket
diff --git a/dockerised_local_env/Makefile b/dockerised_local_env/Makefile
index ae1c85b..0b6b7e3 100644
--- a/dockerised_local_env/Makefile
+++ b/dockerised_local_env/Makefile
@@ -1,9 +1,9 @@
GERRIT_JOB=Gerrit-bazel-stable-2.16
-BUILD_NUM=259
-GERRIT_1_PLUGINS_DIRECTORY=./gerrit-1/plugins
-GERRIT_2_PLUGINS_DIRECTORY=./gerrit-2/plugins
+BUILD_NUM=377
GERRIT_1_BIN_DIRECTORY=./gerrit-1/bin
GERRIT_2_BIN_DIRECTORY=./gerrit-2/bin
+GERRIT_1_LIB_DIRECTORY=./gerrit-1/lib
+GERRIT_2_LIB_DIRECTORY=./gerrit-2/lib
GERRIT_1_ETC_DIRECTORY=./gerrit-1/etc
GERRIT_2_ETC_DIRECTORY=./gerrit-2/etc
GERRIT_1_PLUGINS_DIRECTORY=./gerrit-1/plugins
@@ -16,7 +16,7 @@
all: prepare download build
prepare:
- -mkdir -p $(GERRIT_1_PLUGINS_DIRECTORY) $(GERRIT_2_PLUGINS_DIRECTORY) $(GERRIT_1_BIN_DIRECTORY) $(GERRIT_2_BIN_DIRECTORY) $(GERRIT_1_ETC_DIRECTORY) $(GERRIT_2_ETC_DIRECTORY)
+ -mkdir -p $(GERRIT_1_PLUGINS_DIRECTORY) $(GERRIT_2_PLUGINS_DIRECTORY) $(GERRIT_1_BIN_DIRECTORY) $(GERRIT_2_BIN_DIRECTORY) $(GERRIT_1_ETC_DIRECTORY) $(GERRIT_2_ETC_DIRECTORY) $(GERRIT_1_LIB_DIRECTORY) $(GERRIT_2_LIB_DIRECTORY)
download: gerrit plugin_websession_flatfile \
plugin_healthcheck \
@@ -26,19 +26,19 @@
gerrit: prepare
$(WGET) $(CI_URL)/$(GERRIT_JOB)/lastSuccessfulBuild/artifact/gerrit/bazel-bin/release.war -P $(GERRIT_1_BIN_DIRECTORY)
cp $(GERRIT_1_BIN_DIRECTORY)/*.war $(GERRIT_2_BIN_DIRECTORY)
- for plugin in $(CORE_PLUGINS); do $(WGET) $(CI_URL)/$(GERRIT_JOB)/lastSuccessfulBuild/artifact/gerrit/bazel-genfiles/plugins/$$plugin/$$plugin.jar -P $(GERRIT_1_PLUGINS_DIRECTORY); done
+ for plugin in $(CORE_PLUGINS); do $(WGET) $(CI_URL)/$(GERRIT_JOB)/lastSuccessfulBuild/artifact/gerrit/bazel-bin/plugins/$$plugin/$$plugin.jar -P $(GERRIT_1_PLUGINS_DIRECTORY); done
cp $(GERRIT_1_PLUGINS_DIRECTORY)/*.jar $(GERRIT_2_PLUGINS_DIRECTORY)
plugin_websession_flatfile: prepare
- $(WGET) $(CI_URL)/plugin-websession-flatfile-bazel-master-stable-2.16/lastSuccessfulBuild/artifact/bazel-genfiles/plugins/websession-flatfile/websession-flatfile.jar -P $(GERRIT_1_PLUGINS_DIRECTORY)
+ $(WGET) $(CI_URL)/plugin-websession-flatfile-bazel-master-stable-2.16/lastSuccessfulBuild/artifact/bazel-bin/plugins/websession-flatfile/websession-flatfile.jar -P $(GERRIT_1_PLUGINS_DIRECTORY)
cp $(GERRIT_1_PLUGINS_DIRECTORY)/websession-flatfile.jar $(GERRIT_2_PLUGINS_DIRECTORY)/websession-flatfile.jar
plugin_multi_site: prepare
- $(WGET) $(CI_URL)/plugin-multi-site-bazel-stable-2.16/lastSuccessfulBuild/artifact/bazel-genfiles/plugins/multi-site/multi-site.jar -P $(GERRIT_1_PLUGINS_DIRECTORY)
- cp $(GERRIT_1_PLUGINS_DIRECTORY)/multi-site.jar $(GERRIT_2_PLUGINS_DIRECTORY)/multi-site.jar
+ $(WGET) $(CI_URL)/plugin-multi-site-bazel-stable-2.16/lastSuccessfulBuild/artifact/bazel-bin/plugins/multi-site/multi-site.jar -P $(GERRIT_1_LIB_DIRECTORY)
+ cp $(GERRIT_1_LIB_DIRECTORY)/multi-site.jar $(GERRIT_2_LIB_DIRECTORY)/multi-site.jar
plugin_healthcheck: prepare
- $(WGET) $(CI_URL)/plugin-healthcheck-bazel-stable-2.16/lastSuccessfulBuild/artifact/bazel-genfiles/plugins/healthcheck/healthcheck.jar -P $(GERRIT_1_PLUGINS_DIRECTORY)
+ $(WGET) $(CI_URL)/plugin-healthcheck-bazel-stable-2.16/lastSuccessfulBuild/artifact/bazel-bin/plugins/healthcheck/healthcheck.jar -P $(GERRIT_1_PLUGINS_DIRECTORY)
cp $(GERRIT_1_PLUGINS_DIRECTORY)/healthcheck.jar $(GERRIT_2_PLUGINS_DIRECTORY)/healthcheck.jar
build:
@@ -46,11 +46,11 @@
docker build -t $(MYDIR) ./gerrit-2
clean_gerrit: prepare
- -rm -fr gerrit-{1,2}/{db,data,cache,db,git,index,etc,bin,tmp}/*
- export GERRIT_REPLICATION_INSTANCE=gerrit-2 REPLICATE_ON_STARTUP=true; cat ./gerrit-common/replication.config.template | envsubst '$${GERRIT_REPLICATION_INSTANCE} $${REPLICATE_ON_STARTUP}' > ./gerrit-1/etc/replication.config
- export GERRIT_REPLICATION_INSTANCE=gerrit-1 REPLICATE_ON_STARTUP=false; cat ./gerrit-common/replication.config.template | envsubst '$${GERRIT_REPLICATION_INSTANCE} $${REPLICATE_ON_STARTUP}' > ./gerrit-2/etc/replication.config
- cp ./gerrit-common/gerrit.config ./gerrit-1/etc
- cp ./gerrit-common/gerrit.config ./gerrit-2/etc
+ -rm -fr gerrit-{1,2}/{db,data,cache,db,git,index,etc,bin,tmp,plugins,lib}/*
+ export GERRIT_REPLICATION_INSTANCE=gerrit-2; cat ./gerrit-common/replication.config.template | envsubst '$${GERRIT_REPLICATION_INSTANCE}' > ./gerrit-1/etc/replication.config
+ export GERRIT_REPLICATION_INSTANCE=gerrit-1; cat ./gerrit-common/replication.config.template | envsubst '$${GERRIT_REPLICATION_INSTANCE}' > ./gerrit-2/etc/replication.config
+ cp ./gerrit-common/*.config ./gerrit-1/etc
+ cp ./gerrit-common/*.config ./gerrit-2/etc
cp ./gerrit-common/git-daemon.sh ./gerrit-1/bin
cp ./gerrit-common/git-daemon.sh ./gerrit-2/bin
diff --git a/dockerised_local_env/common-gerrit-config/replication.config.template b/dockerised_local_env/common-gerrit-config/replication.config.template
deleted file mode 100644
index 537e8d8..0000000
--- a/dockerised_local_env/common-gerrit-config/replication.config.template
+++ /dev/null
@@ -1,17 +0,0 @@
-[remote "Replication"]
- url = git://${GERRIT_REPLICATION_INSTANCE}:9418/${name}.git
- adminUrl = ssh://root@sshd:22/var/${GERRIT_REPLICATION_INSTANCE}/git/${name}.git
- push = +refs/*:refs/*
- timeout = 600
- rescheduleDelay = 15
- replicationDelay = 5
- mirror = true
- createMissingRepositories = true
- replicateProjectDeletions = true
- replicateHiddenProjects = true
-[gerrit]
- autoReload = true
- replicateOnStartup = true
-[replication]
- lockErrorMaxRetries = 5
- maxRetries = 5
diff --git a/dockerised_local_env/docker-compose.yaml b/dockerised_local_env/docker-compose.yaml
index 9bf3fdf..b15684d 100644
--- a/dockerised_local_env/docker-compose.yaml
+++ b/dockerised_local_env/docker-compose.yaml
@@ -15,12 +15,17 @@
- ./gerrit-1/etc:/var/gerrit/etc
- ./gerrit-1/db:/var/gerrit/db
- ./gerrit-1/plugins:/var/gerrit/plugins
+ - ./gerrit-1/lib:/var/gerrit/lib
- ./gerrit-1/tmp:/var/gerrit/tmp
+ - ./gerrit-common/shared-dir:/var/gerrit/shared-dir
ports:
- - "29418:29418"
- - "8080:8080"
+ - "39418:29418"
+ - "8081:8080"
depends_on:
- sshd
+ - zookeeper
+ - kafka-broker
+ container_name: gerrit-1
gerrit-2:
build: ./gerrit-2
networks:
@@ -36,12 +41,17 @@
- ./gerrit-2/etc:/var/gerrit/etc
- ./gerrit-2/db:/var/gerrit/db
- ./gerrit-2/plugins:/var/gerrit/plugins
+ - ./gerrit-2/lib:/var/gerrit/lib
- ./gerrit-2/tmp:/var/gerrit/tmp
+ - ./gerrit-common/shared-dir:/var/gerrit/shared-dir
ports:
- - "39418:29418"
- - "8081:8080"
+ - "49418:29418"
+ - "8082:8080"
depends_on:
- sshd
+ - zookeeper
+ - kafka-broker
+ container_name: gerrit-2
sshd:
build: ./sshd
networks:
@@ -50,6 +60,52 @@
- ./gerrit-2/git:/var/gerrit-2/git
- ./gerrit-2/ssh:/root/.ssh
- ./gerrit-1/git:/var/gerrit-1/git
+ container_name: sshd
+ zookeeper:
+ image: wurstmeister/zookeeper:latest
+ networks:
+ gerrit-net:
+ ports:
+ - "2181:2181"
+ kafka-broker:
+ image: wurstmeister/kafka:2.12-2.1.0
+ networks:
+ gerrit-net:
+ ports:
+ - "9092:9092"
+ container_name: kafka-broker
+ environment:
+ KAFKA_ADVERTISED_HOST_NAME: kafka-broker
+ KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+ haproxy:
+ build: haproxy
+ ports:
+ - "8080:8080"
+ - "29418:29418"
+ networks:
+ gerrit-net:
+ depends_on:
+ - syslog-sidecar
+ - gerrit-1
+ - gerrit-2
+ environment:
+ - SYSLOG_SIDECAR=syslog-sidecar
+ - GERRIT_1=gerrit-1
+ - GERRIT_1_SSH=29418
+ - GERRIT_1_HTTP=8080
+ - GERRIT_2=gerrit-2
+ - GERRIT_2_SSH=29418
+ - GERRIT_2_HTTP=8080
+ - HAPROXY_HTTP_PORT=8080
+ - HAPROXY_SSH_PORT=29418
+ syslog-sidecar:
+ image: balabit/syslog-ng:3.19.1
+ volumes:
+ - "./syslog-sidecar/logs:/var/log/syslog-ng"
+ - "./syslog-sidecar/socket:/var/run/syslog-ng"
+ - "./syslog-sidecar/config/:/etc/syslog-ng"
+ networks:
+ gerrit-net:
networks:
gerrit-net:
driver: bridge
diff --git a/dockerised_local_env/gerrit-1/ssh/known_hosts b/dockerised_local_env/gerrit-1/ssh/known_hosts
deleted file mode 100644
index 012fa68..0000000
--- a/dockerised_local_env/gerrit-1/ssh/known_hosts
+++ /dev/null
@@ -1,2 +0,0 @@
-
-sshd,* ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBKZRTdCgJ0FFTpP3ZzRgMMbYgaNmm6PDlg0e9QtOXzCG63GE41EExz2RWw7K9e6eRz+hSVf4hC/KMPgH3Clgo6w=
diff --git a/dockerised_local_env/gerrit-2/docker-entrypoint.sh b/dockerised_local_env/gerrit-2/docker-entrypoint.sh
index 09a3f4d..532377c 100755
--- a/dockerised_local_env/gerrit-2/docker-entrypoint.sh
+++ b/dockerised_local_env/gerrit-2/docker-entrypoint.sh
@@ -9,8 +9,11 @@
echo "Remove git repos created during init phase"
rm -fr /var/gerrit/git/*
- echo "Waiting for initial replication"
+ echo "Waiting for gerrit1 server to become available."
sleep 120
+ ssh -p 29418 admin@gerrit-1 replication start
+ echo "Waiting for replication to complete."
+ sleep 30
fi
java -jar /var/gerrit/bin/gerrit.war daemon
diff --git a/dockerised_local_env/gerrit-2/ssh/known_hosts b/dockerised_local_env/gerrit-2/ssh/known_hosts
deleted file mode 100644
index c6f2bdd..0000000
--- a/dockerised_local_env/gerrit-2/ssh/known_hosts
+++ /dev/null
@@ -1,2 +0,0 @@
-[localhost]:39418 ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBHAfiYyOe3Df8h+nT1axmB5F4cQQg/qnzvBEJsfKHt3uCYjkOLjjadYjujCnkzb74LToaw4pToTfAnCJ42jw5Bk=
-[gerrit-1]:22 ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBHAfiYyOe3Df8h+nT1axmB5F4cQQg/qnzvBEJsfKHt3uCYjkOLjjadYjujCnkzb74LToaw4pToTfAnCJ42jw5Bk=
diff --git a/dockerised_local_env/gerrit-common/gerrit.config b/dockerised_local_env/gerrit-common/gerrit.config
index cefc723..0a47bea 100644
--- a/dockerised_local_env/gerrit-common/gerrit.config
+++ b/dockerised_local_env/gerrit-common/gerrit.config
@@ -2,6 +2,7 @@
basePath = git
serverId = ff17821f-9571-42df-b690-30660f2d6e20
canonicalWebUrl = http://localhost:8080/
+ installModule = com.googlesource.gerrit.plugins.multisite.Module
[database]
type = h2
database = db/ReviewDB
@@ -27,9 +28,12 @@
smtpServer = localhost
[sshd]
listenAddress = *:29418
+ advertisedAddress = *:29418
[httpd]
- listenUrl = http://*:8080/
+ listenUrl = proxy-http://*:8080/
[cache]
directory = cache
[plugins]
allowRemoteAdmin = true
+[plugin "websession-flatfile"]
+ directory = /var/gerrit/shared-dir
diff --git a/dockerised_local_env/gerrit-common/healthcheck.config b/dockerised_local_env/gerrit-common/healthcheck.config
new file mode 100644
index 0000000..849e23f
--- /dev/null
+++ b/dockerised_local_env/gerrit-common/healthcheck.config
@@ -0,0 +1,9 @@
+[healthcheck]
+ timeout = 10s
+
+[healthcheck "querychanges"]
+ # No changes available when Gerrit is installed from scratch
+ enabled = false
+
+[healthcheck "auth"]
+ username = "admin"
\ No newline at end of file
diff --git a/dockerised_local_env/gerrit-common/multi-site.config b/dockerised_local_env/gerrit-common/multi-site.config
new file mode 100644
index 0000000..04b9c2c
--- /dev/null
+++ b/dockerised_local_env/gerrit-common/multi-site.config
@@ -0,0 +1,25 @@
+[index]
+ maxTries = 6
+ retryInterval = 30000
+ numStripedLocks = 100
+
+[kafka]
+ bootstrapServers = kafka-broker:9092
+ securityProtocol = PLAINTEXT
+ indexEventTopic = gerrit_index
+ streamEventTopic = gerrit_stream
+ projectListEventTopic = gerrit_list_project
+ cacheEventTopic = gerrit_cache_eviction
+
+[kafka "subscriber"]
+ enabled = true
+ pollingIntervalMs = 1000
+ KafkaProp-enableAutoCommit = true
+ KafkaProp-autoCommitIntervalMs = 1000
+ KafkaProp-autoOffsetReset = latest
+
+[kafka "publisher"]
+ enabled = true
+
+[ref-database "zookeeper"]
+ connectString = "zookeeper:2181"
diff --git a/dockerised_local_env/gerrit-common/replication.config.template b/dockerised_local_env/gerrit-common/replication.config.template
index f51530f..3864e92 100644
--- a/dockerised_local_env/gerrit-common/replication.config.template
+++ b/dockerised_local_env/gerrit-common/replication.config.template
@@ -11,7 +11,7 @@
replicateHiddenProjects = true
[gerrit]
autoReload = true
- replicateOnStartup = ${REPLICATE_ON_STARTUP}
+ replicateOnStartup = false
[replication]
lockErrorMaxRetries = 5
maxRetries = 5
diff --git a/dockerised_local_env/haproxy/Dockerfile b/dockerised_local_env/haproxy/Dockerfile
new file mode 100644
index 0000000..dd3f9cd
--- /dev/null
+++ b/dockerised_local_env/haproxy/Dockerfile
@@ -0,0 +1,13 @@
+FROM haproxy:1.9.4
+
+RUN apt-get update && \
+ apt-get -y install gettext-base netcat && \
+ rm -rf /var/lib/apt/lists/* && \
+ mkdir /var/lib/haproxy && \
+ mkdir /var/run/haproxy && \
+ useradd haproxy && \
+ chown haproxy: /var/lib/haproxy /var/run/haproxy
+
+COPY haproxy.cfg /usr/local/etc/haproxy/haproxy.cfg.template
+
+COPY docker-entrypoint.sh /
diff --git a/dockerised_local_env/haproxy/docker-entrypoint.sh b/dockerised_local_env/haproxy/docker-entrypoint.sh
new file mode 100755
index 0000000..b78a994
--- /dev/null
+++ b/dockerised_local_env/haproxy/docker-entrypoint.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+envsubst < /usr/local/etc/haproxy/haproxy.cfg.template > /usr/local/etc/haproxy/haproxy.cfg
+haproxy -f /usr/local/etc/haproxy/haproxy.cfg
diff --git a/dockerised_local_env/haproxy/haproxy.cfg b/dockerised_local_env/haproxy/haproxy.cfg
new file mode 100644
index 0000000..000ed9e
--- /dev/null
+++ b/dockerised_local_env/haproxy/haproxy.cfg
@@ -0,0 +1,63 @@
+global
+ log $SYSLOG_SIDECAR local0
+ maxconn 2048
+
+defaults
+ log global
+ mode http
+ option httplog
+ option dontlognull
+ timeout connect 5000
+ timeout client 900000
+ timeout server 900000
+ timeout check 30000
+
+frontend haproxynode
+ bind *:$HAPROXY_HTTP_PORT
+ mode http
+ acl redirect_reads url_reg -i git-upload-pack
+ acl redirect_reads url_reg -i clone.bundle
+ acl redirect_writes url_reg -i git-receive-pack
+ use_backend read-backendnodes if redirect_reads
+ use_backend write-backendnodes if redirect_writes
+ default_backend read-backendnodes
+
+frontend git_ssh
+ bind *:$HAPROXY_SSH_PORT
+ option tcplog
+ mode tcp
+ timeout client 5m
+ default_backend ssh
+
+backend read-backendnodes
+ mode http
+ balance source
+ option forwardfor
+ http-request set-header X-Forwarded-Port %[dst_port]
+ default-server inter 10s fall 3 rise 2
+ option httpchk GET /config/server/healthcheck~status HTTP/1.0
+ http-check expect status 200
+ server node1 $GERRIT_1:$GERRIT_1_HTTP check inter 10s
+ server node2 $GERRIT_2:$GERRIT_2_HTTP check inter 10s
+
+backend write-backendnodes
+ mode http
+ balance roundrobin
+ option forwardfor
+ http-request set-header X-Forwarded-Port %[dst_port]
+ default-server inter 10s fall 3 rise 2
+ option httpchk GET /config/server/healthcheck~status HTTP/1.0
+ http-check expect status 200
+ server node1 $GERRIT_1:$GERRIT_1_HTTP check inter 10s
+ server node2 $GERRIT_2:$GERRIT_2_HTTP check inter 10s backup
+
+backend ssh
+ mode tcp
+ option redispatch
+ option httpchk GET /config/server/healthcheck~status HTTP/1.0
+ http-check expect status 200
+ balance source
+ timeout connect 10s
+ timeout server 5m
+ server ssh_node1 $GERRIT_1:$GERRIT_1_SSH check inter 10s check port $GERRIT_1_HTTP inter 10s
+ server ssh_node2 $GERRIT_2:$GERRIT_2_SSH check inter 10s check port $GERRIT_2_HTTP inter 10s backup
diff --git a/dockerised_local_env/syslog-sidecar/config/syslog-ng.conf b/dockerised_local_env/syslog-sidecar/config/syslog-ng.conf
new file mode 100644
index 0000000..bbd27b6
--- /dev/null
+++ b/dockerised_local_env/syslog-sidecar/config/syslog-ng.conf
@@ -0,0 +1,31 @@
+@version: 3.11
+
+options {
+ keep_hostname(yes);
+ create_dirs(yes);
+ ts_format(iso);
+ time_reopen (10);
+ chain_hostnames (no);
+};
+
+source s_net {
+ tcp(
+ ip("0.0.0.0")
+ );
+ udp(
+ ip("0.0.0.0")
+ );
+ syslog(
+ ip("0.0.0.0")
+ );
+ unix-stream("/var/run/lock/syslog-ng.sock");
+};
+
+destination logfiles {
+ file("/var/log/syslog-ng/$PROGRAM.log");
+};
+
+log {
+ source(s_net);
+ destination(logfiles);
+};
diff --git a/src/main/java/com/googlesource/gerrit/plugins/multisite/Configuration.java b/src/main/java/com/googlesource/gerrit/plugins/multisite/Configuration.java
index 3764013..3226a42 100644
--- a/src/main/java/com/googlesource/gerrit/plugins/multisite/Configuration.java
+++ b/src/main/java/com/googlesource/gerrit/plugins/multisite/Configuration.java
@@ -18,7 +18,6 @@
import static com.google.common.base.Suppliers.ofInstance;
import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.CaseFormat;
import com.google.common.base.Strings;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableList;
@@ -62,25 +61,18 @@
static final String THREAD_POOL_SIZE_KEY = "threadPoolSize";
static final int DEFAULT_INDEX_MAX_TRIES = 2;
static final int DEFAULT_INDEX_RETRY_INTERVAL = 30000;
- private static final int DEFAULT_POLLING_INTERVAL_MS = 1000;
static final int DEFAULT_THREAD_POOL_SIZE = 4;
static final String NUM_STRIPED_LOCKS = "numStripedLocks";
static final int DEFAULT_NUM_STRIPED_LOCKS = 10;
static final String ENABLE_KEY = "enabled";
- static final String DEFAULT_KAFKA_BOOTSTRAP_SERVERS = "localhost:9092";
- static final boolean DEFAULT_ENABLE_PROCESSING = true;
- static final String KAFKA_SECTION = "kafka";
- public static final String KAFKA_PROPERTY_PREFIX = "KafkaProp-";
- private final Supplier<KafkaPublisher> publisher;
private final Supplier<Cache> cache;
private final Supplier<Event> event;
private final Supplier<Index> index;
- private final Supplier<KafkaSubscriber> subscriber;
- private final Supplier<Kafka> kafka;
private final Supplier<SharedRefDatabase> sharedRefDb;
private final Supplier<Collection<Message>> replicationConfigValidation;
private final Config multiSiteConfig;
+ private final KafkaConfiguration kafkaConfig;
@Inject
Configuration(SitePaths sitePaths) {
@@ -91,10 +83,8 @@
public Configuration(Config multiSiteConfig, Config replicationConfig) {
Supplier<Config> lazyMultiSiteCfg = lazyLoad(multiSiteConfig);
this.multiSiteConfig = multiSiteConfig;
+ this.kafkaConfig = new KafkaConfiguration(multiSiteConfig);
replicationConfigValidation = lazyValidateReplicatioConfig(replicationConfig);
- kafka = memoize(() -> new Kafka(lazyMultiSiteCfg));
- publisher = memoize(() -> new KafkaPublisher(lazyMultiSiteCfg));
- subscriber = memoize(() -> new KafkaSubscriber(lazyMultiSiteCfg));
cache = memoize(() -> new Cache(lazyMultiSiteCfg));
event = memoize(() -> new Event(lazyMultiSiteCfg));
index = memoize(() -> new Index(lazyMultiSiteCfg));
@@ -110,11 +100,11 @@
}
public Kafka getKafka() {
- return kafka.get();
+ return kafkaConfig.getKafka();
}
public KafkaPublisher kafkaPublisher() {
- return publisher.get();
+ return kafkaConfig.kafkaPublisher();
}
public Cache cache() {
@@ -130,7 +120,7 @@
}
public KafkaSubscriber kafkaSubscriber() {
- return subscriber.get();
+ return kafkaConfig.kafkaSubscriber();
}
public Collection<Message> validate() {
@@ -192,59 +182,6 @@
}
}
- private static String getString(
- Supplier<Config> cfg, String section, String subsection, String name, String defaultValue) {
- String value = cfg.get().getString(section, subsection, name);
- if (!Strings.isNullOrEmpty(value)) {
- return value;
- }
- return defaultValue;
- }
-
- private static Map<EventFamily, Boolean> eventsEnabled(
- Supplier<Config> config, String subsection) {
- Map<EventFamily, Boolean> eventsEnabled = new HashMap<>();
- for (EventFamily eventFamily : EventFamily.values()) {
- String enabledConfigKey = eventFamily.lowerCamelName() + "Enabled";
-
- eventsEnabled.put(
- eventFamily,
- config
- .get()
- .getBoolean(KAFKA_SECTION, subsection, enabledConfigKey, DEFAULT_ENABLE_PROCESSING));
- }
- return eventsEnabled;
- }
-
- private static void applyKafkaConfig(
- Supplier<Config> configSupplier, String subsectionName, Properties target) {
- Config config = configSupplier.get();
- for (String section : config.getSubsections(KAFKA_SECTION)) {
- if (section.equals(subsectionName)) {
- for (String name : config.getNames(KAFKA_SECTION, section, true)) {
- if (name.startsWith(KAFKA_PROPERTY_PREFIX)) {
- Object value = config.getString(KAFKA_SECTION, subsectionName, name);
- String configProperty = name.replaceFirst(KAFKA_PROPERTY_PREFIX, "");
- String propName =
- CaseFormat.LOWER_CAMEL
- .to(CaseFormat.LOWER_HYPHEN, configProperty)
- .replaceAll("-", ".");
- log.info("[{}] Setting kafka property: {} = {}", subsectionName, propName, value);
- target.put(propName, value);
- }
- }
- }
- }
- target.put(
- "bootstrap.servers",
- getString(
- configSupplier,
- KAFKA_SECTION,
- null,
- "bootstrapServers",
- DEFAULT_KAFKA_BOOTSTRAP_SERVERS));
- }
-
public static class Kafka {
private final Map<EventFamily, String> eventTopics;
private final String bootstrapServers;
@@ -263,14 +200,23 @@
Kafka(Supplier<Config> config) {
this.bootstrapServers =
getString(
- config, KAFKA_SECTION, null, "bootstrapServers", DEFAULT_KAFKA_BOOTSTRAP_SERVERS);
+ config,
+ KafkaConfiguration.KAFKA_SECTION,
+ null,
+ "bootstrapServers",
+ KafkaConfiguration.DEFAULT_KAFKA_BOOTSTRAP_SERVERS);
this.eventTopics = new HashMap<>();
for (Map.Entry<EventFamily, String> topicDefault : EVENT_TOPICS.entrySet()) {
String topicConfigKey = topicDefault.getKey().lowerCamelName() + "Topic";
eventTopics.put(
topicDefault.getKey(),
- getString(config, KAFKA_SECTION, null, topicConfigKey, topicDefault.getValue()));
+ getString(
+ config,
+ KafkaConfiguration.KAFKA_SECTION,
+ null,
+ topicConfigKey,
+ topicDefault.getValue()));
}
}
@@ -281,6 +227,15 @@
public String getBootstrapServers() {
return bootstrapServers;
}
+
+ private static String getString(
+ Supplier<Config> cfg, String section, String subsection, String name, String defaultValue) {
+ String value = cfg.get().getString(section, subsection, name);
+ if (!Strings.isNullOrEmpty(value)) {
+ return value;
+ }
+ return defaultValue;
+ }
}
public static class KafkaPublisher extends Properties {
@@ -294,17 +249,20 @@
private final boolean enabled;
private final Map<EventFamily, Boolean> eventsEnabled;
- private KafkaPublisher(Supplier<Config> cfg) {
+ KafkaPublisher(Supplier<Config> cfg) {
enabled =
cfg.get()
.getBoolean(
- KAFKA_SECTION, KAFKA_PUBLISHER_SUBSECTION, ENABLE_KEY, DEFAULT_BROKER_ENABLED);
+ KafkaConfiguration.KAFKA_SECTION,
+ KAFKA_PUBLISHER_SUBSECTION,
+ KafkaConfiguration.ENABLE_KEY,
+ DEFAULT_BROKER_ENABLED);
- eventsEnabled = eventsEnabled(cfg, KAFKA_PUBLISHER_SUBSECTION);
+ eventsEnabled = KafkaConfiguration.eventsEnabled(cfg, KAFKA_PUBLISHER_SUBSECTION);
if (enabled) {
setDefaults();
- applyKafkaConfig(cfg, KAFKA_PUBLISHER_SUBSECTION, this);
+ KafkaConfiguration.applyKafkaConfig(cfg, KAFKA_PUBLISHER_SUBSECTION, this);
}
}
@@ -343,17 +301,22 @@
this.pollingInterval =
cfg.getInt(
- KAFKA_SECTION,
+ KafkaConfiguration.KAFKA_SECTION,
KAFKA_SUBSCRIBER_SUBSECTION,
"pollingIntervalMs",
- DEFAULT_POLLING_INTERVAL_MS);
+ KafkaConfiguration.DEFAULT_POLLING_INTERVAL_MS);
- enabled = cfg.getBoolean(KAFKA_SECTION, KAFKA_SUBSCRIBER_SUBSECTION, ENABLE_KEY, false);
+ enabled =
+ cfg.getBoolean(
+ KafkaConfiguration.KAFKA_SECTION,
+ KAFKA_SUBSCRIBER_SUBSECTION,
+ KafkaConfiguration.ENABLE_KEY,
+ false);
- eventsEnabled = eventsEnabled(configSupplier, KAFKA_SUBSCRIBER_SUBSECTION);
+ eventsEnabled = KafkaConfiguration.eventsEnabled(configSupplier, KAFKA_SUBSCRIBER_SUBSECTION);
if (enabled) {
- applyKafkaConfig(configSupplier, KAFKA_SUBSCRIBER_SUBSECTION, this);
+ KafkaConfiguration.applyKafkaConfig(configSupplier, KAFKA_SUBSCRIBER_SUBSECTION, this);
}
}
@@ -368,7 +331,11 @@
public Properties initPropsWith(UUID instanceId) {
String groupId =
getString(
- cfg, KAFKA_SECTION, KAFKA_SUBSCRIBER_SUBSECTION, "groupId", instanceId.toString());
+ cfg,
+ KafkaConfiguration.KAFKA_SECTION,
+ KAFKA_SUBSCRIBER_SUBSECTION,
+ "groupId",
+ instanceId.toString());
this.put("group.id", groupId);
return this;
diff --git a/src/main/java/com/googlesource/gerrit/plugins/multisite/KafkaConfiguration.java b/src/main/java/com/googlesource/gerrit/plugins/multisite/KafkaConfiguration.java
new file mode 100644
index 0000000..3376b49
--- /dev/null
+++ b/src/main/java/com/googlesource/gerrit/plugins/multisite/KafkaConfiguration.java
@@ -0,0 +1,145 @@
+// Copyright (C) 2019 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package com.googlesource.gerrit.plugins.multisite;
+
+import static com.google.common.base.Suppliers.memoize;
+import static com.google.common.base.Suppliers.ofInstance;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.CaseFormat;
+import com.google.common.base.Strings;
+import com.google.common.base.Supplier;
+import com.googlesource.gerrit.plugins.multisite.Configuration.Kafka;
+import com.googlesource.gerrit.plugins.multisite.Configuration.KafkaPublisher;
+import com.googlesource.gerrit.plugins.multisite.Configuration.KafkaSubscriber;
+import com.googlesource.gerrit.plugins.multisite.forwarder.events.EventFamily;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+import org.eclipse.jgit.errors.ConfigInvalidException;
+import org.eclipse.jgit.lib.Config;
+import org.eclipse.jgit.storage.file.FileBasedConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class KafkaConfiguration {
+ private static final Logger log = LoggerFactory.getLogger(KafkaConfiguration.class);
+
+ static final String KAFKA_PROPERTY_PREFIX = "KafkaProp-";
+ static final String KAFKA_SECTION = "kafka";
+ static final String ENABLE_KEY = "enabled";
+ static final String DEFAULT_KAFKA_BOOTSTRAP_SERVERS = "localhost:9092";
+ static final boolean DEFAULT_ENABLE_PROCESSING = true;
+ static final int DEFAULT_POLLING_INTERVAL_MS = 1000;
+
+ private final Supplier<KafkaSubscriber> subscriber;
+ private final Supplier<Kafka> kafka;
+ private final Supplier<KafkaPublisher> publisher;
+
+ @VisibleForTesting
+ public KafkaConfiguration(Config kafkaConfig) {
+ Supplier<Config> lazyCfg = lazyLoad(kafkaConfig);
+ kafka = memoize(() -> new Kafka(lazyCfg));
+ publisher = memoize(() -> new KafkaPublisher(lazyCfg));
+ subscriber = memoize(() -> new KafkaSubscriber(lazyCfg));
+ }
+
+ public Kafka getKafka() {
+ return kafka.get();
+ }
+
+ public KafkaSubscriber kafkaSubscriber() {
+ return subscriber.get();
+ }
+
+ static void applyKafkaConfig(
+ Supplier<Config> configSupplier, String subsectionName, Properties target) {
+ Config config = configSupplier.get();
+ for (String section : config.getSubsections(KAFKA_SECTION)) {
+ if (section.equals(subsectionName)) {
+ for (String name : config.getNames(KAFKA_SECTION, section, true)) {
+ if (name.startsWith(KAFKA_PROPERTY_PREFIX)) {
+ Object value = config.getString(KAFKA_SECTION, subsectionName, name);
+ String configProperty = name.replaceFirst(KAFKA_PROPERTY_PREFIX, "");
+ String propName =
+ CaseFormat.LOWER_CAMEL
+ .to(CaseFormat.LOWER_HYPHEN, configProperty)
+ .replaceAll("-", ".");
+ log.info("[{}] Setting kafka property: {} = {}", subsectionName, propName, value);
+ target.put(propName, value);
+ }
+ }
+ }
+ }
+ target.put(
+ "bootstrap.servers",
+ getString(
+ configSupplier,
+ KAFKA_SECTION,
+ null,
+ "bootstrapServers",
+ DEFAULT_KAFKA_BOOTSTRAP_SERVERS));
+ }
+
+ private static String getString(
+ Supplier<Config> cfg, String section, String subsection, String name, String defaultValue) {
+ String value = cfg.get().getString(section, subsection, name);
+ if (!Strings.isNullOrEmpty(value)) {
+ return value;
+ }
+ return defaultValue;
+ }
+
+ static Map<EventFamily, Boolean> eventsEnabled(Supplier<Config> config, String subsection) {
+ Map<EventFamily, Boolean> eventsEnabled = new HashMap<>();
+ for (EventFamily eventFamily : EventFamily.values()) {
+ String enabledConfigKey = eventFamily.lowerCamelName() + "Enabled";
+
+ eventsEnabled.put(
+ eventFamily,
+ config
+ .get()
+ .getBoolean(
+ KafkaConfiguration.KAFKA_SECTION,
+ subsection,
+ enabledConfigKey,
+ KafkaConfiguration.DEFAULT_ENABLE_PROCESSING));
+ }
+ return eventsEnabled;
+ }
+
+ public KafkaPublisher kafkaPublisher() {
+ return publisher.get();
+ }
+
+ private Supplier<Config> lazyLoad(Config config) {
+ if (config instanceof FileBasedConfig) {
+ return memoize(
+ () -> {
+ FileBasedConfig fileConfig = (FileBasedConfig) config;
+ String fileConfigFileName = fileConfig.getFile().getPath();
+ try {
+ log.info("Loading configuration from {}", fileConfigFileName);
+ fileConfig.load();
+ } catch (IOException | ConfigInvalidException e) {
+ log.error("Unable to load configuration from " + fileConfigFileName, e);
+ }
+ return fileConfig;
+ });
+ }
+ return ofInstance(config);
+ }
+}
diff --git a/src/test/java/com/googlesource/gerrit/plugins/multisite/ConfigurationTest.java b/src/test/java/com/googlesource/gerrit/plugins/multisite/ConfigurationTest.java
index 4ceead9..ec3a431 100644
--- a/src/test/java/com/googlesource/gerrit/plugins/multisite/ConfigurationTest.java
+++ b/src/test/java/com/googlesource/gerrit/plugins/multisite/ConfigurationTest.java
@@ -23,11 +23,11 @@
import static com.googlesource.gerrit.plugins.multisite.Configuration.Forwarding.DEFAULT_SYNCHRONIZE;
import static com.googlesource.gerrit.plugins.multisite.Configuration.Forwarding.SYNCHRONIZE_KEY;
import static com.googlesource.gerrit.plugins.multisite.Configuration.Index.INDEX_SECTION;
-import static com.googlesource.gerrit.plugins.multisite.Configuration.KAFKA_PROPERTY_PREFIX;
-import static com.googlesource.gerrit.plugins.multisite.Configuration.KAFKA_SECTION;
import static com.googlesource.gerrit.plugins.multisite.Configuration.KafkaPublisher.KAFKA_PUBLISHER_SUBSECTION;
import static com.googlesource.gerrit.plugins.multisite.Configuration.KafkaSubscriber.KAFKA_SUBSCRIBER_SUBSECTION;
import static com.googlesource.gerrit.plugins.multisite.Configuration.THREAD_POOL_SIZE_KEY;
+import static com.googlesource.gerrit.plugins.multisite.KafkaConfiguration.KAFKA_PROPERTY_PREFIX;
+import static com.googlesource.gerrit.plugins.multisite.KafkaConfiguration.KAFKA_SECTION;
import com.google.common.collect.ImmutableList;
import org.eclipse.jgit.lib.Config;