Revert "No downtime when rebuilding ES indices"

This reverts commit 4410948f29afc4122b7a435c14f1e06f632953e1.
The index aliasing never worked properly and it is now preventing
the use of future ElasticSearch versions due to the direct access
to the ES API which is totally unnneeded.

Change-Id: I98b469f502189507553a03c89b06c6ccb8b32a80
diff --git a/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/job/Main.scala b/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/job/Main.scala
index e3e267e..56cd33e 100644
--- a/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/job/Main.scala
+++ b/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/job/Main.scala
@@ -27,6 +27,7 @@
 import com.gerritforge.analytics.common.api.GerritConnectivity
 import com.gerritforge.analytics.spark.SparkApp
 import com.typesafe.scalalogging.LazyLogging
+import org.elasticsearch.spark.sql._
 
 object Main extends SparkApp with App with LazyLogging {
   override val appName = "Gerrit AuditLog Analytics ETL"
@@ -70,8 +71,6 @@
         sys.exit(1)
       }
 
-      import com.gerritforge.analytics.infrastructure.ESSparkWriterImplicits.withAliasSwap
-      import scala.concurrent.ExecutionContext.Implicits.global
       spark
         .getEventsFromPath(config.eventsPath.get)
         .transformEvents(
@@ -81,14 +80,11 @@
           config.eventsTimeAggregation.get,
           TimeRange(config.since, config.until)
         )
-        .saveToEsWithAliasSwap(config.elasticSearchIndex.get, DOCUMENT_TYPE)
-        .futureAction
-        .map(actionRespose => logger.info(s"Completed index swap ${actionRespose}"))
-        .recover { case exception: Exception => logger.info(s"Index swap failed ${exception}") }
+        .saveToEs(s"${config.elasticSearchIndex.get}/$DOCUMENT_TYPE")
 
     case None =>
       logger.error("Could not parse command line arguments")
       sys.exit(1)
   }
 
-}
+}
\ No newline at end of file
diff --git a/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/model/ElasticSearchFields.scala b/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/model/ElasticSearchFields.scala
index 242c78a..9b05ce9 100644
--- a/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/model/ElasticSearchFields.scala
+++ b/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/model/ElasticSearchFields.scala
@@ -43,5 +43,5 @@
 
   val ALL_DOCUMENT_FIELDS: List[String] = FACETING_FIELDS :+ NUM_EVENTS_FIELD
 
-  val DOCUMENT_TYPE: String = "auditlog"
+  val DOCUMENT_TYPE = "auditlog"
 }
diff --git a/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/spark/AuditLogsTransformer.scala b/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/spark/AuditLogsTransformer.scala
index 04dbb90..535ba26 100644
--- a/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/spark/AuditLogsTransformer.scala
+++ b/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/spark/AuditLogsTransformer.scala
@@ -48,7 +48,7 @@
       .toJsonString
       .toJsonTableDataFrame
       .hydrateWithUserIdentifierColumn(USER_IDENTIFIER_FIELD, broadcastUserIdentifiers.value)
-      .withTimeBucketColumn(TIME_BUCKET_FIELD, timeAggregation)
+      .withTimeBucketColum(TIME_BUCKET_FIELD, timeAggregation)
       .withCommandColumns(COMMAND_FIELD, COMMAND_ARGS_FIELD)
       .withSubCommandColumns(SUB_COMMAND_FIELD)
       .withUserTypeColumn(USER_TYPE_FIELD, broadcastAdditionalUsersInfo.value)
diff --git a/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/spark/dataframe/ops/DataFrameOps.scala b/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/spark/dataframe/ops/DataFrameOps.scala
index 9cdc721..ec4e09c 100644
--- a/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/spark/dataframe/ops/DataFrameOps.scala
+++ b/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/spark/dataframe/ops/DataFrameOps.scala
@@ -53,7 +53,7 @@
 
     }
 
-    def withTimeBucketColumn(timeBucketCol: String, timeAggregation: String): DataFrame = {
+    def withTimeBucketColum(timeBucketCol: String, timeAggregation: String): DataFrame = {
       dataFrame
         .withColumn(
           timeBucketCol,
diff --git a/common/src/main/scala/com/gerritforge/analytics/common/api/ElasticSearchAliasOps.scala b/common/src/main/scala/com/gerritforge/analytics/common/api/ElasticSearchAliasOps.scala
deleted file mode 100644
index 942049e..0000000
--- a/common/src/main/scala/com/gerritforge/analytics/common/api/ElasticSearchAliasOps.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-// Copyright (C) 2019 GerritForge Ltd
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package com.gerritforge.analytics.common.api
-import com.sksamuel.elastic4s.Index
-import com.sksamuel.elastic4s.alias.{AddAliasActionRequest, RemoveAliasAction}
-import com.sksamuel.elastic4s.http.ElasticDsl._
-import com.sksamuel.elastic4s.http.index.admin.AliasActionResponse
-import com.sksamuel.elastic4s.http.{ElasticClient, Response}
-import com.typesafe.scalalogging.Logger
-
-import scala.concurrent.ExecutionContext.Implicits.global
-import scala.concurrent.Future
-
-trait ElasticSearchAliasOps {
-
-  val esClient: ElasticClient
-
-  private val logger = Logger(classOf[ElasticSearchAliasOps])
-
-  def getIndicesFromAlias(aliasName: String): Future[Iterable[Index]] = {
-    logger.info(s"Getting indices from $aliasName")
-
-    esClient
-      .execute(
-        getAliases(aliasName, Seq.empty[String])
-      )
-      .map(_.result.mappings.keys)
-
-  }
-
-  def moveAliasToNewIndex(
-      aliasName: String,
-      newIndexName: String
-  ): Future[Response[AliasActionResponse]] = {
-    val oldIndices: Future[Iterable[Index]] = getIndicesFromAlias(aliasName)
-
-    oldIndices.flatMap { indices =>
-      val removeAliasActions: Iterable[RemoveAliasAction] = indices.map { idxName =>
-        removeAlias(aliasName) on s"${idxName.name}"
-      }
-      val addAliasAction: AddAliasActionRequest = addAlias(aliasName) on newIndexName
-
-      logger.info(
-        s"Replacing old indices (${indices.mkString(",")}) with $newIndexName from alias $aliasName"
-      )
-
-      esClient.execute {
-        aliases(
-          removeAliasActions ++ List(addAliasAction)
-        )
-      }
-    }
-  }
-}
diff --git a/common/src/main/scala/com/gerritforge/analytics/common/api/SparkEsClientProvider.scala b/common/src/main/scala/com/gerritforge/analytics/common/api/SparkEsClientProvider.scala
deleted file mode 100644
index 57beb76..0000000
--- a/common/src/main/scala/com/gerritforge/analytics/common/api/SparkEsClientProvider.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-// Copyright (C) 2019 GerritForge Ltd
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package com.gerritforge.analytics.common.api
-import com.sksamuel.elastic4s.http.ElasticClient
-import org.apache.http.HttpHost
-import org.apache.spark.sql.SparkSession
-import org.elasticsearch.client.RestClient
-import org.elasticsearch.hadoop.cfg.{PropertiesSettings, Settings}
-import org.elasticsearch.spark.cfg.SparkSettingsManager
-
-trait SparkEsClientProvider {
-
-  val esSparkSession: SparkSession
-
-  private lazy val sparkCfg =
-    new SparkSettingsManager()
-      .load(esSparkSession.sqlContext.sparkContext.getConf)
-
-  private lazy val esCfg: Settings = new PropertiesSettings()
-    .load(sparkCfg.save())
-
-  private lazy val restClient: RestClient =
-    RestClient.builder(new HttpHost(esCfg.getNodes, esCfg.getPort, "http")).build()
-
-  lazy val esClient: ElasticClient = ElasticClient.fromRestClient(restClient)
-
-  def closeElasticsearchClientConn(): Unit = {
-    esClient.close()
-    restClient.close()
-  }
-}
diff --git a/common/src/main/scala/com/gerritforge/analytics/infrastructure/esSparkWriter.scala b/common/src/main/scala/com/gerritforge/analytics/infrastructure/esSparkWriter.scala
deleted file mode 100644
index 5ccaf08..0000000
--- a/common/src/main/scala/com/gerritforge/analytics/infrastructure/esSparkWriter.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-// Copyright (C) 2019 GerritForge Ltd
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package com.gerritforge.analytics.infrastructure
-import java.time.Instant
-
-import com.gerritforge.analytics.common.api.{ElasticSearchAliasOps, SparkEsClientProvider}
-import com.gerritforge.analytics.support.ops.IndexNameGenerator
-import com.sksamuel.elastic4s.http.index.admin.AliasActionResponse
-import com.typesafe.scalalogging.LazyLogging
-import org.apache.spark.sql.{Dataset, SparkSession}
-import org.elasticsearch.spark.sql._
-
-import scala.concurrent.Future
-
-case class EnrichedAliasActionResponse(futureAction: Future[AliasActionResponse], path: String)
-
-object ESSparkWriterImplicits {
-  implicit def withAliasSwap[T](data: Dataset[T]): ElasticSearchPimpedWriter[T] =
-    new ElasticSearchPimpedWriter[T](data)
-}
-
-class ElasticSearchPimpedWriter[T](data: Dataset[T])
-    extends ElasticSearchAliasOps
-    with LazyLogging
-    with SparkEsClientProvider {
-
-  def saveToEsWithAliasSwap(
-      aliasName: String,
-      documentType: String
-  ): EnrichedAliasActionResponse = {
-    val newIndexNameWithTime = IndexNameGenerator.timeBasedIndexName(aliasName, Instant.now())
-    val newPersistencePath   = s"$newIndexNameWithTime/$documentType"
-
-    logger.info(
-      s"Storing data into $newPersistencePath and swapping alias $aliasName to read from the new index"
-    )
-
-    import scala.concurrent.ExecutionContext.Implicits.global
-    // Save data
-    val futureResponse: Future[AliasActionResponse] = try {
-      data
-        .toDF()
-        .saveToEs(newPersistencePath)
-
-      logger.info(
-        s"Successfully stored the data into index $newIndexNameWithTime. Will now update the alias $aliasName"
-      )
-      moveAliasToNewIndex(aliasName, newIndexNameWithTime).flatMap { response =>
-        if (response.isSuccess && response.result.success) {
-          logger.info("Alias was updated successfully")
-          closeElasticsearchClientConn()
-          Future.successful(response.result)
-        } else {
-          closeElasticsearchClientConn()
-          logger.error(s"Alias update failed with response result error ${response.error}")
-          logger.error(s"Alias update failed with ES ACK: ${response.result.acknowledged}")
-          Future.failed(new Exception(s"Index alias $aliasName update failure ${response.error}"))
-        }
-      }
-    } catch {
-      case e: Exception =>
-        Future.failed[AliasActionResponse](e)
-    }
-    EnrichedAliasActionResponse(futureResponse, newPersistencePath)
-  }
-
-  override val esSparkSession: SparkSession = data.sparkSession
-}
diff --git a/common/src/main/scala/com/gerritforge/analytics/support/ops/IndexNameGenerator.scala b/common/src/main/scala/com/gerritforge/analytics/support/ops/IndexNameGenerator.scala
deleted file mode 100644
index 96fdb0f..0000000
--- a/common/src/main/scala/com/gerritforge/analytics/support/ops/IndexNameGenerator.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-// Copyright (C) 2019 GerritForge Ltd
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package com.gerritforge.analytics.support.ops
-import java.time.{Instant, LocalDateTime, ZoneId}
-
-object IndexNameGenerator {
-  def timeBasedIndexName(indexName: String, instant: Instant): String = {
-    val now: Long = instant.toEpochMilli
-    val dateWithStrFormat: String =
-      LocalDateTime
-        .ofInstant(Instant.ofEpochMilli(now), ZoneId.systemDefault())
-        .format(AnalyticsDateTimeFormatter.yyyy_MM_dd)
-
-    s"${indexName}_${dateWithStrFormat}_$now"
-  }
-}
diff --git a/common/src/test/scala/com/gerritforge/analytics/SparkTestSupport.scala b/common/src/test/scala/com/gerritforge/analytics/SparkTestSupport.scala
index dd58538..b37a562 100644
--- a/common/src/test/scala/com/gerritforge/analytics/SparkTestSupport.scala
+++ b/common/src/test/scala/com/gerritforge/analytics/SparkTestSupport.scala
@@ -32,7 +32,7 @@
     .getOrCreate()
 
   implicit lazy val sc: SparkContext = spark.sparkContext
-  implicit lazy val sql: SQLContext  = spark.sqlContext
+  implicit lazy val sql: SQLContext = spark.sqlContext
 
   override protected def afterAll() = {
     spark.close()
diff --git a/common/src/test/scala/com/gerritforge/analytics/infrastructure/ElasticSearchPimpedWriterIT.scala b/common/src/test/scala/com/gerritforge/analytics/infrastructure/ElasticSearchPimpedWriterIT.scala
deleted file mode 100644
index 011ccc3..0000000
--- a/common/src/test/scala/com/gerritforge/analytics/infrastructure/ElasticSearchPimpedWriterIT.scala
+++ /dev/null
@@ -1,94 +0,0 @@
-// Copyright (C) 2019 GerritForge Ltd
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package com.gerritforge.analytics.infrastructure
-import com.gerritforge.analytics.SparkTestSupport
-import com.gerritforge.analytics.support.ops.ElasticsearchTestITSupport
-import org.apache.spark.sql.Dataset
-import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
-
-import scala.concurrent.Await
-import scala.concurrent.duration._
-
-class ElasticSearchPimpedWriterIT
-    extends FlatSpec
-    with Matchers
-    with BeforeAndAfterAll
-    with SparkTestSupport
-    with ElasticsearchTestITSupport {
-
-  override lazy val elasticSearchConnPort: Int = esHostHTTPExtPortMapping
-  import ESSparkWriterImplicits.withAliasSwap
-  import spark.implicits._
-
-  "Saving and reading from same ES alias" must "work while changing indices mapping" in {
-
-    val aliasName    = "the_alias"
-    val documentName = "doc"
-
-    // Writing into the first index
-    val dataIntoIndexOne: Dataset[String] = "Content in the first index".split(" ").toList.toDS()
-    Await.result(
-      dataIntoIndexOne.saveToEsWithAliasSwap(aliasName, documentName).futureAction,
-      2 seconds
-    )
-    // Reading from the alias
-    val resultFromAliasFirst: Dataset[String] =
-      spark.read.format("es").load(s"$aliasName/$documentName").as[String]
-
-    // Written should equal Read
-    dataIntoIndexOne
-      .collect()
-      .toList should contain only (resultFromAliasFirst.collect().toList: _*)
-
-    // Writing into the second index
-    val dataIntoIndexTwo: Dataset[String] = "Content in the second index".split(" ").toList.toDS()
-    Await.result(
-      dataIntoIndexTwo.saveToEsWithAliasSwap(aliasName, documentName).futureAction,
-      2 seconds
-    )
-    // Reading from the alias
-    val resultFromAliasSecond: Dataset[String] =
-      spark.read.format("es").load(s"$aliasName/$documentName").as[String]
-
-    // Written should equal Read
-    dataIntoIndexTwo
-      .collect()
-      .toList should contain only (resultFromAliasSecond.collect().toList: _*)
-  }
-
-  "Saving data into ES" must "succeed even if alias creation fails" in {
-    import org.elasticsearch.spark.sql._
-    val indexWithAliasName     = "alias_name"
-    val documentName           = "doc"
-    val indexWithAliasNameData = List("This", "index", "will", "make", "alias", "creation", "fail")
-
-    indexWithAliasNameData.toDS.saveToEs(s"$indexWithAliasName/$documentName")
-
-    val indexWithAliasData = List("An", "index", "with", "alias")
-    val aliasActionResponse: EnrichedAliasActionResponse =
-      indexWithAliasData.toDS.saveToEsWithAliasSwap(indexWithAliasName, documentName)
-
-    val oldIndexData =
-      spark.read.format("es").load(s"$indexWithAliasName/$documentName").as[String]
-    val newIndexData =
-      spark.read.format("es").load(aliasActionResponse.path).as[String]
-
-    assertThrows[NoSuchElementException] {
-      Await.result(aliasActionResponse.futureAction, 2 seconds)
-    }
-    newIndexData.collect().toList should contain only (indexWithAliasData: _*)
-    oldIndexData.collect().toList should contain only (indexWithAliasNameData: _*)
-  }
-}
diff --git a/common/src/test/scala/com/gerritforge/analytics/support/ops/IndexNameGeneratorSpec.scala b/common/src/test/scala/com/gerritforge/analytics/support/ops/IndexNameGeneratorSpec.scala
deleted file mode 100644
index 69dd337..0000000
--- a/common/src/test/scala/com/gerritforge/analytics/support/ops/IndexNameGeneratorSpec.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-// Copyright (C) 2019 GerritForge Ltd
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package com.gerritforge.analytics.support.ops
-import java.time.{Instant, LocalDateTime, ZoneOffset}
-
-import org.scalatest.{FlatSpec, Matchers}
-
-class IndexNameGeneratorSpec extends FlatSpec with Matchers {
-
-  "Index name generator" should "return an index name based on current time" in {
-    val instantUTC: Instant =
-      LocalDateTime
-        .of(2019, 1, 1, 12, 0, 0, 0)
-        .atOffset(ZoneOffset.UTC)
-        .toInstant
-
-    val indexName = "index_name"
-
-    val timeBasedIndexName: String = IndexNameGenerator.timeBasedIndexName(indexName, instantUTC)
-
-    val expectedIndexName = s"${indexName}_2019-01-01_${instantUTC.toEpochMilli}"
-    timeBasedIndexName shouldEqual (expectedIndexName)
-  }
-}
diff --git a/gitcommits/src/main/scala/com/gerritforge/analytics/gitcommits/job/Main.scala b/gitcommits/src/main/scala/com/gerritforge/analytics/gitcommits/job/Main.scala
index b35a8f5..af21fff 100644
--- a/gitcommits/src/main/scala/com/gerritforge/analytics/gitcommits/job/Main.scala
+++ b/gitcommits/src/main/scala/com/gerritforge/analytics/gitcommits/job/Main.scala
@@ -134,13 +134,11 @@
   }
 
   def saveES(df: DataFrame)(implicit config: GerritEndpointConfig) {
-    import scala.concurrent.ExecutionContext.Implicits.global
+    import org.elasticsearch.spark.sql._
     config.elasticIndex.foreach { esIndex =>
-      import com.gerritforge.analytics.infrastructure.ESSparkWriterImplicits.withAliasSwap
-      df.saveToEsWithAliasSwap(esIndex, indexType)
-        .futureAction
-        .map(actionRespose => logger.info(s"Completed index swap ${actionRespose}"))
-        .recover { case exception: Exception => logger.info(s"Index swap failed ${exception}") }
+      logger.info(
+        s"ES content created, saving it to elastic search instance at '${config.elasticIndex}/$indexType'")
+      df.saveToEs(s"$esIndex/$indexType")
     }
 
   }
diff --git a/gitcommits/src/main/scala/com/gerritforge/analytics/gitcommits/plugin/ProcessGitCommitsCommand.scala b/gitcommits/src/main/scala/com/gerritforge/analytics/gitcommits/plugin/ProcessGitCommitsCommand.scala
index d647f84..b024a14 100644
--- a/gitcommits/src/main/scala/com/gerritforge/analytics/gitcommits/plugin/ProcessGitCommitsCommand.scala
+++ b/gitcommits/src/main/scala/com/gerritforge/analytics/gitcommits/plugin/ProcessGitCommitsCommand.scala
@@ -104,18 +104,13 @@
       val projectStats = buildProjectStats().cache()
       val numRows      = projectStats.count()
 
+      import org.elasticsearch.spark.sql._
       config.elasticIndex.foreach { esIndex =>
         stdout.println(
           s"$numRows rows extracted. Posting Elasticsearch at '${config.elasticIndex}/$indexType'"
         )
         stdout.flush()
-        import com.gerritforge.analytics.infrastructure.ESSparkWriterImplicits.withAliasSwap
-        import scala.concurrent.ExecutionContext.Implicits.global
-        projectStats
-          .saveToEsWithAliasSwap(esIndex, indexType)
-          .futureAction
-          .map(actionRespose => logger.info(s"Completed index swap ${actionRespose}"))
-          .recover { case exception: Exception => logger.info(s"Index swap failed ${exception}") }
+        projectStats.saveToEs(s"$esIndex/$indexType")
       }
 
       val elaspsedTs = (System.currentTimeMillis - startTs) / 1000L
diff --git a/project/SharedSettings.scala b/project/SharedSettings.scala
index 8c30929..6605dcd 100644
--- a/project/SharedSettings.scala
+++ b/project/SharedSettings.scala
@@ -21,10 +21,6 @@
 import sbtdocker.DockerPlugin.autoImport._
 
 object SharedSettings {
-  val elastic4s = Seq(
-    "com.sksamuel.elastic4s" %% "elastic4s-core" % Elastic4sVersion,
-    "com.sksamuel.elastic4s" %% "elastic4s-http" % Elastic4sVersion
-  )
 
   private val dockerRepositoryPrefix = "gerrit-analytics-etl"
 
@@ -48,7 +44,7 @@
       "org.scalactic"              %% "scalactic"            % scalactic % "test",
       "org.scalatest"              %% "scalatest"            % scalaTest % "test",
       "com.dimafeng"               %% "testcontainers-scala" % TestContainersScala % Test
-    ) ++ elastic4s
+    )
   )
 
   def commonDockerSettings(projectName: String): Seq[Def.Setting[_]] = {
@@ -106,7 +102,7 @@
 }
 
 object Versions {
-  val Elastic4sVersion    = "6.5.1"
+
   val sparkVersion        = "2.3.3"
   val gerritApiVersion    = "2.13.7"
   val esSpark             = "6.2.0"