Use index name from input parameter

Elastisearch 6.x expects the index in the [index]/[type] format
while from 7.x the index name cannot include the type anymore.

Avoid any index type handling in the code and use directly the
value coming from the input parameter as full index name.

Change-Id: I5bbe2e42d6a42d304bc7c877c1671a537294ecfb
diff --git a/README.md b/README.md
index 65a5f74..a93bad8 100644
--- a/README.md
+++ b/README.md
@@ -56,7 +56,8 @@
     see: https://gerrit.googlesource.com/plugins/analytics/+/master/README.md
 - -u --url Gerrit server URL with the analytics plugins installed
 - -p --prefix (*optional*) Projects prefix. Limit the results to those projects that start with the specified prefix.
-- -e --elasticIndex Elastic Search index name. If not provided no ES export will be performed
+- -e --elasticIndex Elastic Search index name. If not provided no ES export will be performed. _Note: ElastiSearch 6.x
+requires this index format `name/type`, while from ElasticSearch 7.x just `name`_
 - -r --extract-branches Extract and process branches information (Optional) - Default: false
 - -o --out folder location for storing the output as JSON files
     if not provided data is saved to </tmp>/analytics-<NNNN> where </tmp> is
diff --git a/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/job/Main.scala b/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/job/Main.scala
index 56cd33e..70550bd 100644
--- a/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/job/Main.scala
+++ b/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/job/Main.scala
@@ -80,7 +80,7 @@
           config.eventsTimeAggregation.get,
           TimeRange(config.since, config.until)
         )
-        .saveToEs(s"${config.elasticSearchIndex.get}/$DOCUMENT_TYPE")
+        .saveToEs(config.elasticSearchIndex.get)
 
     case None =>
       logger.error("Could not parse command line arguments")
diff --git a/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/model/ElasticSearchFields.scala b/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/model/ElasticSearchFields.scala
index 9b05ce9..8e73fbd 100644
--- a/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/model/ElasticSearchFields.scala
+++ b/auditlog/src/main/scala/com/gerritforge/analytics/auditlog/model/ElasticSearchFields.scala
@@ -42,6 +42,4 @@
   val NUM_EVENTS_FIELD = "num_events"
 
   val ALL_DOCUMENT_FIELDS: List[String] = FACETING_FIELDS :+ NUM_EVENTS_FIELD
-
-  val DOCUMENT_TYPE = "auditlog"
 }
diff --git a/gitcommits/src/main/scala/com/gerritforge/analytics/gitcommits/job/Main.scala b/gitcommits/src/main/scala/com/gerritforge/analytics/gitcommits/job/Main.scala
index af21fff..b80e99d 100644
--- a/gitcommits/src/main/scala/com/gerritforge/analytics/gitcommits/job/Main.scala
+++ b/gitcommits/src/main/scala/com/gerritforge/analytics/gitcommits/job/Main.scala
@@ -109,8 +109,6 @@
   self: LazyLogging with FetchProjects =>
   implicit val codec = Codec.ISO8859
 
-  val indexType = "gitCommits"
-
   def buildProjectStats()(implicit config: GerritEndpointConfig, spark: SparkSession): DataFrame = {
     import com.gerritforge.analytics.gitcommits.engine.GerritAnalyticsTransformations._
 
@@ -137,8 +135,8 @@
     import org.elasticsearch.spark.sql._
     config.elasticIndex.foreach { esIndex =>
       logger.info(
-        s"ES content created, saving it to elastic search instance at '${config.elasticIndex}/$indexType'")
-      df.saveToEs(s"$esIndex/$indexType")
+        s"ES content created, saving it to elastic search instance at '${config.elasticIndex}'")
+      df.saveToEs(esIndex)
     }
 
   }
diff --git a/gitcommits/src/main/scala/com/gerritforge/analytics/gitcommits/plugin/ProcessGitCommitsCommand.scala b/gitcommits/src/main/scala/com/gerritforge/analytics/gitcommits/plugin/ProcessGitCommitsCommand.scala
index b024a14..49c7ede 100644
--- a/gitcommits/src/main/scala/com/gerritforge/analytics/gitcommits/plugin/ProcessGitCommitsCommand.scala
+++ b/gitcommits/src/main/scala/com/gerritforge/analytics/gitcommits/plugin/ProcessGitCommitsCommand.scala
@@ -107,10 +107,10 @@
       import org.elasticsearch.spark.sql._
       config.elasticIndex.foreach { esIndex =>
         stdout.println(
-          s"$numRows rows extracted. Posting Elasticsearch at '${config.elasticIndex}/$indexType'"
+          s"$numRows rows extracted. Posting Elasticsearch at '${config.elasticIndex}'"
         )
         stdout.flush()
-        projectStats.saveToEs(s"$esIndex/$indexType")
+        projectStats.saveToEs(esIndex)
       }
 
       val elaspsedTs = (System.currentTimeMillis - startTs) / 1000L