spark elasticsearch configuration code example
Example 1: read from elasticsearch in spark
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.elasticsearch.spark._
...
val conf = ...
val sc = new SparkContext(conf)
val RDD = sc.esRDD("radio/artists")
Example 2: read from elasticsearch in spark
...
import org.elasticsearch.spark._
...
val conf = ...
val sc = new SparkContext(conf)
sc.esRDD("radio/artists", "?q=me*")
Example 3: adding new field in existing kibana index using spark java
val game = Map(
"media_type"->"game",
"title" -> "FF VI",
"year" -> "1994")
val book = Map("media_type" -> "book","title" -> "Harry Potter","year" -> "2010")
val cd = Map("media_type" -> "music","title" -> "Surfing With The Alien")
sc.makeRDD(Seq(game, book, cd)).saveToEs("my-collection-{media_type}/doc")
Example 4: adding new field in existing kibana index using spark java
String json1 = "{\"reason\" : \"business\",\"airport\" : \"SFO\"}";
String json2 = "{\"participants\" : 5,\"airport\" : \"OTP\"}";
JavaSparkContext jsc = ...
JavaRDD<String> stringRDD = jsc.parallelize(ImmutableList.of(json1, json2));
JavaEsSpark.saveJsonToEs(stringRDD, "spark/json-trips");