行动是治愈恐惧的良药,而犹豫、拖延将不断滋养恐惧。

elasticsearch-spark update field using script返回异常

Elasticsearch | 作者 ggchangan | 发布于2016年09月02日 | 阅读数:7333

 
import org.apache.spark.{SparkConf, SparkContext}
import org.elasticsearch.spark.rdd.EsSpark
import org.elasticsearch.spark._

/**
* Created by magneto on 16-6-29.
*/
object Upsert extends App{
val conf = new SparkConf().setAppName("read for elasticsearch").setMaster("local")
conf.set("es.nodes", "172.24.63.14")
conf.set("es.write.operation", "update")
val up_script = "if (ctx._source.containsKey(\"name\")) {ctx._source.name += tag;} else {ctx._source.name = [tag]}"
conf.set("es.update.script", up_script)
val up_params = "tag:<sxl>"
//val up_params = "{\"tag\":\"sxl\"}";
//val up_params = "tag:xx"
//conf.set("es.update.script.params.json", up_params)
conf.set("es.update.script.params", up_params)
conf.set("es.mapping.id", "id")

val sc = new SparkContext(conf)

//val up_all = Map("es.update.script.params" -> up_params, "es.update.script" -> up_script, "es.mapping.id" -> "id")

//val rdd = sc.makeRDD(Seq(up_all))
//EsSpark.saveToEs(rdd,"test/docs")

//val lines = sc.parallelize( List("""{"id":"67861","address":{"zipcode":"25381","id":"67861"}}""")).saveToEs("test/docs", up_all)
sc.parallelize(Seq(Map("id"->1,"xx"->"xx"))).saveToEs("test/docs")

}
异常:
org.elasticsearch.hadoop.rest.EsHadoopInvalidRequest: Unexpected character ('n' (code 110)): was expecting comma to separate OBJECT entries
 at [Source: [B@7a0c3eeb; line: 1, column: 65]
    at org.elasticsearch.hadoop.rest.RestClient.checkResponse(RestClient.java:478)
    at org.elasticsearch.hadoop.rest.RestClient.execute(RestClient.java:436)
    at org.elasticsearch.hadoop.rest.RestClient.execute(RestClient.java:426)
    at org.elasticsearch.hadoop.rest.RestClient.bulk(RestClient.java:153)
    at org.elasticsearch.hadoop.rest.RestRepository.tryFlush(RestRepository.java:225)
    at org.elasticsearch.hadoop.rest.RestRepository.flush(RestRepository.java:248)
    at org.elasticsearch.hadoop.rest.RestRepository.close(RestRepository.java:267)
    at org.elasticsearch.hadoop.rest.RestService$PartitionWriter.close(RestService.java:130)
    at org.elasticsearch.spark.rdd.EsRDDWriter$$anonfun$write$1.apply$mcV$sp(EsRDDWriter.scala:42)
已邀请:

要回复问题请先登录注册