Hi I have a setup of Druid with HDFS deep storage (replaced the druid-hdfs-extension with 2.7 version because of Cloudera CDH 5.11)
And tried to index some data via index tasks.
Gotting into this exception, and I cannot find where this field “JOINER” is definied. My ingest spec is:
[centos@ip-10-0-0-107 ~]$ cat index-druid-indexer.json
{
“type” : “index”,
“spec” : {
“dataSchema” : {
“dataSource” : “test”,
“parser” : {
“type” : “string”,
“parseSpec” : {
“format” : “json”,
“timestampSpec” : {
“column” : “ts_start”,
“format” : “auto”
},
“dimensionsSpec” : {
“dimensions”: [“ip”,“msisdn”,“user”,“status”,“rerate”],
“dimensionExclusions” : ,
“spatialDimensions” :
}
}
},
“metricsSpec” : [
{
“type” : “count”,
“name” : “count”
},
{
“type” : “doubleSum”,
“name” : “totalDiscount”,
“fieldName” : “discount”
},
{
“type” : “doubleSum”,
“name” : “totalCharge”,
“fieldName” : “charge”
},
{
“type” : “doubleSum”,
“name” : “totalDuration”,
“fieldName” : “duration”
}
],
“granularitySpec” : {
“type” : “uniform”,
“segmentGranularity” : “DAY”,
“queryGranularity” : “NONE”,
“intervals” : [ “2017-06-26/2017-07-01” ]
}
},
“ioConfig” : {
“type” : “index”,
“firehose” : {
“type” : “local”,
“baseDir” : “/home/centos/faker/”,
“filter” : “*cdr.json”
}
},
“tuningConfig” : {
“type”: “index”,
“targetPartitionSize” : 5000000,
“maxRowsInMemory” : 75000
}
}
}
017-06-29T08:36:41,650 INFO [appenderator_merge_0] io.druid.segment.IndexMergerV9 - Completed index.drd in 1 millis.
2017-06-29T08:36:41,651 INFO [appenderator_merge_0] io.druid.java.util.common.io.smoosh.FileSmoosher - Created smoosh file [/home/centos/druid-0.10.0/var/druid/task/index_test_2017-06-29T08:36:34.189Z/work/persist/test_2017-06-26T00:00:00.000Z_2017-06-27T00:00:00.000Z_2017-06-29T08:36:34.353Z/merged/00000.smoosh] of size [95719] bytes.
2017-06-29T08:36:41,654 WARN [task-runner-0-priority-0] io.druid.segment.realtime.appenderator.FiniteAppenderatorDriver - Failed publishAll (try 2), retrying in 3,819ms.
java.util.concurrent.ExecutionException: java.lang.NoSuchFieldError: JOINER
at com.google.common.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:299) ~[guava-16.0.1.jar:?]
at com.google.common.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:286) ~[guava-16.0.1.jar:?]
at com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:116) ~[guava-16.0.1.jar:?]
at io.druid.segment.realtime.appenderator.FiniteAppenderatorDriver.publishAll(FiniteAppenderatorDriver.java:423) [druid-server-0.10.0.jar:0.10.0]
at io.druid.segment.realtime.appenderator.FiniteAppenderatorDriver.finish(FiniteAppenderatorDriver.java:262) [druid-server-0.10.0.jar:0.10.0]
at io.druid.indexing.common.task.IndexTask.generateAndPublishSegments(IndexTask.java:466) [druid-indexing-service-0.10.0.jar:0.10.0]
at io.druid.indexing.common.task.IndexTask.run(IndexTask.java:207) [druid-indexing-service-0.10.0.jar:0.10.0]
at io.druid.indexing.overlord.ThreadPoolTaskRunner$ThreadPoolTaskRunnerCallable.call(ThreadPoolTaskRunner.java:436) [druid-indexing-service-0.10.0.jar:0.10.0]
at io.druid.indexing.overlord.ThreadPoolTaskRunner$ThreadPoolTaskRunnerCallable.call(ThreadPoolTaskRunner.java:408) [druid-indexing-service-0.10.0.jar:0.10.0]
at java.util.concurrent.FutureTask.run(FutureTask.java:266) [?:1.8.0_131]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_131]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_131]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_131]
Caused by: java.lang.NoSuchFieldError: JOINER
Any ideas how to get to the root cause of this Error?
Thanks
NoSuchFieldError.txt (144 KB)