Druid batch ingest failed when use single dimension partition

I’am trying to partition segment by a custom dimension. But the ingestion failed. with “java.lang.RuntimeException: No buckets?? seems there is no data to index.”

2017-08-17T09:04:52,052 ERROR [task-runner-0-priority-0] io.druid.indexing.overlord.ThreadPoolTaskRunner - Exception while running task[HadoopIndexTask{id=index_hadoop_zjy-cidata_2017-08-17T09:03:02.551Z, type=index_hadoop, dataSource=zjy-cidata}]
java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
	at com.google.common.base.Throwables.propagate(Throwables.java:160) ~[guava-16.0.1.jar:?]
	at io.druid.indexing.common.task.HadoopTask.invokeForeignLoader(HadoopTask.java:211) ~[druid-indexing-service-0.10.0.jar:0.10.0]
	at io.druid.indexing.common.task.HadoopIndexTask.run(HadoopIndexTask.java:223) ~[druid-indexing-service-0.10.0.jar:0.10.0]
	at io.druid.indexing.overlord.ThreadPoolTaskRunner$ThreadPoolTaskRunnerCallable.call(ThreadPoolTaskRunner.java:436) [druid-indexing-service-0.10.0.jar:0.10.0]
	at io.druid.indexing.overlord.ThreadPoolTaskRunner$ThreadPoolTaskRunnerCallable.call(ThreadPoolTaskRunner.java:408) [druid-indexing-service-0.10.0.jar:0.10.0]
	at java.util.concurrent.FutureTask.run(FutureTask.java:266) [?:1.8.0_131]
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_131]
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_131]
	at java.lang.Thread.run(Thread.java:748) [?:1.8.0_131]
Caused by: java.lang.reflect.InvocationTargetException
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_131]
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_131]
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_131]
	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_131]
	at io.druid.indexing.common.task.HadoopTask.invokeForeignLoader(HadoopTask.java:208) ~[druid-indexing-service-0.10.0.jar:0.10.0]
	... 7 more
Caused by: java.lang.RuntimeException: java.lang.RuntimeException: No buckets?? seems there is no data to index.
	at io.druid.indexer.IndexGeneratorJob.run(IndexGeneratorJob.java:215) ~[druid-indexing-hadoop-0.10.0.jar:0.10.0]
	at io.druid.indexer.JobHelper.runJobs(JobHelper.java:349) ~[druid-indexing-hadoop-0.10.0.jar:0.10.0]
	at io.druid.indexer.HadoopDruidIndexerJob.run(HadoopDruidIndexerJob.java:95) ~[druid-indexing-hadoop-0.10.0.jar:0.10.0]
	at io.druid.indexing.common.task.HadoopIndexTask$HadoopIndexGeneratorInnerProcessing.runTask(HadoopIndexTask.java:276) ~[druid-indexing-service-0.10.0.jar:0.10.0]
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_131]
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_131]
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_131]
	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_131]
	at io.druid.indexing.common.task.HadoopTask.invokeForeignLoader(HadoopTask.java:208) ~[druid-indexing-service-0.10.0.jar:0.10.0]
	... 7 more
Caused by: java.lang.RuntimeException: No buckets?? seems there is no data to index.
	at io.druid.indexer.IndexGeneratorJob.run(IndexGeneratorJob.java:176) ~[druid-indexing-hadoop-0.10.0.jar:0.10.0]
	at io.druid.indexer.JobHelper.runJobs(JobHelper.java:349) ~[druid-indexing-hadoop-0.10.0.jar:0.10.0]
	at io.druid.indexer.HadoopDruidIndexerJob.run(HadoopDruidIndexerJob.java:95) ~[druid-indexing-hadoop-0.10.0.jar:0.10.0]
	at io.druid.indexing.common.task.HadoopIndexTask$HadoopIndexGeneratorInnerProcessing.runTask(HadoopIndexTask.java:276) ~[druid-indexing-service-0.10.0.jar:0.10.0]
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_131]
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_131]
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_131]
	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_131]
	at io.druid.indexing.common.task.HadoopTask.invokeForeignLoader(HadoopTask.java:208) ~[druid-indexing-service-0.10.0.jar:0.10.0]
	... 7 more
2017-08-17T09:04:52,059 INFO [task-runner-0-priority-0] io.druid.indexing.overlord.TaskRunnerUtils - Task [index_hadoop_zjy-cidata_2017-08-17T09:03:02.551Z] status changed to [FAILED].
2017-08-17T09:04:52,061 INFO [task-runner-0-priority-0] io.druid.indexing.worker.executor.ExecutorLifecycle - Task completed with status: {
  "id" : "index_hadoop_zjy-cidata_2017-08-17T09:03:02.551Z",
  "status" : "FAILED",
  "duration" : 104019
}

the task payload is:

{“task”:“index_hadoop_zjy-cidata_2017-08-17T09:03:02.551Z”,“payload”:{“id”:“index_hadoop_zjy-cidata_2017-08-17T09:03:02.551Z”,“spec”:{“dataSchema”:{“dataSource”:“zjy-cidata”,“parser”:{“type”:“hadoopyString”,“parseSpec”:{“format”:“json”,“timestampSpec”:{“column”:“time”,“format”:“auto”},“dimensionsSpec”:{“dimensionsSpec”:{“dimensions”:[“app_id”,“platform”,“app_version_name”,“app_version_code”,“sdk_version”,“country”,“region”,“city”,“county”,“isp”,“model”,“os_type”,“os_version”,“channel”,“type”,“user_type”,“event_id”,“event_arg1”,“event_arg2”,“event_arg3”,“event_arg4”,“event_arg5”,“event_arg6”,“event_arg7”,“event_arg8”,“event_arg9”,“event_arg10”,“user_dim1”,“user_dim2”,“user_dim3”,“user_dim4”,“user_dim5”,“user_dim6”,“user_dim7”,“user_dim8”,“user_dim9”,“user_dim10”],“dimensionExclusions”:,“spatialDimensions”:}}}},“metricsSpec”:[{“type”:“count”,“name”:“count”},{“type”:“hyperUnique”,“name”:“distinct_device_id”,“fieldName”:“device_id”,“isInputHyperUnique”:false},{“type”:“thetaSketch”,“name”:“distinct_device_id2”,“fieldName”:“device_id”,“size”:16384,“shouldFinalize”:true,“isInputThetaSketch”:false,“errorBoundsStdDev”:null},{“type”:“hyperUnique”,“name”:“distinct_user_id”,“fieldName”:“user_id”,“isInputHyperUnique”:false},{“type”:“thetaSketch”,“name”:“distinct_user_id2”,“fieldName”:“user_id”,“size”:16384,“shouldFinalize”:true,“isInputThetaSketch”:false,“errorBoundsStdDev”:null}],“granularitySpec”:{“type”:“uniform”,“segmentGranularity”:“HOUR”,“queryGranularity”:“HOUR”,“rollup”:true,“intervals”:[“2017-08-12T00:00:00.000Z/2017-08-12T01:00:00.000Z”]}},“ioConfig”:{“type”:“hadoop”,“inputSpec”:{“type”:“static”,“paths”:"/cidata/druid-source/20170812T000000+0000_20170812T010000+0000.txt"},“metadataUpdateSpec”:null,“segmentOutputPath”:null},“tuningConfig”:{“type”:“hadoop”,“workingPath”:null,“version”:“2017-08-17T09:03:02.551Z”,“partitionsSpec”:{“type”:“dimension”,“partitionDimension”:“event_id”,“targetPartitionSize”:1000000,“maxPartitionSize”:1500000,“assumeGrouped”:false,“numShards”:-1,“partitionDimensions”:},“shardSpecs”:{},“indexSpec”:{“bitmap”:{“type”:“concise”},“dimensionCompression”:“lz4”,“metricCompression”:“lz4”,“longEncoding”:“longs”},“maxRowsInMemory”:75000,“leaveIntermediate”:false,“cleanupOnFailure”:true,“overwriteFiles”:false,“ignoreInvalidRows”:true,“jobProperties”:{“mapreduce.job.classloader”:“true”,“mapreduce.job.classloader.system.classes”:"-javax.validation.,java.,javax.,org.apache.commons.logging.,org.apache.log4j.,org.apache.hadoop."},“combineText”:false,“useCombiner”:false,“buildV9Directly”:true,“numBackgroundPersistThreads”:0,“forceExtendableShardSpecs”:false,“useExplicitVersion”:false},“uniqueId”:“893cd09cfa43481fbae14d5ff38220dd”},“hadoopDependencyCoordinates”:[“org.apache.hadoop:hadoop-client:2.6.0-mr1-cdh5.10.0”],“classpathPrefix”:null,“context”:null,“groupId”:“index_hadoop_zjy-cidata_2017-08-17T09:03:02.551Z”,“dataSource”:“zjy-cidata”,“resource”:{“availabilityGroup”:“index_hadoop_zjy-cidata_2017-08-17T09:03:02.551Z”,“requiredCapacity”:1}}}

without set the partitionsSpec. then ingestion succeed. the task payload is

{"task":"index_hadoop_zjy-cidata_2017-08-17T08:46:59.224Z","payload":{"id":"index_hadoop_zjy-cidata_2017-08-17T08:46:59.224Z","spec":{"dataSchema":{"dataSource":"zjy-cidata","parser":{"type":"hadoopyString","parseSpec":{"format":"json","timestampSpec":{"column":"time","format":"auto"},"dimensionsSpec":{"dimensionsSpec":{"dimensions":["app_id","platform","app_version_name","app_version_code","sdk_version","country","region","city","county","isp","model","os_type","os_version","channel","type","user_type","event_id","event_arg1","event_arg2","event_arg3","event_arg4","event_arg5","event_arg6","event_arg7","event_arg8","event_arg9","event_arg10","user_dim1","user_dim2","user_dim3","user_dim4","user_dim5","user_dim6","user_dim7","user_dim8","user_dim9","user_dim10"],"dimensionExclusions":[],"spatialDimensions":[]}}}},"metricsSpec":[{"type":"count","name":"count"},{"type":"hyperUnique","name":"distinct_device_id","fieldName":"device_id","isInputHyperUnique":false},{"type":"thetaSketch","name":"distinct_device_id2","fieldName":"device_id","size":16384,"shouldFinalize":true,"isInputThetaSketch":false,"errorBoundsStdDev":null},{"type":"hyperUnique","name":"distinct_user_id","fieldName":"user_id","isInputHyperUnique":false},{"type":"thetaSketch","name":"distinct_user_id2","fieldName":"user_id","size":16384,"shouldFinalize":true,"isInputThetaSketch":false,"errorBoundsStdDev":null}],"granularitySpec":{"type":"uniform","segmentGranularity":"HOUR","queryGranularity":"HOUR","rollup":true,"intervals":["2017-08-11T00:00:00.000Z/2017-08-11T01:00:00.000Z"]}},"ioConfig":{"type":"hadoop","inputSpec":{"type":"static","paths":"/cidata/druid-source/20170811T000000+0000_20170811T010000+0000.txt"},"metadataUpdateSpec":null,"segmentOutputPath":null},"tuningConfig":{"type":"hadoop","workingPath":null,"version":"2017-08-17T08:46:59.224Z","partitionsSpec":{"type":"hashed","targetPartitionSize":-1,"maxPartitionSize":-1,"assumeGrouped":false,"numShards":-1,"partitionDimensions":[]},"shardSpecs":{},"indexSpec":{"bitmap":{"type":"concise"},"dimensionCompression":"lz4","metricCompression":"lz4","longEncoding":"longs"},"maxRowsInMemory":75000,"leaveIntermediate":false,"cleanupOnFailure":true,"overwriteFiles":false,"ignoreInvalidRows":true,"jobProperties":{"mapreduce.job.classloader":"true","mapreduce.job.classloader.system.classes":"-javax.validation.,java.,javax.,org.apache.commons.logging.,org.apache.log4j.,org.apache.hadoop."},"combineText":false,"useCombiner":false,"buildV9Directly":true,"numBackgroundPersistThreads":0,"forceExtendableShardSpecs":false,"useExplicitVersion":false},"uniqueId":"8f599d7e1f174efb86cf330b459aa8a1"},"hadoopDependencyCoordinates":["org.apache.hadoop:hadoop-client:2.6.0-mr1-cdh5.10.0"],"classpathPrefix":null,"context":null,"groupId":"index_hadoop_zjy-cidata_2017-08-17T08:46:59.224Z","dataSource":"zjy-cidata","resource":{"availabilityGroup":"index_hadoop_zjy-cidata_2017-08-17T08:46:59.224Z","requiredCapacity":1}}}

Could someone tell me, where is wrong ?