Unable to run batch ingestion with hash partitioning

Hi,

I am trying to do a hash based partition in my ingestion and its consistently failing. However, dynamic partition works fine. Here are the tuning config I am running and the stack trace of error. Any clue as to whats happening here helps a great deal.

Tuning config:

`

“tuningConfig”: {
“type”: “index_parallel”,
“partitionsSpec”: {
“type”: “hashed”,
“partitionDimension”: “contextid”,
“numShards”: 150
},
“forceGuaranteedRollup”: true,
“maxNumConcurrentSubTasks”: 999
},

`

Granularity spec:

`

“granularitySpec”: {
“type”: “uniform”,
“queryGranularity”: “DAY”,
“segmentGranularity”: “WEEK”,
“intervals”: [
“2017-01-01T00:00:00.000Z/2021-01-01T00:00:00.000Z”
]
},

`

Stack trace:

`

2020-07-07T14:11:25,473 ERROR [task-runner-0-priority-0] org.apache.druid.indexing.overlord.SingleTaskBackgroundRunner - Exception while running task[AbstractTask{id=‘partial_index_generate_ffs_int_hash_ddcokfig_2020-07-07T14:11:14.391Z’, groupId=‘index_parallel_ffs_int_hash_pbppoebh_2020-07-07T14:11:12.572Z’, taskResource=TaskResource{availabilityGroup=‘partial_index_generate_ffs_int_hash_ddcokfig_2020-07-07T14:11:14.391Z’, requiredCapacity=1}, dataSource=‘ffs_int_hash’, context={forceTimeChunkLock=false}}]
org.apache.druid.java.util.common.ISE: Cannot find a version for interval[2017-10-16T00:00:00.000Z/2017-10-23T00:00:00.000Z]
at org.apache.druid.indexing.common.task.CachingLocalSegmentAllocator.lambda$findVersion$3(CachingLocalSegmentAllocator.java:124) ~[druid-indexing-service-0.18.1.jar:0.18.1]
at java.util.Optional.orElseThrow(Optional.java:290) ~[?:1.8.0_252]
at org.apache.druid.indexing.common.task.CachingLocalSegmentAllocator.findVersion(CachingLocalSegmentAllocator.java:124) ~[druid-indexing-service-0.18.1.jar:0.18.1]
at org.apache.druid.indexing.common.task.CachingLocalSegmentAllocator.lambda$new$0(CachingLocalSegmentAllocator.java:96) ~[druid-indexing-service-0.18.1.jar:0.18.1]
at org.apache.druid.indexing.common.task.batch.partition.HashPartitionAnalysis.lambda$null$0(HashPartitionAnalysis.java:118) ~[druid-indexing-service-0.18.1.jar:0.18.1]
at java.util.stream.IntPipeline$4$1.accept(IntPipeline.java:250) ~[?:1.8.0_252]
at java.util.stream.Streams$RangeIntSpliterator.forEachRemaining(Streams.java:110) ~[?:1.8.0_252]
at java.util.Spliterator$OfInt.forEachRemaining(Spliterator.java:693) ~[?:1.8.0_252]
at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:482) ~[?:1.8.0_252]
at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:472) ~[?:1.8.0_252]
at java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:708) ~[?:1.8.0_252]
at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) ~[?:1.8.0_252]
at java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:566) ~[?:1.8.0_252]
at org.apache.druid.indexing.common.task.batch.partition.HashPartitionAnalysis.lambda$convertToIntervalToSegmentIds$1(HashPartitionAnalysis.java:122) ~[druid-indexing-service-0.18.1.jar:0.18.1]
at java.util.HashMap.forEach(HashMap.java:1289) ~[?:1.8.0_252]
at org.apache.druid.indexing.common.task.batch.partition.HashPartitionAnalysis.forEach(HashPartitionAnalysis.java:91) ~[druid-indexing-service-0.18.1.jar:0.18.1]
at org.apache.druid.indexing.common.task.batch.partition.HashPartitionAnalysis.convertToIntervalToSegmentIds(HashPartitionAnalysis.java:104) ~[druid-indexing-service-0.18.1.jar:0.18.1]
at org.apache.druid.indexing.common.task.CachingLocalSegmentAllocator.(CachingLocalSegmentAllocator.java:98) ~[druid-indexing-service-0.18.1.jar:0.18.1]
at org.apache.druid.indexing.common.task.SegmentAllocators.forNonLinearPartitioning(SegmentAllocators.java:84) ~[druid-indexing-service-0.18.1.jar:0.18.1]
at org.apache.druid.indexing.common.task.batch.parallel.PartialHashSegmentGenerateTask.createSegmentAllocator(PartialHashSegmentGenerateTask.java:136) ~[druid-indexing-service-0.18.1.jar:0.18.1]
at org.apache.druid.indexing.common.task.batch.parallel.PartialSegmentGenerateTask.generateSegments(PartialSegmentGenerateTask.java:174) ~[druid-indexing-service-0.18.1.jar:0.18.1]
at org.apache.druid.indexing.common.task.batch.parallel.PartialSegmentGenerateTask.runTask(PartialSegmentGenerateTask.java:123) ~[druid-indexing-service-0.18.1.jar:0.18.1]
at org.apache.druid.indexing.common.task.AbstractBatchIndexTask.run(AbstractBatchIndexTask.java:123) ~[druid-indexing-service-0.18.1.jar:0.18.1]
at org.apache.druid.indexing.overlord.SingleTaskBackgroundRunner$SingleTaskBackgroundRunnerCallable.call(SingleTaskBackgroundRunner.java:421) [druid-indexing-service-0.18.1.jar:0.18.1]
at org.apache.druid.indexing.overlord.SingleTaskBackgroundRunner$SingleTaskBackgroundRunnerCallable.call(SingleTaskBackgroundRunner.java:393) [druid-indexing-service-0.18.1.jar:0.18.1]
at java.util.concurrent.FutureTask.run(FutureTask.java:266) [?:1.8.0_252]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_252]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_252]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_252]
2020-07-07T14:11:25,512 INFO [task-runner-0-priority-0] org.apache.druid.indexing.worker.executor.ExecutorLifecycle - Task completed with status: {
“id” : “partial_index_generate_ffs_int_hash_ddcokfig_2020-07-07T14:11:14.391Z”,
“status” : “FAILED”,
“duration” : 1137,
“errorMsg” : “org.apache.druid.java.util.common.ISE: Cannot find a version for interval[2017-10-16T00:00:00.000Z/2…”,
“location” : {
“host” : null,
“port” : -1,
“tlsPort” : -1
}
}

`