Hadoop indexing fails if multiple tasks are run in parallel

Hi,
I have HDFS path for multiple days. when I trigger indexing job for one day, it works and successfully exists. But when I run same job for two different days or so, both the jobs fail. Also, I don’t get logs for these failed tasks. Is there anything basic I am missing?

payload from failed task:

{

“task”: “index_hadoop_XXX_summary_2017-03-15T19:22:10.486Z”,

“payload”: {

“id”: “index_hadoop_XXX_summary_2017-03-15T19:22:10.486Z”,

“spec”: {

“dataSchema”: {

“dataSource”: “XXX_summary”,

“parser”: {

“type”: “string”,

“parseSpec”: {

“format”: “tsv”,

“columns”: [

“ts”,

“metro_area”,

“country”,

“appid”,

“osversion”,

“locale”

],

“delimiter”: “\t”,

“dimensionsSpec”: {

“dimensions”: [

“metro_area”,

“country”,

“appid”,

“osversion”,

“locale”

]

},

“timestampSpec”: {

“format”: “auto”,

“column”: “ts”

}

}

},

“metricsSpec”: [

{

“type”: “count”,

“name”: “count”

}

],

“granularitySpec”: {

“type”: “uniform”,

“segmentGranularity”: “HOUR”,

“queryGranularity”: {

“type”: “duration”,

“duration”: 3600000,

“origin”: “1970-01-01T00:00:00.000Z”

},

“rollup”: true,

“intervals”: [

“2017-03-10T08:00:00.000Z/2017-03-11T08:00:00.000Z”

],

“timezone”: null

}

},

“ioConfig”: {

“type”: “hadoop”,

“inputSpec”: {

“type”: “static”,

“paths”: “/user/abc/druid/2017-03-10”,

“ingestionSpec”: {

“ignoreWhenNoSegments”: “yes”

}

},

“metadataUpdateSpec”: null,

“segmentOutputPath”: null

},

“tuningConfig”: {

“type”: “hadoop”,

“workingPath”: null,

“version”: “2017-03-15T19:22:10.485Z”,

“partitionsSpec”: {

“type”: “hashed”,

“targetPartitionSize”: 5000000,

“maxPartitionSize”: 7500000,

“assumeGrouped”: false,

“numShards”: -1,

“partitionDimensions”:

},

“shardSpecs”: {},

“indexSpec”: {

“bitmap”: {

“type”: “concise”

},

“longEncoding”: “longs”

},

“maxRowsInMemory”: 750000,

“leaveIntermediate”: false,

“cleanupOnFailure”: false,

“overwriteFiles”: true,

“ignoreInvalidRows”: false,

“jobProperties”: {

“mapreduce.job.classloader”: “true”,

“mapreduce.job.user.classpath.first”: “true”,

“mapreduce.map.java.opts”: “-Djava.net.preferIPv4Stack=true -Xmx3865051136 -Duser.timezone=UTC -Dfile.encoding=UTF-8”,

“mapreduce.reduce.java.opts”: “-Djava.net.preferIPv4Stack=true -Xmx3865051136 -Duser.timezone=UTC -Dfile.encoding=UTF-8”,

“mapreduce.job.classloader.system.classes”: “-javax.validation.,java.,javax.,org.apache.commons.logging.,org.apache.log4j.,org.apache.hadoop.,org.w3c.,org.xml.”

},

“combineText”: false,

“useCombiner”: false,

“buildV9Directly”: true,

“numBackgroundPersistThreads”: 0,

“forceExtendableShardSpecs”: false,

“useExplicitVersion”: false

},

“uniqueId”: “”

},

“hadoopDependencyCoordinates”: null,

“classpathPrefix”: null,

“context”: null,

“groupId”: “index_hadoop_XXX_summary_2017-03-15T19:22:10.486Z”,

“dataSource”: “XXX_summary”,

“resource”: {

“availabilityGroup”: “index_hadoop_XXX_summary_2017-03-15T19:22:10.486Z”,

“requiredCapacity”: 1

}

}

}

Thanks,

-YK