Upload duplicate data to druid

We want to upload data to druid data source again and again. but now we cant do it.

json format

{

“type” : “index_hadoop”,

“spec” : {

“ioConfig” : {

“type” : “hadoop”,

“appendToExisting”:true,

“inputSpec” : {

“type” : “static”,

“paths” : “quickstart/20170703.csv”

}

},

“dataSchema” : {

“dataSource” : “smsdata20170708”,

“granularitySpec” : {

“type” : “uniform”,

“segmentGranularity” : “day”,

“queryGranularity” : “none”,

“intervals” : [“2017-02-21/2017-02-23”]

},

“parser” : {

“type” : “hadoopyString”,

“parseSpec” : {

“format” : “csv”,

“columns” : [“subscriber_id”,“trans_id_1”,“trans_id_2”,“date_time”,“subscriber_type”,“message_type”,“sub_id_2”,“account_type”,“master_sub_id”,“application_id”,“sub_type_id”,“unit_type_id”,“usage_amount”,“type_of_charge”,“identity_id”,“group_id”,“charge_code”,“content_type”,“fund_usage_type”,“msc_id”,“circle_id”,“sp_id”],

“dimensionsSpec” : {

“dimensions” : [“subscriber_id”,“trans_id_1”,“trans_id_2”,“date_time”,“subscriber_type”,“message_type”,“sub_id_2”,“account_type”,“master_sub_id”,“application_id”,“sub_type_id”,“unit_type_id”,“usage_amount”,“type_of_charge”,“identity_id”,“group_id”,“charge_code”,“content_type”,“fund_usage_type”,“msc_id”,“circle_id”,“sp_id”]

},

“timestampSpec” : {

“format” : “auto”,

“column” : “date_time”

}

}

},

“metricsSpec” : [

{

“name” : “count”,

“type” : “count”

}

]

},

“tuningConfig” : {

“type” : “hadoop”,

“overwriteFiles”:false,

“partitionsSpec” : {

“type” : “hashed”,

“targetPartitionSize” : 5000000

},

“jobProperties” : {}

}

}

}

``