Generate Datasource inputSpec for hadoop indexer job

How to generate datasource inputSpec of ioconfig in java for hadoop indexer job. Are there any java api(io.druid.*) classes to generate it given the datasource and interval. Similar to io.druid.segment.indexing.granularity.GranularitySpec for generating granularity spec for the indexer config.

example inputSpec :
“inputSpec”: {
“type”: “dataSource”,
“ingestionSpec”: {
“dataSource”: “dcap.envoy.diskmounts.kafka”,
“intervals”: [
“2017-05-21T00:00:00.000Z/2017-05-22T00:00:00.000Z”
],
“segments”: [{“dataSource”:“dcap.envoy.diskmounts.kafka”,“interval”:“2017-05-21T06:00:00.000Z/2017-05-21T07:00:00.000Z”,“version”:“2017-05-21T06:02:43.482Z”,“loadSpec”:{“type”:“hdfs”,“path”:"/usr/druid/integration/deepstorage/dcap.envoy.diskmounts.kafka/20170521T060000.000Z_20170521T070000.000Z/2017-05-21T06_02_43.482Z/0/index.zip"},“dimensions”:“HOSTNAME,DISK_DEVICE_NAME,MOUNTPOINT,FS_TYPE”,“metrics”:“count”,“shardSpec”:{“type”:“numbered”,“partitionNum”:0,“partitions”:0},“binaryVersion”:9,“size”:50941095,“identifier”:“dcap.envoy.diskmounts.kafka_2017-05-21T06:00:00.000Z_2017-05-21T07:00:00.000Z_2017-05-21T06:02:43.482Z”},{“dataSource”:“dcap.envoy.diskmounts.kafka”,“interval”:“2017-05-21T06:00:00.000Z/2017-05-21T07:00:00.000Z”,“version”:“2017-05-21T06:02:43.482Z”,“loadSpec”:{“type”:“hdfs”,“path”:"/usr/druid/integration/deepstorage/dcap.envoy.diskmounts.kafka/20170521T060000.000Z_20170521T070000.000Z/2017-05-21T06_02_43.482Z/1/index.zip"},“dimensions”:“HOSTNAME,DISK_DEVICE_NAME,MOUNTPOINT,FS_TYPE”,“metrics”:“count”,“shardSpec”:{“type”:“numbered”,“partitionNum”:1,“partitions”:0},“binaryVersion”:9,“size”:50901141,“identifier”:“dcap.envoy.diskmounts.kafka_2017-05-21T06:00:00.000Z_2017-05-21T07:00:00.000Z_2017-05-21T06:02:43.482Z_1”},{“dataSource”:“dcap.envoy.diskmounts.kafka”,“interval”:“2017-05-21T06:00:00.000Z/2017-05-21T07:00:00.000Z”,“version”:“2017-05-21T06:02:43.482Z”,“loadSpec”:{“type”:“hdfs”,“path”:"/usr/druid/integration/deepstorage/dcap.envoy.diskmounts.kafka/20170521T060000.000Z_20170521T070000.000Z/2017-05-21T06_02_43.482Z/3/index.zip"},“dimensions”:“HOSTNAME,DISK_DEVICE_NAME,MOUNTPOINT,FS_TYPE”,“metrics”:“count”,“shardSpec”:{“type”:“numbered”,“partitionNum”:3,“partitions”:0},“binaryVersion”:9,“size”:50913465,“identifier”:“dcap.envoy.diskmounts.kafka_2017-05-21T06:00:00.000Z_2017-05-21T07:00:00.000Z_2017-05-21T06:02:43.482Z_3”},{“dataSource”:“dcap.envoy.diskmounts.kafka”,“interval”:“2017-05-21T06:00:00.000Z/2017-05-21T07:00:00.000Z”,“version”:“2017-05-21T06:02:43.482Z”,“loadSpec”:{“type”:“hdfs”,“path”:"/usr/druid/integration/deepstorage/dcap.envoy.diskmounts.kafka/20170521T060000.000Z_20170521T070000.000Z/2017-05-21T06_02_43.482Z/2/index.zip"},“dimensions”:“HOSTNAME,DISK_DEVICE_NAME,MOUNTPOINT,FS_TYPE”,“metrics”:“count”,“shardSpec”:{“type”:“numbered”,“partitionNum”:2,“partitions”:0},“binaryVersion”:9,“size”:50885886,“identifier”:“dcap.envoy.diskmounts.kafka_2017-05-21T06:00:00.000Z_2017-05-21T07:00:00.000Z_2017-05-21T06:02:43.482Z_2”}]
}
}

``