Brokers have resource pool exception

hi all,

We have set up druid cluster for about 4 months, now we have a issue that some queries not all queries have the below exception. Anyone have the same problems? What should I do? thank you so much. the exception is below:

io.druid.java.util.common.RE: Failure getting results for query[8f299393-8893-4748-8c12-0a6dbe61a40a] url[http://druid-historical02:8100/druid/v2/] because of [org.jboss.netty.channel.ChannelException: Faulty channel in resource pool]

at io.druid.client.DirectDruidClient$JsonParserIterator.init(DirectDruidClient.java:628) ~[druid-server-0.12.0.jar:0.12.0]

at io.druid.client.DirectDruidClient$JsonParserIterator.hasNext(DirectDruidClient.java:560) ~[druid-server-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.BaseSequence.makeYielder(BaseSequence.java:88) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.BaseSequence.toYielder(BaseSequence.java:68) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.MappedSequence.toYielder(MappedSequence.java:49) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.MergeSequence.lambda$toYielder$0(MergeSequence.java:66) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.MappingAccumulator.accumulate(MappingAccumulator.java:40) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.BaseSequence.accumulate(BaseSequence.java:46) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.MappedSequence.accumulate(MappedSequence.java:43) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.MergeSequence.toYielder(MergeSequence.java:63) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.LazySequence.toYielder(LazySequence.java:46) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.query.RetryQueryRunner$1.toYielder(RetryQueryRunner.java:102) ~[druid-processing-0.12.0.jar:0.12.0]

at io.druid.common.guava.CombiningSequence.toYielder(CombiningSequence.java:80) ~[druid-common-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.MappedSequence.toYielder(MappedSequence.java:49) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.MappedSequence.toYielder(MappedSequence.java:49) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.WrappingSequence$2.get(WrappingSequence.java:87) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.WrappingSequence$2.get(WrappingSequence.java:83) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.query.CPUTimeMetricQueryRunner$1.wrap(CPUTimeMetricQueryRunner.java:74) ~[druid-processing-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.WrappingSequence.toYielder(WrappingSequence.java:82) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.WrappingSequence$2.get(WrappingSequence.java:87) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.WrappingSequence$2.get(WrappingSequence.java:83) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.SequenceWrapper.wrap(SequenceWrapper.java:55) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.WrappingSequence.toYielder(WrappingSequence.java:82) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.java.util.common.guava.Yielders.each(Yielders.java:32) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.sql.calcite.schema.DruidSchema.refreshSegmentsForDataSource(DruidSchema.java:415) ~[druid-sql-0.12.0.jar:0.12.0]

at io.druid.sql.calcite.schema.DruidSchema.refreshSegments(DruidSchema.java:381) ~[druid-sql-0.12.0.jar:0.12.0]

at io.druid.sql.calcite.schema.DruidSchema.access$1000(DruidSchema.java:81) ~[druid-sql-0.12.0.jar:0.12.0]

at io.druid.sql.calcite.schema.DruidSchema$2.run(DruidSchema.java:219) [druid-sql-0.12.0.jar:0.12.0]

at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) [?:1.8.0_121]

at java.util.concurrent.FutureTask.run(FutureTask.java:266) [?:1.8.0_121]

at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180) [?:1.8.0_121]

at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293) [?:1.8.0_121]

at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_121]

at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_121]

at java.lang.Thread.run(Thread.java:745) [?:1.8.0_121]

Caused by: java.util.concurrent.ExecutionException: org.jboss.netty.channel.ChannelException: Faulty channel in resource pool

at com.google.common.util.concurrent.Futures$ImmediateFailedFuture.get(Futures.java:186) ~[guava-16.0.1.jar:?]

at io.druid.client.DirectDruidClient$JsonParserIterator.init(DirectDruidClient.java:598) ~[druid-server-0.12.0.jar:0.12.0]

… 34 more

Caused by: org.jboss.netty.channel.ChannelException: Faulty channel in resource pool

at io.druid.java.util.http.client.NettyHttpClient.go(NettyHttpClient.java:147) ~[java-util-0.12.0.jar:0.12.0]

at io.druid.client.DirectDruidClient.run(DirectDruidClient.java:440) ~[druid-server-0.12.0.jar:0.12.0]

at io.druid.client.CachingClusteredClient$SpecificQueryRunnable.getSimpleServerResults(CachingClusteredClient.java:585) ~[druid-server-0.12.0.jar:0.12.0]

at io.druid.client.CachingClusteredClient$SpecificQueryRunnable.lambda$addSequencesFromServer$6(CachingClusteredClient.java:554) ~[druid-server-0.12.0.jar:0.12.0]

at java.util.TreeMap.forEach(TreeMap.java:1005) ~[?:1.8.0_121]

at io.druid.client.CachingClusteredClient$SpecificQueryRunnable.addSequencesFromServer(CachingClusteredClient.java:540) ~[druid-server-0.12.0.jar:0.12.0]

at io.druid.client.CachingClusteredClient$SpecificQueryRunnable.lambda$run$1(CachingClusteredClient.java:282) ~[druid-server-0.12.0.jar:0.12.0]

… 25 more

Caused by: java.net.ConnectException: Connection refused: druid-historical02/10.65.206.168:8100

at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method) ~[?:1.8.0_121]

at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717) ~[?:1.8.0_121]

at org.jboss.netty.channel.socket.nio.NioClientBoss.connect(NioClientBoss.java:152) ~[netty-3.10.6.Final.jar:?]

at org.jboss.netty.channel.socket.nio.NioClientBoss.processSelectedKeys(NioClientBoss.java:105) ~[netty-3.10.6.Final.jar:?]

at org.jboss.netty.channel.socket.nio.NioClientBoss.process(NioClientBoss.java:79) ~[netty-3.10.6.Final.jar:?]

at org.jboss.netty.channel.socket.nio.AbstractNioSelector.run(AbstractNioSelector.java:337) ~[netty-3.10.6.Final.jar:?]

at org.jboss.netty.channel.socket.nio.NioClientBoss.run(NioClientBoss.java:42) ~[netty-3.10.6.Final.jar:?]

at org.jboss.netty.util.ThreadRenamingRunnable.run(ThreadRenamingRunnable.java:108) ~[netty-3.10.6.Final.jar:?]

at org.jboss.netty.util.internal.DeadLockProofWorker$1.run(DeadLockProofWorker.java:42) ~[netty-3.10.6.Final.jar:?]

… 3 more

Can you give out the query Json ?

there a dashboard which includes about 10 queries or so, they are sent at the same time, I can give you one of them for example.
I send each query separately, the response is ok.

{
  "dataSource": "shopee_id_s0__campaign_microsite",
  "postAggregations": [
    {
      "name": "CTR",
      "fn": "/",
      "fields": [
        {
          "name": "click",
          "fieldName": "sum__click",
          "type": "fieldAccess"
        },
        {
          "name": "impression",
          "fieldName": "sum__impression",
          "type": "fieldAccess"
        }
      ],
      "type": "arithmetic"
    }
  ],
  "queryType": "timeseries",
  "aggregations": [
    {
      "name": "sum__click",
      "type": "longSum",
      "fieldName": "click"
    },
    {
      "name": "sum__impression",
      "type": "longSum",
      "fieldName": "impression"
    }
  ],
  "intervals": "2018-09-09T17:00:00+00:00/2018-09-11T00:47:05+00:00",
  "filter": {
    "fields": [
      {
        "field": {
          "fields": [
            {
              "dimension": "component_type",
              "value": "CONTAINER",
              "type": "selector"
            },
            {
              "dimension": "component_type",
              "value": "PRODUCT_CARD",
              "type": "selector"
            },
            {
              "dimension": "component_type",
              "value": "",
              "type": "selector"
            }
          ],
          "type": "or"
        },
        "type": "not"
      },
      {
        "dimension": "current_page_type",
        "value": "microsite",
        "type": "selector"
      }
    ],
    "type": "and"
  },
  "granularity": "all"
}