Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # Name the Components on this agent
- spoolAgent1.sources = spooldir-source
- spoolAgent1.channels = mem-channel
- spoolAgent1.sinks = kafka-sink-1
- # Describe Configure/Source
- spoolAgent1.sources.spooldir-source.type = spooldir
- spoolAgent1.sources.spooldir-source.spoolDir =
- spoolAgent1.sources.spooldir-source.fileHeader = false
- spoolAgent1.sources.spooldir-source.fileSuffix =.FULLFILE
- spoolAgent1.sources.spooldir-source.deletePolicy = never
- #Describe the sink
- spoolAgent1.sinks.kafka-sink-1.channel = mem-channel
- spoolAgent1.sinks.kafka-sink-1.type = org.apache.flume.sink.kafka.KafkaSink
- spoolAgent1.sinks.kafka-sink-1.batchSize = 20
- spoolAgent1.sinks.kafka-sink-1.brokerList =
- spoolAgent1.sinks.kafka-sink-1.topic = SAMPLELOGMINER
- spoolAgent1.sources.spooldir-source.zookeeperConnect =
- #Bind the source and sink to the channel
- spoolAgent1.sources.spooldir-source.channels = mem-channel
- #Use a channel which buffers events in file
- spoolAgent1.channels.mem-channel.type = MEMORY
- spoolAgent1.channels.mem-channel.capacity = 10000
- spoolAgent1.channels.mem-channel.transactionCapacity = 10000
- # SSL properties for Thrift source s1
- spoolAgent1.sources.spooldir-source.ssl=true
- spoolAgent1.sources.spooldir-source.keystore=
- spoolAgent1.sources.spooldir-source.keystore-password=
- spoolAgent1.sources.spooldir-source.keystore-type=JKS
- # SSL properties for Thrift sink k1
- spoolAgent1.sinks.kafka-sink-1.ssl=true
- spoolAgent1.sinks.kafka-sink-1.truststore=
- spoolAgent1.sinks.kafka-sink-1.truststore-password=
- kafka.common.KafkaException: fetching topic metadata for topics [Set(SAMPLETOPIC)] from broker [ArrayBuffer(BrokerEndPoint(0,sample.node,9093))] failed
- at kafka.client.ClientUtils$.fetchTopicMetadata(ClientUtils.scala:72)
- at kafka.producer.BrokerPartitionInfo.updateInfo(BrokerPartitionInfo.scala:83)
- at kafka.producer.async.DefaultEventHandler$$anonfun$handle$2.apply$mcV$sp(DefaultEventHandler.scala:81)
- at kafka.utils.CoreUtils$.swallow(CoreUtils.scala:79)
- at kafka.utils.Logging$class.swallowError(Logging.scala:106)
- at kafka.utils.CoreUtils$.swallowError(CoreUtils.scala:51)
- at kafka.producer.async.DefaultEventHandler.handle(DefaultEventHandler.scala:81)
- at kafka.producer.Producer.send(Producer.scala:77)
- at kafka.javaapi.producer.Producer.send(Producer.scala:42)
- at org.apache.flume.sink.kafka.KafkaSink.process(KafkaSink.java:135)
- at org.apache.flume.sink.DefaultSinkProcessor.process(DefaultSinkProcessor.java:68)
- at org.apache.flume.SinkRunner$PollingRunner.run(SinkRunner.java:147)
- at java.lang.Thread.run(Thread.java:745)
- Caused by: java.io.EOFException
- at org.apache.kafka.common.network.NetworkReceive.readFromReadableChannel(NetworkReceive.java:99)
- at kafka.network.BlockingChannel.readCompletely(BlockingChannel.scala:129)
- at kafka.network.BlockingChannel.receive(BlockingChannel.scala:120)
- at kafka.producer.SyncProducer.liftedTree1$1(SyncProducer.scala:78)
- at kafka.producer.SyncProducer.kafka$producer$SyncProducer$$doSend(SyncProducer.scala:75)
- at kafka.producer.SyncProducer.send(SyncProducer.scala:120)
- at kafka.client.ClientUtils$.fetchTopicMetadata(ClientUtils.scala:58)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement