Advertisement
Guest User

logstash as service

a guest
Feb 28th, 2015
459
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 27.51 KB | None | 0 0
  1. :message=>"Reading config file", :file=>"logstash/agent.rb", :level=>:debug, :line=>"301"}
  2. :message=>"Compiled pipeline code:\n@inputs = []\n@filters = []\n@outputs = []\n@input_file_1 = plugin(\"input\", \"file\", LogStash::Util.hash_merge_many({ \"path\" => (\"/var/log/apache2/access.log\".force_encoding(\"UTF-8\")) }))\n\n@inputs << @input_file_1\n@filter_grok_2 = plugin(\"filter\", \"grok\", LogStash::Util.hash_merge_many({ \"match\" => {(\"message\".force_encoding(\"UTF-8\")) => (\"%{COMBINEDAPACHELOG}\".force_encoding(\"UTF-8\"))} }))\n\n@filters << @filter_grok_2\n@filter_date_3 = plugin(\"filter\", \"date\", LogStash::Util.hash_merge_many({ \"match\" => [(\"timestamp\".force_encoding(\"UTF-8\")), (\"dd/MMM/yyyy:HH:mm:ss Z\".force_encoding(\"UTF-8\"))] }))\n\n@filters << @filter_date_3\n@output_elasticsearch_4 = plugin(\"output\", \"elasticsearch\", LogStash::Util.hash_merge_many({ \"host\" => (\"localhost\".force_encoding(\"UTF-8\")) }))\n\n@outputs << @output_elasticsearch_4\n@output_stdout_5 = plugin(\"output\", \"stdout\", LogStash::Util.hash_merge_many({ \"codec\" => (\"rubydebug\".force_encoding(\"UTF-8\")) }))\n\n@outputs << @output_stdout_5\n @filter_func = lambda do |event, &block|\n extra_events = []\n @logger.debug? && @logger.debug(\"filter received\", :event => event.to_hash)\n newevents = []\n extra_events.each do |event|\n @filter_grok_2.filter(event) do |newevent|\n newevents << newevent\n end\n end\n extra_events += newevents\n @filter_grok_2.filter(event) do |newevent|\n extra_events << newevent\n end\n if event.cancelled?\n extra_events.each(&block)\n return\n end\n newevents = []\n extra_events.each do |event|\n @filter_date_3.filter(event) do |newevent|\n newevents << newevent\n end\n end\n extra_events += newevents\n @filter_date_3.filter(event) do |newevent|\n extra_events << newevent\n end\n if event.cancelled?\n extra_events.each(&block)\n return\n end\n \n extra_events.each(&block)\n end\n @output_func = lambda do |event, &block|\n @logger.debug? && @logger.debug(\"output received\", :event => event.to_hash)\n @output_elasticsearch_4.handle(event)\n @output_stdout_5.handle(event)\n \n end", :level=>:debug, :file=>"logstash/pipeline.rb", :line=>"26"}
  3. :message=>"Using milestone 2 input plugin 'file'. This plugin should be stable, but if you see strange behavior, please let us know! For more information on plugin milestones, see http://logstash.net/docs/1.4.2-modified/plugin-milestones", :level=>:warn, :file=>"logstash/config/mixin.rb", :line=>"209"}
  4. :message=>"config LogStash::Codecs::Plain/@charset = \"UTF-8\"", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  5. :message=>"config LogStash::Inputs::File/@path = [\"/var/log/apache2/access.log\"]", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  6. :message=>"config LogStash::Inputs::File/@debug = false", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  7. :message=>"config LogStash::Inputs::File/@codec = <LogStash::Codecs::Plain charset=>\"UTF-8\">", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  8. :message=>"config LogStash::Inputs::File/@add_field = {}", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  9. :message=>"config LogStash::Inputs::File/@stat_interval = 1", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  10. :message=>"config LogStash::Inputs::File/@discover_interval = 15", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  11. :message=>"config LogStash::Inputs::File/@sincedb_write_interval = 15", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  12. :message=>"config LogStash::Inputs::File/@start_position = \"end\"", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  13. :message=>"config LogStash::Filters::Grok/@match = {\"message\"=>\"%{COMBINEDAPACHELOG}\"}", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  14. :message=>"config LogStash::Filters::Grok/@type = \"\"", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  15. :message=>"config LogStash::Filters::Grok/@tags = []", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  16. :message=>"config LogStash::Filters::Grok/@exclude_tags = []", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  17. :message=>"config LogStash::Filters::Grok/@add_tag = []", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  18. :message=>"config LogStash::Filters::Grok/@remove_tag = []", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  19. :message=>"config LogStash::Filters::Grok/@add_field = {}", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  20. :message=>"config LogStash::Filters::Grok/@remove_field = []", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  21. :message=>"config LogStash::Filters::Grok/@patterns_dir = []", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  22. :message=>"config LogStash::Filters::Grok/@drop_if_match = false", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  23. :message=>"config LogStash::Filters::Grok/@break_on_match = true", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  24. :message=>"config LogStash::Filters::Grok/@named_captures_only = true", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  25. :message=>"config LogStash::Filters::Grok/@keep_empty_captures = false", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  26. :message=>"config LogStash::Filters::Grok/@singles = true", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  27. :message=>"config LogStash::Filters::Grok/@tag_on_failure = [\"_grokparsefailure\"]", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  28. :message=>"config LogStash::Filters::Grok/@overwrite = []", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  29. :message=>"config LogStash::Filters::Date/@match = [\"timestamp\", \"dd/MMM/yyyy:HH:mm:ss Z\"]", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  30. :message=>"config LogStash::Filters::Date/@type = \"\"", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  31. :message=>"config LogStash::Filters::Date/@tags = []", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  32. :message=>"config LogStash::Filters::Date/@exclude_tags = []", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  33. :message=>"config LogStash::Filters::Date/@add_tag = []", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  34. :message=>"config LogStash::Filters::Date/@remove_tag = []", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  35. :message=>"config LogStash::Filters::Date/@add_field = {}", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  36. :message=>"config LogStash::Filters::Date/@remove_field = []", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  37. :message=>"config LogStash::Filters::Date/@target = \"@timestamp\"", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  38. :message=>"config LogStash::Codecs::Plain/@charset = \"UTF-8\"", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  39. :message=>"config LogStash::Outputs::ElasticSearch/@host = \"localhost\"", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  40. :message=>"config LogStash::Outputs::ElasticSearch/@type = \"\"", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  41. :message=>"config LogStash::Outputs::ElasticSearch/@tags = []", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  42. :message=>"config LogStash::Outputs::ElasticSearch/@exclude_tags = []", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  43. :message=>"config LogStash::Outputs::ElasticSearch/@codec = <LogStash::Codecs::Plain charset=>\"UTF-8\">", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  44. :message=>"config LogStash::Outputs::ElasticSearch/@workers = 1", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  45. :message=>"config LogStash::Outputs::ElasticSearch/@index = \"logstash-%{+YYYY.MM.dd}\"", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  46. :message=>"config LogStash::Outputs::ElasticSearch/@manage_template = true", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  47. :message=>"config LogStash::Outputs::ElasticSearch/@template_name = \"logstash\"", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  48. :message=>"config LogStash::Outputs::ElasticSearch/@template_overwrite = false", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  49. :message=>"config LogStash::Outputs::ElasticSearch/@document_id = nil", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  50. :message=>"config LogStash::Outputs::ElasticSearch/@embedded = false", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  51. :message=>"config LogStash::Outputs::ElasticSearch/@embedded_http_port = \"9200-9300\"", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  52. :message=>"config LogStash::Outputs::ElasticSearch/@max_inflight_requests = 50", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  53. :message=>"config LogStash::Outputs::ElasticSearch/@flush_size = 5000", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  54. :message=>"config LogStash::Outputs::ElasticSearch/@idle_flush_time = 1", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  55. :message=>"config LogStash::Outputs::ElasticSearch/@action = \"index\"", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  56. :message=>"config LogStash::Outputs::Stdout/@codec = <LogStash::Codecs::RubyDebug >", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  57. :message=>"config LogStash::Outputs::Stdout/@type = \"\"", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  58. :message=>"config LogStash::Outputs::Stdout/@tags = []", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  59. :message=>"config LogStash::Outputs::Stdout/@exclude_tags = []", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  60. :message=>"config LogStash::Outputs::Stdout/@workers = 1", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"105"}
  61. :message=>"Registering file input", :path=>["/var/log/apache2/access.log"], :level=>:info, :file=>"logstash/inputs/file.rb", :line=>"74"}
  62. :message=>"No sincedb_path set, generating one based on the file path", :sincedb_path=>"/var/lib/logstash/.sincedb_8636a19711465cc96926000984eb4005", :path=>["/var/log/apache2/access.log"], :level=>:info, :file=>"logstash/inputs/file.rb", :line=>"115"}
  63. :message=>"_sincedb_open: reading from /var/lib/logstash/.sincedb_8636a19711465cc96926000984eb4005", :level=>:debug, :file=>"filewatch/tail.rb", :line=>"199"}
  64. :message=>"_discover_file_glob: /var/log/apache2/access.log: glob is: []", :level=>:debug, :file=>"filewatch/watch.rb", :line=>"117"}
  65. :message=>"Grok patterns path", :patterns_dir=>["/opt/logstash/patterns/*"], :level=>:info, :file=>"logstash/filters/grok.rb", :line=>"240"}
  66. :message=>"Grok loading patterns from file", :path=>"/opt/logstash/patterns/mongodb", :level=>:info, :file=>"logstash/filters/grok.rb", :line=>"247"}
  67. :message=>"Grok loading patterns from file", :path=>"/opt/logstash/patterns/java", :level=>:info, :file=>"logstash/filters/grok.rb", :line=>"247"}
  68. :message=>"Grok loading patterns from file", :path=>"/opt/logstash/patterns/linux-syslog", :level=>:info, :file=>"logstash/filters/grok.rb", :line=>"247"}
  69. :message=>"Grok loading patterns from file", :path=>"/opt/logstash/patterns/redis", :level=>:info, :file=>"logstash/filters/grok.rb", :line=>"247"}
  70. :message=>"Grok loading patterns from file", :path=>"/opt/logstash/patterns/postgresql", :level=>:info, :file=>"logstash/filters/grok.rb", :line=>"247"}
  71. :message=>"Grok loading patterns from file", :path=>"/opt/logstash/patterns/nagios", :level=>:info, :file=>"logstash/filters/grok.rb", :line=>"247"}
  72. :message=>"Grok loading patterns from file", :path=>"/opt/logstash/patterns/mcollective-patterns", :level=>:info, :file=>"logstash/filters/grok.rb", :line=>"247"}
  73. :message=>"Grok loading patterns from file", :path=>"/opt/logstash/patterns/ruby", :level=>:info, :file=>"logstash/filters/grok.rb", :line=>"247"}
  74. org.elasticsearch.node: [logstash-linux01-6158-4008] version[1.1.1], pid[6158], build[f1585f0/2014-04-16T14:27:12Z]
  75. org.elasticsearch.node: [logstash-linux01-6158-4008] initializing ...
  76. org.elasticsearch.node: [logstash-linux01-6158-4008] using home [/var/lib/logstash], config [/var/lib/logstash/config], data [[/var/lib/logstash/data]], logs [/var/lib/logstash/logs], work [/var/lib/logstash/work], plugins [/var/lib/logstash/plugins]
  77. org.elasticsearch.plugins: [logstash-linux01-6158-4008] [/var/lib/logstash/plugins] directory does not exist.
  78. org.elasticsearch.plugins: [logstash-linux01-6158-4008] loaded [], sites []
  79. org.elasticsearch.common.compress.lzf: using [UnsafeChunkDecoder] decoder
  80. org.elasticsearch.threadpool: [logstash-linux01-6158-4008] creating thread_pool [generic], type [cached], keep_alive [30s]
  81. org.elasticsearch.threadpool: [logstash-linux01-6158-4008] creating thread_pool [index], type [fixed], size [1], queue_size [200]
  82. org.elasticsearch.threadpool: [logstash-linux01-6158-4008] creating thread_pool [bulk], type [fixed], size [1], queue_size [50]
  83. org.elasticsearch.threadpool: [logstash-linux01-6158-4008] creating thread_pool [get], type [fixed], size [1], queue_size [1k]
  84. org.elasticsearch.threadpool: [logstash-linux01-6158-4008] creating thread_pool [search], type [fixed], size [3], queue_size [1k]
  85. org.elasticsearch.threadpool: [logstash-linux01-6158-4008] creating thread_pool [suggest], type [fixed], size [1], queue_size [1k]
  86. org.elasticsearch.threadpool: [logstash-linux01-6158-4008] creating thread_pool [percolate], type [fixed], size [1], queue_size [1k]
  87. org.elasticsearch.threadpool: [logstash-linux01-6158-4008] creating thread_pool [management], type [scaling], min [1], size [5], keep_alive [5m]
  88. org.elasticsearch.threadpool: [logstash-linux01-6158-4008] creating thread_pool [flush], type [scaling], min [1], size [1], keep_alive [5m]
  89. org.elasticsearch.threadpool: [logstash-linux01-6158-4008] creating thread_pool [merge], type [scaling], min [1], size [1], keep_alive [5m]
  90. org.elasticsearch.threadpool: [logstash-linux01-6158-4008] creating thread_pool [refresh], type [scaling], min [1], size [1], keep_alive [5m]
  91. org.elasticsearch.threadpool: [logstash-linux01-6158-4008] creating thread_pool [warmer], type [scaling], min [1], size [1], keep_alive [5m]
  92. org.elasticsearch.threadpool: [logstash-linux01-6158-4008] creating thread_pool [snapshot], type [scaling], min [1], size [1], keep_alive [5m]
  93. org.elasticsearch.threadpool: [logstash-linux01-6158-4008] creating thread_pool [optimize], type [fixed], size [1], queue_size [null]
  94. org.elasticsearch.transport.netty: [logstash-linux01-6158-4008] using worker_count[2], port[9300-9400], bind_host[null], publish_host[null], compress[false], connect_timeout[30s], connections_per_node[2/3/6/1/1], receive_predictor[512kb->512kb]
  95. org.elasticsearch.discovery.zen.ping.unicast: [logstash-linux01-6158-4008] using initial hosts [localhost:9300, localhost:9301, localhost:9302, localhost:9303, localhost:9304, localhost:9305], with concurrent_connects [10]
  96. org.elasticsearch.discovery.zen: [logstash-linux01-6158-4008] using ping.timeout [3s], master_election.filter_client [true], master_election.filter_data [false]
  97. org.elasticsearch.discovery.zen.elect: [logstash-linux01-6158-4008] using minimum_master_nodes [-1]
  98. org.elasticsearch.discovery.zen.fd: [logstash-linux01-6158-4008] [master] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
  99. org.elasticsearch.discovery.zen.fd: [logstash-linux01-6158-4008] [node ] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
  100. org.elasticsearch.monitor.jvm: [logstash-linux01-6158-4008] enabled [true], last_gc_enabled [false], interval [1s], gc_threshold [{old=GcThreshold{name='old', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, default=GcThreshold{name='default', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, young=GcThreshold{name='young', warnThreshold=1000, infoThreshold=700, debugThreshold=400}}]
  101. org.elasticsearch.monitor.os: [logstash-linux01-6158-4008] Using probe [org.elasticsearch.monitor.os.JmxOsProbe@6edad23a] with refresh_interval [1s]
  102. org.elasticsearch.monitor.process: [logstash-linux01-6158-4008] Using probe [org.elasticsearch.monitor.process.JmxProcessProbe@72e4c181] with refresh_interval [1s]
  103. org.elasticsearch.monitor.jvm: [logstash-linux01-6158-4008] Using refresh_interval [1s]
  104. org.elasticsearch.monitor.network: [logstash-linux01-6158-4008] Using probe [org.elasticsearch.monitor.network.JmxNetworkProbe@1dfc8b68] with refresh_interval [5s]
  105. org.elasticsearch.monitor.network: [logstash-linux01-6158-4008] net_info
  106. host [linux01]
  107. eth0 display_name [eth0]
  108. address [/fe80:0:0:0:20c:29ff:fed0:5f66%2] [/192.168.8.19]
  109. mtu [1500] multicast [true] ptp [false] loopback [false] up [true] virtual [false]
  110. lo display_name [lo]
  111. address [/0:0:0:0:0:0:0:1%1] [/127.0.0.1]
  112. mtu [65536] multicast [false] ptp [false] loopback [true] up [true] virtual [false]
  113.  
  114. org.elasticsearch.monitor.fs: [logstash-linux01-6158-4008] Using probe [org.elasticsearch.monitor.fs.JmxFsProbe@6ce629e8] with refresh_interval [1s]
  115. org.elasticsearch.indices.store: [logstash-linux01-6158-4008] using indices.store.throttle.type [MERGE], with index.store.throttle.max_bytes_per_sec [20mb]
  116. org.elasticsearch.script: [logstash-linux01-6158-4008] using script cache with max_size [500], expire [null]
  117. org.elasticsearch.cluster.routing.allocation.decider: [logstash-linux01-6158-4008] using node_concurrent_recoveries [2], node_initial_primaries_recoveries [4]
  118. org.elasticsearch.cluster.routing.allocation.decider: [logstash-linux01-6158-4008] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active]
  119. org.elasticsearch.cluster.routing.allocation.decider: [logstash-linux01-6158-4008] using [cluster_concurrent_rebalance] with [2]
  120. org.elasticsearch.gateway.local: [logstash-linux01-6158-4008] using initial_shards [quorum], list_timeout [30s]
  121. org.elasticsearch.indices.recovery: [logstash-linux01-6158-4008] using max_bytes_per_sec[20mb], concurrent_streams [3], file_chunk_size [512kb], translog_size [512kb], translog_ops [1000], and compress [true]
  122. org.elasticsearch.indices.memory: [logstash-linux01-6158-4008] using index_buffer_size [49.1mb], with min_shard_index_buffer_size [4mb], max_shard_index_buffer_size [512mb], shard_inactive_time [30m]
  123. org.elasticsearch.indices.cache.filter: [logstash-linux01-6158-4008] using [node] weighted filter cache with size [20%], actual_size [98.3mb], expire [null], clean_interval [1m]
  124. org.elasticsearch.indices.fielddata.cache: [logstash-linux01-6158-4008] using size [-1] [-1b], expire [null]
  125. org.elasticsearch.gateway.local.state.meta: [logstash-linux01-6158-4008] using gateway.local.auto_import_dangled [YES], with gateway.local.dangling_timeout [2h]
  126. org.elasticsearch.bulk.udp: [logstash-linux01-6158-4008] using enabled [false], host [null], port [9700-9800], bulk_actions [1000], bulk_size [5mb], flush_interval [5s], concurrent_requests [4]
  127. org.elasticsearch.cluster.routing.allocation.decider: [logstash-linux01-6158-4008] using node_concurrent_recoveries [2], node_initial_primaries_recoveries [4]
  128. org.elasticsearch.cluster.routing.allocation.decider: [logstash-linux01-6158-4008] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active]
  129. org.elasticsearch.cluster.routing.allocation.decider: [logstash-linux01-6158-4008] using [cluster_concurrent_rebalance] with [2]
  130. org.elasticsearch.cluster.routing.allocation.decider: [logstash-linux01-6158-4008] using node_concurrent_recoveries [2], node_initial_primaries_recoveries [4]
  131. org.elasticsearch.cluster.routing.allocation.decider: [logstash-linux01-6158-4008] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active]
  132. org.elasticsearch.cluster.routing.allocation.decider: [logstash-linux01-6158-4008] using [cluster_concurrent_rebalance] with [2]
  133. org.elasticsearch.node: [logstash-linux01-6158-4008] initialized
  134. org.elasticsearch.node: [logstash-linux01-6158-4008] starting ...
  135. org.elasticsearch.netty.channel.socket.nio.SelectorUtil: Using select timeout of 500
  136. org.elasticsearch.netty.channel.socket.nio.SelectorUtil: Epoll-bug workaround enabled = false
  137. org.elasticsearch.transport.netty: [logstash-linux01-6158-4008] Bound to address [/0:0:0:0:0:0:0:0:9301]
  138. org.elasticsearch.transport: [logstash-linux01-6158-4008] bound_address {inet[/0:0:0:0:0:0:0:0:9301]}, publish_address {inet[/192.168.8.19:9301]}
  139. org.elasticsearch.transport.netty: [logstash-linux01-6158-4008] connected to node [[#zen_unicast_1#][linux01][inet[localhost/127.0.0.1:9300]]]
  140. org.elasticsearch.transport.netty: [logstash-linux01-6158-4008] connected to node [[#zen_unicast_2#][linux01][inet[localhost/127.0.0.1:9301]]]
  141. org.elasticsearch.transport.netty: [logstash-linux01-6158-4008] disconnected from [[#zen_unicast_1#][linux01][inet[localhost/127.0.0.1:9300]]]
  142. org.elasticsearch.transport.netty: [logstash-linux01-6158-4008] disconnected from [[#zen_unicast_2#][linux01][inet[localhost/127.0.0.1:9301]]]
  143. org.elasticsearch.discovery.zen: [logstash-linux01-6158-4008] filtered ping responses: (filter_client[true], filter_data[false])
  144. --> target [[Graydon Creed][g_MjDNTAQW-uuTX3UnPf2g][linux01][inet[/192.168.8.19:9300]]], master [[Graydon Creed][g_MjDNTAQW-uuTX3UnPf2g][linux01][inet[/192.168.8.19:9300]]]
  145. org.elasticsearch.transport.netty: [logstash-linux01-6158-4008] connected to node [[Graydon Creed][g_MjDNTAQW-uuTX3UnPf2g][linux01][inet[/192.168.8.19:9300]]]
  146. org.elasticsearch.discovery.zen.publish: [logstash-linux01-6158-4008] received cluster state version 173
  147. org.elasticsearch.discovery.zen: [logstash-linux01-6158-4008] received cluster state from [[Graydon Creed][g_MjDNTAQW-uuTX3UnPf2g][linux01][inet[/192.168.8.19:9300]]] which is also master but with cluster name [Cluster [elasticsearch]]
  148. org.elasticsearch.discovery.zen: [logstash-linux01-6158-4008] got a new state from master node, though we are already trying to rejoin the cluster
  149. org.elasticsearch.cluster.service: [logstash-linux01-6158-4008] processing [zen-disco-receive(from master [[Graydon Creed][g_MjDNTAQW-uuTX3UnPf2g][linux01][inet[/192.168.8.19:9300]]])]: execute
  150. org.elasticsearch.discovery.zen.fd: [logstash-linux01-6158-4008] [master] restarting fault detection against master [[Graydon Creed][g_MjDNTAQW-uuTX3UnPf2g][linux01][inet[/192.168.8.19:9300]]], reason [new cluster state received and we are monitoring the wrong master [null]]
  151. org.elasticsearch.cluster.service: [logstash-linux01-6158-4008] got first state from fresh master [g_MjDNTAQW-uuTX3UnPf2g]
  152. org.elasticsearch.cluster.service: [logstash-linux01-6158-4008] cluster state updated, version [173], source [zen-disco-receive(from master [[Graydon Creed][g_MjDNTAQW-uuTX3UnPf2g][linux01][inet[/192.168.8.19:9300]]])]
  153. org.elasticsearch.cluster.service: [logstash-linux01-6158-4008] detected_master [Graydon Creed][g_MjDNTAQW-uuTX3UnPf2g][linux01][inet[/192.168.8.19:9300]], added {[Graydon Creed][g_MjDNTAQW-uuTX3UnPf2g][linux01][inet[/192.168.8.19:9300]],}, reason: zen-disco-receive(from master [[Graydon Creed][g_MjDNTAQW-uuTX3UnPf2g][linux01][inet[/192.168.8.19:9300]]])
  154. org.elasticsearch.cluster.service: [logstash-linux01-6158-4008] set local cluster state to version 173
  155. org.elasticsearch.cluster.service: [logstash-linux01-6158-4008] processing [zen-disco-receive(from master [[Graydon Creed][g_MjDNTAQW-uuTX3UnPf2g][linux01][inet[/192.168.8.19:9300]]])]: done applying updated cluster_state (version: 173)
  156. org.elasticsearch.discovery: [logstash-linux01-6158-4008] elasticsearch/drgo1vl-RmCYphYJ8hoatg
  157. org.elasticsearch.cluster.service: [logstash-linux01-6158-4008] processing [updating local node id]: execute
  158. org.elasticsearch.cluster.service: [logstash-linux01-6158-4008] cluster state updated, version [173], source [updating local node id]
  159. org.elasticsearch.cluster.service: [logstash-linux01-6158-4008] set local cluster state to version 173
  160. org.elasticsearch.cluster.service: [logstash-linux01-6158-4008] processing [updating local node id]: done applying updated cluster_state (version: 173)
  161. org.elasticsearch.node: [logstash-linux01-6158-4008] started
  162. org.elasticsearch.discovery.zen.fd: [logstash-linux01-6158-4008] [master] starting fault detection against master [[Graydon Creed][g_MjDNTAQW-uuTX3UnPf2g][linux01][inet[/192.168.8.19:9300]]], reason [initial_join]
  163. :message=>"Grok loading patterns from file", :path=>"/opt/logstash/patterns/grok-patterns", :level=>:info, :file=>"logstash/filters/grok.rb", :line=>"247"}
  164. :message=>"Grok loading patterns from file", :path=>"/opt/logstash/patterns/haproxy", :level=>:info, :file=>"logstash/filters/grok.rb", :line=>"247"}
  165. :message=>"Grok loading patterns from file", :path=>"/opt/logstash/patterns/junos", :level=>:info, :file=>"logstash/filters/grok.rb", :line=>"247"}
  166. :message=>"Grok loading patterns from file", :path=>"/opt/logstash/patterns/mcollective", :level=>:info, :file=>"logstash/filters/grok.rb", :line=>"247"}
  167. :message=>"Grok loading patterns from file", :path=>"/opt/logstash/patterns/firewalls", :level=>:info, :file=>"logstash/filters/grok.rb", :line=>"247"}
  168. :message=>"Match data", :match=>{"message"=>"%{COMBINEDAPACHELOG}"}, :level=>:info, :file=>"logstash/filters/grok.rb", :line=>"254"}
  169. :message=>"Grok compile", :field=>"message", :patterns=>["%{COMBINEDAPACHELOG}"], :level=>:info, :file=>"logstash/filters/grok.rb", :line=>"265"}
  170. :message=>"regexp: /message", :pattern=>"%{COMBINEDAPACHELOG}", :level=>:debug, :file=>"logstash/filters/grok.rb", :line=>"267"}
  171. :message=>"Adding type with date config", :type=>"", :field=>"timestamp", :format=>"dd/MMM/yyyy:HH:mm:ss Z", :level=>:debug, :file=>"logstash/filters/date.rb", :line=>"165"}
  172. :message=>"Pipeline started", :level=>:info, :file=>"logstash/pipeline.rb", :line=>"78"}
  173. :message=>"log4j java properties setup", :log4j_level=>"DEBUG", :level=>:debug, :file=>"logstash/logging.rb", :line=>"87"}
  174. :message=>"New Elasticsearch output", :cluster=>nil, :host=>"localhost", :port=>"9300-9305", :embedded=>false, :protocol=>"node", :level=>:info, :file=>"logstash/outputs/elasticsearch.rb", :line=>"252"}
  175. :message=>"Automatic template management enabled", :manage_template=>"true", :level=>:info, :file=>"logstash/outputs/elasticsearch.rb", :line=>"258"}
  176. :message=>"Using mapping template", :template=>"{ \"template\" : \"logstash-*\", \"settings\" : { \"index.refresh_interval\" : \"5s\" }, \"mappings\" : { \"_default_\" : { \"_all\" : {\"enabled\" : true}, \"dynamic_templates\" : [ { \"string_fields\" : { \"match\" : \"*\", \"match_mapping_type\" : \"string\", \"mapping\" : { \"type\" : \"string\", \"index\" : \"analyzed\", \"omit_norms\" : true, \"fields\" : { \"raw\" : {\"type\": \"string\", \"index\" : \"not_analyzed\", \"ignore_above\" : 256} } } } } ], \"properties\" : { \"@version\": { \"type\": \"string\", \"index\": \"not_analyzed\" }, \"geoip\" : { \"type\" : \"object\", \"dynamic\": true, \"path\": \"full\", \"properties\" : { \"location\" : { \"type\" : \"geo_point\" } } } } } }}", :level=>:info, :file=>"logstash/outputs/elasticsearch.rb", :line=>"278"}
  177. :message=>"_discover_file_glob: /var/log/apache2/access.log: glob is: []", :level=>:debug, :file=>"filewatch/watch.rb", :line=>"117"}
  178. :message=>"_discover_file_glob: /var/log/apache2/access.log: glob is: []", :level=>:debug, :file=>"filewatch/watch.rb", :line=>"117"}
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement