Advertisement
darkside77

logfile logstash

Feb 3rd, 2019
1,059
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 54.35 KB | None | 0 0
  1. Sending Logstash logs to /usr/share/logstash/logs which is now configured via log4j2.properties
  2. [2019-02-03T12:57:33,978][DEBUG][logstash.modules.scaffold] Found module {:module_name=>"netflow", :directory=>"/usr/share/logstash/modules/netflow/configuration"}
  3. [2019-02-03T12:57:33,995][DEBUG][logstash.plugins.registry] Adding plugin to the registry {:name=>"netflow", :type=>:modules, :class=>#<LogStash::Modules::Scaffold:0x1b2b2a58 @directory="/usr/share/logstash/modules/netflow/configuration", @module_name="netflow", @kibana_version_parts=["6", "0", "0"]>}
  4. [2019-02-03T12:57:34,001][DEBUG][logstash.modules.scaffold] Found module {:module_name=>"fb_apache", :directory=>"/usr/share/logstash/modules/fb_apache/configuration"}
  5. [2019-02-03T12:57:34,004][DEBUG][logstash.plugins.registry] Adding plugin to the registry {:name=>"fb_apache", :type=>:modules, :class=>#<LogStash::Modules::Scaffold:0x59523cb5 @directory="/usr/share/logstash/modules/fb_apache/configuration", @module_name="fb_apache", @kibana_version_parts=["6", "0", "0"]>}
  6. [2019-02-03T12:57:34,124][INFO ][logstash.setting.writabledirectory] Creating directory {:setting=>"path.queue", :path=>"/usr/share/logstash/data/queue"}
  7. [2019-02-03T12:57:34,139][INFO ][logstash.setting.writabledirectory] Creating directory {:setting=>"path.dead_letter_queue", :path=>"/usr/share/logstash/data/dead_letter_queue"}
  8. [2019-02-03T12:57:35,052][DEBUG][logstash.runner ] -------- Logstash Settings (* means modified) ---------
  9. [2019-02-03T12:57:35,054][DEBUG][logstash.runner ] node.name: "logstash"
  10. [2019-02-03T12:57:35,055][DEBUG][logstash.runner ] *path.config: "/config-dir/01-test.conf"
  11. [2019-02-03T12:57:35,057][DEBUG][logstash.runner ] path.data: "/usr/share/logstash/data"
  12. [2019-02-03T12:57:35,058][DEBUG][logstash.runner ] modules.cli: []
  13. [2019-02-03T12:57:35,059][DEBUG][logstash.runner ] modules: []
  14. [2019-02-03T12:57:35,060][DEBUG][logstash.runner ] modules_list: []
  15. [2019-02-03T12:57:35,060][DEBUG][logstash.runner ] modules_variable_list: []
  16. [2019-02-03T12:57:35,061][DEBUG][logstash.runner ] modules_setup: false
  17. [2019-02-03T12:57:35,063][DEBUG][logstash.runner ] config.test_and_exit: false
  18. [2019-02-03T12:57:35,063][DEBUG][logstash.runner ] config.reload.automatic: false
  19. [2019-02-03T12:57:35,065][DEBUG][logstash.runner ] config.reload.interval: 3000000000
  20. [2019-02-03T12:57:35,066][DEBUG][logstash.runner ] config.support_escapes: false
  21. [2019-02-03T12:57:35,067][DEBUG][logstash.runner ] config.field_reference.parser: "COMPAT"
  22. [2019-02-03T12:57:35,068][DEBUG][logstash.runner ] metric.collect: true
  23. [2019-02-03T12:57:35,069][DEBUG][logstash.runner ] pipeline.id: "main"
  24. [2019-02-03T12:57:35,070][DEBUG][logstash.runner ] pipeline.system: false
  25. [2019-02-03T12:57:35,071][DEBUG][logstash.runner ] pipeline.workers: 1
  26. [2019-02-03T12:57:35,071][DEBUG][logstash.runner ] pipeline.output.workers: 1
  27. [2019-02-03T12:57:35,074][DEBUG][logstash.runner ] pipeline.batch.size: 125
  28. [2019-02-03T12:57:35,075][DEBUG][logstash.runner ] pipeline.batch.delay: 50
  29. [2019-02-03T12:57:35,076][DEBUG][logstash.runner ] pipeline.unsafe_shutdown: false
  30. [2019-02-03T12:57:35,077][DEBUG][logstash.runner ] pipeline.java_execution: false
  31. [2019-02-03T12:57:35,078][DEBUG][logstash.runner ] pipeline.reloadable: true
  32. [2019-02-03T12:57:35,078][DEBUG][logstash.runner ] path.plugins: []
  33. [2019-02-03T12:57:35,079][DEBUG][logstash.runner ] config.debug: false
  34. [2019-02-03T12:57:35,080][DEBUG][logstash.runner ] *log.level: "debug" (default: "info")
  35. [2019-02-03T12:57:35,081][DEBUG][logstash.runner ] version: false
  36. [2019-02-03T12:57:35,083][DEBUG][logstash.runner ] help: false
  37. [2019-02-03T12:57:35,084][DEBUG][logstash.runner ] log.format: "plain"
  38. [2019-02-03T12:57:35,093][DEBUG][logstash.runner ] *http.host: "0.0.0.0" (default: "127.0.0.1")
  39. [2019-02-03T12:57:35,094][DEBUG][logstash.runner ] http.port: 9600..9700
  40. [2019-02-03T12:57:35,094][DEBUG][logstash.runner ] http.environment: "production"
  41. [2019-02-03T12:57:35,095][DEBUG][logstash.runner ] queue.type: "memory"
  42. [2019-02-03T12:57:35,096][DEBUG][logstash.runner ] queue.drain: false
  43. [2019-02-03T12:57:35,096][DEBUG][logstash.runner ] queue.page_capacity: 67108864
  44. [2019-02-03T12:57:35,097][DEBUG][logstash.runner ] queue.max_bytes: 1073741824
  45. [2019-02-03T12:57:35,097][DEBUG][logstash.runner ] queue.max_events: 0
  46. [2019-02-03T12:57:35,098][DEBUG][logstash.runner ] queue.checkpoint.acks: 1024
  47. [2019-02-03T12:57:35,098][DEBUG][logstash.runner ] queue.checkpoint.writes: 1024
  48. [2019-02-03T12:57:35,099][DEBUG][logstash.runner ] queue.checkpoint.interval: 1000
  49. [2019-02-03T12:57:35,099][DEBUG][logstash.runner ] dead_letter_queue.enable: false
  50. [2019-02-03T12:57:35,099][DEBUG][logstash.runner ] dead_letter_queue.max_bytes: 1073741824
  51. [2019-02-03T12:57:35,100][DEBUG][logstash.runner ] slowlog.threshold.warn: -1
  52. [2019-02-03T12:57:35,100][DEBUG][logstash.runner ] slowlog.threshold.info: -1
  53. [2019-02-03T12:57:35,101][DEBUG][logstash.runner ] slowlog.threshold.debug: -1
  54. [2019-02-03T12:57:35,101][DEBUG][logstash.runner ] slowlog.threshold.trace: -1
  55. [2019-02-03T12:57:35,103][DEBUG][logstash.runner ] keystore.classname: "org.logstash.secret.store.backend.JavaKeyStore"
  56. [2019-02-03T12:57:35,105][DEBUG][logstash.runner ] keystore.file: "/usr/share/logstash/config/logstash.keystore"
  57. [2019-02-03T12:57:35,105][DEBUG][logstash.runner ] path.queue: "/usr/share/logstash/data/queue"
  58. [2019-02-03T12:57:35,106][DEBUG][logstash.runner ] path.dead_letter_queue: "/usr/share/logstash/data/dead_letter_queue"
  59. [2019-02-03T12:57:35,107][DEBUG][logstash.runner ] path.settings: "/usr/share/logstash/config"
  60. [2019-02-03T12:57:35,107][DEBUG][logstash.runner ] path.logs: "/usr/share/logstash/logs"
  61. [2019-02-03T12:57:35,108][DEBUG][logstash.runner ] xpack.management.enabled: false
  62. [2019-02-03T12:57:35,109][DEBUG][logstash.runner ] xpack.management.logstash.poll_interval: 5000000000
  63. [2019-02-03T12:57:35,110][DEBUG][logstash.runner ] xpack.management.pipeline.id: ["main"]
  64. [2019-02-03T12:57:35,110][DEBUG][logstash.runner ] xpack.management.elasticsearch.username: "logstash_system"
  65. [2019-02-03T12:57:35,111][DEBUG][logstash.runner ] xpack.management.elasticsearch.url: ["https://localhost:9200"]
  66. [2019-02-03T12:57:35,113][DEBUG][logstash.runner ] xpack.management.elasticsearch.ssl.verification_mode: "certificate"
  67. [2019-02-03T12:57:35,113][DEBUG][logstash.runner ] xpack.management.elasticsearch.sniffing: false
  68. [2019-02-03T12:57:35,114][DEBUG][logstash.runner ] xpack.monitoring.enabled: false
  69. [2019-02-03T12:57:35,115][DEBUG][logstash.runner ] *xpack.monitoring.elasticsearch.url: ["http://elasticsearch:9200"] (default: ["http://localhost:9200"])
  70. [2019-02-03T12:57:35,116][DEBUG][logstash.runner ] xpack.monitoring.collection.interval: 10000000000
  71. [2019-02-03T12:57:35,117][DEBUG][logstash.runner ] xpack.monitoring.collection.timeout_interval: 600000000000
  72. [2019-02-03T12:57:35,117][DEBUG][logstash.runner ] xpack.monitoring.elasticsearch.username: "logstash_system"
  73. [2019-02-03T12:57:35,118][DEBUG][logstash.runner ] xpack.monitoring.elasticsearch.ssl.verification_mode: "certificate"
  74. [2019-02-03T12:57:35,124][DEBUG][logstash.runner ] xpack.monitoring.elasticsearch.sniffing: false
  75. [2019-02-03T12:57:35,125][DEBUG][logstash.runner ] xpack.monitoring.collection.pipeline.details.enabled: true
  76. [2019-02-03T12:57:35,126][DEBUG][logstash.runner ] xpack.monitoring.collection.config.enabled: true
  77. [2019-02-03T12:57:35,127][DEBUG][logstash.runner ] node.uuid: ""
  78. [2019-02-03T12:57:35,127][DEBUG][logstash.runner ] --------------- Logstash Settings -------------------
  79. [2019-02-03T12:57:35,244][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
  80. [2019-02-03T12:57:35,275][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"6.5.4"}
  81. [2019-02-03T12:57:35,348][INFO ][logstash.agent ] No persistent UUID file found. Generating new UUID {:uuid=>"9e10fc0f-1d62-453b-9938-94ba049694e0", :path=>"/usr/share/logstash/data/uuid"}
  82. [2019-02-03T12:57:35,374][DEBUG][logstash.agent ] Setting global FieldReference parsing mode: COMPAT
  83. [2019-02-03T12:57:35,437][DEBUG][logstash.agent ] Setting up metric collection
  84. [2019-02-03T12:57:35,622][DEBUG][logstash.instrument.periodicpoller.os] Starting {:polling_interval=>5, :polling_timeout=>120}
  85. [2019-02-03T12:57:36,366][DEBUG][logstash.instrument.periodicpoller.jvm] Starting {:polling_interval=>5, :polling_timeout=>120}
  86. [2019-02-03T12:57:36,612][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
  87. [2019-02-03T12:57:36,639][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
  88. [2019-02-03T12:57:36,693][DEBUG][logstash.instrument.periodicpoller.persistentqueue] Starting {:polling_interval=>5, :polling_timeout=>120}
  89. [2019-02-03T12:57:36,727][DEBUG][logstash.instrument.periodicpoller.deadletterqueue] Starting {:polling_interval=>5, :polling_timeout=>120}
  90. [2019-02-03T12:57:36,796][WARN ][logstash.monitoringextension.pipelineregisterhook] xpack.monitoring.enabled has not been defined, but found elasticsearch configuration. Please explicitly set `xpack.monitoring.enabled: true` in logstash.yml
  91. [2019-02-03T12:57:37,141][DEBUG][logstash.monitoringextension.pipelineregisterhook] compiled metrics pipeline config: {:config=>"# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one\n# or more contributor license agreements. Licensed under the Elastic License;\n# you may not use this file except in compliance with the Elastic License.\n\ninput {\n metrics {\n collection_interval => 10\n collection_timeout_interval => 600\n extended_performance_collection => true\n config_collection => true\n }\n}\noutput {\n elasticsearch {\n hosts => [\"http://elasticsearch:9200\"]\n bulk_path => \"/_xpack/monitoring/_bulk?system_id=logstash&system_api_version=2&interval=1s\"\n manage_template => false\n document_type => \"%{[@metadata][document_type]}\"\n index => \"\"\n sniffing => false\n \n \n # In the case where the user does not want SSL we don't set ssl => false\n # the reason being that the user can still turn ssl on by using https in their URL\n # This causes the ES output to throw an error due to conflicting messages\n \n }\n}\n"}
  92. [2019-02-03T12:57:37,892][DEBUG][logstash.plugins.registry] On demand adding plugin to the registry {:name=>"plain", :type=>"codec", :class=>LogStash::Codecs::Plain}
  93. [2019-02-03T12:57:37,965][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@id = "plain_737171bb-a275-4c1a-b07a-b81ffd465618"
  94. [2019-02-03T12:57:37,971][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@enable_metric = true
  95. [2019-02-03T12:57:37,973][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@charset = "UTF-8"
  96. [2019-02-03T12:57:38,081][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@hosts = [http://elasticsearch:9200]
  97. [2019-02-03T12:57:38,083][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@user = "logstash_system"
  98. [2019-02-03T12:57:38,089][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@password = <password>
  99. [2019-02-03T12:57:38,093][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@sniffing = false
  100. [2019-02-03T12:57:38,094][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@ssl_certificate_verification = true
  101. [2019-02-03T12:57:38,098][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@resurrect_delay = 30
  102. [2019-02-03T12:57:38,101][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@id = "elasticsearch_e7980a3a-7f6c-4762-a784-b1360735439a"
  103. [2019-02-03T12:57:38,102][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@enable_metric = true
  104. [2019-02-03T12:57:38,127][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@codec = <LogStash::Codecs::Plain id=>"plain_737171bb-a275-4c1a-b07a-b81ffd465618", enable_metric=>true, charset=>"UTF-8">
  105. [2019-02-03T12:57:38,134][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@workers = 1
  106. [2019-02-03T12:57:38,135][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@index = "logstash-%{+YYYY.MM.dd}"
  107. [2019-02-03T12:57:38,136][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@manage_template = true
  108. [2019-02-03T12:57:38,137][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@template_name = "logstash"
  109. [2019-02-03T12:57:38,138][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@template_overwrite = false
  110. [2019-02-03T12:57:38,140][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@parent = nil
  111. [2019-02-03T12:57:38,141][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@join_field = nil
  112. [2019-02-03T12:57:38,143][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@upsert = ""
  113. [2019-02-03T12:57:38,144][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@doc_as_upsert = false
  114. [2019-02-03T12:57:38,149][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@script = ""
  115. [2019-02-03T12:57:38,150][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@script_type = "inline"
  116. [2019-02-03T12:57:38,152][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@script_lang = "painless"
  117. [2019-02-03T12:57:38,153][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@script_var_name = "event"
  118. [2019-02-03T12:57:38,154][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@scripted_upsert = false
  119. [2019-02-03T12:57:38,155][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@retry_initial_interval = 2
  120. [2019-02-03T12:57:38,156][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@retry_max_interval = 64
  121. [2019-02-03T12:57:38,158][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@retry_on_conflict = 1
  122. [2019-02-03T12:57:38,159][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@pipeline = nil
  123. [2019-02-03T12:57:38,160][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@action = "index"
  124. [2019-02-03T12:57:38,161][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@sniffing_delay = 5
  125. [2019-02-03T12:57:38,162][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@timeout = 60
  126. [2019-02-03T12:57:38,162][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@failure_type_logging_whitelist = []
  127. [2019-02-03T12:57:38,166][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@pool_max = 1000
  128. [2019-02-03T12:57:38,167][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@pool_max_per_route = 100
  129. [2019-02-03T12:57:38,168][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@validate_after_inactivity = 10000
  130. [2019-02-03T12:57:38,171][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@http_compression = false
  131. [2019-02-03T12:57:38,171][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@custom_headers = {}
  132. [2019-02-03T12:57:38,251][DEBUG][logstash.licensechecker.licensereader] Normalizing http path {:path=>nil, :normalized=>nil}
  133. [2019-02-03T12:57:39,270][INFO ][logstash.licensechecker.licensereader] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://elasticsearch:9200/]}}
  134. [2019-02-03T12:57:39,313][DEBUG][logstash.licensechecker.licensereader] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://elasticsearch:9200/, :path=>"/"}
  135. [2019-02-03T12:57:39,918][WARN ][logstash.licensechecker.licensereader] Restored connection to ES instance {:url=>"http://elasticsearch:9200/"}
  136. [2019-02-03T12:57:40,034][INFO ][logstash.licensechecker.licensereader] ES Output version determined {:es_version=>6}
  137. [2019-02-03T12:57:40,051][WARN ][logstash.licensechecker.licensereader] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>6}
  138. [2019-02-03T12:57:40,233][DEBUG][logstash.licensechecker.licensemanager] updating observers of xpack info change
  139. [2019-02-03T12:57:40,312][DEBUG][logstash.monitoring.internalpipelinesource] updating licensing state installed:true,
  140. license:{"uid"=>"e925ddd1-618c-4061-992f-a2c0e2c345c7", "type"=>"basic", "mode"=>"basic", "status"=>"active"},
  141. features:{"graph"=>{"description"=>"Graph Data Exploration for the Elastic Stack", "available"=>false, "enabled"=>true}, "logstash"=>{"description"=>"Logstash management component for X-Pack", "available"=>false, "enabled"=>true}, "ml"=>{"description"=>"Machine Learning for the Elastic Stack", "available"=>false, "enabled"=>true, "native_code_info"=>{"version"=>"6.5.4", "build_hash"=>"b616085ef32393"}}, "monitoring"=>{"description"=>"Monitoring for the Elastic Stack", "available"=>true, "enabled"=>true}, "rollup"=>{"description"=>"Time series pre-aggregation and rollup", "available"=>true, "enabled"=>true}, "security"=>{"description"=>"Security for the Elastic Stack", "available"=>false, "enabled"=>true}, "watcher"=>{"description"=>"Alerting, Notification and Automation for the Elastic Stack", "available"=>false, "enabled"=>true}},
  142. last_updated:}
  143. [2019-02-03T12:57:40,381][INFO ][logstash.monitoring.internalpipelinesource] Monitoring License OK
  144. [2019-02-03T12:57:40,387][INFO ][logstash.monitoring.internalpipelinesource] Validated license for monitoring. Enabling monitoring pipeline.
  145. [2019-02-03T12:57:40,528][DEBUG][logstash.config.source.local.configpathloader] Skipping the following files while reading config since they don't match the specified glob pattern {:files=>["/config-dir/files", "/config-dir/logstash.conf"]}
  146. [2019-02-03T12:57:40,540][DEBUG][logstash.config.source.local.configpathloader] Reading config file {:config_file=>"/config-dir/01-test.conf"}
  147. [2019-02-03T12:57:40,639][DEBUG][logstash.agent ] Converging pipelines state {:actions_count=>1}
  148. [2019-02-03T12:57:40,678][DEBUG][logstash.agent ] Executing action {:action=>LogStash::PipelineAction::Create/pipeline_id:main}
  149. [2019-02-03T12:57:41,771][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
  150. [2019-02-03T12:57:41,775][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
  151. [2019-02-03T12:57:43,969][DEBUG][logstash.plugins.registry] On demand adding plugin to the registry {:name=>"file", :type=>"input", :class=>LogStash::Inputs::File}
  152. [2019-02-03T12:57:44,158][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@id = "plain_c4f4d29c-57e1-4d66-b3b0-38708c3c516f"
  153. [2019-02-03T12:57:44,162][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@enable_metric = true
  154. [2019-02-03T12:57:44,164][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@charset = "UTF-8"
  155. [2019-02-03T12:57:44,238][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@start_position = "beginning"
  156. [2019-02-03T12:57:44,239][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@path = ["/root/ELK/logstash/files/nginx_logs"]
  157. [2019-02-03T12:57:44,240][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@id = "7c4d0cac26b0aaf9b4af7d3ff738fd8d1ef9295c82a5ae1c5ba31f30b31ef7bb"
  158. [2019-02-03T12:57:44,241][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@sincedb_path = "/dev/null"
  159. [2019-02-03T12:57:44,242][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@enable_metric = true
  160. [2019-02-03T12:57:44,252][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@codec = <LogStash::Codecs::Plain id=>"plain_c4f4d29c-57e1-4d66-b3b0-38708c3c516f", enable_metric=>true, charset=>"UTF-8">
  161. [2019-02-03T12:57:44,254][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@add_field = {}
  162. [2019-02-03T12:57:44,257][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@stat_interval = 1.0
  163. [2019-02-03T12:57:44,260][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@discover_interval = 15
  164. [2019-02-03T12:57:44,261][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@sincedb_write_interval = 15.0
  165. [2019-02-03T12:57:44,262][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@delimiter = "\n"
  166. [2019-02-03T12:57:44,263][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@close_older = 3600.0
  167. [2019-02-03T12:57:44,266][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@mode = "tail"
  168. [2019-02-03T12:57:44,269][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@file_completed_action = "delete"
  169. [2019-02-03T12:57:44,270][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@sincedb_clean_after = 1209600.0
  170. [2019-02-03T12:57:44,271][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@file_chunk_size = 32768
  171. [2019-02-03T12:57:44,277][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@file_chunk_count = 140737488355327
  172. [2019-02-03T12:57:44,278][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@file_sort_by = "last_modified"
  173. [2019-02-03T12:57:44,278][DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@file_sort_direction = "asc"
  174. [2019-02-03T12:57:44,339][DEBUG][logstash.plugins.registry] On demand adding plugin to the registry {:name=>"stdout", :type=>"output", :class=>LogStash::Outputs::Stdout}
  175. [2019-02-03T12:57:44,391][DEBUG][logstash.plugins.registry] On demand adding plugin to the registry {:name=>"rubydebug", :type=>"codec", :class=>LogStash::Codecs::RubyDebug}
  176. [2019-02-03T12:57:44,414][DEBUG][logstash.codecs.rubydebug] config LogStash::Codecs::RubyDebug/@id = "rubydebug_fdc79cac-6835-4417-9b2c-ba5f101c6aa4"
  177. [2019-02-03T12:57:44,416][DEBUG][logstash.codecs.rubydebug] config LogStash::Codecs::RubyDebug/@enable_metric = true
  178. [2019-02-03T12:57:44,418][DEBUG][logstash.codecs.rubydebug] config LogStash::Codecs::RubyDebug/@metadata = false
  179. [2019-02-03T12:57:46,855][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
  180. [2019-02-03T12:57:46,872][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
  181. [2019-02-03T12:57:47,863][DEBUG][logstash.outputs.stdout ] config LogStash::Outputs::Stdout/@codec = <LogStash::Codecs::RubyDebug id=>"rubydebug_fdc79cac-6835-4417-9b2c-ba5f101c6aa4", enable_metric=>true, metadata=>false>
  182. [2019-02-03T12:57:47,864][DEBUG][logstash.outputs.stdout ] config LogStash::Outputs::Stdout/@id = "85c04da132ea8b64527dcbf216f5fa58f13291047ec8e2b5796f43db0697e08a"
  183. [2019-02-03T12:57:47,867][DEBUG][logstash.outputs.stdout ] config LogStash::Outputs::Stdout/@enable_metric = true
  184. [2019-02-03T12:57:47,868][DEBUG][logstash.outputs.stdout ] config LogStash::Outputs::Stdout/@workers = 1
  185. [2019-02-03T12:57:48,007][INFO ][logstash.pipeline ] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>1, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50}
  186. [2019-02-03T12:57:48,619][INFO ][logstash.pipeline ] Pipeline started successfully {:pipeline_id=>"main", :thread=>"#<Thread:0x1e5b743f run>"}
  187. [2019-02-03T12:57:48,807][INFO ][logstash.agent ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>[]}
  188. [2019-02-03T12:57:48,842][INFO ][filewatch.observingtail ] START, creating Discoverer, Watch with file and sincedb collections
  189. [2019-02-03T12:57:48,852][DEBUG][logstash.config.sourceloader] Adding source {:source=>"#<LogStash::Monitoring::InternalPipelineSource:0x63f7e31e>"}
  190. [2019-02-03T12:57:49,073][DEBUG][logstash.agent ] Starting agent
  191. [2019-02-03T12:57:49,185][DEBUG][logstash.config.source.local.configpathloader] Skipping the following files while reading config since they don't match the specified glob pattern {:files=>["/config-dir/files", "/config-dir/logstash.conf"]}
  192. [2019-02-03T12:57:49,187][DEBUG][logstash.config.source.local.configpathloader] Reading config file {:config_file=>"/config-dir/01-test.conf"}
  193. [2019-02-03T12:57:49,275][DEBUG][logstash.agent ] Converging pipelines state {:actions_count=>1}
  194. [2019-02-03T12:57:49,301][DEBUG][logstash.agent ] Executing action {:action=>LogStash::PipelineAction::Create/pipeline_id:.monitoring-logstash}
  195. [2019-02-03T12:57:50,936][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@id = "plain_f30fadb4-1df7-4c21-9574-18813985ccb7"
  196. [2019-02-03T12:57:50,937][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@enable_metric = true
  197. [2019-02-03T12:57:50,939][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@charset = "UTF-8"
  198. [2019-02-03T12:57:50,952][DEBUG][logstash.inputs.metrics ] config LogStash::Inputs::Metrics/@collection_interval = 10
  199. [2019-02-03T12:57:50,957][DEBUG][logstash.inputs.metrics ] config LogStash::Inputs::Metrics/@config_collection = "true"
  200. [2019-02-03T12:57:50,961][DEBUG][logstash.inputs.metrics ] config LogStash::Inputs::Metrics/@collection_timeout_interval = 600
  201. [2019-02-03T12:57:50,963][DEBUG][logstash.inputs.metrics ] config LogStash::Inputs::Metrics/@id = "c7683b7b24c013d58c1f5cb31c66f49401c9a9f6ef1d1db0cc3bd2a432ced74a"
  202. [2019-02-03T12:57:50,967][DEBUG][logstash.inputs.metrics ] config LogStash::Inputs::Metrics/@extended_performance_collection = "true"
  203. [2019-02-03T12:57:50,968][DEBUG][logstash.inputs.metrics ] config LogStash::Inputs::Metrics/@enable_metric = true
  204. [2019-02-03T12:57:50,972][DEBUG][logstash.inputs.metrics ] config LogStash::Inputs::Metrics/@codec = <LogStash::Codecs::Plain id=>"plain_f30fadb4-1df7-4c21-9574-18813985ccb7", enable_metric=>true, charset=>"UTF-8">
  205. [2019-02-03T12:57:50,974][DEBUG][logstash.inputs.metrics ] config LogStash::Inputs::Metrics/@add_field = {}
  206. [2019-02-03T12:57:50,988][DEBUG][logstash.plugins.registry] On demand adding plugin to the registry {:name=>"elasticsearch", :type=>"output", :class=>LogStash::Outputs::ElasticSearch}
  207. [2019-02-03T12:57:51,046][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@id = "plain_b2391fbc-1f01-4fd4-9240-ee733adc49c8"
  208. [2019-02-03T12:57:51,048][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@enable_metric = true
  209. [2019-02-03T12:57:51,051][DEBUG][logstash.codecs.plain ] config LogStash::Codecs::Plain/@charset = "UTF-8"
  210. [2019-02-03T12:57:51,105][WARN ][logstash.outputs.elasticsearch] You are using a deprecated config setting "document_type" set in elasticsearch. Deprecated settings will continue to work, but are scheduled for removal from logstash in the future. Document types are being deprecated in Elasticsearch 6.0, and removed entirely in 7.0. You should avoid this feature If you have any questions about this, please visit the #logstash channel on freenode irc. {:name=>"document_type", :plugin=><LogStash::Outputs::ElasticSearch bulk_path=>"/_xpack/monitoring/_bulk?system_id=logstash&system_api_version=2&interval=1s", hosts=>[http://elasticsearch:9200], sniffing=>false, manage_template=>false, id=>"51f66c9ec66feb8fd59be1157c335c1ddc5fb856d2b248254ae6add289cea7b7", document_type=>"%{[@metadata][document_type]}", enable_metric=>true, codec=><LogStash::Codecs::Plain id=>"plain_b2391fbc-1f01-4fd4-9240-ee733adc49c8", enable_metric=>true, charset=>"UTF-8">, workers=>1, template_name=>"logstash", template_overwrite=>false, doc_as_upsert=>false, script_type=>"inline", script_lang=>"painless", script_var_name=>"event", scripted_upsert=>false, retry_initial_interval=>2, retry_max_interval=>64, retry_on_conflict=>1, action=>"index", ssl_certificate_verification=>true, sniffing_delay=>5, timeout=>60, pool_max=>1000, pool_max_per_route=>100, resurrect_delay=>5, validate_after_inactivity=>10000, http_compression=>false>}
  211. [2019-02-03T12:57:51,113][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@bulk_path = "/_xpack/monitoring/_bulk?system_id=logstash&system_api_version=2&interval=1s"
  212. [2019-02-03T12:57:51,115][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@hosts = [http://elasticsearch:9200]
  213. [2019-02-03T12:57:51,116][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@index = ""
  214. [2019-02-03T12:57:51,118][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@sniffing = false
  215. [2019-02-03T12:57:51,120][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@manage_template = false
  216. [2019-02-03T12:57:51,120][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@id = "51f66c9ec66feb8fd59be1157c335c1ddc5fb856d2b248254ae6add289cea7b7"
  217. [2019-02-03T12:57:51,121][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@document_type = "%{[@metadata][document_type]}"
  218. [2019-02-03T12:57:51,122][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@enable_metric = true
  219. [2019-02-03T12:57:51,122][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@codec = <LogStash::Codecs::Plain id=>"plain_b2391fbc-1f01-4fd4-9240-ee733adc49c8", enable_metric=>true, charset=>"UTF-8">
  220. [2019-02-03T12:57:51,124][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@workers = 1
  221. [2019-02-03T12:57:51,126][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@template_name = "logstash"
  222. [2019-02-03T12:57:51,127][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@template_overwrite = false
  223. [2019-02-03T12:57:51,130][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@parent = nil
  224. [2019-02-03T12:57:51,131][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@join_field = nil
  225. [2019-02-03T12:57:51,132][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@upsert = ""
  226. [2019-02-03T12:57:51,133][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@doc_as_upsert = false
  227. [2019-02-03T12:57:51,134][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@script = ""
  228. [2019-02-03T12:57:51,134][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@script_type = "inline"
  229. [2019-02-03T12:57:51,135][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@script_lang = "painless"
  230. [2019-02-03T12:57:51,135][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@script_var_name = "event"
  231. [2019-02-03T12:57:51,136][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@scripted_upsert = false
  232. [2019-02-03T12:57:51,137][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@retry_initial_interval = 2
  233. [2019-02-03T12:57:51,138][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@retry_max_interval = 64
  234. [2019-02-03T12:57:51,144][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@retry_on_conflict = 1
  235. [2019-02-03T12:57:51,145][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@pipeline = nil
  236. [2019-02-03T12:57:51,146][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@action = "index"
  237. [2019-02-03T12:57:51,148][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@ssl_certificate_verification = true
  238. [2019-02-03T12:57:51,149][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@sniffing_delay = 5
  239. [2019-02-03T12:57:51,150][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@timeout = 60
  240. [2019-02-03T12:57:51,151][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@failure_type_logging_whitelist = []
  241. [2019-02-03T12:57:51,151][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@pool_max = 1000
  242. [2019-02-03T12:57:51,152][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@pool_max_per_route = 100
  243. [2019-02-03T12:57:51,152][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@resurrect_delay = 5
  244. [2019-02-03T12:57:51,153][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@validate_after_inactivity = 10000
  245. [2019-02-03T12:57:51,154][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@http_compression = false
  246. [2019-02-03T12:57:51,155][DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@custom_headers = {}
  247. [2019-02-03T12:57:51,172][INFO ][logstash.pipeline ] Starting pipeline {:pipeline_id=>".monitoring-logstash", "pipeline.workers"=>1, "pipeline.batch.size"=>2, "pipeline.batch.delay"=>50}
  248. [2019-02-03T12:57:51,228][DEBUG][logstash.outputs.elasticsearch] Normalizing http path {:path=>nil, :normalized=>nil}
  249. [2019-02-03T12:57:51,327][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://elasticsearch:9200/]}}
  250. [2019-02-03T12:57:51,334][DEBUG][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://elasticsearch:9200/, :path=>"/"}
  251. [2019-02-03T12:57:51,396][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://elasticsearch:9200/"}
  252. [2019-02-03T12:57:51,432][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>6}
  253. [2019-02-03T12:57:51,434][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>6}
  254. [2019-02-03T12:57:51,495][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["http://elasticsearch:9200"]}
  255. [2019-02-03T12:57:51,617][INFO ][logstash.pipeline ] Pipeline started successfully {:pipeline_id=>".monitoring-logstash", :thread=>"#<Thread:0x2737bc41 sleep>"}
  256. [2019-02-03T12:57:51,626][INFO ][logstash.agent ] Pipelines running {:count=>2, :running_pipelines=>[:main, :".monitoring-logstash"], :non_running_pipelines=>[]}
  257. [2019-02-03T12:57:51,633][DEBUG][logstash.inputs.metrics ] Metric: input started
  258. [2019-02-03T12:57:51,715][DEBUG][logstash.agent ] Starting puma
  259. [2019-02-03T12:57:51,740][DEBUG][logstash.agent ] Trying to start WebServer {:port=>9600}
  260. [2019-02-03T12:57:51,869][DEBUG][logstash.api.service ] [api-service] start
  261. [2019-02-03T12:57:51,956][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
  262. [2019-02-03T12:57:51,963][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
  263. [2019-02-03T12:57:52,463][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
  264. [2019-02-03T12:57:53,680][DEBUG][logstash.pipeline ] Pushing flush onto pipeline {:pipeline_id=>"main", :thread=>"#<Thread:0x1e5b743f sleep>"}
  265. [2019-02-03T12:57:56,617][DEBUG][logstash.pipeline ] Pushing flush onto pipeline {:pipeline_id=>".monitoring-logstash", :thread=>"#<Thread:0x2737bc41 sleep>"}
  266. [2019-02-03T12:57:57,005][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
  267. [2019-02-03T12:57:57,007][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
  268. [2019-02-03T12:57:58,690][DEBUG][logstash.pipeline ] Pushing flush onto pipeline {:pipeline_id=>"main", :thread=>"#<Thread:0x1e5b743f sleep>"}
  269. [2019-02-03T12:58:01,619][DEBUG][logstash.pipeline ] Pushing flush onto pipeline {:pipeline_id=>".monitoring-logstash", :thread=>"#<Thread:0x2737bc41 sleep>"}
  270. [2019-02-03T12:58:01,722][DEBUG][logstash.inputs.metrics ] Metrics input: received a new snapshot {:created_at=>2019-02-03 12:58:01 UTC, :snapshot=>#<LogStash::Instrument::Snapshot:0x55a44694>}
  271. [2019-02-03T12:58:02,021][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
  272. [2019-02-03T12:58:02,026][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
  273. [2019-02-03T12:58:02,278][DEBUG][logstash.pipeline ] filter received {"event"=>{"reloads"=>{"successes"=>0, "failures"=>0}, "logstash"=>{"status"=>"green", "host"=>"logstash", "name"=>"logstash", "uuid"=>"9e10fc0f-1d62-453b-9938-94ba049694e0", "http_address"=>"0.0.0.0:9600", "pipeline"=>{"workers"=>1, "batch_size"=>125}, "version"=>"6.5.4", "snapshot"=>false, "ephemeral_id"=>"c299cbd5-66e7-478d-8a0e-40032f8e6f73"}, "events"=>{"in"=>0, "duration_in_millis"=>0, "filtered"=>0, "out"=>0}, "pipelines"=>[{"reloads"=>{"successes"=>0, "failures"=>0}, "id"=>"main", "queue"=>{"queue_size_in_bytes"=>0, "max_queue_size_in_bytes"=>0, "events_count"=>0, "type"=>"memory"}, "vertices"=>[{"pipeline_ephemeral_id"=>"01d4f528-aa09-4baa-a1ea-41f36775d051", "id"=>:"7c4d0cac26b0aaf9b4af7d3ff738fd8d1ef9295c82a5ae1c5ba31f30b31ef7bb", "queue_push_duration_in_millis"=>0, "events_out"=>0}, {"events_in"=>0, "events_out"=>0, "pipeline_ephemeral_id"=>"01d4f528-aa09-4baa-a1ea-41f36775d051", "id"=>:"85c04da132ea8b64527dcbf216f5fa58f13291047ec8e2b5796f43db0697e08a", "duration_in_millis"=>0}], "events"=>{"in"=>0, "out"=>0, "filtered"=>0, "queue_push_duration_in_millis"=>0, "duration_in_millis"=>0}, "hash"=>"146beeea8be3e571ae18cafeba2dbf41a356780519a15a5de9a003ac7844019d", "ephemeral_id"=>"01d4f528-aa09-4baa-a1ea-41f36775d051"}], "os"=>{"cgroup"=>{"cpuacct"=>{"control_group"=>"/", "usage_nanos"=>69338691377}, "cpu"=>{"stat"=>{"time_throttled_nanos"=>0, "number_of_times_throttled"=>0, "number_of_elapsed_periods"=>0}, "control_group"=>"/"}}, "cpu"=>{"load_average"=>{"5m"=>0.93, "15m"=>0.82, "1m"=>1.16}}}, "process"=>{"cpu"=>{"percent"=>23}, "max_file_descriptors"=>1048576, "open_file_descriptors"=>88}, "jvm"=>{"gc"=>{"collectors"=>{"old"=>{"collection_count"=>2, "collection_time_in_millis"=>398}, "young"=>{"collection_count"=>42, "collection_time_in_millis"=>4226}}}, "uptime_in_millis"=>79273, "mem"=>{"heap_max_in_bytes"=>1065025536, "heap_used_in_bytes"=>172447704, "heap_used_percent"=>16}}, "queue"=>{"events_count"=>0}, "timestamp"=>2019-02-03T12:58:01.699Z}}
  274. [2019-02-03T12:58:02,330][DEBUG][logstash.pipeline ] output received {"event"=>{"reloads"=>{"successes"=>0, "failures"=>0}, "logstash"=>{"status"=>"green", "host"=>"logstash", "name"=>"logstash", "uuid"=>"9e10fc0f-1d62-453b-9938-94ba049694e0", "http_address"=>"0.0.0.0:9600", "pipeline"=>{"workers"=>1, "batch_size"=>125}, "version"=>"6.5.4", "snapshot"=>false, "ephemeral_id"=>"c299cbd5-66e7-478d-8a0e-40032f8e6f73"}, "events"=>{"in"=>0, "duration_in_millis"=>0, "filtered"=>0, "out"=>0}, "pipelines"=>[{"reloads"=>{"successes"=>0, "failures"=>0}, "id"=>"main", "queue"=>{"queue_size_in_bytes"=>0, "max_queue_size_in_bytes"=>0, "events_count"=>0, "type"=>"memory"}, "vertices"=>[{"pipeline_ephemeral_id"=>"01d4f528-aa09-4baa-a1ea-41f36775d051", "id"=>:"7c4d0cac26b0aaf9b4af7d3ff738fd8d1ef9295c82a5ae1c5ba31f30b31ef7bb", "queue_push_duration_in_millis"=>0, "events_out"=>0}, {"events_in"=>0, "events_out"=>0, "pipeline_ephemeral_id"=>"01d4f528-aa09-4baa-a1ea-41f36775d051", "id"=>:"85c04da132ea8b64527dcbf216f5fa58f13291047ec8e2b5796f43db0697e08a", "duration_in_millis"=>0}], "events"=>{"in"=>0, "out"=>0, "filtered"=>0, "queue_push_duration_in_millis"=>0, "duration_in_millis"=>0}, "hash"=>"146beeea8be3e571ae18cafeba2dbf41a356780519a15a5de9a003ac7844019d", "ephemeral_id"=>"01d4f528-aa09-4baa-a1ea-41f36775d051"}], "os"=>{"cgroup"=>{"cpuacct"=>{"control_group"=>"/", "usage_nanos"=>69338691377}, "cpu"=>{"stat"=>{"time_throttled_nanos"=>0, "number_of_times_throttled"=>0, "number_of_elapsed_periods"=>0}, "control_group"=>"/"}}, "cpu"=>{"load_average"=>{"5m"=>0.93, "15m"=>0.82, "1m"=>1.16}}}, "process"=>{"cpu"=>{"percent"=>23}, "max_file_descriptors"=>1048576, "open_file_descriptors"=>88}, "jvm"=>{"gc"=>{"collectors"=>{"old"=>{"collection_count"=>2, "collection_time_in_millis"=>398}, "young"=>{"collection_count"=>42, "collection_time_in_millis"=>4226}}}, "uptime_in_millis"=>79273, "mem"=>{"heap_max_in_bytes"=>1065025536, "heap_used_in_bytes"=>172447704, "heap_used_percent"=>16}}, "queue"=>{"events_count"=>0}, "timestamp"=>2019-02-03T12:58:01.699Z}}
  275. [2019-02-03T12:58:03,089][DEBUG][logstash.pipeline ] filter received {"event"=>{"pipeline"=>{"id"=>"main", "workers"=>1, "batch_size"=>125, "hash"=>"146beeea8be3e571ae18cafeba2dbf41a356780519a15a5de9a003ac7844019d", "representation"=>{"hash"=>"146beeea8be3e571ae18cafeba2dbf41a356780519a15a5de9a003ac7844019d", "version"=>"0.0.0", "plugins"=>[{"version"=>"5.0.6", "name"=>"logstash-codec-cef"}, {"version"=>"3.0.8", "name"=>"logstash-codec-collectd"}, {"version"=>"3.0.6", "name"=>"logstash-codec-dots"}, {"version"=>"3.0.6", "name"=>"logstash-codec-edn"}, {"version"=>"3.0.8", "name"=>"logstash-codec-line"}, {"version"=>"3.0.6", "name"=>"logstash-codec-edn_lines"}, {"version"=>"3.0.6", "name"=>"logstash-codec-es_bulk"}, {"version"=>"3.2.0", "name"=>"logstash-codec-fluent"}, {"version"=>"3.0.5", "name"=>"logstash-codec-graphite"}, {"version"=>"3.0.5", "name"=>"logstash-codec-json"}, {"version"=>"3.0.6", "name"=>"logstash-codec-json_lines"}, {"version"=>"3.0.7", "name"=>"logstash-codec-msgpack"}, {"version"=>"4.1.2", "name"=>"logstash-patterns-core"}, {"version"=>"3.0.10", "name"=>"logstash-codec-multiline"}, {"version"=>"3.14.1", "name"=>"logstash-codec-netflow"}, {"version"=>"3.0.6", "name"=>"logstash-codec-plain"}, {"version"=>"3.0.6", "name"=>"logstash-codec-rubydebug"}, {"version"=>"2.8.0", "name"=>"logstash-filter-aggregate"}, {"version"=>"3.0.6", "name"=>"logstash-filter-anonymize"}, {"version"=>"3.1.2", "name"=>"logstash-filter-cidr"}, {"version"=>"3.0.6", "name"=>"logstash-filter-clone"}, {"version"=>"3.0.8", "name"=>"logstash-filter-csv"}, {"version"=>"3.1.9", "name"=>"logstash-filter-date"}, {"version"=>"1.0.3", "name"=>"logstash-filter-de_dot"}, {"version"=>"1.2.0", "name"=>"logstash-filter-dissect"}, {"version"=>"3.0.11", "name"=>"logstash-filter-dns"}, {"version"=>"3.0.5", "name"=>"logstash-filter-drop"}, {"version"=>"3.4.0", "name"=>"logstash-filter-elasticsearch"}, {"version"=>"3.2.1", "name"=>"logstash-filter-fingerprint"}, {"version"=>"5.0.3", "name"=>"logstash-filter-geoip"}, {"version"=>"4.0.4", "name"=>"logstash-filter-grok"}, {"version"=>"1.0.6", "name"=>"logstash-filter-jdbc_static"}, {"version"=>"1.0.4", "name"=>"logstash-filter-jdbc_streaming"}, {"version"=>"3.0.5", "name"=>"logstash-filter-json"}, {"version"=>"4.2.1", "name"=>"logstash-filter-kv"}, {"version"=>"4.0.5", "name"=>"logstash-filter-metrics"}, {"version"=>"3.3.4", "name"=>"logstash-filter-mutate"}, {"version"=>"3.1.5", "name"=>"logstash-filter-ruby"}, {"version"=>"3.0.6", "name"=>"logstash-filter-sleep"}, {"version"=>"3.1.6", "name"=>"logstash-filter-split"}, {"version"=>"3.0.5", "name"=>"logstash-filter-syslog_pri"}, {"version"=>"4.0.4", "name"=>"logstash-filter-throttle"}, {"version"=>"3.2.3", "name"=>"logstash-filter-translate"}, {"version"=>"1.0.4", "name"=>"logstash-filter-truncate"}, {"version"=>"3.0.6", "name"=>"logstash-filter-urldecode"}, {"version"=>"3.2.3", "name"=>"logstash-filter-useragent"}, {"version"=>"4.0.6", "name"=>"logstash-filter-xml"}, {"version"=>"1.0.4", "name"=>"logstash-input-azure_event_hubs"}, {"version"=>"5.1.6", "name"=>"logstash-input-beats"}, {"version"=>"1.1.4", "name"=>"logstash-input-dead_letter_queue"}, {"version"=>"4.2.1", "name"=>"logstash-input-elasticsearch"}, {"version"=>"3.3.2", "name"=>"logstash-input-exec"}, {"version"=>"4.1.8", "name"=>"logstash-input-file"}, {"version"=>"3.1.4", "name"=>"logstash-input-ganglia"}, {"version"=>"3.1.1", "name"=>"logstash-input-gelf"}, {"version"=>"3.0.6", "name"=>"logstash-input-generator"}, {"version"=>"5.2.0", "name"=>"logstash-input-tcp"}, {"version"=>"3.0.6", "name"=>"logstash-input-graphite"}, {"version"=>"3.0.7", "name"=>"logstash-input-heartbeat"}, {"version"=>"3.2.2", "name"=>"logstash-input-http"}, {"version"=>"4.0.5", "name"=>"logstash-input-http_poller"}, {"version"=>"3.0.6", "name"=>"logstash-input-imap"}, {"version"=>"4.3.13", "name"=>"logstash-input-jdbc"}, {"version"=>"8.2.1", "name"=>"logstash-input-kafka"}, {"version"=>"3.0.7", "name"=>"logstash-input-pipe"}, {"version"=>"6.0.3", "name"=>"logstash-input-rabbitmq"}, {"version"=>"3.4.0", "name"=>"logstash-input-redis"}, {"version"=>"3.4.1", "name"=>"logstash-input-s3"}, {"version"=>"1.0.1", "name"=>"logstash-input-snmp"}, {"version"=>"3.0.6", "name"=>"logstash-input-snmptrap"}, {"version"=>"3.1.2", "name"=>"logstash-input-sqs"}, {"version"=>"3.2.6", "name"=>"logstash-input-stdin"}, {"version"=>"3.4.1", "name"=>"logstash-input-syslog"}, {"version"=>"3.0.8", "name"=>"logstash-input-twitter"}, {"version"=>"3.3.4", "name"=>"logstash-input-udp"}, {"version"=>"3.0.7", "name"=>"logstash-input-unix"}, {"version"=>"3.0.8", "name"=>"logstash-output-cloudwatch"}, {"version"=>"4.2.5", "name"=>"logstash-output-file"}, {"version"=>"3.0.7", "name"=>"logstash-output-csv"}, {"version"=>"1.0.0.beta1", "name"=>"logstash-output-elastic_app_search"}, {"version"=>"9.2.4", "name"=>"logstash-output-elasticsearch"}, {"version"=>"4.1.1", "name"=>"logstash-output-email"}, {"version"=>"3.1.6", "name"=>"logstash-output-graphite"}, {"version"=>"5.2.3", "name"=>"logstash-output-http"}, {"version"=>"7.2.1", "name"=>"logstash-output-kafka"}, {"version"=>"3.1.7", "name"=>"logstash-output-lumberjack"}, {"version"=>"3.0.6", "name"=>"logstash-output-nagios"}, {"version"=>"3.0.5", "name"=>"logstash-output-null"}, {"version"=>"3.0.7", "name"=>"logstash-output-pagerduty"}, {"version"=>"3.0.6", "name"=>"logstash-output-pipe"}, {"version"=>"5.1.1", "name"=>"logstash-output-rabbitmq"}, {"version"=>"4.0.4", "name"=>"logstash-output-redis"}, {"version"=>"4.1.7", "name"=>"logstash-output-s3"}, {"version"=>"4.0.7", "name"=>"logstash-output-sns"}, {"version"=>"5.1.2", "name"=>"logstash-output-sqs"}, {"version"=>"3.1.4", "name"=>"logstash-output-stdout"}, {"version"=>"5.0.3", "name"=>"logstash-output-tcp"}, {"version"=>"3.0.6", "name"=>"logstash-output-udp"}, {"version"=>"3.0.6", "name"=>"logstash-output-webhdfs"}], "graph"=>{"edges"=>[{"to"=>"__QUEUE__", "id"=>"fbf30c45ad12455e38902e2eec76385a2983042791450fb92afe6df896c6fbe6", "from"=>"7c4d0cac26b0aaf9b4af7d3ff738fd8d1ef9295c82a5ae1c5ba31f30b31ef7bb", "type"=>"plain"}, {"to"=>"85c04da132ea8b64527dcbf216f5fa58f13291047ec8e2b5796f43db0697e08a", "id"=>"a5e5637a74fdd2c2250234c3ab9a39a211cb01dc4dba8098a68f9c197c1825d5", "from"=>"__QUEUE__", "type"=>"plain"}], "vertices"=>[{"id"=>"7c4d0cac26b0aaf9b4af7d3ff738fd8d1ef9295c82a5ae1c5ba31f30b31ef7bb", "config_name"=>"file", "meta"=>{"source"=>{"column"=>2, "protocol"=>"str", "id"=>"pipeline", "line"=>2}}, "explicit_id"=>false, "plugin_type"=>"input", "type"=>"plugin"}, {"id"=>"__QUEUE__", "meta"=>nil, "explicit_id"=>false, "type"=>"queue"}, {"id"=>"85c04da132ea8b64527dcbf216f5fa58f13291047ec8e2b5796f43db0697e08a", "config_name"=>"stdout", "meta"=>{"source"=>{"column"=>2, "protocol"=>"str", "id"=>"pipeline", "line"=>10}}, "explicit_id"=>false, "plugin_type"=>"output", "type"=>"plugin"}]}, "type"=>"lir"}, "ephemeral_id"=>"01d4f528-aa09-4baa-a1ea-41f36775d051"}}}
  276. [2019-02-03T12:58:03,099][DEBUG][logstash.pipeline ] output received {"event"=>{"pipeline"=>{"id"=>"main", "workers"=>1, "batch_size"=>125, "hash"=>"146beeea8be3e571ae18cafeba2dbf41a356780519a15a5de9a003ac7844019d", "representation"=>{"hash"=>"146beeea8be3e571ae18cafeba2dbf41a356780519a15a5de9a003ac7844019d", "version"=>"0.0.0", "plugins"=>[{"version"=>"5.0.6", "name"=>"logstash-codec-cef"}, {"version"=>"3.0.8", "name"=>"logstash-codec-collectd"}, {"version"=>"3.0.6", "name"=>"logstash-codec-dots"}, {"version"=>"3.0.6", "name"=>"logstash-codec-edn"}, {"version"=>"3.0.8", "name"=>"logstash-codec-line"}, {"version"=>"3.0.6", "name"=>"logstash-codec-edn_lines"}, {"version"=>"3.0.6", "name"=>"logstash-codec-es_bulk"}, {"version"=>"3.2.0", "name"=>"logstash-codec-fluent"}, {"version"=>"3.0.5", "name"=>"logstash-codec-graphite"}, {"version"=>"3.0.5", "name"=>"logstash-codec-json"}, {"version"=>"3.0.6", "name"=>"logstash-codec-json_lines"}, {"version"=>"3.0.7", "name"=>"logstash-codec-msgpack"}, {"version"=>"4.1.2", "name"=>"logstash-patterns-core"}, {"version"=>"3.0.10", "name"=>"logstash-codec-multiline"}, {"version"=>"3.14.1", "name"=>"logstash-codec-netflow"}, {"version"=>"3.0.6", "name"=>"logstash-codec-plain"}, {"version"=>"3.0.6", "name"=>"logstash-codec-rubydebug"}, {"version"=>"2.8.0", "name"=>"logstash-filter-aggregate"}, {"version"=>"3.0.6", "name"=>"logstash-filter-anonymize"}, {"version"=>"3.1.2", "name"=>"logstash-filter-cidr"}, {"version"=>"3.0.6", "name"=>"logstash-filter-clone"}, {"version"=>"3.0.8", "name"=>"logstash-filter-csv"}, {"version"=>"3.1.9", "name"=>"logstash-filter-date"}, {"version"=>"1.0.3", "name"=>"logstash-filter-de_dot"}, {"version"=>"1.2.0", "name"=>"logstash-filter-dissect"}, {"version"=>"3.0.11", "name"=>"logstash-filter-dns"}, {"version"=>"3.0.5", "name"=>"logstash-filter-drop"}, {"version"=>"3.4.0", "name"=>"logstash-filter-elasticsearch"}, {"version"=>"3.2.1", "name"=>"logstash-filter-fingerprint"}, {"version"=>"5.0.3", "name"=>"logstash-filter-geoip"}, {"version"=>"4.0.4", "name"=>"logstash-filter-grok"}, {"version"=>"1.0.6", "name"=>"logstash-filter-jdbc_static"}, {"version"=>"1.0.4", "name"=>"logstash-filter-jdbc_streaming"}, {"version"=>"3.0.5", "name"=>"logstash-filter-json"}, {"version"=>"4.2.1", "name"=>"logstash-filter-kv"}, {"version"=>"4.0.5", "name"=>"logstash-filter-metrics"}, {"version"=>"3.3.4", "name"=>"logstash-filter-mutate"}, {"version"=>"3.1.5", "name"=>"logstash-filter-ruby"}, {"version"=>"3.0.6", "name"=>"logstash-filter-sleep"}, {"version"=>"3.1.6", "name"=>"logstash-filter-split"}, {"version"=>"3.0.5", "name"=>"logstash-filter-syslog_pri"}, {"version"=>"4.0.4", "name"=>"logstash-filter-throttle"}, {"version"=>"3.2.3", "name"=>"logstash-filter-translate"}, {"version"=>"1.0.4", "name"=>"logstash-filter-truncate"}, {"version"=>"3.0.6", "name"=>"logstash-filter-urldecode"}, {"version"=>"3.2.3", "name"=>"logstash-filter-useragent"}, {"version"=>"4.0.6", "name"=>"logstash-filter-xml"}, {"version"=>"1.0.4", "name"=>"logstash-input-azure_event_hubs"}, {"version"=>"5.1.6", "name"=>"logstash-input-beats"}, {"version"=>"1.1.4", "name"=>"logstash-input-dead_letter_queue"}, {"version"=>"4.2.1", "name"=>"logstash-input-elasticsearch"}, {"version"=>"3.3.2", "name"=>"logstash-input-exec"}, {"version"=>"4.1.8", "name"=>"logstash-input-file"}, {"version"=>"3.1.4", "name"=>"logstash-input-ganglia"}, {"version"=>"3.1.1", "name"=>"logstash-input-gelf"}, {"version"=>"3.0.6", "name"=>"logstash-input-generator"}, {"version"=>"5.2.0", "name"=>"logstash-input-tcp"}, {"version"=>"3.0.6", "name"=>"logstash-input-graphite"}, {"version"=>"3.0.7", "name"=>"logstash-input-heartbeat"}, {"version"=>"3.2.2", "name"=>"logstash-input-http"}, {"version"=>"4.0.5", "name"=>"logstash-input-http_poller"}, {"version"=>"3.0.6", "name"=>"logstash-input-imap"}, {"version"=>"4.3.13", "name"=>"logstash-input-jdbc"}, {"version"=>"8.2.1", "name"=>"logstash-input-kafka"}, {"version"=>"3.0.7", "name"=>"logstash-input-pipe"}, {"version"=>"6.0.3", "name"=>"logstash-input-rabbitmq"}, {"version"=>"3.4.0", "name"=>"logstash-input-redis"}, {"version"=>"3.4.1", "name"=>"logstash-input-s3"}, {"version"=>"1.0.1", "name"=>"logstash-input-snmp"}, {"version"=>"3.0.6", "name"=>"logstash-input-snmptrap"}, {"version"=>"3.1.2", "name"=>"logstash-input-sqs"}, {"version"=>"3.2.6", "name"=>"logstash-input-stdin"}, {"version"=>"3.4.1", "name"=>"logstash-input-syslog"}, {"version"=>"3.0.8", "name"=>"logstash-input-twitter"}, {"version"=>"3.3.4", "name"=>"logstash-input-udp"}, {"version"=>"3.0.7", "name"=>"logstash-input-unix"}, {"version"=>"3.0.8", "name"=>"logstash-output-cloudwatch"}, {"version"=>"4.2.5", "name"=>"logstash-output-file"}, {"version"=>"3.0.7", "name"=>"logstash-output-csv"}, {"version"=>"1.0.0.beta1", "name"=>"logstash-output-elastic_app_search"}, {"version"=>"9.2.4", "name"=>"logstash-output-elasticsearch"}, {"version"=>"4.1.1", "name"=>"logstash-output-email"}, {"version"=>"3.1.6", "name"=>"logstash-output-graphite"}, {"version"=>"5.2.3", "name"=>"logstash-output-http"}, {"version"=>"7.2.1", "name"=>"logstash-output-kafka"}, {"version"=>"3.1.7", "name"=>"logstash-output-lumberjack"}, {"version"=>"3.0.6", "name"=>"logstash-output-nagios"}, {"version"=>"3.0.5", "name"=>"logstash-output-null"}, {"version"=>"3.0.7", "name"=>"logstash-output-pagerduty"}, {"version"=>"3.0.6", "name"=>"logstash-output-pipe"}, {"version"=>"5.1.1", "name"=>"logstash-output-rabbitmq"}, {"version"=>"4.0.4", "name"=>"logstash-output-redis"}, {"version"=>"4.1.7", "name"=>"logstash-output-s3"}, {"version"=>"4.0.7", "name"=>"logstash-output-sns"}, {"version"=>"5.1.2", "name"=>"logstash-output-sqs"}, {"version"=>"3.1.4", "name"=>"logstash-output-stdout"}, {"version"=>"5.0.3", "name"=>"logstash-output-tcp"}, {"version"=>"3.0.6", "name"=>"logstash-output-udp"}, {"version"=>"3.0.6", "name"=>"logstash-output-webhdfs"}], "graph"=>{"edges"=>[{"to"=>"__QUEUE__", "id"=>"fbf30c45ad12455e38902e2eec76385a2983042791450fb92afe6df896c6fbe6", "from"=>"7c4d0cac26b0aaf9b4af7d3ff738fd8d1ef9295c82a5ae1c5ba31f30b31ef7bb", "type"=>"plain"}, {"to"=>"85c04da132ea8b64527dcbf216f5fa58f13291047ec8e2b5796f43db0697e08a", "id"=>"a5e5637a74fdd2c2250234c3ab9a39a211cb01dc4dba8098a68f9c197c1825d5", "from"=>"__QUEUE__", "type"=>"plain"}], "vertices"=>[{"id"=>"7c4d0cac26b0aaf9b4af7d3ff738fd8d1ef9295c82a5ae1c5ba31f30b31ef7bb", "config_name"=>"file", "meta"=>{"source"=>{"column"=>2, "protocol"=>"str", "id"=>"pipeline", "line"=>2}}, "explicit_id"=>false, "plugin_type"=>"input", "type"=>"plugin"}, {"id"=>"__QUEUE__", "meta"=>nil, "explicit_id"=>false, "type"=>"queue"}, {"id"=>"85c04da132ea8b64527dcbf216f5fa58f13291047ec8e2b5796f43db0697e08a", "config_name"=>"stdout", "meta"=>{"source"=>{"column"=>2, "protocol"=>"str", "id"=>"pipeline", "line"=>10}}, "explicit_id"=>false, "plugin_type"=>"output", "type"=>"plugin"}]}, "type"=>"lir"}, "ephemeral_id"=>"01d4f528-aa09-4baa-a1ea-41f36775d051"}}}
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement