Advertisement
Guest User

Untitled

a guest
Apr 9th, 2018
632
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 6.78 KB | None | 0 0
  1. #=========================== Filebeat prospectors =============================
  2.  
  3. filebeat.prospectors:
  4.  
  5. # Each - is a prospector. Most options can be set at the prospector level, so
  6. # you can use different prospectors for various configurations.
  7. # Below are the prospector specific configurations.
  8.  
  9. - type: log
  10.  
  11. # Change to true to enable this prospector configuration.
  12. enabled: true
  13.  
  14. # Paths that should be crawled and fetched. Glob based paths.
  15. paths:
  16. - /TEST/*.log
  17. #- c:\programdata\elasticsearch\logs\*
  18.  
  19. # Exclude lines. A list of regular expressions to match. It drops the lines that are
  20. # matching any regular expression from the list.
  21. #exclude_lines: ['^DBG']
  22.  
  23. # Include lines. A list of regular expressions to match. It exports the lines that are
  24. # matching any regular expression from the list.
  25. #include_lines: ['^ERR', '^WARN']
  26.  
  27. # Exclude files. A list of regular expressions to match. Filebeat drops the files that
  28. # are matching any regular expression from the list. By default, no files are dropped.
  29. #exclude_files: ['.gz$']
  30.  
  31. # Optional additional fields. These fields can be freely picked
  32. # to add additional information to the crawled log files for filtering
  33. #fields:
  34. # level: debug
  35. # review: 1
  36.  
  37. ### Multiline options
  38.  
  39. # Mutiline can be used for log messages spanning multiple lines. This is common
  40. # for Java Stack Traces or C-Line Continuation
  41.  
  42. # The regexp Pattern that has to be matched. The example pattern matches all lines starting with [
  43. #multiline.pattern: ^\[
  44.  
  45. # Defines if the pattern set under pattern should be negated or not. Default is false.
  46. #multiline.negate: false
  47.  
  48. # Match can be set to "after" or "before". It is used to define if lines should be append to a pattern
  49. # that was (not) matched before or after or as long as a pattern is not matched based on negate.
  50. # Note: After is the equivalent to previous and before is the equivalent to to next in Logstash
  51. #multiline.match: after
  52.  
  53.  
  54. #============================= Filebeat modules ===============================
  55.  
  56. filebeat.config.modules:
  57. # Glob pattern for configuration loading
  58. path: ${path.config}/modules.d/*.yml
  59.  
  60. # Set to true to enable config reloading
  61. reload.enabled: false
  62.  
  63. # Period on which files under path should be checked for changes
  64. #reload.period: 10s
  65.  
  66. #==================== Elasticsearch template setting ==========================
  67.  
  68. setup.template.settings:
  69. index.number_of_shards: 3
  70. #index.codec: best_compression
  71. #_source.enabled: false
  72.  
  73. #================================ General =====================================
  74.  
  75. # The name of the shipper that publishes the network data. It can be used to group
  76. # all the transactions sent by a single shipper in the web interface.
  77. #name:
  78.  
  79. # The tags of the shipper are included in their own field with each
  80. # transaction published.
  81. #tags: ["service-X", "web-tier"]
  82.  
  83. # Optional fields that you can specify to add additional information to the
  84. # output.
  85. #fields:
  86. # env: staging
  87.  
  88.  
  89. #============================== Dashboards =====================================
  90. # These settings control loading the sample dashboards to the Kibana index. Loading
  91. # the dashboards is disabled by default and can be enabled either by setting the
  92. # options here, or by using the `-setup` CLI flag or the `setup` command.
  93. #setup.dashboards.enabled: true
  94.  
  95. # The URL from where to download the dashboards archive. By default this URL
  96. # has a value which is computed based on the Beat name and version. For released
  97. # versions, this URL points to the dashboard archive on the artifacts.elastic.co
  98. # website.
  99. #setup.dashboards.url:
  100.  
  101. #============================== Kibana =====================================
  102.  
  103. # Starting with Beats version 6.0.0, the dashboards are loaded via the Kibana API.
  104. # This requires a Kibana endpoint configuration.
  105. setup.kibana:
  106.  
  107. # Kibana Host
  108. # Scheme and port can be left out and will be set to the default (http and 5601)
  109. # In case you specify and additional path, the scheme is required: http://localhost:5601/path
  110. # IPv6 addresses should always be defined as: https://[2001:db8::1]:5601
  111. host: "172.19.32.154:5601"
  112. username: "kibana"
  113. password: "kibana"
  114. #============================= Elastic Cloud ==================================
  115.  
  116. # These settings simplify using filebeat with the Elastic Cloud (https://cloud.elastic.co/).
  117.  
  118. # The cloud.id setting overwrites the `output.elasticsearch.hosts` and
  119. # `setup.kibana.host` options.
  120. # You can find the `cloud.id` in the Elastic Cloud web UI.
  121. #cloud.id:
  122.  
  123. # The cloud.auth setting overwrites the `output.elasticsearch.username` and
  124. # `output.elasticsearch.password` settings. The format is `<user>:<pass>`.
  125. #cloud.auth:
  126.  
  127. #================================ Outputs =====================================
  128.  
  129. # Configure what output to use when sending the data collected by the beat.
  130.  
  131. #-------------------------- Elasticsearch output ------------------------------
  132. #output.elasticsearch:
  133.  
  134. # Array of hosts to connect to.
  135. # hosts: ["localhost:9200"]
  136.  
  137. # Optional protocol and basic auth credentials.
  138. #protocol: "https"
  139. #username: "elastic"
  140. #password: "elastic"
  141.  
  142. #----------------------------- Logstash output --------------------------------
  143. output.logstash:
  144. # The Logstash hosts
  145. hosts: ["127.0.0.1:5044"]
  146.  
  147. # Optional SSL. By default is off.
  148. # List of root certificates for HTTPS server verifications
  149. #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"]
  150.  
  151. # Certificate for SSL client authentication
  152. #ssl.certificate: "/etc/pki/client/cert.pem"
  153.  
  154. # Client Certificate Key
  155. #ssl.key: "/etc/pki/client/cert.key"
  156.  
  157. #================================ Logging =====================================
  158.  
  159. # Sets log level. The default log level is info.
  160. # Available log levels are: error, warning, info, debug
  161. #logging.level: debug
  162.  
  163. # At debug level, you can selectively enable logging only for some components.
  164. # To enable all selectors use ["*"]. Examples of other selectors are "beat",
  165. # "publish", "service".
  166. #logging.selectors: ["*"]
  167.  
  168. #============================== Xpack Monitoring ===============================
  169. # filebeat can export internal metrics to a central Elasticsearch monitoring
  170. # cluster. This requires xpack monitoring to be enabled in Elasticsearch. The
  171. # reporting is disabled by default.
  172.  
  173. # Set to true to enable the monitoring reporter.
  174. #xpack.monitoring.enabled: false
  175.  
  176. # Uncomment to send the metrics to Elasticsearch. Most settings from the
  177. # Elasticsearch output are accepted here as well. Any setting that is not set is
  178. # automatically inherited from the Elasticsearch output configuration, so if you
  179. # have the Elasticsearch output configured, you can simply uncomment the
  180. # following line.
  181. #xpack.monitoring.elasticsearch:
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement