Advertisement
Guest User

Untitled

a guest
Nov 27th, 2017
69
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
YAML 6.87 KB | None | 0 0
  1. ###################### Filebeat Configuration Example #########################
  2.  
  3. # This file is an example configuration file highlighting only the most common
  4. # options. The filebeat.full.yml file from the same directory contains all the
  5. # supported options with more comments. You can use it as a reference.
  6. #
  7. # You can find the full configuration reference here:
  8. # https://www.elastic.co/guide/en/beats/filebeat/index.html
  9.  
  10. #=========================== Filebeat prospectors =============================
  11.  
  12. filebeat.prospectors:
  13. # Each - is a prospector. Most options can be set at the prospector level, so
  14. # you can use different prospectors for various configurations.
  15. # Below are the prospector specific configurations.
  16.  
  17. - input_type: log
  18.  
  19.   # Paths that should be crawled and fetched. Glob based paths.
  20.   paths:
  21.    - /var/log/*.log
  22.     - C:\sac4\elasticsearch\logs\*.log
  23.  
  24.   # Exclude lines. A list of regular expressions to match. It drops the lines that are
  25.   # matching any regular expression from the list.
  26.   #exclude_lines: ["^DBG"]
  27.  
  28.   # Include lines. A list of regular expressions to match. It exports the lines that are
  29.   # matching any regular expression from the list.
  30.   #include_lines: ["^ERR", "^WARN"]
  31.  
  32.   # Exclude files. A list of regular expressions to match. Filebeat drops the files that
  33.   # are matching any regular expression from the list. By default, no files are dropped.
  34.   #exclude_files: [".gz$"]
  35.  
  36.   # Optional additional fields. These field can be freely picked
  37.   # to add additional information to the crawled log files for filtering
  38.   #fields:
  39.   #  level: debug
  40.   #  review: 1
  41.  
  42.   ### Multiline options
  43.  
  44.   # Mutiline can be used for log messages spanning multiple lines. This is common
  45.   # for Java Stack Traces or C-Line Continuation
  46.  
  47.   # The regexp Pattern that has to be matched. The example pattern matches all lines starting with [
  48.   #multiline.pattern: ^\[
  49.  
  50.   # Defines if the pattern set under pattern should be negated or not. Default is false.
  51.   #multiline.negate: false
  52.  
  53.   # Match can be set to "after" or "before". It is used to define if lines should be append to a pattern
  54.   # that was (not) matched before or after or as long as a pattern is not matched based on negate.
  55.   # Note: After is the equivalent to previous and before is the equivalent to to next in Logstash
  56.   #multiline.match: after
  57.  
  58.   #================================ JSON =====================================
  59.  
  60. ### JSON configuration
  61.   # Decode JSON options. Enable this if your logs are structured in JSON.
  62.   # JSON key on which to apply the line filtering and multiline settings. This key
  63.   # must be top level and its value must be string, otherwise it is ignored. If
  64.   # no text key is defined, the line filtering and multiline features cannot be used.
  65.   #json.message_key:
  66.   json.message_key: message
  67.  
  68.   # By default, the decoded JSON is placed under a "json" key in the output document.
  69.   # If you enable this setting, the keys are copied top level in the output document.
  70.   #json.keys_under_root: false
  71.   json.keys_under_root: true
  72.  
  73.   # If keys_under_root and this setting are enabled, then the values from the decoded
  74.   # JSON object overwrite the fields that Filebeat normally adds (type, source, offset, etc.)
  75.   # in case of conflicts.
  76.   json.overwrite_keys: true
  77.  
  78.   # If this setting is enabled, Filebeat adds a "json_error" key in case of JSON
  79.   # unmarshaling errors or when a text key is defined in the configuration but cannot
  80.   # be used.
  81.   #json.add_error_key: false
  82.  
  83.  
  84. #================================ General =====================================
  85.  
  86. # The name of the shipper that publishes the network data. It can be used to group
  87. # all the transactions sent by a single shipper in the web interface.
  88. #name:
  89.  
  90. # The tags of the shipper are included in their own field with each
  91. # transaction published.
  92. #tags: ["service-X", "web-tier"]
  93.  
  94. # Optional fields that you can specify to add additional information to the
  95. # output.
  96. #fields:
  97. #  env: staging
  98.  
  99. #================================ Outputs =====================================
  100.  
  101. # Configure what outputs to use when sending the data collected by the beat.
  102. # Multiple outputs may be used.
  103.  
  104. #-------------------------- Elasticsearch output ------------------------------
  105. output.elasticsearch:
  106.  # Array of hosts to connect to.
  107.   hosts: ["localhost:9200"]
  108.   bulk_max_size : 8192
  109.   workers: 4
  110.  
  111.   # Optional protocol and basic auth credentials.
  112.   #protocol: "https"
  113.   #username: "elastic"
  114.   #password: "changeme"
  115.  
  116.   #-------------------------- Templates ----------------------------------
  117.    # A template is used to set the mapping in Elasticsearch
  118.   # By default template loading is enabled and the template is loaded.
  119.   # These settings can be adjusted to load your own template or overwrite existing ones.
  120.  
  121.   # Set to false to disable template loading.
  122.   template.enabled: true
  123.  
  124.   # Template name. By default the template name is filebeat.
  125.   template.name: "prueba"
  126.  
  127.   # Path to template file
  128.   template.path: "prueba.template.json"
  129.  
  130.   # Overwrite existing template
  131.   #template.overwrite: true
  132.  
  133.   # If set to true, filebeat checks the Elasticsearch version at connect time, and if it
  134.   # is 2.x, it loads the file specified by the template.versions.2x.path setting. The
  135.   # default is true.
  136.   template.versions.2x.enabled: false
  137.  
  138.   # Path to the Elasticsearch 2.x version of the template file.
  139.   #template.versions.2x.path: "${path.config}/filebeat.template-es2x.json"
  140.  
  141.   # If set to true, filebeat checks the Elasticsearch version at connect time, and if it
  142.   # is 6.x, it loads the file specified by the template.versions.6x.path setting. The
  143.   # default is true.
  144.   template.versions.6x.enabled: false
  145.  
  146.   # Path to the Elasticsearch 6.x version of the template file.
  147.   #template.versions.6x.path: "${path.config}/filebeat.template-es6x.json"
  148.  
  149.  
  150.   #-------------------------- Kibana output ------------------------------
  151.   setup.kibana:
  152.   host: "localhost:5601"
  153. #----------------------------- Logstash output --------------------------------
  154. #output.logstash:
  155.   # The Logstash hosts
  156.   #hosts: ["localhost:5044"]
  157.  
  158.   # Optional SSL. By default is off.
  159.   # List of root certificates for HTTPS server verifications
  160.   #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"]
  161.  
  162.   # Certificate for SSL client authentication
  163.   #ssl.certificate: "/etc/pki/client/cert.pem"
  164.  
  165.   # Client Certificate Key
  166.   #ssl.key: "/etc/pki/client/cert.key"
  167.  
  168.  
  169.  
  170. #================================ Logging =====================================
  171.  
  172. # Sets log level. The default log level is info.
  173. # Available log levels are: critical, error, warning, info, debug
  174. #logging.level: debug
  175.  
  176. # At debug level, you can selectively enable logging only for some components.
  177. # To enable all selectors use ["*"]. Examples of other selectors are "beat",
  178. # "publish", "service".
  179. #logging.selectors: ["*"]
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement