Advertisement
Guest User

Untitled

a guest
Apr 12th, 2016
276
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 8.92 KB | None | 0 0
  1. [Main]
  2.  
  3. # The host of the Datadog intake server to send Agent data to
  4. dd_url: https://app.datadoghq.com
  5.  
  6. # If you need a proxy to connect to the Internet, provide the settings here
  7. # proxy_host: my-proxy.com
  8. # proxy_port: 3128
  9. # proxy_user: user
  10. # proxy_password: password
  11. # To be used with some proxys that return a 302 which make curl switch from POST to GET
  12. # See http://stackoverflow.com/questions/8156073/curl-violate-rfc-2616-10-3-2-and-switch-from-post-to-get
  13. # proxy_forbid_method_switch: no
  14.  
  15. # If you run the agent behind haproxy, you might want to set this to yes
  16. # skip_ssl_validation: no
  17.  
  18. # The Datadog api key to associate your Agent's data with your organization.
  19. # Can be found here:
  20. # https://app.datadoghq.com/account/settings
  21. api_key: <redacted>
  22.  
  23. # Force the hostname to whatever you want.
  24. #hostname: mymachine.mydomain
  25.  
  26. # Set the host's tags
  27. tags: env:k8s-exp
  28.  
  29. # Add one "dd_check:checkname" tag per running check. It makes it possible to slice
  30. # and dice per monitored app (= running Agent Check) on Datadog's backend.
  31. # create_dd_check_tags: no
  32.  
  33. # Collect AWS EC2 custom tags as agent tags (requires an IAM role associated with the instance)
  34. # collect_ec2_tags: no
  35.  
  36. # Incorporate security-groups into tags collected from AWS EC2
  37. # collect_security_groups: no
  38.  
  39. # Enable Agent Developer Mode
  40. # Agent Developer Mode collects and sends more fine-grained metrics about agent and check performance
  41. # developer_mode: no
  42. # In developer mode, the number of runs to be included in a single collector profile
  43. # collector_profile_interval: 20
  44.  
  45. # Collect instance metadata
  46. # The Agent will try to collect instance metadata for EC2 and GCE instances by
  47. # trying to connect to the local endpoint: http://169.254.169.254
  48. # See http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/AESDG-chapter-instancedata.html
  49. # and https://developers.google.com/compute/docs/metadata
  50. # for more information
  51. # collect_instance_metadata: yes
  52.  
  53. # use unique hostname for GCE hosts, see http://dtdg.co/1eAynZk
  54. gce_updated_hostname: yes
  55.  
  56. # Set the threshold for accepting points to allow anything
  57. # with recent_point_threshold seconds
  58. # Defaults to 30 seconds if no value is provided
  59. # recent_point_threshold: 30
  60.  
  61. # Use mount points instead of volumes to track disk and fs metrics
  62. # DEPRECATED: use conf.d/disk.yaml instead to configure it
  63. use_mount: no
  64.  
  65. # Change port the Agent is listening to
  66. # listen_port: 17123
  67.  
  68. # Start a graphite listener on this port
  69. # graphite_listen_port: 17124
  70.  
  71. # Additional directory to look for Datadog checks
  72. # additional_checksd: /etc/dd-agent/checks.d/
  73.  
  74. # Allow non-local traffic to this Agent
  75. # This is required when using this Agent as a proxy for other Agents
  76. # that might not have an internet connection
  77. # For more information, please see
  78. # https://github.com/DataDog/dd-agent/wiki/Network-Traffic-and-Proxy-Configuration
  79. non_local_traffic: yes
  80.  
  81. # Select the Tornado HTTP Client in the forwarder
  82. # Default to the simple http client
  83. # use_curl_http_client: False
  84.  
  85. # The loopback address the Forwarder and Dogstatsd will bind.
  86. # Optional, it is mainly used when running the agent on Openshift
  87. # bind_host: localhost
  88.  
  89. # If enabled the collector will capture a metric for check run times.
  90. # check_timings: no
  91.  
  92. # If you want to remove the 'ww' flag from ps catching the arguments of processes
  93. # for instance for security reasons
  94. # exclude_process_args: no
  95.  
  96. # histogram_aggregates: max, median, avg, count
  97. # histogram_percentiles: 0.95
  98.  
  99. # ========================================================================== #
  100. # DogStatsd configuration #
  101. # ========================================================================== #
  102.  
  103. # If you don't want to enable the DogStatsd server, set this option to no
  104. # use_dogstatsd: yes
  105.  
  106. # DogStatsd is a small server that aggregates your custom app metrics. For
  107. # usage information, check out http://docs.datadoghq.com/guides/dogstatsd/
  108.  
  109. # Make sure your client is sending to the same port.
  110. # dogstatsd_port : 8125
  111.  
  112. # By default dogstatsd will post aggregate metrics to the Agent (which handles
  113. # errors/timeouts/retries/etc). To send directly to the datadog api, set this
  114. # to https://app.datadoghq.com.
  115. # dogstatsd_target : http://localhost:17123
  116.  
  117. # If you want to forward every packet received by the dogstatsd server
  118. # to another statsd server, uncomment these lines.
  119. # WARNING: Make sure that forwarded packets are regular statsd packets and not "dogstatsd" packets,
  120. # as your other statsd server might not be able to handle them.
  121. # statsd_forward_host: address_of_own_statsd_server
  122. # statsd_forward_port: 8125
  123.  
  124. # you may want all statsd metrics coming from this host to be namespaced
  125. # in some way; if so, configure your namespace here. a metric that looks
  126. # like `metric.name` will instead become `namespace.metric.name`
  127. # statsd_metric_namespace:
  128.  
  129. # By default, dogstatsd supports only plain ASCII packets. However, most
  130. # (dog)statsd client support UTF8 by encoding packets before sending them
  131. # this option enables UTF8 decoding in case you need it.
  132. # However, it comes with a performance overhead of ~10% in the dogstatsd
  133. # server. This will be taken care of properly in the new gen agent core.
  134. # utf8_decoding: false
  135.  
  136. # ========================================================================== #
  137. # Service-specific configuration #
  138. # ========================================================================== #
  139.  
  140. # -------------------------------------------------------------------------- #
  141. # Disk #
  142. # -------------------------------------------------------------------------- #
  143.  
  144. # Some infrastrucures have many constantly changing virtual devices (e.g. folks
  145. # running constantly churning linux containers) whose metrics aren't
  146. # interesting for datadog. To filter out a particular pattern of devices
  147. # from collection, configure a regex here:
  148. # DEPRECATED: use conf.d/disk.yaml instead to configure it
  149. # device_blacklist_re: .*\/dev\/mapper\/lxc-box.*
  150.  
  151. # -------------------------------------------------------------------------- #
  152. # Ganglia #
  153. # -------------------------------------------------------------------------- #
  154.  
  155. # Ganglia host where gmetad is running
  156. #ganglia_host: localhost
  157.  
  158. # Ganglia port where gmetad is running
  159. #ganglia_port: 8651
  160.  
  161. # -------------------------------------------------------------------------- #
  162. # Dogstream (log file parser)
  163. # -------------------------------------------------------------------------- #
  164.  
  165. # Comma-separated list of logs to parse and optionally custom parsers to use.
  166. # The form should look like this:
  167. #
  168. # dogstreams: /path/to/log1:parsers_module:custom_parser, /path/to/log2, /path/to/log3, ...
  169. #
  170. # Or this:
  171. #
  172. # dogstreams: /path/to/log1:/path/to/my/parsers_module.py:custom_parser, /path/to/log2, /path/to/log3, ...
  173. #
  174. # Each entry is a path to a log file and optionally a Python module/function pair
  175. # separated by colons.
  176. #
  177. # Custom parsers should take a 2 parameters, a logger object and
  178. # a string parameter of the current line to parse. It should return a tuple of
  179. # the form:
  180. # (metric (str), timestamp (unix timestamp), value (float), attributes (dict))
  181. # where attributes should at least contain the key 'metric_type', specifying
  182. # whether the given metric is a 'counter' or 'gauge'.
  183. #
  184. # Unless parsers are specified with an absolute path, the modules must exist in
  185. # the Agent's PYTHONPATH. You can set this as an environment variable when
  186. # starting the Agent. If the name of the custom parser function is not passed,
  187. # 'parser' is assumed.
  188. #
  189. # If this value isn't specified, the default parser assumes this log format:
  190. # metric timestamp value key0=val0 key1=val1 ...
  191. #
  192.  
  193. # ========================================================================== #
  194. # Custom Emitters #
  195. # ========================================================================== #
  196.  
  197. # Comma-separated list of emitters to be used in addition to the standard one
  198. #
  199. # Expected to be passed as a comma-separated list of colon-delimited
  200. # name/object pairs.
  201. #
  202. # custom_emitters: /usr/local/my-code/emitters/rabbitmq.py:RabbitMQEmitter
  203. #
  204. # If the name of the emitter function is not specified, 'emitter' is assumed.
  205.  
  206.  
  207. # ========================================================================== #
  208. # Logging
  209. # ========================================================================== #
  210.  
  211. log_level: DEBUG
  212.  
  213. # collector_log_file: /var/log/datadog/collector.log
  214. # forwarder_log_file: /var/log/datadog/forwarder.log
  215. # dogstatsd_log_file: /var/log/datadog/dogstatsd.log
  216.  
  217. # if syslog is enabled but a host and port are not set, a local domain socket
  218. # connection will be attempted
  219. #
  220. log_to_syslog: no
  221. # syslog_host:
  222. # syslog_port:
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement