Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # Version 6.5.1
- # DO NOT EDIT THIS FILE!
- # Changes to default files will be lost on update and are difficult to
- # manage and support.
- #
- # Please make any changes to system defaults by overriding them in
- # apps or $SPLUNK_HOME/etc/system/local
- # (See "Configuration file precedence" in the web documentation).
- #
- # To override a specific setting, copy the name of the stanza and
- # setting to the file where you wish to override it.
- #
- # This file contains possible attribute/value pairs for configuring
- # Splunk's processing properties.
- #
- [default]
- CHARSET = UTF-8
- LINE_BREAKER_LOOKBEHIND = 100
- TRUNCATE = 10000
- DATETIME_CONFIG = /etc/datetime.xml
- ANNOTATE_PUNCT = True
- HEADER_MODE =
- MAX_DAYS_HENCE=2
- MAX_DAYS_AGO=2000
- MAX_DIFF_SECS_AGO=3600
- MAX_DIFF_SECS_HENCE=604800
- MAX_TIMESTAMP_LOOKAHEAD = 128
- SHOULD_LINEMERGE = True
- BREAK_ONLY_BEFORE =
- BREAK_ONLY_BEFORE_DATE = True
- MAX_EVENTS = 256
- MUST_BREAK_AFTER =
- MUST_NOT_BREAK_AFTER =
- MUST_NOT_BREAK_BEFORE =
- TRANSFORMS =
- SEGMENTATION = indexing
- SEGMENTATION-all = full
- SEGMENTATION-inner = inner
- SEGMENTATION-outer = outer
- SEGMENTATION-raw = none
- SEGMENTATION-standard = standard
- LEARN_SOURCETYPE = true
- LEARN_MODEL = true
- maxDist = 100
- AUTO_KV_JSON = true
- detect_trailing_nulls = false
- sourcetype =
- priority =
- ########## APPLICATION SERVERS ##########
- [log4j]
- BREAK_ONLY_BEFORE = \d\d?:\d\d:\d\d
- pulldown_type = true
- maxDist = 75
- category = Application
- description = Output produced by any Java 2 Enterprise Edition (J2EE) application server using log4j
- [log4php]
- pulldown_type = true
- BREAK_ONLY_BEFORE = ^\w{3} \w{3}
- category = Application
- description = Output produced by a machine that runs the log4php logging utility
- [weblogic_stdout]
- pulldown_type = true
- maxDist = 60
- MAX_TIMESTAMP_LOOKAHEAD = 32
- MAX_EVENTS = 2048
- REPORT-st = weblogic-code
- category = Application
- description = Output produced by the Oracle WebLogic Java EE application server
- [websphere_activity]
- pulldown_type = true
- BREAK_ONLY_BEFORE = ^-----
- MAX_TIMESTAMP_LOOKAHEAD = 500
- REPORT-st = colon-line
- category = Application
- description = Activity logs produced by the Oracle WebLogic Java EE application server
- [websphere_core]
- pulldown_type = true
- maxDist = 70
- BREAK_ONLY_BEFORE = ^NULL\s
- category = Application
- description = Output produced by the IBM WebSphere application server
- [websphere_trlog]
- pulldown_type = true
- REPORT-st = was-trlog-code
- category = Application
- description = Trace output produced by the IBM WebSphere application server
- [log4net_xml]
- maxDist = 75
- NO_BINARY_CHECK = 1
- SHOULD_LINEMERGE = true
- BREAK_ONLY_BEFORE = <log4net:event
- TIME_PREFIX = timestamp="
- MAX_EVENTS = 1000
- pulldown_type = 1
- category = Application
- description = An XML-formatted output of the Apache log4j framework to the Microsoft .NET runtime
- [catalina]
- BREAK_ONLY_BEFORE_DATE = true
- SHOULD_LINEMERGE=true
- MAX_TIMESTAMP_LOOKAHEAD=30
- TIME_PREFIX = ^
- pulldown_type = 1
- category = Application
- description = Output produced by Apache Tomcat Catalina (System.out and System.err)
- [ruby_on_rails]
- TIME_PREFIX = (for [\d\.]+ at\s)
- TIME_FORMAT = %Y-%m-%d %H:%M:%S %Z
- BREAK_ONLY_BEFORE = Processing
- pulldown_type = 1
- category = Application
- description = Output produced by a Ruby On Rails Web application framework
- ########## ARCHIVES ##########
- [preprocess-bzip]
- invalid_cause = archive
- is_valid = False
- LEARN_MODEL = false
- [preprocess-Z]
- invalid_cause = archive
- is_valid = False
- LEARN_MODEL = false
- [preprocess-gzip]
- invalid_cause = archive
- is_valid = False
- LEARN_MODEL = false
- [preprocess-tar]
- invalid_cause = archive
- is_valid = False
- LEARN_MODEL = false
- [preprocess-zip]
- invalid_cause = archive
- is_valid = False
- LEARN_MODEL = false
- [preprocess-targz]
- invalid_cause = archive
- is_valid = False
- LEARN_MODEL = false
- ########## DATABASES ##########
- [db2_diag]
- pulldown_type = 1
- maxDist = 90
- REPORT-st = db2
- category = Database
- description = Diagnostic output produced by the IBM DB2 database server
- [mysqld]
- pulldown_type = 1
- maxDist = 20
- BREAK_ONLY_BEFORE = ^\d{6}\s
- TIME_FORMAT = %y%m%d %k:%M:%S
- category = Database
- description = Output produced by the MySQL database server
- [mysqld_error]
- pulldown_type = 1
- maxDist = 50
- MAX_EVENTS = 1024
- BREAK_ONLY_BEFORE = ^\d{6}\s
- category = Database
- description = Errors produced by the MySQL database server
- [mysqld_bin]
- pulldown_type = 1
- maxDist = 20
- BREAK_ONLY_BEFORE = ^#\d{6}
- category = Database
- description = Binary log output produced by the MySQL database server
- [mysql_slow]
- SHOULD_LINEMERGE = true
- TIME_FORMAT = Time: %y%m%d %k:%M:%S %Z
- BREAK_ONLY_BEFORE = #\sTime:\s\d{6}\s[\s\d]\d:\d\d:\d\d
- MAX_EVENTS = 512
- pulldown_type = 1
- category = Database
- description = Slow query log output produced by the MySQL database server
- ########## EMAIL ##########
- [exim_main]
- SHOULD_LINEMERGE = False
- [exim_reject]
- SHOULD_LINEMERGE = False
- [postfix_syslog]
- pulldown_type = 1
- MAX_TIMESTAMP_LOOKAHEAD = 32
- TIME_FORMAT = %b %d %H:%M:%S
- TRANSFORMS-host = syslog-host
- REPORT-syslog = syslog-extractions
- SHOULD_LINEMERGE = False
- category = Email
- description = Output produced by the Postfix email server
- [sendmail_syslog]
- pulldown_type = 1
- MAX_TIMESTAMP_LOOKAHEAD = 32
- SHOULD_LINEMERGE = False
- TIME_FORMAT = %b %d %H:%M:%S
- TRANSFORMS = syslog-host
- REPORT-syslog = sendmail-extractions
- category = Email
- description = Output produced by the Sendmail email server
- [procmail]
- pulldown_type = 1
- BREAK_ONLY_BEFORE = procmail: \[\d+\]
- MAX_TIMESTAMP_LOOKAHEAD = 64
- category = Email
- description = Output produced by the Procmail email server
- ########## OSs ##########
- [linux_messages_syslog]
- pulldown_type = 1
- MAX_TIMESTAMP_LOOKAHEAD = 32
- TIME_FORMAT = %b %d %H:%M:%S
- TRANSFORMS = syslog-host
- REPORT-syslog = syslog-extractions
- SHOULD_LINEMERGE = False
- category = Operating System
- description = Format found within the Linux log file /var/log/messages
- [linux_secure]
- pulldown_type = 1
- REPORT-syslog = syslog-extractions
- SHOULD_LINEMERGE = False
- category = Operating System
- description = Format for the /var/log/secure file containing all security related messages on a Linux machine
- [linux_audit]
- pulldown_type = 1
- BREAK_ONLY_BEFORE_DATE = False
- category = Operating System
- description = Output produced by the auditd system daemon used to track changes on a Linux machine
- [linux_bootlog]
- BREAK_ONLY_BEFORE_DATE = False
- [anaconda]
- BREAK_ONLY_BEFORE = ^\*
- [anaconda_syslog]
- REPORT-syslog = syslog-extractions
- SHOULD_LINEMERGE = False
- TIME_FORMAT = %b %d %H:%M:%S
- [osx_asl]
- BREAK_ONLY_BEFORE_DATE = False
- REPORT-asl = bracket-space
- [osx_crashreporter]
- BREAK_ONLY_BEFORE_DATE = False
- [osx_crash_log]
- BREAK_ONLY_BEFORE = gooblygook
- MAX_EVENTS = 200000
- [osx_install]
- BREAK_ONLY_BEFORE_DATE = False
- [osx_secure]
- BREAK_ONLY_BEFORE_DATE = False
- [osx_daily]
- BREAK_ONLY_BEFORE = ^(Sun|Mon|Tue|Wed|Thu|Fri|Sat)
- [osx_weekly]
- BREAK_ONLY_BEFORE = ^(Sun|Mon|Tue|Wed|Thu|Fri|Sat)
- [osx_monthly]
- BREAK_ONLY_BEFORE = ^(Sun|Mon|Tue|Wed|Thu|Fri|Sat)
- [osx_window_server]
- SHOULD_LINEMERGE = False
- [windows_snare_syslog]
- pulldown_type = 1
- MAX_TIMESTAMP_LOOKAHEAD = 32
- TRANSFORMS = syslog-host
- REPORT-syslog = syslog-extractions
- SHOULD_LINEMERGE = False
- TIME_FORMAT = %b %d %H:%M:%S
- category = Operating System
- description = Output produced by the Snare syslog server on Windows
- [dmesg]
- pulldown_type = 1
- BREAK_ONLY_BEFORE = ^\S
- DATETIME_CONFIG = NONE
- category = Operating System
- description = Output produced by the "dmesg" *nix command, printing the *nix kernel ring buffer
- [ftp]
- pulldown_type = 0
- BREAK_ONLY_BEFORE_DATE = False
- [ssl_error]
- pulldown_type = 0
- BREAK_ONLY_BEFORE_DATE = False
- [syslog]
- pulldown_type = true
- maxDist = 3
- TIME_FORMAT = %b %d %H:%M:%S
- MAX_TIMESTAMP_LOOKAHEAD = 32
- TRANSFORMS = syslog-host
- REPORT-syslog = syslog-extractions
- SHOULD_LINEMERGE = False
- category = Operating System
- description = Output produced by many syslog daemons, as described in RFC3164 by the IETF
- [sar]
- ; break on blanklines, clock-resets, or common headers attributes (/s, %, or alpha-)
- BREAK_ONLY_BEFORE = (?:^\s*$)|00:00:0|/s|%|[a-z]-
- MAX_EVENTS = 1000
- [rpmpkgs]
- BREAK_ONLY_BEFORE_DATE = False
- LEARN_MODEL = false
- ########## NETWORK ##########
- [novell_groupwise]
- SHOULD_LINEMERGE = False
- MAX_TIMESTAMP_LOOKAHEAD = 9
- TRANSFORMS-nov = novell-groupwise-arrival,novell-groupwise-queue,novell-groupwise-transfer
- [tcp]
- BREAK_ONLY_BEFORE = (=\+)+
- KV_MODE = none
- REPORT-tcp = tcpdump-endpoints, colon-kv
- ########## PRINTERS ##########
- [cups_access]
- BREAK_ONLY_BEFORE_DATE = False
- [cups_error]
- BREAK_ONLY_BEFORE_DATE = False
- [spooler]
- BREAK_ONLY_BEFORE_DATE = False
- ########## ROUTERS AND FIREWALLS ##########
- [cisco_cdr]
- maxDist = 1
- SHOULD_LINEMERGE = False
- [cisco_syslog]
- pulldown_type = 0
- MAX_TIMESTAMP_LOOKAHEAD = 32
- SHOULD_LINEMERGE = False
- TIME_FORMAT = %b %d %H:%M:%S
- TRANSFORMS = syslog-host
- REPORT-syslog = syslog-extractions
- [cisco:asa]
- SHOULD_LINEMERGE = false
- pulldown_type = 1
- category = Network & Security
- description = Output produced by the Cisco Adaptive Security Appliance (ASA) Firewall
- [clavister]
- SHOULD_LINEMERGE = False
- ########## VoIP ##########
- [asterisk_cdr]
- MAX_TIMESTAMP_LOOKAHEAD = 256
- SHOULD_LINEMERGE = False
- [asterisk_event]
- maxDist = 3
- SHOULD_LINEMERGE = False
- [asterisk_messages]
- SHOULD_LINEMERGE = False
- [asterisk_queue]
- SHOULD_LINEMERGE = False
- ########## WEBSERVERS ##########
- [access_combined]
- pulldown_type = true
- maxDist = 28
- MAX_TIMESTAMP_LOOKAHEAD = 128
- REPORT-access = access-extractions
- SHOULD_LINEMERGE = False
- TIME_PREFIX = \[
- category = Web
- description = National Center for Supercomputing Applications (NCSA) combined format HTTP web server logs (can be generated by apache or other web servers)
- [access_combined_wcookie]
- MAX_TIMESTAMP_LOOKAHEAD = 128
- REPORT-access = access-extractions
- SHOULD_LINEMERGE = False
- TIME_PREFIX = \[
- [access_common]
- MAX_TIMESTAMP_LOOKAHEAD = 128
- REPORT-access = access-extractions
- SHOULD_LINEMERGE = False
- TIME_PREFIX = \[
- [apache_error]
- pulldown_type = true
- maxDist = 50
- MAX_TIMESTAMP_LOOKAHEAD = 128
- BREAK_ONLY_BEFORE = ^\[
- TIME_FORMAT = [%A %B %d %T %Y]
- category = Web
- description = Error log format produced by the Apache web server (typically error_log on *nix systems)
- [iis]
- pulldown_type = true
- MAX_TIMESTAMP_LOOKAHEAD = 32
- SHOULD_LINEMERGE = False
- INDEXED_EXTRACTIONS = w3c
- detect_trailing_nulls = auto
- category = Web
- description = W3C Extended log format produced by the Microsoft Internet Information Services (IIS) web server
- ########## MISC ##########
- [snort]
- pulldown_type = true
- BREAK_ONLY_BEFORE = (=\+)+
- KV_MODE = none
- REPORT-tcp = tcpdump-endpoints, colon-kv
- category = Network & Security
- description = Output produced by the Snort network intrusion detection/prevention application
- ########## SPLUNK ##########
- [splunk_com_php_error]
- maxDist = 70
- MAX_TIMESTAMP_LOOKAHEAD = 40
- [splunkd]
- MAX_TIMESTAMP_LOOKAHEAD = 40
- # splunkd TIME_FORMAT should be kept in synch with
- # - etc/log.cfg
- # - src/framework/SplunkdTimestamp.cpp
- # This format won't, of course, match all older forwarders, but regex fallback
- # will handle those cases
- TIME_FORMAT = %m-%d-%Y %H:%M:%S.%l %z
- # logs from old forwarders (4.1 and prior) will not match the TIME_FORMAT, due to a lack of timezone.
- # This will cause some events to be merged.
- # Disable multiline support to get these case right.
- # Splunkd data prior to 5.0 can have multiline events, but it is quite rare
- # (debug output and bugs mostly), and 5.0+ explicitly disallows generating such
- SHOULD_LINEMERGE = false
- [splunkd_crash_log]
- SHOULD_LINEMERGE = True
- MUST_BREAK_AFTER = ^(?i)terminating\.\.\.
- MAX_TIMESTAMP_LOOKAHEAD = 1
- DATETIME_CONFIG = NONE
- MAX_EVENTS = 2048
- [splunkd_misc]
- SHOULD_LINEMERGE = False
- MAX_TIMESTAMP_LOOKAHEAD = 1
- [splunkd_stderr]
- TIME_FORMAT = %m-%d-%Y %T.%Q %z
- SHOULD_LINEMERGE = False
- MAX_TIMESTAMP_LOOKAHEAD = 40
- [splunk-blocksignature]
- SEGMENTATION = whitespace-only
- MAX_TIMESTAMP_LOOKAHEAD = 40
- [splunk_directory_monitor]
- MAX_TIMESTAMP_LOOKAHEAD = 40
- [splunk_directory_monitor_misc]
- MAX_TIMESTAMP_LOOKAHEAD = 40
- [splunk_search_history]
- BREAK_ONLY_BEFORE = ^\d
- MAX_TIMESTAMP_LOOKAHEAD = 40
- [splunkd_remote_searches]
- MAX_TIMESTAMP_LOOKAHEAD = 40
- TIME_FORMAT = %m-%d-%Y %H:%M:%S.%l %z
- SHOULD_LINEMERGE = false
- [splunkd_access]
- maxDist = 28
- MAX_TIMESTAMP_LOOKAHEAD = 128
- REPORT-access = access-extractions, extract_spent
- SHOULD_LINEMERGE = False
- TIME_PREFIX = \[
- [splunkd_ui_access]
- maxDist = 28
- MAX_TIMESTAMP_LOOKAHEAD = 128
- REPORT-access = access-extractions, extract_spent
- SHOULD_LINEMERGE = False
- TIME_PREFIX = \[
- [splunk_web_access]
- maxDist = 28
- MAX_TIMESTAMP_LOOKAHEAD = 128
- REPORT-access = access-extractions
- SHOULD_LINEMERGE = False
- TIME_PREFIX = \[
- EXTRACT-extract_spent = \s(?<spent>\d+(\.\d+)?)ms$
- [splunk_web_service]
- MAX_TIMESTAMP_LOOKAHEAD = 40
- REPORT-fields = splunk-service-extractions
- [splunkd_conf]
- SHOULD_LINEMERGE = false
- TIMESTAMP_FIELDS = datetime
- TIME_FORMAT = %m-%d-%Y %H:%M:%S.%l %z
- INDEXED_EXTRACTIONS = json
- KV_MODE = none
- [django_access]
- maxDist = 28
- MAX_TIMESTAMP_LOOKAHEAD = 128
- REPORT-access = access-extractions
- SHOULD_LINEMERGE = False
- TIME_PREFIX = \[
- EXTRACT-extract_spent = \s(?<spent>\d+(\.\d+)?)ms$
- [django_service]
- MAX_TIMESTAMP_LOOKAHEAD = 40
- TRUNCATE=100000
- [django_error]
- MAX_TIMESTAMP_LOOKAHEAD = 40
- TRUNCATE=100000
- [splunk_help]
- BREAK_ONLY_BEFORE = gooblygook
- MAX_EVENTS = 200000
- TRANSFORMS-help = splunk_help
- [mongod]
- TIME_FORMAT = %Y-%m-%dT%H:%M:%S.%3N%Z
- SHOULD_LINEMERGE = False
- MAX_TIMESTAMP_LOOKAHEAD = 40
- [source::.../var/log/splunk/searchhistory.log(.\d+)?]
- TRANSFORMS = splunk_index_history
- sourcetype = splunk_search_history
- [source::.../var/log/splunk/(web|report)_access(-\d+)?.log(.\d+)?]
- sourcetype = splunk_web_access
- [source::.../var/log/splunk/(web|report)_service(-\d+)?.log(.\d+)?]
- sourcetype = splunk_web_service
- [source::.../var/log/splunk/metrics.log(.\d+)?]
- sourcetype = splunkd
- [source::.../var/log/splunk/license_usage(|_summary).log(.\d+)?]
- sourcetype = splunkd
- [source::.../var/log/splunk/django_access.log(.\d+)?]
- sourcetype = django_access
- [source::.../var/log/splunk/django_service.log(.\d+)?]
- sourcetype = django_service
- [source::.../var/log/splunk/django_error.log(.\d+)?]
- sourcetype = django_error
- [source::.../splunkd.log(.\d+)?]
- sourcetype = splunkd
- [source::.../var/log/splunk/splunkd-utility.log(.\d+)?]
- sourcetype = splunkd
- [source::.../var/log/splunk/scheduler.log(.\d+)?]
- sourcetype = scheduler
- [source::.../var/log/splunk/audit.log(.\d+)?]
- TRANSFORMS = send_to_nullqueue
- sourcetype = splunk_audit
- [source::.../var/log/splunk/btool.log(.\d+)?]
- sourcetype = splunk_btool
- [source::.../var/log/splunk/intentions.log(.\d+)?]
- sourcetype = splunk_intentions
- [source::.../var/log/splunk/python.log(.\d+)?]
- sourcetype = splunk_python
- [source::.../var/log/splunk/searches.log]
- sourcetype = searches
- [source::.../var/log/splunk/splunk_stdout.log]
- sourcetype = splunkd_stdout
- [source::.../var/log/splunk/splunkd_stderr.log]
- sourcetype = splunkd_stderr
- [source::.../var/log/splunk/*crash-*.log]
- sourcetype = splunkd_crash_log
- [source::.../var/log/splunk/migration.log.*]
- sourcetype = splunk_migration
- [source::.../var/log/splunk/remote_searches.log(.\d+)?]
- sourcetype = splunkd_remote_searches
- [source::.../splunkd_access.log(.\d+)?]
- sourcetype = splunkd_access
- [source::.../splunkd_ui_access.log(.\d+)?]
- sourcetype = splunkd_ui_access
- [source::.../var/log/splunk/conf.log(.\d+)?]
- sourcetype = splunkd_conf
- [source::.../var/log/splunk/mongod.log(.\d+)?]
- sourcetype = mongod
- ########## SPECIAL ##########
- [__singleline]
- SHOULD_LINEMERGE = False
- [too_small]
- maxDist = 9999
- BREAK_ONLY_BEFORE_DATE = True
- PREFIX_SOURCETYPE = True
- ; same as too_small but for larger text that has special characters
- [breakable_text]
- BREAK_ONLY_BEFORE = (^(?:---|===|\*\*\*|___|=+=))|^\s*$
- LEARN_MODEL = false
- [lastlog]
- invalid_cause = binary
- LEARN_MODEL = false
- [wtmp]
- invalid_cause = binary
- LEARN_MODEL = false
- [known_binary]
- is_valid = False
- invalid_cause = binary
- LEARN_MODEL = false
- [ignored_type]
- is_valid = False
- invalid_cause = ignored_type
- LEARN_MODEL = false
- [stash]
- TRUNCATE = 0
- # only look for ***SPLUNK*** on the first line
- HEADER_MODE = firstline
- # we can summary index past data, but rarely future data
- MAX_DAYS_HENCE = 2
- MAX_DAYS_AGO = 10000
- # 5 years difference between two events
- MAX_DIFF_SECS_AGO = 155520000
- MAX_DIFF_SECS_HENCE = 155520000
- MAX_TIMESTAMP_LOOKAHEAD = 64
- LEARN_MODEL = false
- # search time extractions
- KV_MODE = none
- REPORT-1 = stash_extract
- [stash_new]
- TRUNCATE = 0
- # only look for ***SPLUNK*** on the first line
- HEADER_MODE = firstline
- # we can summary index past data, but rarely future data
- MAX_DAYS_HENCE = 2
- MAX_DAYS_AGO = 10000
- # 5 years difference between two events
- MAX_DIFF_SECS_AGO = 155520000
- MAX_DIFF_SECS_HENCE = 155520000
- MAX_TIMESTAMP_LOOKAHEAD = 64
- LEARN_MODEL = false
- # break .stash_new custom format into events
- SHOULD_LINEMERGE = false
- BREAK_ONLY_BEFORE_DATE = false
- LINE_BREAKER = (\r?\n==##~~##~~ 1E8N3D4E6V5E7N2T9 ~~##~~##==\r?\n)
- # change sourcetype to stash before indexing/forwarding this data (these events
- # are feed to the stashparsing pipeline)
- TRANSFORMS-sourcetype = set_sourcetype_to_stash
- ########## NON-LOG FILES ##########
- # settings copied from zip
- [source_archive]
- invalid_cause = needs_preprocess
- is_valid = False
- LEARN_MODEL = false
- [web]
- BREAK_ONLY_BEFORE=goblygook
- MAX_EVENTS=200000
- DATETIME_CONFIG = NONE
- CHECK_METHOD = modtime
- LEARN_MODEL = false
- [backup_file]
- BREAK_ONLY_BEFORE=goblygook
- MAX_EVENTS=10000
- LEARN_MODEL = false
- [manpage]
- BREAK_ONLY_BEFORE = gooblygook
- MAX_EVENTS = 200000
- DATETIME_CONFIG = NONE
- CHECK_METHOD = modtime
- LEARN_MODEL = false
- [misc_text]
- BREAK_ONLY_BEFORE=goblygook
- MAX_EVENTS=200000
- DATETIME_CONFIG = NONE
- CHECK_METHOD = modtime
- pulldown_type = false
- LEARN_MODEL = false
- [csv]
- SHOULD_LINEMERGE = False
- pulldown_type = true
- INDEXED_EXTRACTIONS = csv
- KV_MODE = none
- category = Structured
- description = Comma-separated value format. Set header and other settings in "Delimited Settings"
- [psv]
- SHOULD_LINEMERGE = False
- pulldown_type = true
- INDEXED_EXTRACTIONS = psv
- FIELD_DELIMITER=|
- HEADER_FIELD_DELIMITER=|
- KV_MODE = none
- category = Structured
- description = Pipe-separated value format. Set header and other settings in "Delimited Settings"
- [tsv]
- SHOULD_LINEMERGE = False
- pulldown_type = true
- INDEXED_EXTRACTIONS = tsv
- FIELD_DELIMITER=tab
- HEADER_FIELD_DELIMITER=tab
- KV_MODE = none
- category = Structured
- description = Tab-separated value format. Set header and other settings in "Delimited Settings"
- [_json]
- pulldown_type = true
- INDEXED_EXTRACTIONS = json
- KV_MODE = none
- category = Structured
- description = JavaScript Object Notation format. For more information, visit http://json.org/
- [json_no_timestamp]
- BREAK_ONLY_BEFORE = ^{
- DATETIME_CONFIG = CURRENT
- MAX_TIMESTAMP_LOOKAHEAD = 800
- pulldown_type = 1
- category = Structured
- description = A variant of the JSON source type, with support for nonexistent timestamps
- [fs_notification]
- SHOULD_LINEMERGE=false
- [exchange]
- INDEXED_EXTRACTIONS = w3c
- KV_MODE = none
- [generic_single_line]
- TIME_FORMAT = %Y-%m-%dT%H:%M:%S.%3N %Z
- SHOULD_LINEMERGE = false
- pulldown_type = 1
- category = Miscellaneous
- description = A common log format with a predefined timestamp. Customize timestamp in "Timestamp" options
- ########## RULE BASED CONDITIONS ##########
- [rule::snort]
- sourcetype = snort
- # IF MORE THAN 5% OF LINES MATCH REGEX, MUST BE THIS TYPE
- MORE_THAN_1 = (=\+)+
- MORE_THAN_10 = (?:[0-9A-F]{2} ){16}
- [rule::exim_main]
- sourcetype = exim_main
- # MORE THAN 2% HAVE <=, =>, 'queue'
- MORE_THANA_2 = <=
- MORE_THANB_2 = =>
- MORE_THANC_2 = queue
- [rule::postfix_syslog]
- sourcetype = postfix_syslog
- # IF 75% OF LINES MATCH REGEX, MUST BE THIS TYPE
- MORE_THAN_75 = ^\w{3} +\d+ \d\d:\d\d:\d\d .* postfix(/\w+)?\[\d+\]:
- [rule::sendmail_syslog]
- sourcetype = sendmail_syslog
- # IF 75% OF LINES MATCH REGEX, MUST BE THIS TYPE
- MORE_THAN_75 = ^\w{3} +\d+ \d\d:\d\d:\d\d .* (sendmail|imapd|ipop3d)\[\d+\]:
- [rule::access_common]
- sourcetype = access_common
- MORE_THAN_75 = ^\S+ \S+ \S+ \[[^\]]+\] "[^"]+" \S+ \S+$
- [rule::access_combined]
- sourcetype = access_combined
- MORE_THAN_75 = ^\S+ \S+ \S+ \S* ?\[[^\]]+\] "[^"]*" \S+ \S+ \S+ "[^"]*"$
- [rule::access_combined_wcookie]
- sourcetype = access_combined_wcookie
- # more restrictive version = ^\S+ \S+ \S+ \S* ?\[[^\]]+\] "[^"]*" \S+ \S+ \S+ "[^"]*" "[^"]*"$
- MORE_THAN_75 = ^\S+ \S+ \S+ \S* ?\[[^\]]+\] "[^"]*" \S+ \S+(?: \S+)? "[^"]*" "[^"]*"
- ### DELAYED RULE BASED CONDITIONS. RUN AS LAST DITCH EFFORT BEFORE MAKING A NEW SOURCETYPE ###
- # break text on ascii art and blanklines if more than 10% of lines
- # have ascii art or blanklines, and less than 10% have timestamps
- [delayedrule::breakable_text]
- MORE_THAN_10 = (^(?:---|===|\*\*\*|___|=+=))|^\s*$
- LESS_THAN_10 = [: ][012]?[0-9]:[0-5][0-9]
- sourcetype = breakable_text
- [delayedrule::syslog]
- sourcetype = syslog
- # IF MORE THAN 80% OF LINES MATCH REGEX, MUST BE THIS TYPE
- MORE_THAN_80 = ^\w{3} +\d+ \d\d:\d\d:\d\d (?!AM|PM)[\w\-.]+ [\w\-/.]+(\[\d+\])?:
- ########## FILE MATCH CONDITIONS ##########
- [source::.../var/log/anaconda.syslog(.\d+)?]
- sourcetype = anaconda_syslog
- [source::.../var/log/anaconda.log(.\d+)?]
- sourcetype = anaconda
- [source::.../var/log/httpd/error_log(.\d+)?]
- sourcetype = apache_error
- [source::.../var/log/cups/access_log(.\d+)?]
- sourcetype = cups_access
- [source::.../var/log/cups/error_log(.\d+)?]
- sourcetype = cups_error
- [source::.../var/log/dmesg(.\d+)?]
- sourcetype = dmesg
- [source::.../var/log/ftp.log(.\d+)?]
- sourcetype = ftp
- [source::.../(u_|)ex(tend|\d{4,8})*?.log]
- sourcetype = iis
- [source::.../var/log/lastlog(.\d+)?]
- sourcetype = lastlog
- [source::.../var/log/audit/audit.log(.\d+)?]
- sourcetype = linux_audit
- [source::.../var/log/boot.log(.\d+)?]
- sourcetype = linux_bootlog
- [source::.../var/log/secure(.\d+)?]
- sourcetype = linux_secure
- [source::.../man/man\d+/*.\d+]
- sourcetype = manpage
- [source::.../var/log/asl.log(.\d+)?]
- sourcetype = osx_asl
- [source::.../var/log/crashreporter.log(.\d+)?]
- sourcetype = osx_crashreporter
- [source::....crash.log(.\d+)?]
- sourcetype = osx_crash_log
- [source::.../var/log/install.log(.\d+)?]
- sourcetype = osx_install
- [source::.../var/log/secure.log(.\d+)?]
- sourcetype = osx_secure
- [source::.../var/log/daily.out(.\d+)?]
- sourcetype = osx_daily
- [source::.../var/log/weekly.out(.\d+)?]
- sourcetype = osx_weekly
- [source::.../var/log/monthly.out(.\d+)?]
- sourcetype = osx_monthly
- [source::.../private/var/log/windowserver.log(.\d+)?]
- sourcetype = osx_window_server
- [source::....Z(.\d+)?]
- unarchive_cmd = gzip -cd -
- sourcetype = preprocess-Z
- NO_BINARY_CHECK = true
- [source::....(tbz|tbz2)(.\d+)?]
- unarchive_cmd = _auto
- sourcetype = preprocess-bzip
- NO_BINARY_CHECK = true
- [source::....bz2?(.\d+)?]
- unarchive_cmd = bzip2 -cd -
- sourcetype = preprocess-bzip
- NO_BINARY_CHECK = true
- [source::....(?<!tar.)gz(.\d+)?]
- unarchive_cmd = gzip -cd -
- sourcetype = preprocess-gzip
- NO_BINARY_CHECK = true
- [source::....(tar.gz|tgz)(.\d+)?]
- unarchive_cmd = _auto
- sourcetype = preprocess-targz
- NO_BINARY_CHECK = true
- [source::....tar(.\d+)?]
- unarchive_cmd = _auto
- sourcetype = preprocess-tar
- NO_BINARY_CHECK = true
- [(?i)source::....zip(.\d+)?]
- unarchive_cmd = _auto
- sourcetype = preprocess-zip
- NO_BINARY_CHECK = true
- [source::.../var/log/rpmpkgs(.\d+)?]
- sourcetype = rpmpkgs
- [source::.../var/log/sa/sar\d+]
- sourcetype = sar
- [source::.../var/log/spooler(.\d+)?]
- sourcetype = spooler
- [source::.../var/log/httpd/httpd/ssl_error_log(.\d+)?]
- sourcetype = ssl_error
- [source::.../messages(.\d+)?]
- sourcetype = syslog
- [source::.../syslog(.\d+)?]
- sourcetype = syslog
- #[source::.../(www|apache|httpd).../access*]
- #sourcetype = access_common
- [source::.../(apache|httpd).../error*]
- sourcetype = apache_error
- [source::.../private/var/log/system.log(.\d+)?]
- sourcetype = syslog
- [source::.../private/var/log/mail.log(.\d+)?]
- sourcetype = syslog
- [source::.../var/log/wtmp(.\d+)?]
- sourcetype = wtmp
- [source::.../procmail(_|.)log]
- sourcetype = procmail
- [source::.../mysql.log(.\d+)?]
- sourcetype = mysqld
- [source::...stash]
- sourcetype = stash
- [source::...stash_new]
- sourcetype = stash_new
- ####### NON-LOG FILES
- [source::....(jar)(.\d+)?]
- sourcetype = source_archive
- [source::....(css|htm|html|sgml|shtml|template)]
- sourcetype = web
- [source::....csv]
- sourcetype = csv
- [source::...((.(bak|old))|,v|~|#)]
- sourcetype = ignored_type
- [source::.../(readme|README)...]
- sourcetype=misc_text
- [source::....(0t|a|ali|asa|au|bmp|cg|cgi|class|d|dat|deb|del|dot|dvi|dylib|elc|eps|exe|ftn|gif|hlp|hqx|hs|icns|ico|inc|iso|jame|jin|jpeg|jpg|kml|la|lhs|lib|lo|lock|mcp|mid|mp3|mpg|msf|nib|o|obj|odt|ogg|ook|opt|os|pal|pbm|pdf|pem|pgm|plo|png|po|pod|pp|ppd|ppm|ppt|prc|ps|psd|psym|pyc|pyd|rast|rb|rde|rdf|rdr|rgb|ro|rpm|rsrc|so|ss|stg|strings|tdt|tif|tiff|tk|uue|vhd|xbm|xlb|xls|xlw)]
- sourcetype = known_binary
- [source::....(cache|class|cxx|dylib|jar|lo|xslt|md5|rpm|deb|iso|vim)]
- sourcetype = ignored_type
- # internal sourcetype used in the fish bucket
- [fileTrackerCrcLog]
- SEGMENTATION = meta-tokenizer
- KV_MODE = none
- EXTRACT-1 = (?<_KEY_1>\S+)::(?<_VAL_1>\S+)
- # MySQL example.
- # See the Splunker's Guide for Splunk.com
- # for the myunbinit script and sample MySQL setup
- # This example is commented out.
- #
- # [mysql]
- # match_filename1 = *.bin
- # invalid_cause = needs_preprocess
- # is_valid = False
- #
- # Dealing with all windows type data, even when we're a unix
- # platform, incase these types of data is forwarded by a windows
- # light weight forwarder
- [ActiveDirectory]
- SHOULD_LINEMERGE = false
- LINE_BREAKER = ([\r\n]+---splunk-admon-end-of-event---\r\n[\r\n]*)
- EXTRACT-GUID = (?i)(?!=\w)(?:objectguid|guid)\s*=\s*(?<guid_lookup>[\w\-]+)
- EXTRACT-SID = objectSid\s*=\s*(?<sid_lookup>\S+)
- REPORT-MESSAGE = ad-kv
- # some schema AD events may be very long
- MAX_EVENTS = 10000
- TRUNCATE = 100000
- [WinRegistry]
- DATETIME_CONFIG=NONE
- LINE_BREAKER = ([\r\n]+---splunk-regmon-end-of-event---\r\n[\r\n]*)
- [WinWinHostMon]
- DATETIME_CONFIG=NONE
- SHOULD_LINEMERGE = false
- [WinPrintMon]
- DATETIME_CONFIG=NONE
- SHOULD_LINEMERGE = false
- [wmi]
- SHOULD_LINEMERGE = false
- LINE_BREAKER = ([\r\n]+---splunk-wmi-end-of-event---\r\n[\r\n]*)
- CHARSET = UTF-8
- [source::WMI...]
- REPORT-MESSAGE = wel-message, wel-eq-kv, wel-col-kv
- TRANSFORMS-FIELDS = wmi-host, wmi-override-host
- SHOULD_LINEMERGE = false
- [source::WinEventLog...]
- REPORT-MESSAGE = wel-message, wel-eq-kv, wel-col-kv
- KV_MODE=none
- # Note the below settings are effectively legacy, in place here to handle
- # data coming from much much older forwarders (3.x & 4.x)
- SHOULD_LINEMERGE = false
- MAX_TIMESTAMP_LOOKAHEAD=30
- LINE_BREAKER = ([\r\n](?=\d{2}/\d{2}/\d{2,4} \d{2}:\d{2}:\d{2} [aApPmM]{2}))
- TRANSFORMS-FIELDS = strip-winevt-linebreaker
- [PerformanceMonitor]
- SHOULD_LINEMERGE = false
- LINE_BREAKER = ([\r\n]+---splunk-perfmon-end-of-event---\r\n[\r\n]*)
- REPORT-MESSAGE = perfmon-kv
- [source::PerfmonMk...]
- EXTRACT-collection,category,object = collection=\"?(?P<collection>[^\"\n]+)\"?\ncategory=\"?(?P<category>[^\"\n]+)\"?\nobject=\"?(?P<object>[^\"\n]+)\"?\n
- KV_MODE = multi_PerfmonMk
- NO_BINARY_CHECK = 1
- pulldown_type = 1
- [WinNetMonMk]
- KV_MODE = multi_WinNetMonMk
- NO_BINARY_CHECK = 1
- pulldown_type = 0
- [source::.../disk_objects.log(.\d+)?]
- sourcetype = splunk_disk_objects
- [source::.../resource_usage.log(.\d+)?]
- sourcetype = splunk_resource_usage
- [source::.../kvstore.log(.\d+)?]
- sourcetype = kvstore
- [source::.../token_input_metrics.log(.\d+)?]
- sourcetype = token_endpoint_metrics
- [source::.../http_event_collector_metrics.log(.\d+)?]
- sourcetype = http_event_collector_metrics
- [splunk_disk_objects]
- SHOULD_LINEMERGE = false
- TIMESTAMP_FIELDS = datetime
- TIME_FORMAT = %m-%d-%Y %H:%M:%S.%l %z
- INDEXED_EXTRACTIONS = json
- KV_MODE = none
- JSON_TRIM_BRACES_IN_ARRAY_NAMES = true
- [splunk_resource_usage]
- SHOULD_LINEMERGE = false
- TIMESTAMP_FIELDS = datetime
- TIME_FORMAT = %m-%d-%Y %H:%M:%S.%l %z
- INDEXED_EXTRACTIONS = json
- KV_MODE = none
- JSON_TRIM_BRACES_IN_ARRAY_NAMES = true
- [kvstore]
- SHOULD_LINEMERGE = false
- TIMESTAMP_FIELDS = datetime
- TIME_FORMAT = %m-%d-%Y %H:%M:%S.%l %z
- INDEXED_EXTRACTIONS = json
- KV_MODE = none
- TRUNCATE = 1000000
- JSON_TRIM_BRACES_IN_ARRAY_NAMES = true
- [token_input_metrics]
- SHOULD_LINEMERGE = false
- TIMESTAMP_FIELDS = datetime
- TIME_FORMAT = %m-%d-%Y %H:%M:%S.%l %z
- INDEXED_EXTRACTIONS = json
- KV_MODE = none
- JSON_TRIM_BRACES_IN_ARRAY_NAMES = true
- [http_event_collector_metrics]
- SHOULD_LINEMERGE = false
- TIMESTAMP_FIELDS = datetime
- TIME_FORMAT = %m-%d-%Y %H:%M:%S.%l %z
- INDEXED_EXTRACTIONS = json
- KV_MODE = none
- JSON_TRIM_BRACES_IN_ARRAY_NAMES = true
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement