Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- Config:
- ########################
- # logstash Configuration Files - Bro IDS Logs
- # Created by 505Forensics (http://www.505forensics.com)
- # MIT License, so do what you want with it!
- #
- # For use with logstash, elasticsearch, and kibana to analyze logs
- #
- # Usage: Reference this config file for your instance of logstash to parse Bro conn logs
- #
- # Limitations: Standard Bro log delimiter is tab.
- #
- # Dependencies: Utilizing the logstash 'translate' filter requires having the logstash contrib plugins added, which are community supported and not part of the official release. Visit logstash.net to find out how to install these
- #
- #######################
- input {
- file {
- type => "bro-conn_log"
- start_position => "end"
- sincedb_path => "/var/tmp/.bro_conn_sincedb"
- #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
- path => "/opt/bro/logs/current/conn.log"
- }
- }
- filter {
- #Let's get rid of those header lines; they begin with a hash
- if [message] =~ /^#/ {
- drop { }
- }
- #Now, using the csv filter, we can define the Bro log fields
- if [type] == "bro-conn_log" {
- csv {
- columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","proto","service","duration","orig_bytes","resp_bytes","conn_state","local_orig","missed_bytes","history","orig_pkts","orig_ip_bytes","resp_pkts","resp_ip_bytes","tunnel_parents"]
- #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, insert a literal <tab> in between the two quotes on your logstash system, use a text editor like nano that doesn't convert tabs to spaces.
- separator => " "
- }
- #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
- date {
- match => [ "ts", "UNIX" ]
- }
- # add geoip attributes
- geoip {
- source => "id.orig_h"
- target => "orig_geoip"
- }
- geoip {
- source => "id.resp_h"
- target => "resp_geoip"
- }
- #The following makes use of the translate filter (logstash contrib) to convert conn_state into human text. Saves having to look up values for packet introspection
- translate {
- field => "conn_state"
- destination => "conn_state_full"
- dictionary => [
- "S0", "Connection attempt seen, no reply",
- "S1", "Connection established, not terminated",
- "S2", "Connection established and close attempt by originator seen (but no reply from responder)",
- "S3", "Connection established and close attempt by responder seen (but no reply from originator)",
- "SF", "Normal SYN/FIN completion",
- "REJ", "Connection attempt rejected",
- "RSTO", "Connection established, originator aborted (sent a RST)",
- "RSTR", "Established, responder aborted",
- "RSTOS0", "Originator sent a SYN followed by a RST, we never saw a SYN-ACK from the responder",
- "RSTRH", "Responder sent a SYN ACK followed by a RST, we never saw a SYN from the (purported) originator",
- "SH", "Originator sent a SYN followed by a FIN, we never saw a SYN ACK from the responder (hence the connection was 'half' open)",
- "SHR", "Responder sent a SYN ACK followed by a FIN, we never saw a SYN from the originator",
- "OTH", "No SYN seen, just midstream traffic (a 'partial connection' that was not later closed)"
- ]
- }
- mutate {
- convert => [ "id.orig_p", "integer" ]
- convert => [ "id.resp_p", "integer" ]
- convert => [ "orig_bytes", "integer" ]
- convert => [ "duration", "float" ]
- convert => [ "resp_bytes", "integer" ]
- convert => [ "missed_bytes", "integer" ]
- convert => [ "orig_pkts", "integer" ]
- convert => [ "orig_ip_bytes", "integer" ]
- convert => [ "resp_pkts", "integer" ]
- convert => [ "resp_ip_bytes", "integer" ]
- rename => [ "id.orig_h", "id_orig_host" ]
- rename => [ "id.orig_p", "id_orig_port" ]
- rename => [ "id.resp_h", "id_resp_host" ]
- rename => [ "id.resp_p", "id_resp_port" ]
- }
- }
- }
- output {
- #stdout { codec => rubydebug }
- elasticsearch { hosts => 10.100.130.72 }
- }
- ########################
- # logstash Configuration Files - Bro IDS Logs
- # Created by 505Forensics (http://www.505forensics.com)
- # MIT License, so do what you want with it!
- #
- # For use with logstash, elasticsearch, and kibana to analyze logs
- #
- # Usage: Reference this config file for your instance of logstash to parse Bro dns logs
- #
- # Limitations: Standard bro log delimiter is tab.
- #
- #######################
- input {
- file {
- type => "bro-dns_log"
- start_position => "end"
- sincedb_path => "/var/tmp/.bro_dns_sincedb"
- #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
- path => "/opt/bro/logs/current/dns.log"
- }
- }
- filter {
- #Let's get rid of those header lines; they begin with a hash
- if [message] =~ /^#/ {
- drop { }
- }
- #Now, using the csv filter, we can define the Bro log fields
- if [type] == "bro-dns_log" {
- csv {
- #dns.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto trans_id query qclass qclass_name qtype qtype_name rcode rcode_name AA TC RD RA Z answers TTLs rejected
- columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","proto","trans_id","query","qclass","qclass_name","qtype","qtype_name","rcode","rcode_name","AA","TC","RD","RA","Z","answers","TTLs","rejected"]
- #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
- separator => " "
- }
- #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
- date {
- match => [ "ts", "UNIX" ]
- }
- # add geoip attributes
- geoip {
- source => "id.orig_h"
- target => "orig_geoip"
- }
- geoip {
- source => "id.resp_h"
- target => "resp_geoip"
- }
- mutate {
- convert => [ "id.orig_p", "integer" ]
- convert => [ "id.resp_p", "integer" ]
- convert => [ "trans_id", "integer" ]
- convert => [ "qclass", "integer" ]
- convert => [ "qtype", "integer" ]
- convert => [ "rcode", "integer" ]
- rename => [ "id.orig_h", "id_orig_host" ]
- rename => [ "id.orig_p", "id_orig_port" ]
- rename => [ "id.resp_h", "id_resp_host" ]
- rename => [ "id.resp_p", "id_resp_port" ]
- }
- }
- output {
- #stdout { codec => rubydebug }
- elasticsearch { hosts => 10.100.130.72 }
- }
- }
- ########################
- # logstash Configuration Files - Bro IDS Logs
- # Created by 505Forensics (http://www.505forensics.com)
- # MIT License, so do what you want with it!
- #
- # For use with logstash, elasticsearch, and kibana to analyze logs
- #
- # Usage: Reference this config file for your instance of logstash to parse Bro files logs
- #
- # Limitations: Standard bro log delimiter is tab.
- #
- #######################
- input {
- file {
- type => "bro-files_log"
- start_position => "end"
- sincedb_path => "/var/tmp/.bro_files_sincedb"
- #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
- path => "/opt/bro/logs/current/files.log"
- }
- }
- filter {
- #Let's get rid of those header lines; they begin with a hash
- if [message] =~ /^#/ {
- drop { }
- }
- #Now, using the csv filter, we can define the Bro log fields
- if [type] == "bro-files_log" {
- csv {
- #files.log:#fields ts fuid tx_hosts rx_hosts conn_uids source depth analyzers mime_type filename duration local_orig is_orig seen_bytes total_bytes missing_bytes overflow_bytes timedout parent_fuid md5 sha1 sha256 extracted
- columns => ["ts","fuid","tx_hosts","rx_hosts","conn_uids","source","depth","analyzers","mime_type","filename","duration","local_orig","is_orig","seen_bytes","total_bytes","missing_bytes","overflow_bytes","timedout","parent_fuid","md5","sha1","sha256","extracted"]
- #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
- separator => " "
- }
- #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
- date {
- match => [ "ts", "UNIX" ]
- }
- # add geoip attributes
- geoip {
- source => "tx_hosts"
- target => "tx_hosts_geoip"
- }
- geoip {
- source => "rx_hosts"
- target => "rx_hosts_geoip"
- }
- mutate {
- convert => [ "duration", "float" ]
- convert => [ "depth", "integer" ]
- convert => [ "seen_bytes", "integer" ]
- convert => [ "total_bytes", "integer" ]
- convert => [ "missing_bytes", "integer" ]
- convert => [ "overflow_bytes", "integer" ]
- }
- }
- }
- output {
- #stdout { codec => rubydebug }
- elasticsearch { hosts => 10.100.130.72 }
- }
- ########################
- # logstash Configuration Files - Bro IDS Logs
- # Created by 505Forensics (http://www.505forensics.com)
- # MIT License, so do what you want with it!
- #
- # For use with logstash, elasticsearch, and kibana to analyze logs
- #
- # Usage: Reference this config file for your instance of logstash to parse Bro http logs
- #
- # Limitations: Standard bro log delimiter is tab.
- #
- #######################
- input {
- file {
- type => "bro-http_log"
- start_position => "end"
- sincedb_path => "/var/tmp/.bro_http_sincedb"
- #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
- path => "/opt/bro/logs/current/http.log"
- }
- }
- filter {
- #Let's get rid of those header lines; they begin with a hash
- if [message] =~ /^#/ {
- drop { }
- }
- #Now, using the csv filter, we can define the Bro log fields
- if [type] == "bro-http_log" {
- csv {
- #http.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p trans_depth method host uri referrer user_agent request_body_len response_body_len status_code status_msg info_code info_msg filename tags username password proxied orig_fuids orig_mime_types resp_fuids resp_mime_types
- columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","trans_depth","method","host","uri","referrer","user_agent","request_body_len","response_body_len","status_code","status_msg","info_code","info_msg","filename","tags","username","password","proxied","orig_fuids","orig_mime_types","resp_fuids","resp_mime_types"]
- #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
- separator => " "
- }
- #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
- date {
- match => [ "ts", "UNIX" ]
- }
- # add geoip attributes
- geoip {
- source => "id.orig_h"
- target => "orig_geoip"
- }
- geoip {
- source => "id.resp_h"
- target => "resp_geoip"
- }
- mutate {
- convert => [ "id.orig_p", "integer" ]
- convert => [ "id.resp_p", "integer" ]
- convert => [ "trans_depth", "integer" ]
- convert => [ "request_body_len", "integer" ]
- convert => [ "response_body_len", "integer" ]
- convert => [ "status_code", "integer" ]
- convert => [ "info_code", "integer" ]
- rename => [ "id.orig_h", "id_orig_host" ]
- rename => [ "id.orig_p", "id_orig_port" ]
- rename => [ "id.resp_h", "id_resp_host" ]
- rename => [ "id.resp_p", "id_resp_port" ]
- }
- }
- }
- output {
- #stdout { codec => rubydebug }
- elasticsearch { hosts => 10.100.130.72 }
- }
- ########################
- # logstash Configuration Files - Bro IDS Logs
- # Created by Knowm (http://www.knowm.org)
- # MIT License, so do what you want with it!
- #
- # For use with logstash, elasticsearch, and kibana to analyze logs
- #
- # Usage: Reference this config file for your instance of logstash to parse Bro ssl logs
- #
- # Limitations: Standard bro log delimiter is tab.
- #
- #######################
- input {
- file {
- type => "bro-intel_log"
- start_position => "end"
- sincedb_path => "/var/tmp/.bro_intel_sincedb"
- #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
- path => "/opt/bro/logs/current/intel.log"
- }
- }
- filter {
- #Let's get rid of those header lines; they begin with a hash
- if [message] =~ /^#/ {
- drop { }
- }
- #Now, using the csv filter, we can define the Bro log fields
- if [type] == "bro-intel_log" {
- csv {
- #fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p seen.indicator seen.indicator_type seen.where seen.node matched sources fuid file_mime_type file_desc
- columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","seen.indicator","seen.indicator_type","seen.where","seen.node","matched","sources","fuid","file_mime_type","file_desc"]
- #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
- separator => " "
- }
- #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
- date {
- match => [ "ts", "UNIX" ]
- }
- # add geoip attributes
- geoip {
- source => "id.orig_h"
- target => "orig_geoip"
- }
- geoip {
- source => "id.resp_h"
- target => "resp_geoip"
- }
- mutate {
- convert => [ "id.orig_p", "integer" ]
- convert => [ "id.resp_p", "integer" ]
- rename => [ "id.orig_h", "id_orig_host" ]
- rename => [ "id.orig_p", "id_orig_port" ]
- rename => [ "id.resp_h", "id_resp_host" ]
- rename => [ "id.resp_p", "id_resp_port" ]
- }
- }
- }
- output {
- #stdout { codec => rubydebug }
- elasticsearch { hosts => 10.100.130.72 }
- }
- ########################
- # logstash Configuration Files - Bro IDS Logs
- # Created by 505Forensics (http://www.505forensics.com)
- # MIT License, so do what you want with it!
- #
- # For use with logstash, elasticsearch, and kibana to analyze logs
- #
- # Usage: Reference this config file for your instance of logstash to parse Bro notice logs
- #
- # Limitations: Standard bro log delimiter is tab.
- #
- #######################
- input {
- file {
- type => "bro-notice_log"
- start_position => "end"
- sincedb_path => "/var/tmp/.bro_notice_sincedb"
- #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
- path => "/opt/bro/logs/current/notice.log"
- }
- }
- filter {
- #Let's get rid of those header lines; they begin with a hash
- if [message] =~ /^#/ {
- drop { }
- }
- #Now, using the csv filter, we can define the Bro log fields
- if [type] == "bro-notice_log" {
- csv {
- #notice.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p fuid file_mime_type file_desc proto note msg sub src dst p n peer_descr actions suppress_for dropped remote_location.country_code remote_location.region remote_location.city remote_location.latitude remote_location.longitude
- columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","fuid","file_mime_type","file_desc","proto","note","msg","sub","src","dst","p","n","peer_descr","actions","suppress_for","dropped","remote_location.country_code","remote_location.region","remote_location.city","remote_location.latitude","remote_location.longitude"]
- #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
- separator => " "
- }
- #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
- date {
- match => [ "ts", "UNIX" ]
- }
- # add geoip attributes
- geoip {
- source => "id.orig_h"
- target => "orig_geoip"
- }
- geoip {
- source => "id.resp_h"
- target => "resp_geoip"
- }
- mutate {
- convert => [ "id.orig_p", "integer" ]
- convert => [ "id.resp_p", "integer" ]
- convert => [ "p", "integer" ]
- convert => [ "n", "integer" ]
- convert => [ "suppress_for", "float" ]
- rename => [ "id.orig_h", "id_orig_host" ]
- rename => [ "id.orig_p", "id_orig_port" ]
- rename => [ "id.resp_h", "id_resp_host" ]
- rename => [ "id.resp_p", "id_resp_port" ]
- rename => [ "remote_location.country_code", "remote_location_country_code" ]
- rename => [ "remote_location.region", "remote_location_region" ]
- rename => [ "remote_location.city", "remote_location_city" ]
- rename => [ "remote_location.latitude", "remote_location_latitude" ]
- rename => [ "remote_location.longitude", "remote_location_longitude" ]
- }
- }
- }
- output {
- #stdout { codec => rubydebug }
- elasticsearch { hosts => 10.100.130.72 }
- }
- ########################
- # logstash Configuration Files - Bro IDS Logs
- # Created by Knowm (http://www.knowm.org)
- # MIT License, so do what you want with it!
- #
- # For use with logstash, elasticsearch, and kibana to analyze logs
- #
- # Usage: Reference this config file for your instance of logstash to parse Bro ssh logs
- #
- # Limitations: Standard bro log delimiter is tab.
- #
- #######################
- input {
- file {
- type => "bro-ssh_log"
- start_position => "end"
- sincedb_path => "/var/tmp/.bro_ssh_sincedb"
- #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
- path => "/opt/bro/logs/current/ssh.log"
- }
- }
- filter {
- #Let's get rid of those header lines; they begin with a hash
- if [message] =~ /^#/ {
- drop { }
- }
- #Now, using the csv filter, we can define the Bro log fields
- if [type] == "bro-ssh_log" {
- csv {
- #ssh.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p status direction client server remote_location.country_code remote_location.region remote_location.city remote_location.latitude remote_location.longitude
- columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","status","direction","client","server","remote_location.country_code","remote_location.region","remote_location.city","remote_location.latitude","remote_location.longitude"]
- #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
- separator => " "
- }
- #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
- date {
- match => [ "ts", "UNIX" ]
- }
- # add geoip attributes
- geoip {
- source => "id.orig_h"
- target => "orig_geoip"
- }
- geoip {
- source => "id.resp_h"
- target => "resp_geoip"
- }
- mutate {
- convert => [ "id.orig_p", "integer" ]
- convert => [ "id.resp_p", "integer" ]
- rename => [ "id.orig_h", "id_orig_host" ]
- rename => [ "id.orig_p", "id_orig_port" ]
- rename => [ "id.resp_h", "id_resp_host" ]
- rename => [ "id.resp_p", "id_resp_port" ]
- rename => [ "remote_location.country_code", "remote_location_country_code" ]
- rename => [ "remote_location.region", "remote_location_region" ]
- rename => [ "remote_location.city", "remote_location_city" ]
- rename => [ "remote_location.latitude", "remote_location_latitude" ]
- rename => [ "remote_location.longitude", "remote_location_longitude" ]
- }
- }
- }
- output {
- #stdout { codec => rubydebug }
- elasticsearch { hosts => 10.100.130.72 }
- }
- ########################
- # logstash Configuration Files - Bro IDS Logs
- # Created by Knowm (http://www.knowm.org)
- # MIT License, so do what you want with it!
- #
- # For use with logstash, elasticsearch, and kibana to analyze logs
- #
- # Usage: Reference this config file for your instance of logstash to parse Bro ssl logs
- #
- # Limitations: Standard bro log delimiter is tab.
- #
- #######################
- input {
- file {
- type => "bro-ssl_log"
- start_position => "end"
- sincedb_path => "/var/tmp/.bro_ssl_sincedb"
- #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
- path => "/opt/bro/logs/current/ssl.log"
- }
- }
- filter {
- #Let's get rid of those header lines; they begin with a hash
- if [message] =~ /^#/ {
- drop { }
- }
- #Now, using the csv filter, we can define the Bro log fields
- if [type] == "bro-ssl_log" {
- csv {
- #ssl.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p version cipher curve server_name session_id last_alert established cert_chain_fuids client_cert_chain_fuids subject issuer client_subject client_issuer validation_status
- columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","version","cipher","curve","server_name","session_id","last_alert","established","cert_chain_fuids","client_cert_chain_fuids","subject","issuer","client_subject","client_issuer","validation_status"]
- #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
- separator => " "
- }
- #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
- date {
- match => [ "ts", "UNIX" ]
- }
- # add geoip attributes
- geoip {
- source => "id.orig_h"
- target => "orig_geoip"
- }
- geoip {
- source => "id.resp_h"
- target => "resp_geoip"
- }
- mutate {
- convert => [ "id.orig_p", "integer" ]
- convert => [ "id.resp_p", "integer" ]
- rename => [ "id.orig_h", "id_orig_host" ]
- rename => [ "id.orig_p", "id_orig_port" ]
- rename => [ "id.resp_h", "id_resp_host" ]
- rename => [ "id.resp_p", "id_resp_port" ]
- }
- }
- }
- output {
- #stdout { codec => rubydebug }
- elasticsearch { hosts => 10.100.130.72 }
- }
- ########################
- # logstash Configuration Files - Bro IDS Logs
- # Created by Knowm (http://www.knowm.org)
- # MIT License, so do what you want with it!
- #
- # For use with logstash, elasticsearch, and kibana to analyze logs
- #
- # Usage: Reference this config file for your instance of logstash to parse Bro ssl logs
- #
- # Limitations: Standard bro log delimiter is tab.
- #
- #######################
- input {
- file {
- type => "bro-tunnel_log"
- start_position => "end"
- sincedb_path => "/var/tmp/.bro_tunnel_sincedb"
- #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
- path => "/opt/bro/logs/current/tunnel.log"
- }
- }
- filter {
- #Let's get rid of those header lines; they begin with a hash
- if [message] =~ /^#/ {
- drop { }
- }
- #Now, using the csv filter, we can define the Bro log fields
- if [type] == "bro-tunnel_log" {
- csv {
- #tunnel.log#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p tunnel_type action
- columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","tunnel_type","action"]
- #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
- separator => " "
- }
- #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
- date {
- match => [ "ts", "UNIX" ]
- }
- # add geoip attributes
- geoip {
- source => "id.orig_h"
- target => "orig_geoip"
- }
- geoip {
- source => "id.resp_h"
- target => "resp_geoip"
- }
- mutate {
- convert => [ "id.orig_p", "integer" ]
- convert => [ "id.resp_p", "integer" ]
- rename => [ "id.orig_h", "id_orig_host" ]
- rename => [ "id.orig_p", "id_orig_port" ]
- rename => [ "id.resp_h", "id_resp_host" ]
- rename => [ "id.resp_p", "id_resp_port" ]
- }
- }
- }
- output {
- #stdout { codec => rubydebug }
- elasticsearch { hosts => 10.100.130.72 }
- }
- ########################
- # logstash Configuration Files - Bro IDS Logs
- # Created by 505Forensics (http://www.505forensics.com)
- # MIT License, so do what you want with it!
- #
- # For use with logstash, elasticsearch, and kibana to analyze logs
- #
- # Usage: Reference this config file for your instance of logstash to parse Bro weird logs
- #
- # Limitations: Standard bro log delimiter is tab.
- #
- #######################
- input {
- file {
- type => "bro-weird_log"
- start_position => "end"
- sincedb_path => "/var/tmp/.bro_weird_sincedb"
- #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
- path => "/opt/bro/logs/current/weird.log"
- }
- }
- filter {
- #Let's get rid of those header lines; they begin with a hash
- if [message] =~ /^#/ {
- drop { }
- }
- #Now, using the csv filter, we can define the Bro log fields
- if [type] == "bro-weird_log" {
- csv {
- #weird.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer
- columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","name","addl","notice","peer"]
- #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
- separator => " "
- }
- #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
- date {
- match => [ "ts", "UNIX" ]
- }
- # add geoip attributes
- geoip {
- source => "id.orig_h"
- target => "orig_geoip"
- }
- geoip {
- source => "id.resp_h"
- target => "resp_geoip"
- }
- mutate {
- convert => [ "id.orig_p", "integer" ]
- convert => [ "id.resp_p", "integer" ]
- rename => [ "id.orig_h", "id_orig_host" ]
- rename => [ "id.orig_p", "id_orig_port" ]
- rename => [ "id.resp_h", "id_resp_host" ]
- rename => [ "id.resp_p", "id_resp_port" ]
- }
- }
- }
- output {
- #stdout { codec => rubydebug }
- elasticsearch { hosts => 10.100.130.72 }
- }
- ########################
- # logstash Configuration Files - Bro IDS Logs
- # Created by Knowm (http://www.knowm.org)
- # MIT License, so do what you want with it!
- #
- # For use with logstash, elasticsearch, and kibana to analyze logs
- #
- # Usage: Reference this config file for your instance of logstash to parse Bro x509 logs
- #
- # Limitations: Standard bro log delimiter is tab.
- #
- #######################
- input {
- file {
- type => "bro-x509_log"
- start_position => "end"
- sincedb_path => "/var/tmp/.bro_x509_sincedb"
- #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
- path => "/opt/bro/logs/current/x509.log"
- }
- }
- filter {
- #Let's get rid of those header lines; they begin with a hash
- if [message] =~ /^#/ {
- drop { }
- }
- #Now, using the csv filter, we can define the Bro log fields
- if [type] == "bro-x509_log" {
- csv {
- #x509.log:#fields ts id certificate.version certificate.serial certificate.subject certificate.issuer certificate.not_valid_before certificate.not_valid_after certificate.key_alg certificate.sig_alg certificate.key_type certificate.key_length certificate.exponent certificate.curve san.dns san.uri san.email san.ip basic_constraints.ca basic_constraints.path_len
- columns => ["ts","id","certificate.version","certificate.serial","certificate.subject","icertificate.issuer","certificate.not_valid_before","certificate.not_valid_after","certificate.key_alg","certificate.sig_alg","certificate.key_type","certificate.key_length","certificate.exponent","certificate.curve","san.dns","san.uri","san.email","san.ip","basic_constraints.ca","basic_constraints.path_len"]
- #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
- separator => " "
- }
- #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
- date {
- match => [ "ts", "UNIX" ]
- }
- mutate {
- rename => [ "certificate.version", "certificate_version" ]
- rename => [ "certificate.serial", "certificate_serial" ]
- rename => [ "certificate.subject", "certificate_subject" ]
- rename => [ "icertificate.issuer", "icertificate_issuer" ]
- rename => [ "certificate.not_valid_before", "certificate_not_valid_before" ]
- rename => [ "certificate.not_valid_after", "certificate_not_valid_after" ]
- rename => [ "certificate.key_alg", "certificate_key_alg" ]
- rename => [ "certificate.sig_alg", "certificate_sig_alg" ]
- rename => [ "certificate.key_type", "certificate_key_type" ]
- rename => [ "certificate.key_length", "certificate_key_length" ]
- rename => [ "certificate.exponent", "certificate_exponent" ]
- rename => [ "certificate.curve", "certificate_curve" ]
- rename => [ "san.dns", "san_dns" ]
- rename => [ "san.uri", "san_uri" ]
- rename => [ "san.email", "san_email" ]
- rename => [ "san.ip", "san_ip" ]
- rename => [ "basic_constraints.ca", "basic_constraints_ca" ]
- rename => [ "basic_constraints.path_len", "basic_constraints_path_len" ]
- }
- }
- }
- input {
- file {
- type => "secure_log"
- path => "/var/log/secure"
- }
- }
- filter {
- grok {
- add_tag => [ "sshd_fail" ]
- match => { "message" => "Failed %{WORD:sshd_auth_type} for %{USERNAME:sshd_invalid_user} from %{IP:sshd_client_ip} port %{NUMBER:sshd_port} %{GREEDYDATA:sshd_protocol}" }
- }
- }
- output {
- elasticsearch {
- index => "sshd_fail-%{+YYYY.MM}"
- }
- }
- input {
- file {
- type => "secure_log"
- path => "/var/log/secure"
- }
- }
- filter {
- grok {
- add_tag => [ "sshd_fail" ]
- match => { "message" => "Failed %{WORD:sshd_auth_type} for %{USERNAME:sshd_invalid_user} from %{IP:sshd_client_ip} port %{NUMBER:sshd_port} %{GREEDYDATA:sshd_protocol}" }
- }
- }
- output {
- elasticsearch {
- index => "sshd_fail-%{+YYYY.MM}"
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement