Advertisement
Guest User

Untitled

a guest
Oct 18th, 2018
129
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 30.11 KB | None | 0 0
  1. Config:
  2.  
  3.  
  4. ########################
  5. # logstash Configuration Files - Bro IDS Logs
  6. # Created by 505Forensics (http://www.505forensics.com)
  7. # MIT License, so do what you want with it!
  8. #
  9. # For use with logstash, elasticsearch, and kibana to analyze logs
  10. #
  11. # Usage: Reference this config file for your instance of logstash to parse Bro conn logs
  12. #
  13. # Limitations: Standard Bro log delimiter is tab.
  14. #
  15. # Dependencies: Utilizing the logstash 'translate' filter requires having the logstash contrib plugins added, which are community supported and not part of the official release. Visit logstash.net to find out how to install these
  16. #
  17. #######################
  18.  
  19. input {
  20. file {
  21. type => "bro-conn_log"
  22. start_position => "end"
  23. sincedb_path => "/var/tmp/.bro_conn_sincedb"
  24.  
  25. #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
  26. path => "/opt/bro/logs/current/conn.log"
  27. }
  28. }
  29.  
  30. filter {
  31.  
  32. #Let's get rid of those header lines; they begin with a hash
  33. if [message] =~ /^#/ {
  34. drop { }
  35. }
  36.  
  37. #Now, using the csv filter, we can define the Bro log fields
  38. if [type] == "bro-conn_log" {
  39. csv {
  40. columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","proto","service","duration","orig_bytes","resp_bytes","conn_state","local_orig","missed_bytes","history","orig_pkts","orig_ip_bytes","resp_pkts","resp_ip_bytes","tunnel_parents"]
  41.  
  42. #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, insert a literal <tab> in between the two quotes on your logstash system, use a text editor like nano that doesn't convert tabs to spaces.
  43. separator => " "
  44. }
  45.  
  46. #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
  47. date {
  48. match => [ "ts", "UNIX" ]
  49. }
  50.  
  51. # add geoip attributes
  52. geoip {
  53. source => "id.orig_h"
  54. target => "orig_geoip"
  55. }
  56. geoip {
  57. source => "id.resp_h"
  58. target => "resp_geoip"
  59. }
  60.  
  61. #The following makes use of the translate filter (logstash contrib) to convert conn_state into human text. Saves having to look up values for packet introspection
  62. translate {
  63. field => "conn_state"
  64.  
  65. destination => "conn_state_full"
  66.  
  67. dictionary => [
  68. "S0", "Connection attempt seen, no reply",
  69. "S1", "Connection established, not terminated",
  70. "S2", "Connection established and close attempt by originator seen (but no reply from responder)",
  71. "S3", "Connection established and close attempt by responder seen (but no reply from originator)",
  72. "SF", "Normal SYN/FIN completion",
  73. "REJ", "Connection attempt rejected",
  74. "RSTO", "Connection established, originator aborted (sent a RST)",
  75. "RSTR", "Established, responder aborted",
  76. "RSTOS0", "Originator sent a SYN followed by a RST, we never saw a SYN-ACK from the responder",
  77. "RSTRH", "Responder sent a SYN ACK followed by a RST, we never saw a SYN from the (purported) originator",
  78. "SH", "Originator sent a SYN followed by a FIN, we never saw a SYN ACK from the responder (hence the connection was 'half' open)",
  79. "SHR", "Responder sent a SYN ACK followed by a FIN, we never saw a SYN from the originator",
  80. "OTH", "No SYN seen, just midstream traffic (a 'partial connection' that was not later closed)"
  81. ]
  82. }
  83.  
  84. mutate {
  85. convert => [ "id.orig_p", "integer" ]
  86. convert => [ "id.resp_p", "integer" ]
  87. convert => [ "orig_bytes", "integer" ]
  88. convert => [ "duration", "float" ]
  89. convert => [ "resp_bytes", "integer" ]
  90. convert => [ "missed_bytes", "integer" ]
  91. convert => [ "orig_pkts", "integer" ]
  92. convert => [ "orig_ip_bytes", "integer" ]
  93. convert => [ "resp_pkts", "integer" ]
  94. convert => [ "resp_ip_bytes", "integer" ]
  95. rename => [ "id.orig_h", "id_orig_host" ]
  96. rename => [ "id.orig_p", "id_orig_port" ]
  97. rename => [ "id.resp_h", "id_resp_host" ]
  98. rename => [ "id.resp_p", "id_resp_port" ]
  99. }
  100. }
  101. }
  102.  
  103. output {
  104. #stdout { codec => rubydebug }
  105. elasticsearch { hosts => 10.100.130.72 }
  106. }
  107. ########################
  108. # logstash Configuration Files - Bro IDS Logs
  109. # Created by 505Forensics (http://www.505forensics.com)
  110. # MIT License, so do what you want with it!
  111. #
  112. # For use with logstash, elasticsearch, and kibana to analyze logs
  113. #
  114. # Usage: Reference this config file for your instance of logstash to parse Bro dns logs
  115. #
  116. # Limitations: Standard bro log delimiter is tab.
  117. #
  118. #######################
  119.  
  120. input {
  121. file {
  122. type => "bro-dns_log"
  123. start_position => "end"
  124. sincedb_path => "/var/tmp/.bro_dns_sincedb"
  125.  
  126. #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
  127. path => "/opt/bro/logs/current/dns.log"
  128. }
  129. }
  130.  
  131. filter {
  132.  
  133.  
  134.  
  135. #Let's get rid of those header lines; they begin with a hash
  136. if [message] =~ /^#/ {
  137. drop { }
  138. }
  139.  
  140. #Now, using the csv filter, we can define the Bro log fields
  141. if [type] == "bro-dns_log" {
  142. csv {
  143.  
  144. #dns.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p proto trans_id query qclass qclass_name qtype qtype_name rcode rcode_name AA TC RD RA Z answers TTLs rejected
  145. columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","proto","trans_id","query","qclass","qclass_name","qtype","qtype_name","rcode","rcode_name","AA","TC","RD","RA","Z","answers","TTLs","rejected"]
  146.  
  147. #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
  148. separator => " "
  149. }
  150.  
  151. #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
  152. date {
  153. match => [ "ts", "UNIX" ]
  154. }
  155.  
  156. # add geoip attributes
  157. geoip {
  158. source => "id.orig_h"
  159. target => "orig_geoip"
  160. }
  161. geoip {
  162. source => "id.resp_h"
  163. target => "resp_geoip"
  164. }
  165.  
  166. mutate {
  167. convert => [ "id.orig_p", "integer" ]
  168. convert => [ "id.resp_p", "integer" ]
  169. convert => [ "trans_id", "integer" ]
  170. convert => [ "qclass", "integer" ]
  171. convert => [ "qtype", "integer" ]
  172. convert => [ "rcode", "integer" ]
  173. rename => [ "id.orig_h", "id_orig_host" ]
  174. rename => [ "id.orig_p", "id_orig_port" ]
  175. rename => [ "id.resp_h", "id_resp_host" ]
  176. rename => [ "id.resp_p", "id_resp_port" ]
  177. }
  178. }
  179.  
  180. output {
  181. #stdout { codec => rubydebug }
  182. elasticsearch { hosts => 10.100.130.72 }
  183. }
  184.  
  185. }
  186.  
  187. ########################
  188. # logstash Configuration Files - Bro IDS Logs
  189. # Created by 505Forensics (http://www.505forensics.com)
  190. # MIT License, so do what you want with it!
  191. #
  192. # For use with logstash, elasticsearch, and kibana to analyze logs
  193. #
  194. # Usage: Reference this config file for your instance of logstash to parse Bro files logs
  195. #
  196. # Limitations: Standard bro log delimiter is tab.
  197. #
  198. #######################
  199.  
  200. input {
  201. file {
  202. type => "bro-files_log"
  203. start_position => "end"
  204. sincedb_path => "/var/tmp/.bro_files_sincedb"
  205.  
  206. #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
  207. path => "/opt/bro/logs/current/files.log"
  208. }
  209. }
  210.  
  211. filter {
  212.  
  213. #Let's get rid of those header lines; they begin with a hash
  214. if [message] =~ /^#/ {
  215. drop { }
  216. }
  217.  
  218. #Now, using the csv filter, we can define the Bro log fields
  219. if [type] == "bro-files_log" {
  220. csv {
  221.  
  222. #files.log:#fields ts fuid tx_hosts rx_hosts conn_uids source depth analyzers mime_type filename duration local_orig is_orig seen_bytes total_bytes missing_bytes overflow_bytes timedout parent_fuid md5 sha1 sha256 extracted
  223. columns => ["ts","fuid","tx_hosts","rx_hosts","conn_uids","source","depth","analyzers","mime_type","filename","duration","local_orig","is_orig","seen_bytes","total_bytes","missing_bytes","overflow_bytes","timedout","parent_fuid","md5","sha1","sha256","extracted"]
  224.  
  225. #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
  226. separator => " "
  227. }
  228.  
  229. #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
  230. date {
  231. match => [ "ts", "UNIX" ]
  232. }
  233.  
  234. # add geoip attributes
  235. geoip {
  236. source => "tx_hosts"
  237. target => "tx_hosts_geoip"
  238. }
  239. geoip {
  240. source => "rx_hosts"
  241. target => "rx_hosts_geoip"
  242. }
  243.  
  244. mutate {
  245. convert => [ "duration", "float" ]
  246. convert => [ "depth", "integer" ]
  247. convert => [ "seen_bytes", "integer" ]
  248. convert => [ "total_bytes", "integer" ]
  249. convert => [ "missing_bytes", "integer" ]
  250. convert => [ "overflow_bytes", "integer" ]
  251. }
  252. }
  253. }
  254.  
  255. output {
  256. #stdout { codec => rubydebug }
  257. elasticsearch { hosts => 10.100.130.72 }
  258. }
  259.  
  260. ########################
  261. # logstash Configuration Files - Bro IDS Logs
  262. # Created by 505Forensics (http://www.505forensics.com)
  263. # MIT License, so do what you want with it!
  264. #
  265. # For use with logstash, elasticsearch, and kibana to analyze logs
  266. #
  267. # Usage: Reference this config file for your instance of logstash to parse Bro http logs
  268. #
  269. # Limitations: Standard bro log delimiter is tab.
  270. #
  271. #######################
  272.  
  273. input {
  274. file {
  275. type => "bro-http_log"
  276. start_position => "end"
  277. sincedb_path => "/var/tmp/.bro_http_sincedb"
  278.  
  279. #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
  280. path => "/opt/bro/logs/current/http.log"
  281. }
  282. }
  283.  
  284. filter {
  285.  
  286. #Let's get rid of those header lines; they begin with a hash
  287. if [message] =~ /^#/ {
  288. drop { }
  289. }
  290.  
  291. #Now, using the csv filter, we can define the Bro log fields
  292. if [type] == "bro-http_log" {
  293. csv {
  294.  
  295. #http.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p trans_depth method host uri referrer user_agent request_body_len response_body_len status_code status_msg info_code info_msg filename tags username password proxied orig_fuids orig_mime_types resp_fuids resp_mime_types
  296. columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","trans_depth","method","host","uri","referrer","user_agent","request_body_len","response_body_len","status_code","status_msg","info_code","info_msg","filename","tags","username","password","proxied","orig_fuids","orig_mime_types","resp_fuids","resp_mime_types"]
  297.  
  298. #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
  299. separator => " "
  300. }
  301.  
  302. #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
  303. date {
  304. match => [ "ts", "UNIX" ]
  305. }
  306.  
  307. # add geoip attributes
  308. geoip {
  309. source => "id.orig_h"
  310. target => "orig_geoip"
  311. }
  312. geoip {
  313. source => "id.resp_h"
  314. target => "resp_geoip"
  315. }
  316.  
  317. mutate {
  318. convert => [ "id.orig_p", "integer" ]
  319. convert => [ "id.resp_p", "integer" ]
  320. convert => [ "trans_depth", "integer" ]
  321. convert => [ "request_body_len", "integer" ]
  322. convert => [ "response_body_len", "integer" ]
  323. convert => [ "status_code", "integer" ]
  324. convert => [ "info_code", "integer" ]
  325. rename => [ "id.orig_h", "id_orig_host" ]
  326. rename => [ "id.orig_p", "id_orig_port" ]
  327. rename => [ "id.resp_h", "id_resp_host" ]
  328. rename => [ "id.resp_p", "id_resp_port" ]
  329. }
  330. }
  331. }
  332.  
  333. output {
  334. #stdout { codec => rubydebug }
  335. elasticsearch { hosts => 10.100.130.72 }
  336. }
  337.  
  338. ########################
  339. # logstash Configuration Files - Bro IDS Logs
  340. # Created by Knowm (http://www.knowm.org)
  341. # MIT License, so do what you want with it!
  342. #
  343. # For use with logstash, elasticsearch, and kibana to analyze logs
  344. #
  345. # Usage: Reference this config file for your instance of logstash to parse Bro ssl logs
  346. #
  347. # Limitations: Standard bro log delimiter is tab.
  348. #
  349. #######################
  350.  
  351. input {
  352. file {
  353. type => "bro-intel_log"
  354. start_position => "end"
  355. sincedb_path => "/var/tmp/.bro_intel_sincedb"
  356.  
  357. #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
  358. path => "/opt/bro/logs/current/intel.log"
  359. }
  360. }
  361.  
  362. filter {
  363.  
  364. #Let's get rid of those header lines; they begin with a hash
  365. if [message] =~ /^#/ {
  366. drop { }
  367. }
  368.  
  369. #Now, using the csv filter, we can define the Bro log fields
  370. if [type] == "bro-intel_log" {
  371. csv {
  372.  
  373. #fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p seen.indicator seen.indicator_type seen.where seen.node matched sources fuid file_mime_type file_desc
  374.  
  375. columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","seen.indicator","seen.indicator_type","seen.where","seen.node","matched","sources","fuid","file_mime_type","file_desc"]
  376.  
  377. #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
  378. separator => " "
  379. }
  380.  
  381. #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
  382. date {
  383. match => [ "ts", "UNIX" ]
  384. }
  385.  
  386. # add geoip attributes
  387. geoip {
  388. source => "id.orig_h"
  389. target => "orig_geoip"
  390. }
  391. geoip {
  392. source => "id.resp_h"
  393. target => "resp_geoip"
  394. }
  395.  
  396. mutate {
  397. convert => [ "id.orig_p", "integer" ]
  398. convert => [ "id.resp_p", "integer" ]
  399. rename => [ "id.orig_h", "id_orig_host" ]
  400. rename => [ "id.orig_p", "id_orig_port" ]
  401. rename => [ "id.resp_h", "id_resp_host" ]
  402. rename => [ "id.resp_p", "id_resp_port" ]
  403. }
  404. }
  405. }
  406.  
  407. output {
  408. #stdout { codec => rubydebug }
  409. elasticsearch { hosts => 10.100.130.72 }
  410. }
  411.  
  412. ########################
  413. # logstash Configuration Files - Bro IDS Logs
  414. # Created by 505Forensics (http://www.505forensics.com)
  415. # MIT License, so do what you want with it!
  416. #
  417. # For use with logstash, elasticsearch, and kibana to analyze logs
  418. #
  419. # Usage: Reference this config file for your instance of logstash to parse Bro notice logs
  420. #
  421. # Limitations: Standard bro log delimiter is tab.
  422. #
  423. #######################
  424.  
  425. input {
  426. file {
  427. type => "bro-notice_log"
  428. start_position => "end"
  429. sincedb_path => "/var/tmp/.bro_notice_sincedb"
  430.  
  431. #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
  432. path => "/opt/bro/logs/current/notice.log"
  433. }
  434. }
  435.  
  436. filter {
  437.  
  438. #Let's get rid of those header lines; they begin with a hash
  439. if [message] =~ /^#/ {
  440. drop { }
  441. }
  442.  
  443. #Now, using the csv filter, we can define the Bro log fields
  444. if [type] == "bro-notice_log" {
  445. csv {
  446.  
  447. #notice.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p fuid file_mime_type file_desc proto note msg sub src dst p n peer_descr actions suppress_for dropped remote_location.country_code remote_location.region remote_location.city remote_location.latitude remote_location.longitude
  448. columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","fuid","file_mime_type","file_desc","proto","note","msg","sub","src","dst","p","n","peer_descr","actions","suppress_for","dropped","remote_location.country_code","remote_location.region","remote_location.city","remote_location.latitude","remote_location.longitude"]
  449.  
  450. #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
  451. separator => " "
  452. }
  453.  
  454. #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
  455. date {
  456. match => [ "ts", "UNIX" ]
  457. }
  458.  
  459. # add geoip attributes
  460. geoip {
  461. source => "id.orig_h"
  462. target => "orig_geoip"
  463. }
  464. geoip {
  465. source => "id.resp_h"
  466. target => "resp_geoip"
  467. }
  468.  
  469. mutate {
  470. convert => [ "id.orig_p", "integer" ]
  471. convert => [ "id.resp_p", "integer" ]
  472. convert => [ "p", "integer" ]
  473. convert => [ "n", "integer" ]
  474. convert => [ "suppress_for", "float" ]
  475. rename => [ "id.orig_h", "id_orig_host" ]
  476. rename => [ "id.orig_p", "id_orig_port" ]
  477. rename => [ "id.resp_h", "id_resp_host" ]
  478. rename => [ "id.resp_p", "id_resp_port" ]
  479. rename => [ "remote_location.country_code", "remote_location_country_code" ]
  480. rename => [ "remote_location.region", "remote_location_region" ]
  481. rename => [ "remote_location.city", "remote_location_city" ]
  482. rename => [ "remote_location.latitude", "remote_location_latitude" ]
  483. rename => [ "remote_location.longitude", "remote_location_longitude" ]
  484. }
  485. }
  486. }
  487.  
  488. output {
  489. #stdout { codec => rubydebug }
  490. elasticsearch { hosts => 10.100.130.72 }
  491. }
  492. ########################
  493. # logstash Configuration Files - Bro IDS Logs
  494. # Created by Knowm (http://www.knowm.org)
  495. # MIT License, so do what you want with it!
  496. #
  497. # For use with logstash, elasticsearch, and kibana to analyze logs
  498. #
  499. # Usage: Reference this config file for your instance of logstash to parse Bro ssh logs
  500. #
  501. # Limitations: Standard bro log delimiter is tab.
  502. #
  503. #######################
  504.  
  505. input {
  506. file {
  507. type => "bro-ssh_log"
  508. start_position => "end"
  509. sincedb_path => "/var/tmp/.bro_ssh_sincedb"
  510.  
  511. #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
  512. path => "/opt/bro/logs/current/ssh.log"
  513. }
  514. }
  515.  
  516. filter {
  517.  
  518. #Let's get rid of those header lines; they begin with a hash
  519. if [message] =~ /^#/ {
  520. drop { }
  521. }
  522.  
  523. #Now, using the csv filter, we can define the Bro log fields
  524. if [type] == "bro-ssh_log" {
  525. csv {
  526.  
  527. #ssh.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p status direction client server remote_location.country_code remote_location.region remote_location.city remote_location.latitude remote_location.longitude
  528. columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","status","direction","client","server","remote_location.country_code","remote_location.region","remote_location.city","remote_location.latitude","remote_location.longitude"]
  529.  
  530. #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
  531. separator => " "
  532. }
  533.  
  534. #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
  535. date {
  536. match => [ "ts", "UNIX" ]
  537. }
  538.  
  539. # add geoip attributes
  540. geoip {
  541. source => "id.orig_h"
  542. target => "orig_geoip"
  543. }
  544. geoip {
  545. source => "id.resp_h"
  546. target => "resp_geoip"
  547. }
  548.  
  549. mutate {
  550. convert => [ "id.orig_p", "integer" ]
  551. convert => [ "id.resp_p", "integer" ]
  552. rename => [ "id.orig_h", "id_orig_host" ]
  553. rename => [ "id.orig_p", "id_orig_port" ]
  554. rename => [ "id.resp_h", "id_resp_host" ]
  555. rename => [ "id.resp_p", "id_resp_port" ]
  556. rename => [ "remote_location.country_code", "remote_location_country_code" ]
  557. rename => [ "remote_location.region", "remote_location_region" ]
  558. rename => [ "remote_location.city", "remote_location_city" ]
  559. rename => [ "remote_location.latitude", "remote_location_latitude" ]
  560. rename => [ "remote_location.longitude", "remote_location_longitude" ]
  561. }
  562. }
  563. }
  564.  
  565. output {
  566. #stdout { codec => rubydebug }
  567. elasticsearch { hosts => 10.100.130.72 }
  568. }
  569. ########################
  570. # logstash Configuration Files - Bro IDS Logs
  571. # Created by Knowm (http://www.knowm.org)
  572. # MIT License, so do what you want with it!
  573. #
  574. # For use with logstash, elasticsearch, and kibana to analyze logs
  575. #
  576. # Usage: Reference this config file for your instance of logstash to parse Bro ssl logs
  577. #
  578. # Limitations: Standard bro log delimiter is tab.
  579. #
  580. #######################
  581.  
  582. input {
  583. file {
  584. type => "bro-ssl_log"
  585. start_position => "end"
  586. sincedb_path => "/var/tmp/.bro_ssl_sincedb"
  587.  
  588. #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
  589. path => "/opt/bro/logs/current/ssl.log"
  590. }
  591. }
  592.  
  593. filter {
  594.  
  595. #Let's get rid of those header lines; they begin with a hash
  596. if [message] =~ /^#/ {
  597. drop { }
  598. }
  599.  
  600. #Now, using the csv filter, we can define the Bro log fields
  601. if [type] == "bro-ssl_log" {
  602. csv {
  603.  
  604. #ssl.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p version cipher curve server_name session_id last_alert established cert_chain_fuids client_cert_chain_fuids subject issuer client_subject client_issuer validation_status
  605. columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","version","cipher","curve","server_name","session_id","last_alert","established","cert_chain_fuids","client_cert_chain_fuids","subject","issuer","client_subject","client_issuer","validation_status"]
  606.  
  607. #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
  608. separator => " "
  609. }
  610.  
  611. #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
  612. date {
  613. match => [ "ts", "UNIX" ]
  614. }
  615.  
  616. # add geoip attributes
  617. geoip {
  618. source => "id.orig_h"
  619. target => "orig_geoip"
  620. }
  621. geoip {
  622. source => "id.resp_h"
  623. target => "resp_geoip"
  624. }
  625.  
  626. mutate {
  627. convert => [ "id.orig_p", "integer" ]
  628. convert => [ "id.resp_p", "integer" ]
  629. rename => [ "id.orig_h", "id_orig_host" ]
  630. rename => [ "id.orig_p", "id_orig_port" ]
  631. rename => [ "id.resp_h", "id_resp_host" ]
  632. rename => [ "id.resp_p", "id_resp_port" ]
  633. }
  634. }
  635. }
  636.  
  637. output {
  638. #stdout { codec => rubydebug }
  639. elasticsearch { hosts => 10.100.130.72 }
  640. }
  641. ########################
  642. # logstash Configuration Files - Bro IDS Logs
  643. # Created by Knowm (http://www.knowm.org)
  644. # MIT License, so do what you want with it!
  645. #
  646. # For use with logstash, elasticsearch, and kibana to analyze logs
  647. #
  648. # Usage: Reference this config file for your instance of logstash to parse Bro ssl logs
  649. #
  650. # Limitations: Standard bro log delimiter is tab.
  651. #
  652. #######################
  653.  
  654. input {
  655. file {
  656. type => "bro-tunnel_log"
  657. start_position => "end"
  658. sincedb_path => "/var/tmp/.bro_tunnel_sincedb"
  659.  
  660. #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
  661. path => "/opt/bro/logs/current/tunnel.log"
  662. }
  663. }
  664.  
  665. filter {
  666.  
  667. #Let's get rid of those header lines; they begin with a hash
  668. if [message] =~ /^#/ {
  669. drop { }
  670. }
  671.  
  672. #Now, using the csv filter, we can define the Bro log fields
  673. if [type] == "bro-tunnel_log" {
  674. csv {
  675.  
  676. #tunnel.log#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p tunnel_type action
  677.  
  678. columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","tunnel_type","action"]
  679.  
  680. #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
  681. separator => " "
  682. }
  683.  
  684. #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
  685. date {
  686. match => [ "ts", "UNIX" ]
  687. }
  688.  
  689. # add geoip attributes
  690. geoip {
  691. source => "id.orig_h"
  692. target => "orig_geoip"
  693. }
  694. geoip {
  695. source => "id.resp_h"
  696. target => "resp_geoip"
  697. }
  698.  
  699. mutate {
  700. convert => [ "id.orig_p", "integer" ]
  701. convert => [ "id.resp_p", "integer" ]
  702. rename => [ "id.orig_h", "id_orig_host" ]
  703. rename => [ "id.orig_p", "id_orig_port" ]
  704. rename => [ "id.resp_h", "id_resp_host" ]
  705. rename => [ "id.resp_p", "id_resp_port" ]
  706. }
  707. }
  708. }
  709.  
  710. output {
  711. #stdout { codec => rubydebug }
  712. elasticsearch { hosts => 10.100.130.72 }
  713. }
  714. ########################
  715. # logstash Configuration Files - Bro IDS Logs
  716. # Created by 505Forensics (http://www.505forensics.com)
  717. # MIT License, so do what you want with it!
  718. #
  719. # For use with logstash, elasticsearch, and kibana to analyze logs
  720. #
  721. # Usage: Reference this config file for your instance of logstash to parse Bro weird logs
  722. #
  723. # Limitations: Standard bro log delimiter is tab.
  724. #
  725. #######################
  726.  
  727. input {
  728. file {
  729. type => "bro-weird_log"
  730. start_position => "end"
  731. sincedb_path => "/var/tmp/.bro_weird_sincedb"
  732.  
  733. #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
  734. path => "/opt/bro/logs/current/weird.log"
  735. }
  736. }
  737.  
  738. filter {
  739.  
  740. #Let's get rid of those header lines; they begin with a hash
  741. if [message] =~ /^#/ {
  742. drop { }
  743. }
  744.  
  745. #Now, using the csv filter, we can define the Bro log fields
  746. if [type] == "bro-weird_log" {
  747. csv {
  748.  
  749. #weird.log:#fields ts uid id.orig_h id.orig_p id.resp_h id.resp_p name addl notice peer
  750. columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","name","addl","notice","peer"]
  751.  
  752. #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
  753. separator => " "
  754. }
  755.  
  756. #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
  757. date {
  758. match => [ "ts", "UNIX" ]
  759. }
  760.  
  761. # add geoip attributes
  762. geoip {
  763. source => "id.orig_h"
  764. target => "orig_geoip"
  765. }
  766. geoip {
  767. source => "id.resp_h"
  768. target => "resp_geoip"
  769. }
  770.  
  771. mutate {
  772. convert => [ "id.orig_p", "integer" ]
  773. convert => [ "id.resp_p", "integer" ]
  774. rename => [ "id.orig_h", "id_orig_host" ]
  775. rename => [ "id.orig_p", "id_orig_port" ]
  776. rename => [ "id.resp_h", "id_resp_host" ]
  777. rename => [ "id.resp_p", "id_resp_port" ]
  778. }
  779. }
  780. }
  781.  
  782. output {
  783. #stdout { codec => rubydebug }
  784. elasticsearch { hosts => 10.100.130.72 }
  785. }
  786.  
  787.  
  788. ########################
  789. # logstash Configuration Files - Bro IDS Logs
  790. # Created by Knowm (http://www.knowm.org)
  791. # MIT License, so do what you want with it!
  792. #
  793. # For use with logstash, elasticsearch, and kibana to analyze logs
  794. #
  795. # Usage: Reference this config file for your instance of logstash to parse Bro x509 logs
  796. #
  797. # Limitations: Standard bro log delimiter is tab.
  798. #
  799. #######################
  800.  
  801. input {
  802. file {
  803. type => "bro-x509_log"
  804. start_position => "end"
  805. sincedb_path => "/var/tmp/.bro_x509_sincedb"
  806.  
  807. #Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
  808. path => "/opt/bro/logs/current/x509.log"
  809. }
  810. }
  811.  
  812. filter {
  813.  
  814. #Let's get rid of those header lines; they begin with a hash
  815. if [message] =~ /^#/ {
  816. drop { }
  817. }
  818.  
  819. #Now, using the csv filter, we can define the Bro log fields
  820. if [type] == "bro-x509_log" {
  821. csv {
  822.  
  823. #x509.log:#fields ts id certificate.version certificate.serial certificate.subject certificate.issuer certificate.not_valid_before certificate.not_valid_after certificate.key_alg certificate.sig_alg certificate.key_type certificate.key_length certificate.exponent certificate.curve san.dns san.uri san.email san.ip basic_constraints.ca basic_constraints.path_len
  824. columns => ["ts","id","certificate.version","certificate.serial","certificate.subject","icertificate.issuer","certificate.not_valid_before","certificate.not_valid_after","certificate.key_alg","certificate.sig_alg","certificate.key_type","certificate.key_length","certificate.exponent","certificate.curve","san.dns","san.uri","san.email","san.ip","basic_constraints.ca","basic_constraints.path_len"]
  825.  
  826. #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
  827. separator => " "
  828. }
  829.  
  830. #Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
  831. date {
  832. match => [ "ts", "UNIX" ]
  833.  
  834. }
  835.  
  836. mutate {
  837. rename => [ "certificate.version", "certificate_version" ]
  838. rename => [ "certificate.serial", "certificate_serial" ]
  839. rename => [ "certificate.subject", "certificate_subject" ]
  840. rename => [ "icertificate.issuer", "icertificate_issuer" ]
  841. rename => [ "certificate.not_valid_before", "certificate_not_valid_before" ]
  842. rename => [ "certificate.not_valid_after", "certificate_not_valid_after" ]
  843. rename => [ "certificate.key_alg", "certificate_key_alg" ]
  844. rename => [ "certificate.sig_alg", "certificate_sig_alg" ]
  845. rename => [ "certificate.key_type", "certificate_key_type" ]
  846. rename => [ "certificate.key_length", "certificate_key_length" ]
  847. rename => [ "certificate.exponent", "certificate_exponent" ]
  848. rename => [ "certificate.curve", "certificate_curve" ]
  849. rename => [ "san.dns", "san_dns" ]
  850. rename => [ "san.uri", "san_uri" ]
  851. rename => [ "san.email", "san_email" ]
  852. rename => [ "san.ip", "san_ip" ]
  853. rename => [ "basic_constraints.ca", "basic_constraints_ca" ]
  854. rename => [ "basic_constraints.path_len", "basic_constraints_path_len" ]
  855. }
  856. }
  857. }
  858.  
  859. input {
  860. file {
  861. type => "secure_log"
  862. path => "/var/log/secure"
  863. }
  864. }
  865. filter {
  866. grok {
  867. add_tag => [ "sshd_fail" ]
  868. match => { "message" => "Failed %{WORD:sshd_auth_type} for %{USERNAME:sshd_invalid_user} from %{IP:sshd_client_ip} port %{NUMBER:sshd_port} %{GREEDYDATA:sshd_protocol}" }
  869. }
  870. }
  871.  
  872. output {
  873. elasticsearch {
  874. index => "sshd_fail-%{+YYYY.MM}"
  875. }
  876. }
  877. input {
  878. file {
  879. type => "secure_log"
  880. path => "/var/log/secure"
  881. }
  882. }
  883. filter {
  884. grok {
  885. add_tag => [ "sshd_fail" ]
  886. match => { "message" => "Failed %{WORD:sshd_auth_type} for %{USERNAME:sshd_invalid_user} from %{IP:sshd_client_ip} port %{NUMBER:sshd_port} %{GREEDYDATA:sshd_protocol}" }
  887. }
  888. }
  889.  
  890. output {
  891. elasticsearch {
  892. index => "sshd_fail-%{+YYYY.MM}"
  893. }
  894. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement