Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/usr/bin/ruby
- require 'rubygems'
- require 'aws/s3'
- # for non-us buckets, we need to change the endpoint
- AWS.config(:s3_endpoint => "s3-eu-west-1.amazonaws.com")
- # connect to S3
- s3 = AWS::S3.new(:access_key_id => S3_ACCESS_KEY, :secret_access_key => S3_SECRET_KEY)
- # grab the bucket where the logs are stored
- bucket = s3.buckets[BUCKET_NAME]
- File.open("/var/log/s3_bucket.log", 'w') do |file|
- # grab all the objects in the bucket, can also use a prefix here and limit what S3 returns
- bucket.objects.with_prefix('staticassets-logs/').each do |log|
- log.read do |line|
- file.write(line)
- end
- end
- end
- #!/bin/bash
- export PATH=$PATH:/bin:/usr/bin
- cd /var/log/s3/$S3_BUCKET/
- export s3url=s3://$S3_BUCKET/$S3_PREFIX
- s3cmd -c /home/logstash/.s3cfg sync --skip-existing $s3url .
- input {
- file {
- type => "s3-access-log"
- path => "/var/log/s3/$S3_BUCKET/$S3_BUCKET/*"
- sincedb_path => "/dev/null"
- start_position => "beginning"
- }
- }
- filter {
- if [type] == "s3-access-log" {
- grok {
- patterns_dir => ["/etc/logstash/conf.d/patterns"]
- match => { "message" => "%{S3_ACCESS_LOG}" }
- remove_field => ["message"]
- }
- date {
- match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ]
- remove_field => ["timestamp"]
- }
- }
- }
- output {
- elasticsearch { host => localhost }
- stdout { codec => rubydebug }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement