Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- require 'rubygems'
- require 'active_support/all'
- require 'date'
- require 'aws-sdk'
- require 'pry'
- # If you have a static website that you generate locally, track with git, and
- # push to S3, you can use this script to incrementally push new files to your
- # bucket. It worked for me when johnholdun.com was hosted with Amazon but use it
- # at your own risk!
- # The directory where your local files live
- DIRECTORY = '/changeme/'.freeze
- AWS_REGION = 'us-east-1'.freeze
- ACCESS_KEY_ID = 'CHANGEME'.freeze
- SECRET_ACCESS_KEY = 'CHANGEME'.freeze
- BUCKET = 'example.com'.freeze
- CLOUDFRONT_DISTRIBUTION_ID = 'CHANGEME'.freeze
- puts 'Checking for unstaged output files...'
- filenames = `git diff --name-only -- #{DIRECTORY}`.lines.map(&:strip)
- if filenames.empty?
- puts 'Checking for output files in last commit...'
- filenames =
- `git diff HEAD~ --name-only -- #{DIRECTORY}`.lines.map(&:strip)
- end
- filenames.select! { |f| File.exist?(f) }
- if filenames.empty?
- puts 'Nothing to upload!'
- exit
- end
- puts "Uploading #{filenames.size} file#{'s' unless filenames.size == 1}..."
- Aws.config.update \
- region: AWS_REGION,
- credentials: Aws::Credentials.new(ACCESS_KEY_ID, SECRET_ACCESS_KEY)
- s3 = Aws::S3::Client.new
- filenames.each do |filename|
- puts filename
- # TODO: determine content_type from filename?
- content_type = 'text/html'
- File.open(filename) do |file|
- s3.put_object \
- bucket: BUCKET,
- key: filename.sub(%r{^#{DIRECTORY}}, ''),
- body: file,
- acl: 'public-read',
- content_type: content_type
- end
- end
- puts 'Done uploading! Time to invalidate the cloud.'
- cloud_front = Aws::CloudFront::Client.new
- cloud_front.create_invalidation \
- distribution_id: CLOUDFRONT_DISTRIBUTION_ID,
- invalidation_batch: {
- paths: {
- quantity: 1,
- items: ['/*'],
- },
- caller_reference: Time.now.to_i.to_s
- }
- puts 'ayy'
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement