clean up Workman left overs

2.0
Lorenzo Planas 12 years ago
parent 7a74bdfd88
commit ddd2bd142a

@ -1,40 +0,0 @@
# encoding: utf-8
require 'aws-sdk'
module Workman
module Commands
class AWSConfigurator
def initialize(config={})
@config = config
end #initialize
def configure
AWS.config(
access_key_id: access_key_id,
secret_access_key: secret_access_key
)
end #configure
private
attr_reader :config
def access_key_id
AWS.config.access_key_id || get_access_key_id
end #access_key_id
def secret_access_key
AWS.config.secret_access_key || get_secret_access_key
end #secret_access_key_id
def get_access_key_id
config.fetch(:access_key_id, ENV['AWS_ACCESS_KEY_ID'])
end #get_access_key_id
def get_secret_access_key
config.fetch(:secret_access_key, ENV['AWS_SECRET_ACCESS_KEY'])
end #get_secret_access_key
end # AWSConfigurator
end # Commands
end # Workman

@ -1,53 +0,0 @@
# encoding: utf-8
require 'aws-sdk'
require_relative 'aws_configurator'
module Workman
module Commands
class S3UrlParser
def initialize(url)
@url = url
end #initialize
def parse
object_name = url.split('/').last
bucket_name = url.split('/')[-2]
[bucket_name, object_name]
end #parse
private
attr_reader :url
end # S3UrlParser
class S3Downloader
def initialize(config={})
AWSConfigurator.new(config).configure
@s3 = AWS::S3.new
end #initialize
def download(file_url, destination_directory)
@bucket_name, @object_name = S3UrlParser.new(file_url).parse
destination = filepath_for(file_url, destination_directory)
bucket = s3.buckets[bucket_name]
object = bucket.objects[object_name]
File.open(destination, 'w') do |file|
object.read { |chunk| file.write(chunk) }
end
destination
end #download
private
attr_reader :s3, :bucket_name, :object_name
def filepath_for(file_url, destination_directory)
File.join(destination_directory, file_url.split('/').last)
end #filepath_for
end # S3Downloader
end # Commands
end # Workman

@ -1,31 +0,0 @@
# encoding: utf-8
require 'aws-sdk'
require_relative './aws_configurator'
module Workman
module Commands
class S3Uploader
def initialize(config={})
AWSConfigurator.new(config).configure
end #initialize
def upload(filepath, bucket=nil)
bucket ||= default_bucket
basename = File.basename(filepath)
uploaded_file = bucket.objects[basename]
uploaded_file.write(file: filepath)
uploaded_file.public_url
end #upload
private
attr_reader :bucket_name
def default_bucket
@default_bucket ||= AWS::S3.new.buckets[ENV.fetch('S3_BUCKET')]
end #default_bucket
end # S3Uploader
end # Commands
end # Workman

@ -1,61 +0,0 @@
# encoding: utf-8
require 'minitest/autorun'
require_relative '../../../commands/aws_configurator'
include Workman::Commands
describe AWSConfigurator do
describe '#configure' do
it 'gives first preference to existing AWS.config settings' do
AWS.config(
access_key_id: 'existing_access_key_id',
secret_access_key: 'existing_secret_access_key'
)
config = {
access_key_id: 'passed_access_key_id',
secret_access_key: 'passed_secret_access_key'
}
AWSConfigurator.new(config).configure
AWS.config.access_key_id.must_match /existing_access/
AWS.config.secret_access_key.must_match /existing_secret/
end
it 'gives second preference to passed configuration' do
AWS.config(
access_key_id: nil,
secret_access_key: nil
)
config = {
access_key_id: 'passed_access_key_id',
secret_access_key: 'passed_secret_access_key'
}
AWSConfigurator.new(config).configure
AWS.config.access_key_id.must_match /passed_access/
AWS.config.secret_access_key.must_match /passed_secret/
end
it 'gives last preference to environment variables' do
AWS.config(
access_key_id: nil,
secret_access_key: nil
)
previous_access_key_id = ENV['AWS_ACCESS_KEY_ID']
previous_secret_access_key = ENV['AWS_SECRET_ACCESS_KEY']
ENV['AWS_ACCESS_KEY_ID'] = 'environment_access_key_id'
ENV['AWS_SECRET_ACCESS_KEY'] = 'environment_secret_access_key'
AWSConfigurator.new.configure
AWS.config.access_key_id.must_match /environment_access/
AWS.config.secret_access_key.must_match /environment_secret/
ENV['AWS_ACCESS_KEY_ID'] = previous_access_key_id
ENV['AWS_SECRET_ACCESS_KEY'] = previous_secret_access_key
end
end #configure
end # AWSConfigurator

@ -1,45 +0,0 @@
# encoding: utf-8
require 'minitest/autorun'
require_relative '../../../commands/s3_downloader'
require_relative '../../../commands/s3_uploader'
include Workman::Commands
describe S3Downloader do
before do
AWS.config(
access_key_id: nil,
secret_access_key: nil
)
end
describe '#initialize' do
it 'sets the (optionally) passed AWS configuration using the
AWSConfigurator' do
AWS.config(access_key_id: 'bogus')
S3Downloader.new
AWS.config.access_key_id.must_equal 'bogus'
AWS.config(access_key_id: nil)
S3Downloader.new(access_key_id: 'passed_key')
AWS.config.access_key_id.must_equal 'passed_key'
end
end #initialize
describe '#download' do
it 'downloads a file from the passed url' do
fake_filepath = File.path(fake_file_factory)
uploaded_file_url = S3Uploader.new.upload(fake_filepath).to_s
filepath = S3Downloader.new.download(uploaded_file_url, '/var/tmp')
File.exists?(filepath).must_equal true
end
end #download
def fake_file_factory
file = File.new("/var/tmp/#{Time.now.utc.to_f}", 'w' )
file.puts 'bogus'
file
end #fake_file_factory
end # S3Downloader

@ -1,68 +0,0 @@
# encoding: utf-8
require 'minitest/autorun'
require 'ostruct'
require_relative '../../../commands/s3_uploader'
include Workman::Commands
describe S3Uploader do
describe '#initialize' do
it 'sets the (optionally) passed AWS configuration using the
AWSConfigurator' do
AWS.config(access_key_id: 'bogus')
S3Uploader.new
AWS.config.access_key_id.must_equal 'bogus'
AWS.config(access_key_id: nil)
S3Uploader.new(access_key_id: 'passed_key')
AWS.config.access_key_id.must_equal 'passed_key'
end
end #initialize
describe '#upload' do
before do
AWS.config(
access_key_id: nil,
secret_access_key: nil
)
@fake_file = fake_file_factory
@fake_bucket = fake_bucket_factory
end
it 'strips the path from the uploaded file name' do
S3Uploader.new.upload(@fake_file.path, @fake_bucket).wont_match /var/
end
it 'returns the url of the uploaded file' do
url = S3Uploader.new.upload(@fake_file.path, @fake_bucket)
url.must_match /http/
url.must_match File.basename(@fake_file)
end
end #upload
def fake_bucket_factory
bucket = OpenStruct.new(fake_file: fake_uploaded_file)
def bucket.objects; Hash.new(self.fake_file); end
bucket
end #fake_bucket_factory
def fake_uploaded_file
Class.new do
def write(arguments={})
@filepath = arguments.fetch(:file)
end
def public_url;
"http://s3.amazonaws.com/bucket/#{File.basename(@filepath)}"
end #public_url
end.new
end #fake_uploaded_file
def fake_file_factory
file = File.new("/var/tmp/#{Time.now.utc.to_f}", 'w' )
file.puts 'bogus'
file
end #fake_file_factory
end # S3Uploader
Loading…
Cancel
Save