Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
137 changes: 0 additions & 137 deletions Gemfile.lock

This file was deleted.

130 changes: 102 additions & 28 deletions lib/paperclip/storage/s3.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ module Storage
# Amazon's S3 file hosting service is a scalable, easy place to store files for
# distribution. You can find out more about it at http://aws.amazon.com/s3
#
# To use Paperclip with S3, include the +aws-sdk+ gem in your Gemfile:
# gem 'aws-sdk'
# To use Paperclip with S3, include the +aws-sdk+ gem (v2+) in your Gemfile:
# gem 'aws-sdk', '~> 2.0'
# There are a few S3-specific options for has_attached_file:
# * +s3_credentials+: Takes a path, a File, or a Hash. The path (or File) must point
# to a YAML file containing the +access_key_id+ and +secret_access_key+ that Amazon
Expand Down Expand Up @@ -88,7 +88,7 @@ def self.extended base
rescue LoadError => e
e.message << " (You may need to install the aws-sdk gem)"
raise e
end unless defined?(AWS::Core)
end unless defined?(Aws::S3)

base.instance_eval do
@s3_options = @options[:s3_options] || {}
Expand All @@ -97,7 +97,7 @@ def self.extended base
Proc.new do |style, attachment|
permission = (@s3_permissions[style.to_sym] || @s3_permissions[:default])
permission = permission.call(attachment, style) if permission.is_a?(Proc)
(permission == :public_read) ? 'http' : 'https'
(permission.to_s.tr('_', '-') == 'public-read') ? 'http' : 'https'
end
@s3_metadata = @options[:s3_metadata] || {}
@s3_headers = @options[:s3_headers] || {}
Expand Down Expand Up @@ -140,8 +140,15 @@ def self.extended base
end

def expiring_url(time = 3600, style_name = default_style)
if path
s3_object(style_name).url_for(:read, :expires => time, :secure => use_secure_protocol?(style_name)).to_s
if path(style_name)
presigner = Aws::S3::Presigner.new(:client => s3_interface.client)
url = presigner.presigned_url(
:get_object,
:bucket => bucket_name,
:key => path(style_name).sub(%r{^/}, ''),
:expires_in => time
)
apply_s3_protocol_to_url(url, style_name)
end
Comment thread
undefinedacai marked this conversation as resolved.
end

Expand All @@ -167,34 +174,54 @@ def bucket_name

def s3_interface
@s3_interface ||= begin
config = { :s3_endpoint => s3_host_name }
config = {}
config[:region] = s3_credentials[:region] || ENV['AWS_REGION'] || 'us-east-1'

# Use_ssl is implied by the endpoint URI scheme. Build a custom endpoint
# only when the host is not an AWS S3 hostname (e.g. MinIO in development).
# AWS regional hostnames (s3-<region>.amazonaws.com) should not be set as
# a custom endpoint — the SDK resolves them from :region automatically.
host = s3_host_name
if host =~ /\.amazonaws\.com\z/i
# Infer region from the hostname when no explicit region was provided,
# so that s3_host_name like "s3-ap-northeast-1.amazonaws.com" routes
# to the correct region instead of falling back to us-east-1.
if !s3_credentials[:region] && !ENV['AWS_REGION']
inferred = infer_region_from_host(host)
config[:region] = inferred if inferred
end
else
use_ssl = !@s3_options.key?(:use_ssl) || @s3_options[:use_ssl]
config[:endpoint] = "#{use_ssl ? 'https' : 'http'}://#{host}"
end
Comment thread
undefinedacai marked this conversation as resolved.
Comment thread
undefinedacai marked this conversation as resolved.

if using_http_proxy?

proxy_opts = { :host => http_proxy_host }
proxy_opts[:port] = http_proxy_port if http_proxy_port
if http_proxy_user
Comment thread
undefinedacai marked this conversation as resolved.
userinfo = http_proxy_user.to_s
userinfo += ":#{http_proxy_password}" if http_proxy_password
proxy_opts[:userinfo] = userinfo
end
config[:proxy_uri] = URI::HTTP.build(proxy_opts)
config[:http_proxy] = URI::HTTP.build(proxy_opts).to_s
end

[:access_key_id, :secret_access_key].each do |opt|
config[opt] = s3_credentials[opt] if s3_credentials[opt]
end

AWS::S3.new(config.merge(@s3_options))
config.merge!(translate_s3_options(@s3_options))

Aws::S3::Resource.new(config)
end
end

def s3_bucket
@s3_bucket ||= s3_interface.buckets[bucket_name]
@s3_bucket ||= s3_interface.bucket(bucket_name)
end

def s3_object style_name = default_style
s3_bucket.objects[path(style_name).sub(%r{^/},'')]
s3_bucket.object(path(style_name).sub(%r{^/},''))
end

def using_http_proxy?
Expand Down Expand Up @@ -238,7 +265,7 @@ def exists?(style = default_style)
else
false
end
rescue AWS::Errors::Base => e
rescue Aws::Errors::ServiceError
Comment thread
undefinedacai marked this conversation as resolved.
false
end

Expand Down Expand Up @@ -268,13 +295,13 @@ def to_file style = default_style
basename = File.basename(filename, extname)
file = Tempfile.new([basename, extname])
file.binmode
file.write(s3_object(style).read)
file.write(s3_object(style).get.body.read)
file.rewind
return file
end

def create_bucket
s3_interface.buckets.create(bucket_name)
s3_interface.create_bucket(:bucket => bucket_name)
end

def flush_writes #:nodoc:
Expand All @@ -283,7 +310,11 @@ def flush_writes #:nodoc:
log("saving #{path(style)}")
acl = @s3_permissions[style] || @s3_permissions[:default]
acl = acl.call(self, style) if acl.respond_to?(:call)
# SDK v2 expects ACL as a hyphenated string (e.g. "public-read"),
# whereas v1 used underscore symbols (e.g. :public_read).
acl = acl.to_s.tr('_', '-')
Comment thread
undefinedacai marked this conversation as resolved.
write_options = {
:body => file,
:content_type => file.content_type.to_s.strip,
:acl => acl
}
Expand All @@ -292,8 +323,12 @@ def flush_writes #:nodoc:
write_options[:server_side_encryption] = @s3_server_side_encryption
end
write_options.merge!(@s3_headers)
s3_object(style).write(file, write_options)
rescue AWS::S3::Errors::NoSuchBucket => e
# SDK v2 expects enum values as uppercase strings
# (e.g. "REDUCED_REDUNDANCY" not :reduced_redundancy, "AES256" not :aes256)
write_options[:storage_class] = write_options[:storage_class].to_s.upcase if write_options[:storage_class]
write_options[:server_side_encryption] = write_options[:server_side_encryption].to_s.upcase if write_options[:server_side_encryption]
s3_object(style).put(write_options)
rescue Aws::S3::Errors::NoSuchBucket
Comment thread
undefinedacai marked this conversation as resolved.
create_bucket
retry
end
Expand All @@ -308,8 +343,8 @@ def flush_deletes #:nodoc:
@queued_for_delete.each do |path|
begin
log("deleting #{path}")
s3_bucket.objects[path.sub(%r{^/},'')].delete
rescue AWS::Errors::Base => e
s3_bucket.object(path.sub(%r{^/},'')).delete
rescue Aws::Errors::ServiceError
# Ignore this.
end
end
Expand All @@ -330,18 +365,57 @@ def find_credentials creds
end
private :find_credentials

def establish_connection!
@connection ||= AWS::S3::Base.establish_connection!( @s3_options.merge(
:access_key_id => s3_credentials[:access_key_id],
:secret_access_key => s3_credentials[:secret_access_key]
))
# Translate legacy aws-sdk v1 s3_options keys to their v2 equivalents.
# Unknown keys are passed through so callers don't lose custom options.
def translate_s3_options(opts)
return opts if opts.empty?
translated = opts.dup
# v1: :s3_force_path_style => v2: :force_path_style
if translated.key?(:s3_force_path_style)
translated[:force_path_style] = translated.delete(:s3_force_path_style)
end
Comment thread
undefinedacai marked this conversation as resolved.
# v1: :s3_endpoint => v2: :endpoint
if translated.key?(:s3_endpoint)
translated[:endpoint] = translated.delete(:s3_endpoint)
end
# v1: :use_ssl is handled via the endpoint URI scheme; drop it here
# so it is not passed as an unknown option to Aws::S3::Resource.new.
translated.delete(:use_ssl)
translated
end
private :translate_s3_options

# Replace the scheme of a presigned URL to match s3_protocol, preserving
# backward-compatible http/https behavior for expiring_url.
def apply_s3_protocol_to_url(url, style_name)
protocol = s3_protocol(style_name).to_s.sub(%r{://\z}, '')
if protocol == 'http' || protocol == 'https'
url.sub(%r{\Ahttps?://}, "#{protocol}://")
else
url
end
end
private :apply_s3_protocol_to_url

# Extract region from an AWS S3 hostname.
# Supports "s3-<region>.amazonaws.com" and "s3.<region>.amazonaws.com".
# Returns nil for "s3.amazonaws.com" (the us-east-1 default) and
# any hostname token that is not a valid AWS region identifier.
def infer_region_from_host(host)
case host
when /\As3[.-]([^.]+)\.amazonaws\.com\z/i
region = $1.downcase
aws_region_token?(region) ? region : nil
else
nil
end
end
private :establish_connection!
private :infer_region_from_host

def use_secure_protocol?(style_name)
s3_protocol(style_name) == "https"
def aws_region_token?(token)
!!(token =~ /\A[a-z]{2}(?:-[a-z]+)+-\d+\z/)
end
private :use_secure_protocol?
private :aws_region_token?
end
end
end
2 changes: 1 addition & 1 deletion lib/paperclip/version.rb
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
module Paperclip
VERSION = "2.7.0.2" unless defined? Paperclip::VERSION
VERSION = "2.7.0.3" unless defined? Paperclip::VERSION
end
2 changes: 1 addition & 1 deletion paperclip.gemspec
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ Gem::Specification.new do |s|
s.add_development_dependency('shoulda')
s.add_development_dependency('appraisal', '~> 0.4.0')
s.add_development_dependency('mocha')
s.add_development_dependency('aws-sdk')
s.add_development_dependency('aws-sdk', '~> 2.0')
s.add_development_dependency('sqlite3', '~> 1.3.4')
Comment thread
undefinedacai marked this conversation as resolved.
s.add_development_dependency('cucumber', '~> 1.1.0')
Comment thread
undefinedacai marked this conversation as resolved.
s.add_development_dependency('aruba')
Expand Down
Loading
Loading