diff --git a/.travis.yml b/.travis.yml
index 2ecced23f..4388dd3b4 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -11,6 +11,9 @@ gemfile:
- gemfiles/3.2.gemfile
- gemfiles/4.1.gemfile
- gemfiles/4.2.gemfile
+ - gemfiles/3.2.awsv1.gemfile
+ - gemfiles/4.1.awsv1.gemfile
+ - gemfiles/4.2.awsv1.gemfile
matrix:
fast_finish: true
diff --git a/Appraisals b/Appraisals
index 153c8f6e3..d8ec05c9e 100644
--- a/Appraisals
+++ b/Appraisals
@@ -1,11 +1,29 @@
appraise "3.2" do
gem "rails", "~> 3.2.0"
+ gem "aws-sdk", "~> 2.0"
end
appraise "4.1" do
gem "rails", "~> 4.1.0"
+ gem "aws-sdk", "~> 2.0"
end
appraise "4.2" do
gem "rails", "~> 4.2.0"
+ gem "aws-sdk", "~> 2.0"
+end
+
+appraise "3.2.awsv1" do
+ gem "rails", "~> 3.2.0"
+ gem "aws-sdk", "~> 1.5"
+end
+
+appraise "4.1.awsv1" do
+ gem "rails", "~> 4.1.0"
+ gem "aws-sdk", "~> 1.5"
+end
+
+appraise "4.2.awsv1" do
+ gem "rails", "~> 4.2.0"
+ gem "aws-sdk", "~> 1.5"
end
diff --git a/features/basic_integration.feature b/features/basic_integration.feature
index f61e71aa4..0e34456dc 100644
--- a/features/basic_integration.feature
+++ b/features/basic_integration.feature
@@ -69,6 +69,7 @@ Feature: Rails integration
bucket: paperclip
access_key_id: access_key
secret_access_key: secret_key
+ s3_region: us-west-2
"""
And I start the rails application
When I go to the new user page
diff --git a/features/step_definitions/s3_steps.rb b/features/step_definitions/s3_steps.rb
index 2ae69533a..af2bfee0c 100644
--- a/features/step_definitions/s3_steps.rb
+++ b/features/step_definitions/s3_steps.rb
@@ -1,11 +1,15 @@
When /^I attach the file "([^"]*)" to "([^"]*)" on S3$/ do |file_path, field|
definition = Paperclip::AttachmentRegistry.definitions_for(User)[field.downcase.to_sym]
- path = "https://paperclip.s3.amazonaws.com#{definition[:path]}"
+ path = if defined?(::AWS)
+ "https://paperclip.s3.amazonaws.com#{definition[:path]}"
+ else
+ "https://paperclip.s3-us-west-2.amazonaws.com#{definition[:path]}"
+ end
path.gsub!(':filename', File.basename(file_path))
path.gsub!(/:([^\/\.]+)/) do |match|
"([^\/\.]+)"
end
- FakeWeb.register_uri(:put, Regexp.new(path), :body => "OK")
+ FakeWeb.register_uri(:put, Regexp.new(path), :body => defined?(::AWS) ? "OK" : "")
step "I attach the file \"#{file_path}\" to \"#{field}\""
end
diff --git a/gemfiles/3.2.awsv1.gemfile b/gemfiles/3.2.awsv1.gemfile
new file mode 100644
index 000000000..b79f2906c
--- /dev/null
+++ b/gemfiles/3.2.awsv1.gemfile
@@ -0,0 +1,20 @@
+# This file was generated by Appraisal
+
+source "https://rubygems.org"
+
+gem "sqlite3", "~> 1.3.8", :platforms => :ruby
+gem "jruby-openssl", :platforms => :jruby
+gem "activerecord-jdbcsqlite3-adapter", :platforms => :jruby
+gem "rubysl", :platforms => :rbx
+gem "racc", :platforms => :rbx
+gem "pry"
+gem "rails", "~> 3.2.0"
+gem "aws-sdk", "~> 1.5"
+
+group :development, :test do
+ gem "mime-types", "~> 1.16"
+ gem "builder"
+ gem "rubocop", :require => false
+end
+
+gemspec :path => "../"
diff --git a/gemfiles/3.2.gemfile b/gemfiles/3.2.gemfile
index 4e7c4bcc7..5d861102e 100644
--- a/gemfiles/3.2.gemfile
+++ b/gemfiles/3.2.gemfile
@@ -9,6 +9,7 @@ gem "rubysl", :platforms => :rbx
gem "racc", :platforms => :rbx
gem "pry"
gem "rails", "~> 3.2.0"
+gem "aws-sdk", "~> 2.0"
group :development, :test do
gem "mime-types", "~> 1.16"
diff --git a/gemfiles/4.1.awsv1.gemfile b/gemfiles/4.1.awsv1.gemfile
new file mode 100644
index 000000000..d3bab0bed
--- /dev/null
+++ b/gemfiles/4.1.awsv1.gemfile
@@ -0,0 +1,20 @@
+# This file was generated by Appraisal
+
+source "https://rubygems.org"
+
+gem "sqlite3", "~> 1.3.8", :platforms => :ruby
+gem "jruby-openssl", :platforms => :jruby
+gem "activerecord-jdbcsqlite3-adapter", :platforms => :jruby
+gem "rubysl", :platforms => :rbx
+gem "racc", :platforms => :rbx
+gem "pry"
+gem "rails", "~> 4.1.0"
+gem "aws-sdk", "~> 1.5"
+
+group :development, :test do
+ gem "mime-types", "~> 1.16"
+ gem "builder"
+ gem "rubocop", :require => false
+end
+
+gemspec :path => "../"
diff --git a/gemfiles/4.1.gemfile b/gemfiles/4.1.gemfile
index 68fd6a752..609f8bec4 100644
--- a/gemfiles/4.1.gemfile
+++ b/gemfiles/4.1.gemfile
@@ -9,6 +9,7 @@ gem "rubysl", :platforms => :rbx
gem "racc", :platforms => :rbx
gem "pry"
gem "rails", "~> 4.1.0"
+gem "aws-sdk", "~> 2.0"
group :development, :test do
gem "mime-types", "~> 1.16"
diff --git a/gemfiles/4.2.awsv1.gemfile b/gemfiles/4.2.awsv1.gemfile
new file mode 100644
index 000000000..d521bb363
--- /dev/null
+++ b/gemfiles/4.2.awsv1.gemfile
@@ -0,0 +1,20 @@
+# This file was generated by Appraisal
+
+source "https://rubygems.org"
+
+gem "sqlite3", "~> 1.3.8", :platforms => :ruby
+gem "jruby-openssl", :platforms => :jruby
+gem "activerecord-jdbcsqlite3-adapter", :platforms => :jruby
+gem "rubysl", :platforms => :rbx
+gem "racc", :platforms => :rbx
+gem "pry"
+gem "rails", "~> 4.2.0"
+gem "aws-sdk", "~> 1.5"
+
+group :development, :test do
+ gem "mime-types", "~> 1.16"
+ gem "builder"
+ gem "rubocop", :require => false
+end
+
+gemspec :path => "../"
diff --git a/gemfiles/4.2.gemfile b/gemfiles/4.2.gemfile
index 0fdd126c1..3ac60abcd 100644
--- a/gemfiles/4.2.gemfile
+++ b/gemfiles/4.2.gemfile
@@ -9,6 +9,7 @@ gem "rubysl", :platforms => :rbx
gem "racc", :platforms => :rbx
gem "pry"
gem "rails", "~> 4.2.0"
+gem "aws-sdk", "~> 2.0"
group :development, :test do
gem "mime-types", "~> 1.16"
diff --git a/lib/paperclip/storage/s3.rb b/lib/paperclip/storage/s3.rb
index 24ad73dbf..5fa6f6327 100644
--- a/lib/paperclip/storage/s3.rb
+++ b/lib/paperclip/storage/s3.rb
@@ -41,7 +41,7 @@ module Storage
# * +s3_permissions+: This is a String that should be one of the "canned" access
# policies that S3 provides (more information can be found here:
# http://docs.aws.amazon.com/AmazonS3/latest/dev/ACLOverview.html)
- # The default for Paperclip is :public_read.
+ # The default for Paperclip is :public_read (aws-sdk v1) / public-read (aws-sdk v2).
#
# You can set permission on a per style bases by doing the following:
# :s3_permissions => {
@@ -93,6 +93,7 @@ module Storage
# S3 (strictly speaking) does not support directories, you can still use a / to
# separate parts of your file name.
# * +s3_host_name+: If you are using your bucket in Tokyo region etc, write host_name.
+ # * +s3_region+: For aws-sdk v2, s3_region is required.
# * +s3_metadata+: These key/value pairs will be stored with the
# object. This option works by prefixing each key with
# "x-amz-meta-" before sending it as a header on the object
@@ -114,20 +115,26 @@ module S3
def self.extended base
begin
require 'aws-sdk'
+ const_set('AWS_CLASS', defined?(::Aws) ? ::Aws : ::AWS)
+ const_set('AWS_BASE_ERROR',
+ defined?(::Aws) ? Aws::Errors::ServiceError : AWS::Errors::Base)
+ const_set('DEFAULT_PERMISSION',
+ defined?(::AWS) ? :public_read : :'public-read')
+
rescue LoadError => e
e.message << " (You may need to install the aws-sdk gem)"
raise e
- end unless defined?(AWS::Core)
+ end unless defined?(AWS_CLASS)
# Overriding log formatter to make sure it return a UTF-8 string
- if defined?(AWS::Core::LogFormatter)
- AWS::Core::LogFormatter.class_eval do
+ if defined?(AWS_CLASS::Core::LogFormatter)
+ AWS_CLASS::Core::LogFormatter.class_eval do
def summarize_hash(hash)
hash.map { |key, value| ":#{key}=>#{summarize_value(value)}".force_encoding('UTF-8') }.sort.join(',')
end
end
- elsif defined?(AWS::Core::ClientLogging)
- AWS::Core::ClientLogging.class_eval do
+ elsif defined?(AWS_CLASS::Core::ClientLogging)
+ AWS_CLASS::Core::ClientLogging.class_eval do
def sanitize_hash(hash)
hash.map { |key, value| "#{sanitize_value(key)}=>#{sanitize_value(value)}".force_encoding('UTF-8') }.sort.join(',')
end
@@ -141,7 +148,7 @@ def sanitize_hash(hash)
Proc.new do |style, attachment|
permission = (@s3_permissions[style.to_s.to_sym] || @s3_permissions[:default])
permission = permission.call(attachment, style) if permission.respond_to?(:call)
- (permission == :public_read) ? 'http' : 'https'
+ (permission == DEFAULT_PERMISSION) ? 'http' : 'https'
end
@s3_metadata = @options[:s3_metadata] || {}
@s3_headers = {}
@@ -149,7 +156,7 @@ def sanitize_hash(hash)
@s3_storage_class = set_storage_class(@options[:s3_storage_class])
- @s3_server_side_encryption = :aes256
+ @s3_server_side_encryption = "AES256"
if @options[:s3_server_side_encryption].blank?
@s3_server_side_encryption = false
end
@@ -182,8 +189,13 @@ def sanitize_hash(hash)
def expiring_url(time = 3600, style_name = default_style)
if path(style_name)
- base_options = { :expires => time, :secure => use_secure_protocol?(style_name) }
- s3_object(style_name).url_for(:read, base_options.merge(s3_url_options)).to_s
+ if aws_v1?
+ base_options = { :expires => time, :secure => use_secure_protocol?(style_name) }
+ s3_object(style_name).url_for(:read, base_options.merge(s3_url_options)).to_s
+ else
+ base_options = { :expires_in => time }
+ s3_object(style_name).presigned_url(:get, base_options.merge(s3_url_options)).to_s
+ end
else
url(style_name)
end
@@ -200,6 +212,13 @@ def s3_host_name
host_name || s3_credentials[:s3_host_name] || "s3.amazonaws.com"
end
+ def s3_region
+ region = @options[:s3_region]
+ region = region.call(self) if region.is_a?(Proc)
+
+ region || s3_credentials[:s3_region]
+ end
+
def s3_host_alias
@s3_host_alias = @options[:s3_host_alias]
@s3_host_alias = @s3_host_alias.call(self) if @s3_host_alias.respond_to?(:call)
@@ -220,7 +239,11 @@ def bucket_name
def s3_interface
@s3_interface ||= begin
- config = { :s3_endpoint => s3_host_name }
+ config = if aws_v1?
+ { :s3_endpoint => s3_host_name }
+ else
+ { :region => s3_region }
+ end
if using_http_proxy?
@@ -234,7 +257,7 @@ def s3_interface
config[:proxy_uri] = URI::HTTP.build(proxy_opts)
end
- [:access_key_id, :secret_access_key, :credential_provider].each do |opt|
+ [:access_key_id, :secret_access_key, :credential_provider, :credentials].each do |opt|
config[opt] = s3_credentials[opt] if s3_credentials[opt]
end
@@ -244,15 +267,31 @@ def s3_interface
def obtain_s3_instance_for(options)
instances = (Thread.current[:paperclip_s3_instances] ||= {})
- instances[options] ||= AWS::S3.new(options)
+ instances[options] ||= if aws_v1?
+ AWS_CLASS::S3.new(options)
+ else
+ AWS_CLASS::S3::Resource.new(options)
+ end
end
def s3_bucket
- @s3_bucket ||= s3_interface.buckets[bucket_name]
+ @s3_bucket ||= if aws_v1?
+ s3_interface.buckets[bucket_name]
+ else
+ s3_interface.bucket(bucket_name)
+ end
+ end
+
+ def style_name_as_path(style_name)
+ path(style_name).sub(%r{\A/},'')
end
def s3_object style_name = default_style
- s3_bucket.objects[path(style_name).sub(%r{\A/},'')]
+ if aws_v1?
+ s3_bucket.objects[style_name_as_path(style_name)]
+ else
+ s3_bucket.object(style_name_as_path(style_name))
+ end
end
def using_http_proxy?
@@ -277,7 +316,7 @@ def http_proxy_password
def set_permissions permissions
permissions = { :default => permissions } unless permissions.respond_to?(:merge)
- permissions.merge :default => (permissions[:default] || :public_read)
+ permissions.merge :default => (permissions[:default] || DEFAULT_PERMISSION)
end
def set_storage_class(storage_class)
@@ -297,7 +336,7 @@ def exists?(style = default_style)
else
false
end
- rescue AWS::Errors::Base => e
+ rescue AWS_BASE_ERROR => e
false
end
@@ -323,7 +362,11 @@ def s3_protocol(style = default_style, with_colon = false)
end
def create_bucket
- s3_interface.buckets.create(bucket_name)
+ if aws_v1?
+ s3_interface.buckets.create(bucket_name)
+ else
+ s3_interface.bucket(bucket_name).create
+ end
end
def flush_writes #:nodoc:
@@ -356,11 +399,15 @@ def flush_writes #:nodoc:
write_options[:metadata] = @s3_metadata unless @s3_metadata.empty?
write_options.merge!(@s3_headers)
- s3_object(style).write(file, write_options)
- rescue AWS::S3::Errors::NoSuchBucket
+ if aws_v1?
+ s3_object(style).write(file, write_options)
+ else
+ s3_object(style).upload_file(file.path, write_options)
+ end
+ rescue AWS_CLASS::S3::Errors::NoSuchBucket
create_bucket
retry
- rescue AWS::S3::Errors::SlowDown
+ rescue AWS_CLASS::S3::Errors::SlowDown
retries += 1
if retries <= 5
sleep((2 ** retries) * 0.5)
@@ -382,8 +429,12 @@ def flush_deletes #:nodoc:
@queued_for_delete.each do |path|
begin
log("deleting #{path}")
- s3_bucket.objects[path.sub(%r{\A/},'')].delete
- rescue AWS::Errors::Base => e
+ if aws_v1?
+ s3_bucket.objects[path.sub(%r{\A/},'')]
+ else
+ s3_bucket.object(path.sub(%r{\A/},''))
+ end.delete
+ rescue AWS_BASE_ERROR => e
# Ignore this.
end
end
@@ -393,17 +444,21 @@ def flush_deletes #:nodoc:
def copy_to_local_file(style, local_dest_path)
log("copying #{path(style)} to local file #{local_dest_path}")
::File.open(local_dest_path, 'wb') do |local_file|
- s3_object(style).read do |chunk|
+ s3_object(style).send(aws_v1? ? :read : :get) do |chunk|
local_file.write(chunk)
end
end
- rescue AWS::Errors::Base => e
+ rescue AWS_BASE_ERROR => e
warn("#{e} - cannot copy #{path(style)} to local file #{local_dest_path}")
false
end
private
+ def aws_v1?
+ Gem::Version.new(AWS_CLASS::VERSION) < Gem::Version.new(2)
+ end
+
def find_credentials creds
case creds
when File
diff --git a/paperclip.gemspec b/paperclip.gemspec
index b5115586f..9ea052cae 100644
--- a/paperclip.gemspec
+++ b/paperclip.gemspec
@@ -31,7 +31,7 @@ Gem::Specification.new do |s|
s.add_development_dependency('rspec')
s.add_development_dependency('appraisal')
s.add_development_dependency('mocha')
- s.add_development_dependency('aws-sdk', '>= 1.5.7', "<= 2.0")
+ s.add_development_dependency('aws-sdk', '>= 1.5.7', "< 3.0")
s.add_development_dependency('bourne')
s.add_development_dependency('cucumber', '~> 1.3.18')
s.add_development_dependency('aruba')
diff --git a/spec/paperclip/storage/s3_live_spec.rb b/spec/paperclip/storage/s3_live_spec.rb
index f4304aa5b..402bcd0f9 100644
--- a/spec/paperclip/storage/s3_live_spec.rb
+++ b/spec/paperclip/storage/s3_live_spec.rb
@@ -8,6 +8,7 @@
storage: :s3,
bucket: ENV["S3_BUCKET"],
path: ":class/:attachment/:id/:style.:extension",
+ s3_region: ENV["S3_REGION"],
s3_credentials: {
aws_access_key_id: ENV['AWS_ACCESS_KEY_ID'],
aws_secre_access_key: ENV['AWS_SECRET_ACCESS_KEY']
@@ -45,6 +46,7 @@
storage: :s3,
bucket: ENV["S3_BUCKET"],
path: ":class/:attachment/:id/:style.:extension",
+ s3_region: ENV["S3_REGION"],
s3_credentials: {
aws_access_key_id: ENV['AWS_ACCESS_KEY_ID'],
aws_secre_access_key: ENV['AWS_SECRET_ACCESS_KEY']
@@ -64,6 +66,7 @@
storage: :s3,
bucket: ENV["S3_BUCKET"],
path: ":class/:attachment/:id/:style.:extension",
+ s3_region: ENV["S3_REGION"],
s3_credentials: {
aws_access_key_id: ENV['AWS_ACCESS_KEY_ID'],
aws_secre_access_key: ENV['AWS_SECRET_ACCESS_KEY']
@@ -105,6 +108,7 @@
rebuild_model styles: { thumb: "100x100", square: "32x32#" },
storage: :s3,
bucket: ENV["S3_BUCKET"],
+ s3_region: ENV["S3_REGION"],
s3_credentials: {
aws_access_key_id: ENV['AWS_ACCESS_KEY_ID'],
aws_secre_access_key: ENV['AWS_SECRET_ACCESS_KEY']
@@ -136,7 +140,7 @@
it "is destroyable" do
url = @dummy.avatar.url
@dummy.destroy
- assert_not_found_response url
+ assert_forbidden_response url
end
end
@@ -146,12 +150,12 @@
storage: :s3,
bucket: ENV["S3_BUCKET"],
path: ":class/:attachment/:id/:style.:extension",
+ s3_region: ENV["S3_REGION"],
s3_credentials: {
aws_access_key_id: ENV['AWS_ACCESS_KEY_ID'],
aws_secre_access_key: ENV['AWS_SECRET_ACCESS_KEY']
},
- s3_server_side_encryption: :aes256
-
+ s3_server_side_encryption: "AES256"
Dummy.delete_all
@dummy = Dummy.new
end
@@ -173,7 +177,7 @@
end
it "is encrypted on S3" do
- assert @dummy.avatar.s3_object.server_side_encryption == :aes256
+ assert @dummy.avatar.s3_object.server_side_encryption == "AES256"
end
end
end
diff --git a/spec/paperclip/storage/s3_spec.rb b/spec/paperclip/storage/s3_spec.rb
index e29a17d45..c361bfc35 100644
--- a/spec/paperclip/storage/s3_spec.rb
+++ b/spec/paperclip/storage/s3_spec.rb
@@ -3,17 +3,24 @@
describe Paperclip::Storage::S3 do
before do
- AWS.stub!
+ if defined?(::Aws)
+ Aws.config[:stub_responses] = true
+ else
+ AWS.stub!
+ end
+ end
+
+ def aws2_add_region
+ defined?(::Aws) ? { s3_region: 'us-east-1' } : {}
end
context "Parsing S3 credentials" do
before do
@proxy_settings = {host: "127.0.0.1", port: 8888, user: "foo", password: "bar"}
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
bucket: "testing",
http_proxy: @proxy_settings,
s3_credentials: {not: :important}
-
@dummy = Dummy.new
@avatar = @dummy.avatar
end
@@ -55,7 +62,8 @@
context ":bucket option via :s3_credentials" do
before do
- rebuild_model storage: :s3, s3_credentials: {bucket: 'testing'}
+ rebuild_model (aws2_add_region).merge storage: :s3,
+ s3_credentials: {bucket: 'testing'}
@dummy = Dummy.new
end
@@ -68,7 +76,8 @@
context ":bucket option" do
before do
- rebuild_model storage: :s3, bucket: "testing", s3_credentials: {}
+ rebuild_model (aws2_add_region).merge storage: :s3,
+ bucket: "testing", s3_credentials: {}
@dummy = Dummy.new
end
@@ -81,7 +90,7 @@
context "missing :bucket option" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
http_proxy: @proxy_settings,
s3_credentials: {not: :important}
@@ -98,7 +107,7 @@
context "" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
s3_credentials: {},
bucket: "bucket",
path: ":attachment/:basename:dotextension",
@@ -125,8 +134,8 @@
["http", :http, ""].each do |protocol|
context "as #{protocol.inspect}" do
before do
- rebuild_model storage: :s3, s3_protocol: protocol
-
+ rebuild_model (aws2_add_region).merge storage: :s3,
+ s3_protocol: protocol
@dummy = Dummy.new
end
@@ -139,7 +148,7 @@
context "s3_protocol: 'https'" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
s3_credentials: {},
s3_protocol: 'https',
bucket: "bucket",
@@ -156,7 +165,7 @@
context "s3_protocol: ''" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
s3_credentials: {},
s3_protocol: '',
bucket: "bucket",
@@ -173,7 +182,7 @@
context "s3_protocol: :https" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
s3_credentials: {},
s3_protocol: :https,
bucket: "bucket",
@@ -190,7 +199,7 @@
context "s3_protocol: ''" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
s3_credentials: {},
s3_protocol: '',
bucket: "bucket",
@@ -207,7 +216,7 @@
context "An attachment that uses S3 for storage and has the style in the path" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
bucket: "testing",
path: ":attachment/:style/:basename:dotextension",
styles: {
@@ -232,13 +241,23 @@
end
end
+ # if using aws-sdk-v2, the s3_host_name will be defined by the s3_region
context "s3_host_name" do
before do
- rebuild_model storage: :s3,
- s3_credentials: {},
- bucket: "bucket",
- path: ":attachment/:basename:dotextension",
- s3_host_name: "s3-ap-northeast-1.amazonaws.com"
+ if defined?(::Aws)
+ rebuild_model storage: :s3,
+ s3_credentials: {},
+ bucket: "bucket",
+ path: ":attachment/:basename:dotextension",
+ s3_host_name: "s3-ap-northeast-1.amazonaws.com",
+ s3_region: "ap-northeast-1"
+ else
+ rebuild_model storage: :s3,
+ s3_credentials: {},
+ bucket: "bucket",
+ path: ":attachment/:basename:dotextension",
+ s3_host_name: "s3-ap-northeast-1.amazonaws.com"
+ end
@dummy = Dummy.new
@dummy.avatar = stringy_file
@dummy.stubs(:new_record?).returns(false)
@@ -249,13 +268,16 @@
end
it "uses the S3 bucket with the correct host name" do
- assert_equal "s3-ap-northeast-1.amazonaws.com", @dummy.avatar.s3_bucket.config.s3_endpoint
+ assert_equal "s3-ap-northeast-1.amazonaws.com",
+ (defined?(::Aws) ?
+ @dummy.avatar.s3_bucket.client.config.endpoint.host :
+ @dummy.avatar.s3_bucket.config.s3_endpoint)
end
end
context "dynamic s3_host_name" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
s3_credentials: {},
bucket: "bucket",
path: ":attachment/:basename:dotextension",
@@ -276,8 +298,8 @@ class << @dummy
context "An attachment that uses S3 for storage and has styles that return different file types" do
before do
- rebuild_model styles: { large: ['500x500#', :jpg] },
- storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
+ styles: { large: ['500x500#', :jpg] },
bucket: "bucket",
path: ":attachment/:basename:dotextension",
s3_credentials: {
@@ -311,8 +333,10 @@ class << @dummy
context "An attachment that uses S3 for storage and has a proc for styles" do
before do
- rebuild_model styles: lambda { |attachment| attachment.instance.counter; {thumbnail: { geometry: "50x50#", s3_headers: {'Cache-Control' => 'max-age=31557600'}} }},
- storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
+ styles: lambda { |attachment| attachment.instance.counter
+ {thumbnail: { geometry: "50x50#",
+ s3_headers: {'Cache-Control' => 'max-age=31557600'}} }},
bucket: "bucket",
path: ":attachment/:style/:basename:dotextension",
s3_credentials: {
@@ -336,8 +360,15 @@ def counter
object = stub
@dummy.avatar.stubs(:s3_object).with(:original).returns(object)
@dummy.avatar.stubs(:s3_object).with(:thumbnail).returns(object)
- object.expects(:write).with(anything, content_type: 'image/png', acl: :public_read)
- object.expects(:write).with(anything, content_type: 'image/png', acl: :public_read, cache_control: 'max-age=31557600')
+
+ object.expects((defined?(::Aws) ? :upload_file : :write))
+ .with(anything, content_type: 'image/png',
+ acl: Paperclip::Storage::S3::DEFAULT_PERMISSION)
+
+ object.expects((defined?(::Aws) ? :upload_file : :write))
+ .with(anything, content_type: 'image/png',
+ acl: Paperclip::Storage::S3::DEFAULT_PERMISSION,
+ cache_control: 'max-age=31557600')
@dummy.save
end
@@ -351,8 +382,8 @@ def counter
context "An attachment that uses S3 for storage and has spaces in file name" do
before do
rebuild_model(
+ (aws2_add_region).merge storage: :s3,
styles: { large: ["500x500#", :jpg] },
- storage: :s3,
bucket: "bucket",
s3_credentials: { "access_key_id" => "12345",
"secret_access_key" => "54321" }
@@ -376,8 +407,8 @@ def counter
context "An attachment that uses S3 for storage and has a question mark in file name" do
before do
- rebuild_model styles: { large: ['500x500#', :jpg] },
- storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
+ styles: { large: ['500x500#', :jpg] },
bucket: "bucket",
s3_credentials: {
'access_key_id' => "12345",
@@ -408,7 +439,7 @@ def original_filename
context "" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
s3_credentials: {},
bucket: "bucket",
path: ":attachment/:basename:dotextension",
@@ -426,7 +457,7 @@ def original_filename
context "" do
before do
rebuild_model(
- storage: :s3,
+ (aws2_add_region).merge storage: :s3,
s3_credentials: {
production: { bucket: "prod_bucket" },
development: { bucket: "dev_bucket" }
@@ -448,7 +479,7 @@ def original_filename
context "generating a url with a proc as the host alias" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
s3_credentials: { bucket: "prod_bucket" },
s3_host_alias: Proc.new{|atch| "cdn#{atch.instance.counter % 4}.example.com"},
path: ":attachment/:basename:dotextension",
@@ -478,7 +509,7 @@ def counter
context "" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
s3_credentials: {},
bucket: "bucket",
path: ":attachment/:basename:dotextension",
@@ -509,7 +540,7 @@ def counter
url: ":s3_alias_url"
}
- rebuild_model base_options.merge(options)
+ rebuild_model (aws2_add_region).merge base_options.merge(options)
}
end
@@ -522,8 +553,12 @@ def counter
object = stub
@dummy.avatar.stubs(:s3_object).returns(object)
- object.expects(:url_for).with(:read, expires: 3600, secure: true)
+ if defined?(::Aws)
+ object.expects(:presigned_url).with(:get, expires_in: 3600)
+ else
+ object.expects(:url_for).with(:read, expires: 3600, secure: true)
+ end
@dummy.avatar.expiring_url
end
end
@@ -537,8 +572,15 @@ def counter
object = stub
@dummy.avatar.stubs(:s3_object).returns(object)
- object.expects(:url_for).with(:read, expires: 3600, secure: true, response_content_disposition: "inline")
-
+ if defined?(::Aws)
+ object.expects(:presigned_url)
+ .with(:get, expires_in: 3600,
+ response_content_disposition: "inline")
+ else
+ object.expects(:url_for)
+ .with(:read, expires: 3600, secure: true,
+ response_content_disposition: "inline")
+ end
@dummy.avatar.expiring_url
end
end
@@ -559,8 +601,14 @@ def counter
object = stub
@dummy.avatar.stubs(:s3_object).returns(object)
- object.expects(:url_for).with(:read, expires: 3600, secure: true, response_content_type: "image/png")
-
+ if defined?(::Aws)
+ object.expects(:presigned_url)
+ .with(:get, expires_in: 3600, response_content_type: "image/png")
+ else
+ object.expects(:url_for)
+ .with(:read, expires: 3600, secure: true,
+ response_content_type: "image/png")
+ end
@dummy.avatar.expiring_url
end
end
@@ -588,15 +636,15 @@ def counter
context "Generating a url with an expiration for each style" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
s3_credentials: {
- production: { bucket: "prod_bucket" },
- development: { bucket: "dev_bucket" }
- },
- s3_permissions: :private,
- s3_host_alias: "something.something.com",
- path: ":attachment/:style/:basename:dotextension",
- url: ":s3_alias_url"
+ production: { bucket: "prod_bucket" },
+ development: { bucket: "dev_bucket" }
+ },
+ s3_permissions: :private,
+ s3_host_alias: "something.something.com",
+ path: ":attachment/:style/:basename:dotextension",
+ url: ":s3_alias_url"
rails_env("production") do
@dummy = Dummy.new
@@ -607,26 +655,34 @@ def counter
it "generates a url for the thumb" do
object = stub
@dummy.avatar.stubs(:s3_object).with(:thumb).returns(object)
- object.expects(:url_for).with(:read, expires: 1800, secure: true)
+ if defined?(::Aws)
+ object.expects(:presigned_url).with(:get, expires_in: 1800)
+ else
+ object.expects(:url_for).with(:read, expires: 1800, secure: true)
+ end
@dummy.avatar.expiring_url(1800, :thumb)
end
it "generates a url for the default style" do
object = stub
@dummy.avatar.stubs(:s3_object).with(:original).returns(object)
- object.expects(:url_for).with(:read, expires: 1800, secure: true)
+ if defined?(::Aws)
+ object.expects(:presigned_url).with(:get, expires_in: 1800)
+ else
+ object.expects(:url_for).with(:read, expires: 1800, secure: true)
+ end
@dummy.avatar.expiring_url(1800)
end
end
context "Parsing S3 credentials with a bucket in them" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
s3_credentials: {
- production: { bucket: "prod_bucket" },
- development: { bucket: "dev_bucket" }
- }
- @dummy = Dummy.new
+ production: { bucket: "prod_bucket" },
+ development: { bucket: "dev_bucket" }
+ }
+ @dummy = Dummy.new
end
it "gets the right bucket in production" do
@@ -644,47 +700,81 @@ def counter
end
end
+ # for aws-sdk-v2 the bucket.name is determined by the :s3_region
context "Parsing S3 credentials with a s3_host_name in them" do
before do
- rebuild_model storage: :s3,
- bucket: 'testing',
- s3_credentials: {
- production: { s3_host_name: "s3-world-end.amazonaws.com" },
- development: { s3_host_name: "s3-ap-northeast-1.amazonaws.com" }
- }
- @dummy = Dummy.new
+ if defined?(::Aws)
+ rebuild_model storage: :s3,
+ bucket: 'testing',
+ s3_credentials: {
+ production: {
+ s3_region: "world-end",
+ s3_host_name: "s3-world-end.amazonaws.com" },
+ development: {
+ s3_region: "ap-northeast-1",
+ s3_host_name: "s3-ap-northeast-1.amazonaws.com" }
+ }
+ else
+ rebuild_model storage: :s3,
+ bucket: 'testing',
+ s3_credentials: {
+ production: { s3_host_name: "s3-world-end.amazonaws.com" },
+ development: { s3_host_name: "s3-ap-northeast-1.amazonaws.com" }
+ }
+ end
+ @dummy = Dummy.new
end
it "gets the right s3_host_name in production" do
rails_env("production") do
assert_match %r{^s3-world-end.amazonaws.com}, @dummy.avatar.s3_host_name
- assert_match %r{^s3-world-end.amazonaws.com}, @dummy.avatar.s3_bucket.config.s3_endpoint
+ if defined?(::Aws)
+ assert_match %r{^s3.world-end.amazonaws.com},
+ @dummy.avatar.s3_bucket.client.config.endpoint.host
+ else
+ assert_match %r{^s3-world-end.amazonaws.com},
+ @dummy.avatar.s3_bucket.config.s3_endpoint
+ end
end
end
it "gets the right s3_host_name in development" do
rails_env("development") do
assert_match %r{^s3-ap-northeast-1.amazonaws.com}, @dummy.avatar.s3_host_name
- assert_match %r{^s3-ap-northeast-1.amazonaws.com}, @dummy.avatar.s3_bucket.config.s3_endpoint
+ if defined?(::Aws)
+ assert_match %r{^s3-ap-northeast-1.amazonaws.com},
+ @dummy.avatar.s3_bucket.client.config.endpoint.host
+ else
+ assert_match %r{^s3-ap-northeast-1.amazonaws.com},
+ @dummy.avatar.s3_bucket.config.s3_endpoint
+ end
end
end
it "gets the right s3_host_name if the key does not exist" do
rails_env("test") do
assert_match %r{^s3.amazonaws.com}, @dummy.avatar.s3_host_name
- assert_match %r{^s3.amazonaws.com}, @dummy.avatar.s3_bucket.config.s3_endpoint
+ if defined?(::Aws)
+ # :s3_region is *required* for aws-sdk-v2
+ assert_raises(Aws::Errors::MissingRegionError) do
+ @dummy.avatar.s3_bucket.client.config.endpoint.host
+ end
+ else
+ assert_match %r{^s3.amazonaws.com},
+ @dummy.avatar.s3_bucket.config.s3_endpoint
+ end
end
end
end
context "An attachment with S3 storage" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
bucket: "testing",
path: ":attachment/:style/:basename:dotextension",
s3_credentials: {
- aws_access_key_id: "12345",
- aws_secret_access_key: "54321"
+ access_key_id: "12345",
+ secret_access_key: "54321"
}
end
@@ -714,15 +804,22 @@ def counter
it "is rewound after flush_writes" do
@dummy.avatar.instance_eval "def after_flush_writes; end"
- @dummy.avatar.stubs(:s3_object).returns(stub(write: true))
-
+ if defined?(::Aws)
+ @dummy.avatar.stubs(:s3_object).returns(stub(upload_file: true))
+ else
+ @dummy.avatar.stubs(:s3_object).returns(stub(write: true))
+ end
files = @dummy.avatar.queued_for_write.values.each(&:read)
@dummy.save
assert files.none?(&:eof?), "Expect all the files to be rewound."
end
it "is removed after after_flush_writes" do
- @dummy.avatar.stubs(:s3_object).returns(stub(write: true))
+ if defined?(::Aws)
+ @dummy.avatar.stubs(:s3_object).returns(stub(upload_file: true))
+ else
+ @dummy.avatar.stubs(:s3_object).returns(stub(write: true))
+ end
paths = @dummy.avatar.queued_for_write.values.map(&:path)
@dummy.save
assert paths.none?{ |path| File.exist?(path) },
@@ -731,10 +828,17 @@ def counter
it "will retry to save again but back off on SlowDown" do
@dummy.avatar.stubs(:sleep)
- AWS::S3::S3Object.any_instance.stubs(:write).
- raises(AWS::S3::Errors::SlowDown.new(stub, stub(status: 503, body: "")))
-
- expect {@dummy.save}.to raise_error(AWS::S3::Errors::SlowDown)
+ if defined?(::Aws)
+ Aws::S3::Object.any_instance.stubs(:upload_file).
+ raises(Aws::S3::Errors::SlowDown.new(stub,
+ stub(status: 503, body: "")))
+ expect {@dummy.save}.to raise_error(Aws::S3::Errors::SlowDown)
+ else
+ AWS::S3::S3Object.any_instance.stubs(:write).
+ raises(AWS::S3::Errors::SlowDown.new(stub,
+ stub(status: 503, body: "")))
+ expect {@dummy.save}.to raise_error(AWS::S3::Errors::SlowDown)
+ end
expect(@dummy.avatar).to have_received(:sleep).with(1)
expect(@dummy.avatar).to have_received(:sleep).with(2)
expect(@dummy.avatar).to have_received(:sleep).with(4)
@@ -746,9 +850,9 @@ def counter
before do
object = stub
@dummy.avatar.stubs(:s3_object).returns(object)
- object.expects(:write).with(anything,
- content_type: "image/png",
- acl: :public_read)
+ object.expects((defined?(::Aws) ? :upload_file : :write))
+ .with(anything, content_type: 'image/png',
+ acl: Paperclip::Storage::S3::DEFAULT_PERMISSION)
@dummy.save
end
@@ -759,12 +863,21 @@ def counter
context "and saved without a bucket" do
before do
- AWS::S3::BucketCollection.any_instance.expects(:create).with("testing")
- AWS::S3::S3Object.any_instance.stubs(:write).
- raises(AWS::S3::Errors::NoSuchBucket.new(stub,
- stub(status: 404,
- body: ""))).
- then.returns(nil)
+ if defined?(::Aws)
+ Aws::S3::Bucket.any_instance.expects(:create)
+ Aws::S3::Object.any_instance.stubs(:upload_file).
+ raises(Aws::S3::Errors::NoSuchBucket
+ .new(stub,
+ stub(status: 404, body: ""))).then.returns(nil)
+ else
+ AWS::S3::BucketCollection.any_instance.expects(:create)
+ .with("testing")
+ AWS::S3::S3Object.any_instance.stubs(:write).
+ raises(AWS::S3::Errors::NoSuchBucket.new(stub,
+ stub(status: 404,
+ body: ""))).
+ then.returns(nil)
+ end
@dummy.save
end
@@ -775,8 +888,13 @@ def counter
context "and remove" do
before do
- AWS::S3::S3Object.any_instance.stubs(:exists?).returns(true)
- AWS::S3::S3Object.any_instance.stubs(:delete)
+ if defined?(::Aws)
+ Aws::S3::Object.any_instance.stubs(:exists?).returns(true)
+ Aws::S3::Object.any_instance.stubs(:delete)
+ else
+ AWS::S3::S3Object.any_instance.stubs(:exists?).returns(true)
+ AWS::S3::S3Object.any_instance.stubs(:delete)
+ end
@dummy.destroy
end
@@ -787,7 +905,14 @@ def counter
context 'that the file were missing' do
before do
- AWS::S3::S3Object.any_instance.stubs(:exists?).raises(AWS::Errors::Base)
+ if defined?(::Aws)
+ Aws::S3::Object.any_instance.stubs(:exists?)
+ .raises(Aws::S3::Errors::ServiceError.new("rspec stub raises",
+ "object exists?"))
+ else
+ AWS::S3::S3Object.any_instance.stubs(:exists?)
+ .raises(AWS::Errors::Base)
+ end
end
it 'returns false on exists?' do
@@ -799,7 +924,7 @@ def counter
context "An attachment with S3 storage and bucket defined as a Proc" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
bucket: lambda { |attachment| "bucket_#{attachment.instance.other}" },
s3_credentials: {not: :important}
end
@@ -814,7 +939,7 @@ def counter
context "An attachment with S3 storage and S3 credentials defined as a Proc" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
bucket: {not: :important},
s3_credentials: lambda { |attachment|
Hash['access_key_id' => "access#{attachment.instance.other}", 'secret_access_key' => "secret#{attachment.instance.other}"]
@@ -831,22 +956,35 @@ def counter
before do
class DummyCredentialProvider; end
- rebuild_model storage: :s3,
- bucket: "testing",
- s3_credentials: {
- credential_provider: DummyCredentialProvider.new
- }
- @dummy = Dummy.new
+ if defined?(::Aws)
+ rebuild_model (aws2_add_region).merge storage: :s3,
+ bucket: "testing",
+ s3_credentials: {
+ credentials: DummyCredentialProvider.new
+ }
+ else
+ rebuild_model storage: :s3,
+ bucket: "testing",
+ s3_credentials: {
+ credential_provider: DummyCredentialProvider.new
+ }
+ end
+ @dummy = Dummy.new
end
it "sets the credential-provider" do
- expect(@dummy.avatar.s3_bucket.config.credential_provider).to be_a DummyCredentialProvider
+ if defined?(::Aws)
+ expect(@dummy.avatar.s3_bucket.client.config.credentials).to be_a DummyCredentialProvider
+ else
+ expect(@dummy.avatar.s3_bucket.config.credential_provider).to be_a DummyCredentialProvider
+ end
end
end
context "An attachment with S3 storage and S3 credentials in an unsupported manor" do
before do
- rebuild_model storage: :s3, bucket: "testing", s3_credentials: ["unsupported"]
+ rebuild_model (aws2_add_region).merge storage: :s3,
+ bucket: "testing", s3_credentials: ["unsupported"]
@dummy = Dummy.new
end
@@ -859,7 +997,7 @@ class DummyCredentialProvider; end
context "An attachment with S3 storage and S3 credentials not supplied" do
before do
- rebuild_model storage: :s3, bucket: "testing"
+ rebuild_model (aws2_add_region).merge storage: :s3, bucket: "testing"
@dummy = Dummy.new
end
@@ -870,7 +1008,7 @@ class DummyCredentialProvider; end
context "An attachment with S3 storage and specific s3 headers set" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
bucket: "testing",
path: ":attachment/:style/:basename:dotextension",
s3_credentials: {
@@ -893,10 +1031,12 @@ class DummyCredentialProvider; end
before do
object = stub
@dummy.avatar.stubs(:s3_object).returns(object)
- object.expects(:write).with(anything,
- content_type: "image/png",
- acl: :public_read,
- cache_control: 'max-age=31557600')
+
+ object.expects((defined?(::Aws) ? :upload_file : :write))
+ .with(anything,
+ content_type: 'image/png',
+ acl: Paperclip::Storage::S3::DEFAULT_PERMISSION,
+ cache_control: 'max-age=31557600')
@dummy.save
end
@@ -909,7 +1049,7 @@ class DummyCredentialProvider; end
context "An attachment with S3 storage and metadata set using header names" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
bucket: "testing",
path: ":attachment/:style/:basename:dotextension",
s3_credentials: {
@@ -932,10 +1072,12 @@ class DummyCredentialProvider; end
before do
object = stub
@dummy.avatar.stubs(:s3_object).returns(object)
- object.expects(:write).with(anything,
- content_type: "image/png",
- acl: :public_read,
- metadata: { "color" => "red" })
+
+ object.expects((defined?(::Aws) ? :upload_file : :write))
+ .with(anything,
+ content_type: 'image/png',
+ acl: Paperclip::Storage::S3::DEFAULT_PERMISSION,
+ metadata: { "color" => "red" })
@dummy.save
end
@@ -948,7 +1090,7 @@ class DummyCredentialProvider; end
context "An attachment with S3 storage and metadata set using the :s3_metadata option" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
bucket: "testing",
path: ":attachment/:style/:basename:dotextension",
s3_credentials: {
@@ -971,10 +1113,12 @@ class DummyCredentialProvider; end
before do
object = stub
@dummy.avatar.stubs(:s3_object).returns(object)
- object.expects(:write).with(anything,
- content_type: "image/png",
- acl: :public_read,
- metadata: { "color" => "red" })
+
+ object.expects((defined?(::Aws) ? :upload_file : :write))
+ .with(anything,
+ content_type: 'image/png',
+ acl: Paperclip::Storage::S3::DEFAULT_PERMISSION,
+ metadata: { "color" => "red" })
@dummy.save
end
@@ -988,7 +1132,7 @@ class DummyCredentialProvider; end
context "An attachment with S3 storage and storage class set" do
context "using the header name" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
bucket: "testing",
path: ":attachment/:style/:basename:dotextension",
s3_credentials: {
@@ -1011,10 +1155,12 @@ class DummyCredentialProvider; end
before do
object = stub
@dummy.avatar.stubs(:s3_object).returns(object)
- object.expects(:write).with(anything,
- content_type: "image/png",
- acl: :public_read,
- storage_class: "reduced_redundancy")
+
+ object.expects((defined?(::Aws) ? :upload_file : :write))
+ .with(anything,
+ content_type: 'image/png',
+ acl: Paperclip::Storage::S3::DEFAULT_PERMISSION,
+ storage_class: "reduced_redundancy")
@dummy.save
end
@@ -1027,7 +1173,7 @@ class DummyCredentialProvider; end
context "using per style hash" do
before do
- rebuild_model :storage => :s3,
+ rebuild_model (aws2_add_region).merge :storage => :s3,
:bucket => "testing",
:path => ":attachment/:style/:basename.:extension",
:styles => {
@@ -1056,9 +1202,15 @@ class DummyCredentialProvider; end
object = stub
[:thumb, :original].each do |style|
@dummy.avatar.stubs(:s3_object).with(style).returns(object)
- expected_options = {:content_type => "image/png", :acl => :public_read}
+
+ expected_options = {
+ :content_type => "image/png",
+ :acl => Paperclip::Storage::S3::DEFAULT_PERMISSION
+ }
expected_options.merge!(:storage_class => :reduced_redundancy) if style == :thumb
- object.expects(:write).with(anything, expected_options)
+
+ object.expects((defined?(::Aws) ? :upload_file : :write))
+ .with(anything, expected_options)
end
@dummy.save
end
@@ -1072,7 +1224,7 @@ class DummyCredentialProvider; end
context "using global hash option" do
before do
- rebuild_model :storage => :s3,
+ rebuild_model (aws2_add_region).merge :storage => :s3,
:bucket => "testing",
:path => ":attachment/:style/:basename.:extension",
:styles => {
@@ -1099,9 +1251,11 @@ class DummyCredentialProvider; end
object = stub
[:thumb, :original].each do |style|
@dummy.avatar.stubs(:s3_object).with(style).returns(object)
- object.expects(:write).with(anything, :content_type => "image/png",
- :acl => :public_read,
- :storage_class => :reduced_redundancy)
+
+ object.expects((defined?(::Aws) ? :upload_file : :write))
+ .with(anything, :content_type => "image/png",
+ :acl => Paperclip::Storage::S3::DEFAULT_PERMISSION,
+ :storage_class => :reduced_redundancy)
end
@dummy.save
end
@@ -1118,7 +1272,7 @@ class DummyCredentialProvider; end
[nil, false, ''].each do |tech|
before do
rebuild_model(
- storage: :s3,
+ (aws2_add_region).merge storage: :s3,
bucket: "testing",
path: ":attachment/:style/:basename:dotextension",
s3_credentials: {
@@ -1140,9 +1294,10 @@ class DummyCredentialProvider; end
before do
object = stub
@dummy.avatar.stubs(:s3_object).returns(object)
- object.expects(:write).with(anything,
- content_type: "image/png",
- acl: :public_read)
+
+ object.expects((defined?(::Aws) ? :upload_file : :write))
+ .with(anything, :content_type => "image/png",
+ :acl => Paperclip::Storage::S3::DEFAULT_PERMISSION)
@dummy.save
end
@@ -1156,7 +1311,7 @@ class DummyCredentialProvider; end
context "An attachment with S3 storage and using AES256 encryption" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
bucket: "testing",
path: ":attachment/:style/:basename:dotextension",
s3_credentials: {
@@ -1179,10 +1334,11 @@ class DummyCredentialProvider; end
before do
object = stub
@dummy.avatar.stubs(:s3_object).returns(object)
- object.expects(:write).with(anything,
- content_type: "image/png",
- acl: :public_read,
- server_side_encryption: :aes256)
+
+ object.expects((defined?(::Aws) ? :upload_file : :write))
+ .with(anything, content_type: "image/png",
+ acl: Paperclip::Storage::S3::DEFAULT_PERMISSION,
+ server_side_encryption: :aes256)
@dummy.save
end
@@ -1195,7 +1351,7 @@ class DummyCredentialProvider; end
context "An attachment with S3 storage and storage class set using the :storage_class option" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
bucket: "testing",
path: ":attachment/:style/:basename:dotextension",
s3_credentials: {
@@ -1218,10 +1374,12 @@ class DummyCredentialProvider; end
before do
object = stub
@dummy.avatar.stubs(:s3_object).returns(object)
- object.expects(:write).with(anything,
- content_type: "image/png",
- acl: :public_read,
- storage_class: :reduced_redundancy)
+
+ object.expects((defined?(::Aws) ? :upload_file : :write))
+ .with(anything,
+ content_type: "image/png",
+ acl: Paperclip::Storage::S3::DEFAULT_PERMISSION,
+ storage_class: :reduced_redundancy)
@dummy.save
end
@@ -1239,7 +1397,7 @@ class DummyCredentialProvider; end
ENV['S3_SECRET'] = 'pathname_secret'
rails_env('test') do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
s3_credentials: Pathname.new(fixture_file('s3.yml'))
Dummy.delete_all
@@ -1249,8 +1407,16 @@ class DummyCredentialProvider; end
it "parses the credentials" do
assert_equal 'pathname_bucket', @dummy.avatar.bucket_name
- assert_equal 'pathname_key', @dummy.avatar.s3_bucket.config.access_key_id
- assert_equal 'pathname_secret', @dummy.avatar.s3_bucket.config.secret_access_key
+
+ assert_equal 'pathname_key',
+ (defined?(::Aws) ?
+ @dummy.avatar.s3_bucket.client.config.access_key_id :
+ @dummy.avatar.s3_bucket.config.access_key_id)
+
+ assert_equal 'pathname_secret',
+ (defined?(::Aws) ?
+ @dummy.avatar.s3_bucket.client.config.secret_access_key :
+ @dummy.avatar.s3_bucket.config.secret_access_key)
end
end
@@ -1261,7 +1427,7 @@ class DummyCredentialProvider; end
ENV['S3_SECRET'] = 'env_secret'
rails_env('test') do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
s3_credentials: File.new(fixture_file('s3.yml'))
Dummy.delete_all
@@ -1272,15 +1438,23 @@ class DummyCredentialProvider; end
it "runs the file through ERB" do
assert_equal 'env_bucket', @dummy.avatar.bucket_name
- assert_equal 'env_key', @dummy.avatar.s3_bucket.config.access_key_id
- assert_equal 'env_secret', @dummy.avatar.s3_bucket.config.secret_access_key
+
+ assert_equal 'env_key',
+ (defined?(::Aws) ?
+ @dummy.avatar.s3_bucket.client.config.access_key_id :
+ @dummy.avatar.s3_bucket.config.access_key_id)
+
+ assert_equal 'env_secret',
+ (defined?(::Aws) ?
+ @dummy.avatar.s3_bucket.client.config.secret_access_key :
+ @dummy.avatar.s3_bucket.config.secret_access_key)
end
end
context "S3 Permissions" do
context "defaults to :public_read" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
bucket: "testing",
path: ":attachment/:style/:basename:dotextension",
s3_credentials: {
@@ -1302,9 +1476,11 @@ class DummyCredentialProvider; end
before do
object = stub
@dummy.avatar.stubs(:s3_object).returns(object)
- object.expects(:write).with(anything,
- content_type: "image/png",
- acl: :public_read)
+
+ object.expects((defined?(::Aws) ? :upload_file : :write))
+ .with(anything,
+ content_type: "image/png",
+ acl: Paperclip::Storage::S3::DEFAULT_PERMISSION)
@dummy.save
end
@@ -1317,7 +1493,7 @@ class DummyCredentialProvider; end
context "string permissions set" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
bucket: "testing",
path: ":attachment/:style/:basename:dotextension",
s3_credentials: {
@@ -1340,9 +1516,9 @@ class DummyCredentialProvider; end
before do
object = stub
@dummy.avatar.stubs(:s3_object).returns(object)
- object.expects(:write).with(anything,
- content_type: "image/png",
- acl: :private)
+
+ object.expects((defined?(::Aws) ? :upload_file : :write))
+ .with(anything, content_type: "image/png", acl: :private)
@dummy.save
end
@@ -1355,7 +1531,7 @@ class DummyCredentialProvider; end
context "hash permissions set" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
bucket: "testing",
path: ":attachment/:style/:basename:dotextension",
styles: {
@@ -1385,9 +1561,11 @@ class DummyCredentialProvider; end
[:thumb, :original].each do |style|
object = stub
@dummy.avatar.stubs(:s3_object).with(style).returns(object)
- object.expects(:write).with(anything,
- content_type: "image/png",
- acl: style == :thumb ? :public_read : :private)
+
+ object.expects((defined?(::Aws) ? :upload_file : :write))
+ .with(anything,
+ content_type: "image/png",
+ acl: style == :thumb ? :public_read : :private)
end
@dummy.save
end
@@ -1402,7 +1580,7 @@ class DummyCredentialProvider; end
context "proc permission set" do
before do
rebuild_model(
- storage: :s3,
+ (aws2_add_region).merge storage: :s3,
bucket: "testing",
path: ":attachment/:style/:basename:dotextension",
styles: {
@@ -1413,7 +1591,7 @@ class DummyCredentialProvider; end
'secret_access_key' => "54321"
},
s3_permissions: lambda {|attachment, style|
- attachment.instance.private_attachment? && style.to_sym != :thumb ? :private : :public_read
+ attachment.instance.private_attachment? && style.to_sym != :thumb ? :private : Paperclip::Storage::S3::DEFAULT_PERMISSION
}
)
end
@@ -1446,7 +1624,7 @@ class DummyCredentialProvider; end
context "An attachment with S3 storage and metadata set using a proc as headers" do
before do
rebuild_model(
- storage: :s3,
+ (aws2_add_region).merge storage: :s3,
bucket: "testing",
path: ":attachment/:style/:basename:dotextension",
styles: {
@@ -1477,10 +1655,12 @@ class DummyCredentialProvider; end
[:thumb, :original].each do |style|
object = stub
@dummy.avatar.stubs(:s3_object).with(style).returns(object)
- object.expects(:write).with(anything,
- content_type: "image/png",
- acl: :public_read,
- content_disposition: 'attachment; filename="Custom Avatar Name.png"')
+
+ object.expects((defined?(::Aws) ? :upload_file : :write))
+ .with(anything,
+ content_type: "image/png",
+ acl: Paperclip::Storage::S3::DEFAULT_PERMISSION,
+ content_disposition: 'attachment; filename="Custom Avatar Name.png"')
end
@dummy.save
end
@@ -1494,7 +1674,7 @@ class DummyCredentialProvider; end
context "path is a proc" do
before do
- rebuild_model storage: :s3,
+ rebuild_model (aws2_add_region).merge storage: :s3,
path: ->(attachment) { attachment.instance.attachment_path }
@dummy = Dummy.new
diff --git a/spec/support/assertions.rb b/spec/support/assertions.rb
index efef401c1..16c43d5a9 100644
--- a/spec/support/assertions.rb
+++ b/spec/support/assertions.rb
@@ -61,6 +61,13 @@ def assert_not_found_response(url)
end
end
+ def assert_forbidden_response(url)
+ Net::HTTP.get_response(URI.parse(url)) do |response|
+ assert_equal "403", response.code,
+ "Expected HTTP response code 403, got #{response.code}"
+ end
+ end
+
def assert_frame_dimensions(range, frames)
frames.each_with_index do |frame, frame_index|
frame.split('x').each_with_index do |dimension, dimension_index |