提交 951b2ee5 编写于 作者: D David Heinemeier Hansson

Merge branch 'master' of github.com:basecamp/activefile

......@@ -4,4 +4,6 @@ gemspec
gem 'rake'
gem 'byebug'
gem 'sqlite3'
\ No newline at end of file
gem 'sqlite3'
gem 'aws-sdk'
\ No newline at end of file
......@@ -24,11 +24,20 @@ GEM
minitest (~> 5.1)
tzinfo (~> 1.1)
arel (8.0.0)
aws-sdk (2.10.7)
aws-sdk-resources (= 2.10.7)
aws-sdk-core (2.10.7)
aws-sigv4 (~> 1.0)
jmespath (~> 1.0)
aws-sdk-resources (2.10.7)
aws-sdk-core (= 2.10.7)
aws-sigv4 (1.0.0)
byebug (9.0.6)
concurrent-ruby (1.0.5)
globalid (0.4.0)
activesupport (>= 4.2.0)
i18n (0.8.4)
jmespath (1.3.1)
minitest (5.10.2)
rake (12.0.0)
sqlite3 (1.3.13)
......@@ -41,6 +50,7 @@ PLATFORMS
DEPENDENCIES
activefile!
aws-sdk
bundler (~> 1.15)
byebug
rake
......
......@@ -7,14 +7,9 @@ class ActiveFile::Blob < ActiveRecord::Base
has_secure_token :key
store :metadata, coder: JSON
class_attribute :verifier, default: -> { Rails.application.message_verifier('ActiveFile') }
class_attribute :site
class << self
def find_verified(signed_id)
find(verifier.verify(signed_id))
end
def build_after_upload(data:, filename:, content_type: nil, metadata: nil)
new.tap do |blob|
blob.filename = name
......
require "aws-sdk"
class ActiveFile::Sites::S3Site < ActiveFile::Site
attr_reader :client, :bucket
def initialize(access_key_id:, secret_access_key:, region:, bucket:)
@client = Aws::S3::Resource.new(access_key_id: access_key_id, secret_access_key: secret_access_key, region: region)
@bucket = @client.bucket(bucket)
end
def upload(key, data)
object_for(key).put(body: data)
end
def download(key)
if block_given?
stream(key, &block)
else
object_for(key).read
end
end
def delete(key)
object_for(key).delete
end
def exists?(key)
object_for(key).exists?
end
def byte_size(key)
object_for(key).head[:size]
end
def checksum(key)
head = object_for(key).head
# If the etag has no dashes, it's the MD5
if !head.etag.include?("-")
head.etag.gsub('"', '')
# Check for md5 in metadata if it was uploaded via multipart
elsif md5sum = head.meta["md5sum"]
md5sum
# Otherwise, we don't have a digest yet for this key
else
nil
end
end
private
def object_for(key)
bucket.object(key)
end
# Reads the object for the given key in chunks, yielding each to the block.
def stream(key, options = {}, &block)
object = object_for(key)
chunk_size = 5242880 # 5 megabytes
offset = 0
while offset < object.content_length
yield object.read(options.merge(:range => "bytes=#{offset}-#{offset + chunk_size - 1}"))
offset += chunk_size
end
end
end
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册