Skip to Content Skip to Search

class ActiveStorage::Service::S3Service

Active Storage S3 Service

Wraps the Amazon Simple Storage Service (S3) as an Active Storage service. See ActiveStorage::Service for the generic API documentation that applies to all services.

Inherits From

Constants

10000
5.megabytes
[
:MD5,
:SHA256,
]

Attributes

[R] bucket
[R] client
[R] default_digest_algorithm
[R] multipart_upload_threshold
[R] upload_options

Public class methods

Source code GitHub
# File activestorage/lib/active_storage/service/s3_service.rb, line 21
def initialize(bucket:, upload: {}, public: false, default_digest_algorithm: :MD5, **options)
  @client = Aws::S3::Resource.new(**options)
  @bucket = @client.bucket(bucket)

  @multipart_upload_threshold = upload.delete(:multipart_threshold) || 100.megabytes
  @public = public

  @upload_options = upload
  @upload_options[:acl] = "public-read" if public?
  @default_digest_algorithm = default_digest_algorithm.to_sym
  raise ActiveStorage::UnsupportedChecksumError unless SUPPORTED_CHECKSUM_ALGORITHMS.include?(@default_digest_algorithm)
end

Public instance methods

Source code GitHub
# File activestorage/lib/active_storage/service/s3_service.rb, line 124
def base64digest(io, algorithm: default_digest_algorithm, **)
  digest = checksum_implementation(algorithm).base64digest(io)
  if algorithm == :MD5
    digest
  else
    "#{algorithm}:#{digest}"
  end
end
Source code GitHub
# File activestorage/lib/active_storage/service/s3_service.rb, line 133
def base64file(file, algorithm: default_digest_algorithm, **)
  digest = checksum_implementation(algorithm).file(file).base64digest
  if algorithm == :MD5
    digest
  else
    "#{algorithm}:#{digest}"
  end
end
Source code GitHub
# File activestorage/lib/active_storage/service/s3_service.rb, line 161
def checksum_implementation(algorithm = default_digest_algorithm, **)
  case algorithm
  when :MD5
    md5
  when :SHA256
    sha256
  else
    raise ActiveStorage::UnsupportedChecksumError
  end
end
Source code GitHub
# File activestorage/lib/active_storage/service/s3_service.rb, line 106
def compose(source_keys, destination_key, filename: nil, content_type: nil, disposition: nil, custom_metadata: {})
  content_disposition = content_disposition_with(type: disposition, filename: filename) if disposition && filename

  object_for(destination_key).upload_stream(
    content_type: content_type,
    content_disposition: content_disposition,
    part_size: MINIMUM_UPLOAD_PART_SIZE,
    metadata: custom_metadata,
    **upload_options
  ) do |out|
    source_keys.each do |source_key|
      stream(source_key) do |chunk|
        IO.copy_stream(StringIO.new(chunk), out)
      end
    end
  end
end
Source code GitHub
# File activestorage/lib/active_storage/service/s3_service.rb, line 142
def compute_checksum_in_chunks(io, algorithm: default_digest_algorithm, **)
  raise ArgumentError, "io must be rewindable" unless io.respond_to?(:rewind)

  digest = checksum_implementation(algorithm).new.tap do |checksum|
    read_buffer = "".b
    while io.read(5.megabytes, read_buffer)
      checksum << read_buffer
    end

    io.rewind
  end.base64digest

  if algorithm == :MD5
    digest
  else
    "#{algorithm}:#{digest}"
  end
end
Source code GitHub
# File activestorage/lib/active_storage/service/s3_service.rb, line 68
def delete(key)
  instrument :delete, key: key do
    object_for(key).delete
  end
end
Source code GitHub
# File activestorage/lib/active_storage/service/s3_service.rb, line 74
def delete_prefixed(prefix)
  instrument :delete_prefixed, prefix: prefix do
    bucket.objects(prefix: prefix).batch_delete!
  end
end
Source code GitHub
# File activestorage/lib/active_storage/service/s3_service.rb, line 46
def download(key, &block)
  if block_given?
    instrument :streaming_download, key: key do
      stream(key, &block)
    end
  else
    instrument :download, key: key do
      object_for(key).get.body.string.force_encoding(Encoding::BINARY)
    rescue Aws::S3::Errors::NoSuchKey
      raise ActiveStorage::FileNotFoundError
    end
  end
end
Source code GitHub
# File activestorage/lib/active_storage/service/s3_service.rb, line 60
def download_chunk(key, range)
  instrument :download_chunk, key: key, range: range do
    object_for(key).get(range: "bytes=#{range.begin}-#{range.exclude_end? ? range.end - 1 : range.end}").body.string.force_encoding(Encoding::BINARY)
  rescue Aws::S3::Errors::NoSuchKey
    raise ActiveStorage::FileNotFoundError
  end
end
Source code GitHub
# File activestorage/lib/active_storage/service/s3_service.rb, line 80
def exist?(key)
  instrument :exist, key: key do |payload|
    answer = object_for(key).exists?
    payload[:exist] = answer
    answer
  end
end
Source code GitHub
# File activestorage/lib/active_storage/service/s3_service.rb, line 100
def headers_for_direct_upload(key, content_type:, checksum:, filename: nil, disposition: nil, custom_metadata: {}, **)
  content_disposition = content_disposition_with(type: disposition, filename: filename) if filename

  { "Content-Type" => content_type, **s3_http_headers_for_direct_upload(checksum), "Content-Disposition" => content_disposition, **custom_metadata_headers(custom_metadata) }
end
Source code GitHub
# File activestorage/lib/active_storage/service/s3_service.rb, line 34
def upload(key, io, checksum: nil, filename: nil, content_type: nil, disposition: nil, custom_metadata: {}, **)
  instrument :upload, key: key, checksum: checksum do
    content_disposition = content_disposition_with(filename: filename, type: disposition) if disposition && filename

    if io.size < multipart_upload_threshold
      upload_with_single_part key, io, checksum: checksum, content_type: content_type, content_disposition: content_disposition, custom_metadata: custom_metadata
    else
      upload_with_multipart key, io, content_type: content_type, content_disposition: content_disposition, custom_metadata: custom_metadata
    end
  end
end
Source code GitHub
# File activestorage/lib/active_storage/service/s3_service.rb, line 88
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:, custom_metadata: {})
  instrument :url, key: key do |payload|
    generated_url = object_for(key).presigned_url :put, expires_in: expires_in.to_i,
      content_type: content_type, content_length: content_length, content_md5: checksum,
      metadata: custom_metadata, whitelist_headers: ["content-length"], **upload_options

    payload[:url] = generated_url

    generated_url
  end
end

Definition files