Class/Module Index [+]

Quicksearch

Backup::CloudIO::S3

Attributes

access_key_id[R]
bucket[R]
chunk_size[R]
encryption[R]
fog_options[R]
region[R]
secret_access_key[R]
storage_class[R]
use_iam_profile[R]

Public Class Methods

new(options = {}) click to toggle source
# File lib/backup/cloud_io/s3.rb, line 19
def initialize(options = {})
  super

  @access_key_id      = options[:access_key_id]
  @secret_access_key  = options[:secret_access_key]
  @use_iam_profile    = options[:use_iam_profile]
  @region             = options[:region]
  @bucket             = options[:bucket]
  @chunk_size         = options[:chunk_size]
  @encryption         = options[:encryption]
  @storage_class      = options[:storage_class]
  @fog_options        = options[:fog_options]
end

Public Instance Methods

delete(objects_or_keys) click to toggle source

Delete object(s) from the bucket.

  • Called by the Storage (with objects) and the Syncer (with keys)

  • Deletes 1000 objects per request.

  • Missing objects will be ignored.

# File lib/backup/cloud_io/s3.rb, line 100
def delete(objects_or_keys)
  keys = Array(objects_or_keys).dup
  keys.map!(&:key) if keys.first.is_a?(Object)

  opts = { :quiet => true } # only report Errors in DeleteResult
  until keys.empty?
    _keys = keys.slice!(0, 1000)
    with_retries('DELETE Multiple Objects') do
      resp = connection.delete_multiple_objects(bucket, _keys, opts.dup)
      unless resp.body['DeleteResult'].empty?
        errors = resp.body['DeleteResult'].map do |result|
          error = result['Error']
          "Failed to delete: #{ error['Key'] }\n" +
          "Reason: #{ error['Code'] }: #{ error['Message'] }"
        end.join("\n")
        raise Error, "The server returned the following:\n#{ errors }"
      end
    end
  end
end
head_object(object) click to toggle source

Used by Object to fetch metadata if needed.

# File lib/backup/cloud_io/s3.rb, line 87
def head_object(object)
  resp = nil
  with_retries("HEAD '#{ bucket }/#{ object.key }'") do
    resp = connection.head_object(bucket, object.key)
  end
  resp
end
objects(prefix) click to toggle source

Returns all objects in the bucket with the given prefix.

  • get_bucket returns a max of 1000 objects per request.

  • Returns objects in alphabetical order.

  • If marker is given, only objects after the marker are in the response.

# File lib/backup/cloud_io/s3.rb, line 67
def objects(prefix)
  objects = []
  resp = nil
  prefix = prefix.chomp('/')
  opts = { 'prefix' => prefix + '/' }

  while resp.nil? || resp.body['IsTruncated']
    opts.merge!('marker' => objects.last.key) unless objects.empty?
    with_retries("GET '#{ bucket }/#{ prefix }/*'") do
      resp = connection.get_bucket(bucket, opts)
    end
    resp.body['Contents'].each do |obj_data|
      objects << Object.new(self, obj_data)
    end
  end

  objects
end
upload(src, dest) click to toggle source

The Syncer may call this method in multiple threads. However, objects is always called prior to multithreading.

# File lib/backup/cloud_io/s3.rb, line 35
def upload(src, dest)
  file_size = File.size(src)
  chunk_bytes = chunk_size * 1024**2
  if chunk_bytes > 0 && file_size > chunk_bytes
    raise FileSizeError,             File Too Large            File: #{ src }            Size: #{ file_size }            Max Multipart Upload Size is #{ MAX_MULTIPART_SIZE } (5 TiB) if file_size > MAX_MULTIPART_SIZE

    chunk_bytes = adjusted_chunk_bytes(chunk_bytes, file_size)
    upload_id = initiate_multipart(dest)
    parts = upload_parts(src, dest, upload_id, chunk_bytes, file_size)
    complete_multipart(dest, upload_id, parts)
  else
    raise FileSizeError,             File Too Large            File: #{ src }            Size: #{ file_size }            Max File Size is #{ MAX_FILE_SIZE } (5 GiB) if file_size > MAX_FILE_SIZE

    put_object(src, dest)
  end
end

[Validate]

Generated with the Darkfish Rdoc Generator 2.