# File lib/backup/cloud_io/cloud_files.rb, line 18 def initialize(options = {}) super @username = options[:username] @api_key = options[:api_key] @auth_url = options[:auth_url] @region = options[:region] @servicenet = options[:servicenet] @container = options[:container] @segments_container = options[:segments_container] @segment_size = options[:segment_size] @days_to_keep = options[:days_to_keep] @fog_options = options[:fog_options] end
Delete non-SLO object(s) from the container.
Called by the Storage (with objects) and the Syncer (with names)
Deletes 10,000 objects per request.
Missing objects will be ignored.
# File lib/backup/cloud_io/cloud_files.rb, line 103 def delete(objects_or_names) names = Array(objects_or_names).dup names.map!(&:name) if names.first.is_a?(Object) until names.empty? _names = names.slice!(0, 10000) with_retries('DELETE Multiple Objects') do resp = connection.delete_multiple_objects(container, _names) resp_status = resp.body['Response Status'] raise Error, #{ resp_status } The server returned the following: #{ resp.body.inspect } unless resp_status == '200 OK' end end end
Delete an SLO object(s) from the container.
Removes the SLO manifest object and all associated segments.
Missing segments will be ignored.
# File lib/backup/cloud_io/cloud_files.rb, line 126 def delete_slo(objects) Array(objects).each do |object| with_retries("DELETE SLO Manifest '#{ container }/#{ object.name }'") do resp = connection.delete_static_large_object(container, object.name) resp_status = resp.body['Response Status'] raise Error, #{ resp_status } The server returned the following: #{ resp.body.inspect } unless resp_status == '200 OK' end end end
Used by Object to fetch metadata if needed.
# File lib/backup/cloud_io/cloud_files.rb, line 90 def head_object(object) resp = nil with_retries("HEAD '#{ container }/#{ object.name }'") do resp = connection.head_object(container, object.name) end resp end
Returns all objects in the container with the given prefix.
get_container returns a max of 10000 objects per request.
Returns objects sorted using a sqlite binary collating function.
If marker is given, only objects after the marker are in the response.
# File lib/backup/cloud_io/cloud_files.rb, line 68 def objects(prefix) objects = [] resp = nil prefix = prefix.chomp('/') opts = { :prefix => prefix + '/' } create_containers while resp.nil? || resp.body.count == 10000 opts.merge!(:marker => objects.last.name) unless objects.empty? with_retries("GET '#{ container }/#{ prefix }/*'") do resp = connection.get_container(container, opts) end resp.body.each do |obj_data| objects << Object.new(self, obj_data) end end objects end
The Syncer may call this method in multiple threads, but objects is always called before this occurs.
# File lib/backup/cloud_io/cloud_files.rb, line 35 def upload(src, dest) create_containers file_size = File.size(src) segment_bytes = segment_size * 1024**2 if segment_bytes > 0 && file_size > segment_bytes raise FileSizeError, File Too Large File: #{ src } Size: #{ file_size } Max SLO Size is #{ MAX_SLO_SIZE } (5 GiB * 1000 segments) if file_size > MAX_SLO_SIZE segment_bytes = adjusted_segment_bytes(segment_bytes, file_size) segments = upload_segments(src, dest, segment_bytes, file_size) upload_manifest(dest, segments) else raise FileSizeError, File Too Large File: #{ src } Size: #{ file_size } Max File Size is #{ MAX_FILE_SIZE } (5 GiB) if file_size > MAX_FILE_SIZE put_object(src, dest) end end
Generated with the Darkfish Rdoc Generator 2.