class EasyAwscr::S3::Client

Defined in:

easy-awscr/s3/client.cr

Constant Summary

MINIMUM_PART_SIZE_5MB = 5242880

This is a hard limit enforced by AWS for multipart uploads: each part must be at least 5 MB.

Constructors

Instance Method Summary

Constructor Detail

def self.new(*, region : String = EasyAwscr::Config.default_region!, credential_provider : EasyAwscr::Config::Provider = EasyAwscr::Config.default_credential_provider, lazy_init = false) #

[View source]

Instance Method Detail

def abort_multipart_upload(bucket : String, object : String, upload_id : String) #

Aborts a multi part upload. Returns true if the abort was a success, false otherwise.

resp = client.abort_multipart_upload("bucket1", "obj", "123")
p resp # => true

[View source]
def batch_delete(bucket, keys : Array(String)) #

Batch deletes a list of object keys in a single request.

resp = client.batch_delete("bucket1", ["obj", "obj2"])
p resp.success? # => true

[View source]
def complete_multipart_upload(bucket : String, object : String, upload_id : String, parts : Array(Awscr::S3::Response::UploadPartOutput)) #

Complete a multipart upload

resp = client.complete_multipart_upload("bucket1", "obj", "123", parts)
p resp.key # => obj

[View source]
def copy_object(bucket, source : String, destination : String, headers = Hash(String, String).new) #

Copy an object from source to destination in a bucket.

client.copy_object("bucket1", "source_object", "destination_object")

[View source]
def delete_bucket(bucket) #

Delete a bucket, note: it must be empty

resp = client.delete_bucket("test")
p resp # => true

[View source]
def delete_object(bucket, object, headers = Hash(String, String).new) #

Delete an object from a bucket, returns true if successful, false otherwise.

resp = client.delete_object("bucket1", "obj")
p resp # => true

[View source]
def get_object(bucket, object : String, headers = Hash(String, String).new) #

Get the contents of an object in a bucket

resp = client.get_object("bucket1", "obj")
p resp.body # => "MY DATA"

[View source]
def get_object(bucket, object : String, headers = Hash(String, String).new, &) #

Get the contents of an object in a bucket as an IO object

client.get_object("bucket1", "obj") do |resp|
  IO.copy(resp.body_io, STDOUT) # => "MY DATA"
end

[View source]
def head_bucket(bucket) #

Get information about a bucket, useful for determining if a bucket exists. Raises a Http::ServerError if the bucket does not exist.

resp = client.head_bucket("bucket1")
p resp # => true

[View source]
def head_object(bucket, object : String, headers = Hash(String, String).new) #

Get the metadata of an object in a bucket

resp = client.head_object("bucket1", "obj")
p resp.size          # => 123
p resp.status        # => HTTP::Status::OK
p resp.last_modified # => "Wed, 19 Jun 2019 11:55:33 GMT"
p resp.etag          # => ""
p resp.meta          # => {"my_tag" => "my_value"}

[View source]
def list_buckets #

List s3 buckets

resp = client.list_buckets
p resp.buckets.map(&.name) # => ["bucket1", "bucket2"]

[View source]
def list_objects(bucket, *, prefix = nil, max_keys = nil) : Awscr::S3::Paginator::ListObjectsV2 #

List all the items in a bucket

resp = client.list_objects("bucket1", prefix: "test")
p resp.map(&.key) # => ["obj"]

[View source]
def put_bucket(bucket, region : String | Nil = nil, headers = Hash(String, String).new) #

Create a bucket, optionally place it in a region.

resp = client.create_bucket("test")
p resp # => true

[View source]
def put_object(bucket, object : String, body : IO | String | Bytes, headers = Hash(String, String).new) #

Add an object to a bucket.

resp = client.put_object("bucket1", "obj", "MY DATA")
p resp.key # => "obj"

[View source]
def start_multipart_upload(bucket : String, object : String, headers = Hash(String, String).new) #

Start a multipart upload

resp = client.start_multipart_upload("bucket1", "obj")
p resp.upload_id # => someid

[View source]
def stream_to_s3(bucket : String, object : String, *, headers = Hash(String, String).new, part_size = MINIMUM_PART_SIZE_5MB, max_workers = 8, auto_close = true, &) #

Provides IO that can be used to stream directly into an S3 file. In contrast to #upload_file, the size of the data does not have to be known before. It will use #start_multipart_upload, #upload_part, and #complete_multipart_upload internally.

Example: creates a file on S3

client.stream_to_s3("bucket1", "obj") do |io|
  io << ...
end

Intuitively, it is like writing to a local file, matching this pattern:

File.open("/tmp/bucket1/obj", "w") do |io|
  io << ...
end

If you need more control, you can also get direct access to the IO object:

io = client.stream_to_s3("bucket1", "obj", auto_close: false) { |io| io }
io << ...
io.close

[View source]
def upload_file(bucket : String, object : String, io : IO, *, headers = Hash(String, String).new, with_content_type = true, simultaneous_parts = 5) : Bool #

Upload a file to a bucket. Returns true if successful; otherwise an Http::ServerError is thrown.

File.open("/path/some/big/file.txt") do |io|
  success = client.upload_file("bucket1", "obj", io)
  p success => true
end

It uses Awscr::S3::FileUploader internally:

  • If the file is 5MB or lower, it will be uploaded in a single request; but if the file is greater than 5MB, it will be uploaded in parts.
  • If with_content_type is true, the uploader will automatically add a content type header

[View source]
def upload_part(bucket : String, object : String, upload_id : String, part_number : Int32, part : IO | String) #

Upload a part, for use in multipart uploading

resp = client.upload_part("bucket1", "obj", "someid", 123, "MY DATA")
p resp.upload_id # => someid

[View source]