final class Live extends Service
Service use to wrap the unsafe amazon s3 client and access safely to s3 storage
- Alphabetic
- By Inheritance
- Live
- Service
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Instance Constructors
- new Live(unsafeClient: S3AsyncClient)
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
def
createBucket(bucketName: String): IO[S3Exception, Unit]
Create a bucket
-
def
deleteBucket(bucketName: String): IO[S3Exception, Unit]
Delete bucket, the operation fail if bucket is not present
-
def
deleteObject(bucketName: String, key: String): IO[S3Exception, Unit]
delete an object from a bucket, if not present it will succeed
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
execute[T](f: (S3AsyncClient) ⇒ CompletableFuture[T]): ZIO[Any, S3Exception, T]
* expose safely amazon s3 async client
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
getNextObjects(listing: S3ObjectListing): IO[S3Exception, S3ObjectListing]
Fetch the next object listing from a specific object listing.
-
def
getObject(bucketName: String, key: String): Stream[S3Exception, Byte]
Read an object from a bucket, the operation fail if object is not present
-
def
getObjectMetadata(bucketName: String, key: String): IO[S3Exception, ObjectMetadata]
Retrieves metadata from an object without returning the object itself.
Retrieves metadata from an object without returning the object itself. This operation is useful if you're only interested in an object's metadata.
- bucketName
name of the bucket
- key
object identifier to read
- returns
the ObjectMetadata
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
isBucketExists(bucketName: String): IO[S3Exception, Boolean]
Check if bucket exists
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
listAllObjects(bucketName: String, options: ListObjectOptions): Stream[S3Exception, S3ObjectSummary]
- Definition Classes
- Service
-
def
listAllObjects(bucketName: String): Stream[S3Exception, S3ObjectSummary]
List all descendant objects of a bucket Fetch all objects recursively of all nested directory by traversing all of them
List all descendant objects of a bucket Fetch all objects recursively of all nested directory by traversing all of them
- bucketName
name of the bucket
- Definition Classes
- Service
-
val
listBuckets: IO[S3Exception, S3BucketListing]
List all available buckets
- def listObjects(bucketName: String, options: ListObjectOptions): IO[S3Exception, S3ObjectListing]
-
def
listObjects(bucketName: String): IO[S3Exception, S3ObjectListing]
list all object for a specific bucket
-
def
multipartUpload[R](bucketName: String, key: String, content: ZStream[R, Throwable, Byte], options: MultipartUploadOptions)(parallelism: Int): ZIO[R, S3Exception, Unit]
*
*
Store data object into a specific bucket, minimun size of the data is 5 Mb to use multipartt upload (restriction from amazon API)
- bucketName
name of the bucket
- key
unique object identifier
- content
object data
- options
the optional configurations of the multipart upload
- parallelism
the number of parallel requests to upload chunks
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
def
paginate(initialListing: S3ObjectListing): Stream[S3Exception, S3ObjectListing]
List all objects by traversing all nested directories
List all objects by traversing all nested directories
- initialListing
object listing to start with
- Definition Classes
- Service
-
def
putObject[R](bucketName: String, key: String, contentLength: Long, content: ZStream[R, Throwable, Byte], options: UploadOptions): ZIO[R, S3Exception, Unit]
Store data object into a specific bucket
-
def
streamLines(bucketName: String, key: String): Stream[S3Exception, String]
Read an object by lines
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()