All Downloads are FREE. Search and download functionalities are using the official Maven repository.
Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
eu.shiftforward.apso.io.S3FileDescriptor.scala Maven / Gradle / Ivy
package eu.shiftforward.apso.io
import java.io.InputStream
import com.amazonaws.auth.BasicAWSCredentials
import com.amazonaws.services.s3.model.S3ObjectSummary
import com.typesafe.config.Config
import eu.shiftforward.apso.Logging
import eu.shiftforward.apso.aws.{ S3Bucket, SerializableAWSCredentials }
import eu.shiftforward.apso.config.FileDescriptorCredentials
import scala.collection.concurrent.TrieMap
case class S3FileDescriptor(
bucket: S3Bucket,
protected val elements: List[String],
private var summary: Option[S3ObjectSummary] = None)
extends FileDescriptor with RemoteFileDescriptor with Logging {
type Self = S3FileDescriptor
val bucketName = bucket.bucketName
protected val root = bucketName
private lazy val builtPath = buildPath(elements)
@inline private def buildPath(p: Seq[String]): String = p.mkString("/")
protected def duplicate(elements: List[String]) =
this.copy(elements = elements)
def size = summary match {
case Some(info) => info.getSize
case None => bucket.size(builtPath)
}
def download(localTarget: LocalFileDescriptor, safeDownloading: Boolean): Boolean = {
if (localTarget.isDirectory) {
throw new Exception("File descriptor points to a directory")
} else {
localTarget.parent().mkdirs()
if (safeDownloading) {
val tmpFile = localTarget.sibling(_ + ".tmp")
val succeed = bucket.pull(builtPath, tmpFile.path)
if (succeed) tmpFile.rename(localTarget)
succeed
} else {
bucket.pull(builtPath, localTarget.path)
}
}
}
def upload(localTarget: LocalFileDescriptor): Boolean = {
if (localTarget.isDirectory) {
throw new Exception("File descriptor points to a directory")
} else {
bucket.push(builtPath, localTarget.file)
}
}
def upload(inputStream: InputStream, length: Option[Long]): Boolean = {
if (isDirectory) {
throw new Exception("File descriptor points to a directory")
} else {
bucket.push(builtPath, inputStream, length)
}
}
def stream() = bucket.stream(builtPath)
override def cd(pathString: String): S3FileDescriptor = {
val newPath = pathString.split("/").map(_.trim).toList.foldLeft(elements) {
case (acc, "." | "") => acc
case (acc, "..") => acc.dropRight(1)
case (acc, segment) => acc :+ segment
}
this.copy(elements = newPath)
}
override def list: Iterator[S3FileDescriptor] = {
def removePrefix(primary: List[String], secondary: List[String]): List[String] = {
(primary, secondary) match {
case (h1 :: t1, h2 :: t2) if h1 == h2 => removePrefix(t1, t2)
case (Nil, s) => s
case (p, Nil) => Nil
}
}
val s3Elements = listS3WithPrefix("", includeDirectories = true).map { info =>
removePrefix(elements, info.getKey.split("/").toList).head -> info
}.toMap
s3Elements.map {
case (newElement, info) => this.copy(elements = elements :+ newElement, summary = Some(info))
}.toIterator
}
def listAllFilesWithPrefix(prefix: String): Iterator[S3FileDescriptor] = {
listS3WithPrefix(prefix, includeDirectories = false).map { info =>
this.copy(elements = info.getKey.split("/").toList, summary = Some(info))
}
}
private[this] def listS3WithPrefix(prefix: String, includeDirectories: Boolean): Iterator[S3ObjectSummary] = {
bucket.getObjectsWithMatchingPrefix(buildPath(elements :+ prefix), includeDirectories)
}
override def sibling(f: String => String): S3FileDescriptor = {
S3FileDescriptor(bucket, elements.dropRight(1) :+ f(name))
}
private lazy val isDirectoryRemote = bucket.isDirectory(builtPath)
private var isDirectoryLocal = false
def isDirectory: Boolean = isDirectoryLocal || isDirectoryRemote
def exists: Boolean = bucket.exists(builtPath)
def delete(): Boolean = bucket.delete(builtPath)
def mkdirs(): Boolean = {
val result = isDirectory || bucket.createDirectory(builtPath)
isDirectoryLocal = result
result
}
override def toString: String = s"s3://$path"
}
object S3FileDescriptor {
/**
* Creates an S3FileDescriptor from a path string extracting the bucket and the path
* @param path the uri without the protocol, containing the bucket and path
* @return a s3 file descriptor
*/
def apply(path: String): S3FileDescriptor = apply(path, None)
/**
* Creates an S3FileDescriptor from a path string extracting the bucket and the path
* @param path the uri without the protocol, containing the bucket and path
* @param credentialsConfig the config containing the credentials
* @return a s3 file descriptor
*/
def apply(path: String, credentialsConfig: Config): S3FileDescriptor =
apply(path, credentials.read(credentialsConfig, path))
/**
* Creates an S3FileDescriptor from a path string extracting the bucket and the path
* @param path the uri without the protocol, containing the bucket and path
* @param credentials credentials for accessing the s3 bucket
* @return a s3 file descriptor
*/
def apply(path: String, credentials: BasicAWSCredentials): S3FileDescriptor = {
apply(path, Some(SerializableAWSCredentials(credentials)))
}
/**
* Creates an S3FileDescriptor from a path string extracting the bucket and the path
* @param path the uri without the protocol, containing the bucket and path
* @param credentials serializable credentials for accessing the s3 bucket
* @return a s3 file descriptor
*/
def apply(path: String, credentials: SerializableAWSCredentials): S3FileDescriptor = {
apply(path, Some(credentials))
}
/**
* Creates an S3FileDescriptor from a path string extracting the bucket and the path
* @param path the uri without the protocol, containing the bucket and path
* @param credentials optional credentials for accessing the s3 bucket
* @return a s3 file descriptor
*/
private def apply(path: String, credentials: Option[SerializableAWSCredentials]): S3FileDescriptor = {
path.split('/').toList match {
case s3bucket :: s3path =>
def newBucket = credentials.fold(new S3Bucket(s3bucket))(s3Cred => new S3Bucket(s3bucket, () => s3Cred))
val s3BucketRef = s3Buckets.getOrElseUpdate(s3bucket, newBucket)
S3FileDescriptor(s3BucketRef, s3path.filterNot(_.trim == ""))
case _ => throw new IllegalArgumentException("Error parsing S3 URI")
}
}
/**
* Credential extractor for a s3 bucket from the credential config
*/
val credentials = new FileDescriptorCredentials[SerializableAWSCredentials] {
def id(path: String) = path.split("/").headOption.mkString
val protocol = "s3"
def createCredentials(id: String, s3Config: Config) = {
new SerializableAWSCredentials(
s3Config.getString("access-key"),
s3Config.getString("secret-key"))
}
}
/**
* This Map caches S3Buckets so that each S3FileDescriptor does not need to have it's own
* internal object to access the bucket.
*/
private val s3Buckets = TrieMap.empty[String, S3Bucket]
}