3
import boto.s3.connection
4
from contextlib import closing
8
import simplestreams.objectstores as objectstores
9
import simplestreams.contentsource as cs
11
class S3ObjectStore(objectstores.ObjectStore):
16
def __init__(self, prefix):
17
# expect 's3://bucket/path_prefix'
19
if prefix.startswith("s3://"):
24
(self.bucketname, self.path_prefix) = path.split("/", 1)
28
if not self._connection:
29
self._connection = boto.s3.connection.S3Connection()
31
return self._connection
36
self._bucket = self._conn.get_bucket(self.bucketname)
39
def insert(self, path, reader, checksums=None, mutable=True):
40
#store content from reader.read() into path, expecting result checksum
42
tfile = tempfile.TemporaryFile()
43
with reader(path) as rfp:
45
buf = rfp.read(self.read_size)
47
if len(buf) != self.read_size:
49
with closing(self.bucket.new_key(self.path_prefix + path)) as key:
50
key.set_contents_from_file(tfile)
54
def insert_content(self, path, content, checksums=None):
55
with closing(self.bucket.new_key(self.path_prefix + path)) as key:
56
key.set_contents_from_string(content)
58
def remove(self, path):
59
#remove path from store
60
self.bucket.delete_key(self.path_prefix + path)
62
def reader(self, path):
63
# essentially return an 'open(path, r)'
64
key = self.bucket.get_key(self.path_prefix + path)
66
myerr = IOError("Unable to open %s" % path)
67
myerr.errno = errno.ENOENT
70
return cs.FdContentSource(fd=key, url=self.path_prefix + path)
72
def exists_with_checksum(self, path, checksums=None):
73
key = self.bucket.get_key(self.path_prefix + path)
77
if 'md5' in checksums:
78
return checksums['md5'] == key.etag.replace('"', "")