diff --git a/moto/s3/exceptions.py b/moto/s3/exceptions.py
index ba7b09429..6633c62d3 100644
--- a/moto/s3/exceptions.py
+++ b/moto/s3/exceptions.py
@@ -1,2 +1,6 @@
class BucketAlreadyExists(Exception):
pass
+
+
+class MissingBucket(Exception):
+ pass
diff --git a/moto/s3/models.py b/moto/s3/models.py
index 67b40d5e0..b224fbe21 100644
--- a/moto/s3/models.py
+++ b/moto/s3/models.py
@@ -7,7 +7,7 @@ import itertools
from moto.core import BaseBackend
from moto.core.utils import iso_8601_datetime, rfc_1123_datetime
-from .exceptions import BucketAlreadyExists
+from .exceptions import BucketAlreadyExists, MissingBucket
from .utils import clean_key_name, _VersionedKeyStore
UPLOAD_ID_BYTES = 43
@@ -177,40 +177,42 @@ class S3Backend(BaseBackend):
return self.buckets.values()
def get_bucket(self, bucket_name):
- return self.buckets.get(bucket_name)
+ try:
+ return self.buckets[bucket_name]
+ except KeyError:
+ raise MissingBucket()
def delete_bucket(self, bucket_name):
- bucket = self.buckets.get(bucket_name)
- if bucket:
- if bucket.keys:
- # Can't delete a bucket with keys
- return False
- else:
- return self.buckets.pop(bucket_name)
- return None
+ bucket = self.get_bucket(bucket_name)
+ if bucket.keys:
+ # Can't delete a bucket with keys
+ return False
+ else:
+ return self.buckets.pop(bucket_name)
def set_bucket_versioning(self, bucket_name, status):
- self.buckets[bucket_name].versioning_status = status
+ self.get_bucket(bucket_name).versioning_status = status
def get_bucket_versioning(self, bucket_name):
- return self.buckets[bucket_name].versioning_status
+ return self.get_bucket(bucket_name).versioning_status
def get_bucket_versions(self, bucket_name, delimiter=None,
encoding_type=None,
key_marker=None,
max_keys=None,
version_id_marker=None):
- bucket = self.buckets[bucket_name]
+ bucket = self.get_bucket(bucket_name)
if any((delimiter, encoding_type, key_marker, version_id_marker)):
raise NotImplementedError(
"Called get_bucket_versions with some of delimiter, encoding_type, key_marker, version_id_marker")
return itertools.chain(*(l for _, l in bucket.keys.iterlists()))
+
def set_key(self, bucket_name, key_name, value, storage=None, etag=None):
key_name = clean_key_name(key_name)
- bucket = self.buckets[bucket_name]
+ bucket = self.get_bucket(bucket_name)
old_key = bucket.keys.get(key_name, None)
if old_key is not None and bucket.is_versioned:
@@ -248,14 +250,14 @@ class S3Backend(BaseBackend):
return key
def initiate_multipart(self, bucket_name, key_name):
- bucket = self.buckets[bucket_name]
+ bucket = self.get_bucket(bucket_name)
new_multipart = FakeMultipart(key_name)
bucket.multiparts[new_multipart.id] = new_multipart
return new_multipart
def complete_multipart(self, bucket_name, multipart_id):
- bucket = self.buckets[bucket_name]
+ bucket = self.get_bucket(bucket_name)
multipart = bucket.multiparts[multipart_id]
value, etag = multipart.complete()
if value is None:
@@ -265,27 +267,27 @@ class S3Backend(BaseBackend):
return self.set_key(bucket_name, multipart.key_name, value, etag=etag)
def cancel_multipart(self, bucket_name, multipart_id):
- bucket = self.buckets[bucket_name]
+ bucket = self.get_bucket(bucket_name)
del bucket.multiparts[multipart_id]
def list_multipart(self, bucket_name, multipart_id):
- bucket = self.buckets[bucket_name]
+ bucket = self.get_bucket(bucket_name)
return bucket.multiparts[multipart_id].list_parts()
def get_all_multiparts(self, bucket_name):
- bucket = self.buckets[bucket_name]
+ bucket = self.get_bucket(bucket_name)
return bucket.multiparts
def set_part(self, bucket_name, multipart_id, part_id, value):
- bucket = self.buckets[bucket_name]
+ bucket = self.get_bucket(bucket_name)
multipart = bucket.multiparts[multipart_id]
return multipart.set_part(part_id, value)
def copy_part(self, dest_bucket_name, multipart_id, part_id,
src_bucket_name, src_key_name):
src_key_name = clean_key_name(src_key_name)
- src_bucket = self.buckets[src_bucket_name]
- dest_bucket = self.buckets[dest_bucket_name]
+ src_bucket = self.get_bucket(src_bucket_name)
+ dest_bucket = self.get_bucket(dest_bucket_name)
multipart = dest_bucket.multiparts[multipart_id]
return multipart.set_part(part_id, src_bucket.keys[src_key_name].value)
@@ -317,14 +319,14 @@ class S3Backend(BaseBackend):
def delete_key(self, bucket_name, key_name):
key_name = clean_key_name(key_name)
- bucket = self.buckets[bucket_name]
+ bucket = self.get_bucket(bucket_name)
return bucket.keys.pop(key_name)
def copy_key(self, src_bucket_name, src_key_name, dest_bucket_name, dest_key_name, storage=None):
src_key_name = clean_key_name(src_key_name)
dest_key_name = clean_key_name(dest_key_name)
- src_bucket = self.buckets[src_bucket_name]
- dest_bucket = self.buckets[dest_bucket_name]
+ src_bucket = self.get_bucket(src_bucket_name)
+ dest_bucket = self.get_bucket(dest_bucket_name)
key = src_bucket.keys[src_key_name]
if dest_key_name != src_key_name:
key = key.copy(dest_key_name)
diff --git a/moto/s3/responses.py b/moto/s3/responses.py
index 9e657430f..eee8e22de 100644
--- a/moto/s3/responses.py
+++ b/moto/s3/responses.py
@@ -3,7 +3,7 @@ import re
from jinja2 import Template
-from .exceptions import BucketAlreadyExists
+from .exceptions import BucketAlreadyExists, MissingBucket
from .models import s3_backend
from .utils import bucket_name_from_url
from xml.dom import minidom
@@ -26,7 +26,11 @@ class ResponseObject(object):
return template.render(buckets=all_buckets)
def bucket_response(self, request, full_url, headers):
- response = self._bucket_response(request, full_url, headers)
+ try:
+ response = self._bucket_response(request, full_url, headers)
+ except MissingBucket:
+ return 404, headers, ""
+
if isinstance(response, basestring):
return 200, headers, response
else:
@@ -57,11 +61,12 @@ class ResponseObject(object):
raise NotImplementedError("Method {0} has not been impelemented in the S3 backend yet".format(method))
def _bucket_response_head(self, bucket_name, headers):
- bucket = self.backend.get_bucket(bucket_name)
- if bucket:
- return 200, headers, ""
- else:
+ try:
+ self.backend.get_bucket(bucket_name)
+ except MissingBucket:
return 404, headers, ""
+ else:
+ return 200, headers, ""
def _bucket_response_get(self, bucket_name, querystring, headers):
if 'uploads' in querystring:
@@ -104,22 +109,23 @@ class ResponseObject(object):
is_truncated='false',
)
- bucket = self.backend.get_bucket(bucket_name)
- if bucket:
- prefix = querystring.get('prefix', [None])[0]
- delimiter = querystring.get('delimiter', [None])[0]
- result_keys, result_folders = self.backend.prefix_query(bucket, prefix, delimiter)
- template = Template(S3_BUCKET_GET_RESPONSE)
- return 200, headers, template.render(
- bucket=bucket,
- prefix=prefix,
- delimiter=delimiter,
- result_keys=result_keys,
- result_folders=result_folders
- )
- else:
+ try:
+ bucket = self.backend.get_bucket(bucket_name)
+ except MissingBucket:
return 404, headers, ""
+ prefix = querystring.get('prefix', [None])[0]
+ delimiter = querystring.get('delimiter', [None])[0]
+ result_keys, result_folders = self.backend.prefix_query(bucket, prefix, delimiter)
+ template = Template(S3_BUCKET_GET_RESPONSE)
+ return 200, headers, template.render(
+ bucket=bucket,
+ prefix=prefix,
+ delimiter=delimiter,
+ result_keys=result_keys,
+ result_folders=result_folders
+ )
+
def _bucket_response_put(self, request, bucket_name, querystring, headers):
if 'versioning' in querystring:
ver = re.search('([A-Za-z]+)', request.body)
@@ -138,12 +144,14 @@ class ResponseObject(object):
return 200, headers, template.render(bucket=new_bucket)
def _bucket_response_delete(self, bucket_name, headers):
- removed_bucket = self.backend.delete_bucket(bucket_name)
- if removed_bucket is None:
+ try:
+ removed_bucket = self.backend.delete_bucket(bucket_name)
+ except MissingBucket:
# Non-existant bucket
template = Template(S3_DELETE_NON_EXISTING_BUCKET)
return 404, headers, template.render(bucket_name=bucket_name)
- elif removed_bucket:
+
+ if removed_bucket:
# Bucket exists
template = Template(S3_DELETE_BUCKET_SUCCESS)
return 204, headers, template.render(bucket=removed_bucket)
@@ -198,13 +206,17 @@ class ResponseObject(object):
key_name = k.firstChild.nodeValue
self.backend.delete_key(bucket_name, key_name)
deleted_names.append(key_name)
- except KeyError as e:
+ except KeyError:
error_names.append(key_name)
- return 200, headers, template.render(deleted=deleted_names,delete_errors=error_names)
+ return 200, headers, template.render(deleted=deleted_names, delete_errors=error_names)
def key_response(self, request, full_url, headers):
- response = self._key_response(request, full_url, headers)
+ try:
+ response = self._key_response(request, full_url, headers)
+ except MissingBucket:
+ return 404, headers, ""
+
if isinstance(response, basestring):
return 200, headers, response
else:
@@ -455,43 +467,43 @@ S3_DELETE_BUCKET_WITH_ITEMS_ERROR = """
S3_BUCKET_VERSIONING = """
- {{ bucket_versioning_status }}
+ {{ bucket_versioning_status }}
"""
S3_BUCKET_GET_VERSIONING = """
{% if status is none %}
-
+
{% else %}
-
- {{ status }}
-
+
+ {{ status }}
+
{% endif %}
"""
S3_BUCKET_GET_VERSIONS = """
- {{ bucket.name }}
- {{ prefix }}
- {{ key_marker }}
- {{ max_keys }}
- {{ is_truncated }}
- {% for key in key_list %}
-
- {{ key.name }}
- {{ key._version_id }}
- false
- {{ key.last_modified_ISO8601 }}
- {{ key.etag }}
- {{ key.size }}
- {{ key.storage_class }}
-
- 75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a
- webfile
-
-
- {% endfor %}
+ {{ bucket.name }}
+ {{ prefix }}
+ {{ key_marker }}
+ {{ max_keys }}
+ {{ is_truncated }}
+ {% for key in key_list %}
+
+ {{ key.name }}
+ {{ key._version_id }}
+ false
+ {{ key.last_modified_ISO8601 }}
+ {{ key.etag }}
+ {{ key.size }}
+ {{ key.storage_class }}
+
+ 75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a
+ webfile
+
+
+ {% endfor %}
"""
diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py
index 5f08f30cd..1738db53f 100644
--- a/tests/test_s3/test_s3.py
+++ b/tests/test_s3/test_s3.py
@@ -149,6 +149,16 @@ def test_list_multiparts():
uploads.should.be.empty
+@mock_s3
+def test_key_save_to_missing_bucket():
+ conn = boto.connect_s3('the_key', 'the_secret')
+ bucket = conn.get_bucket('mybucket', validate=False)
+
+ key = Key(bucket)
+ key.key = "the-key"
+ key.set_contents_from_string.when.called_with("foobar").should.throw(S3ResponseError)
+
+
@mock_s3
def test_missing_key():
conn = boto.connect_s3('the_key', 'the_secret')