diff --git a/AUTHORS.md b/AUTHORS.md
index 1cf19dd76..8a0831dcd 100644
--- a/AUTHORS.md
+++ b/AUTHORS.md
@@ -7,4 +7,8 @@ Moto is written by Steve Pulec with contributions from:
* [Dan Berglund](https://github.com/cheif)
* [Lincoln de Sousa](https://github.com/clarete)
* [mhock](https://github.com/mhock)
-* [Ilya Sukhanov](https://github.com/IlyaSukhanov)
\ No newline at end of file
+* [Ilya Sukhanov](https://github.com/IlyaSukhanov)
+* [Lucian Branescu Mihaila](https://github.com/lucian1900)
+* [Konstantinos Koukopoulos](https://github.com/kouk)
+* [attili](https://github.com/attili)
+* [JJ Zeng](https://github.com/jjofseattle)
diff --git a/moto/__init__.py b/moto/__init__.py
index 57e8eef38..634daa00e 100644
--- a/moto/__init__.py
+++ b/moto/__init__.py
@@ -7,6 +7,8 @@ from .ec2 import mock_ec2
from .elb import mock_elb
from .emr import mock_emr
from .s3 import mock_s3
+from .s3bucket_path import mock_s3bucket_path
from .ses import mock_ses
from .sqs import mock_sqs
from .sts import mock_sts
+from .route53 import mock_route53
diff --git a/moto/backends.py b/moto/backends.py
index 6f375a8f1..b11005227 100644
--- a/moto/backends.py
+++ b/moto/backends.py
@@ -4,9 +4,11 @@ from moto.ec2 import ec2_backend
from moto.elb import elb_backend
from moto.emr import emr_backend
from moto.s3 import s3_backend
+from moto.s3bucket_path import s3bucket_path_backend
from moto.ses import ses_backend
from moto.sqs import sqs_backend
from moto.sts import sts_backend
+from moto.route53 import route53_backend
BACKENDS = {
'autoscaling': autoscaling_backend,
@@ -15,7 +17,9 @@ BACKENDS = {
'elb': elb_backend,
'emr': emr_backend,
's3': s3_backend,
+ 's3bucket_path': s3bucket_path_backend,
'ses': ses_backend,
'sqs': sqs_backend,
'sts': sts_backend,
+ 'route53': route53_backend
}
diff --git a/moto/core/models.py b/moto/core/models.py
index f3e6ad701..17238fcb0 100644
--- a/moto/core/models.py
+++ b/moto/core/models.py
@@ -9,6 +9,7 @@ from .utils import convert_regex_to_flask_path
class MockAWS(object):
def __init__(self, backend):
self.backend = backend
+ HTTPretty.reset()
def __call__(self, func):
return self.decorate_callable(func)
diff --git a/moto/route53/__init__.py b/moto/route53/__init__.py
new file mode 100644
index 000000000..6448c3c39
--- /dev/null
+++ b/moto/route53/__init__.py
@@ -0,0 +1,2 @@
+from .models import route53_backend
+mock_route53 = route53_backend.decorator
diff --git a/moto/route53/models.py b/moto/route53/models.py
new file mode 100644
index 000000000..d901996fa
--- /dev/null
+++ b/moto/route53/models.py
@@ -0,0 +1,44 @@
+from moto.core import BaseBackend
+from moto.core.utils import get_random_hex
+
+
+class FakeZone:
+
+ def __init__(self, name, id):
+ self.name = name
+ self.id = id
+ self.rrsets = {}
+
+ def add_rrset(self, name, rrset):
+ self.rrsets[name] = rrset
+
+ def delete_rrset(self, name):
+ self.rrsets.pop(name, None)
+
+
+class Route53Backend(BaseBackend):
+
+ def __init__(self):
+ self.zones = {}
+
+ def create_hosted_zone(self, name):
+ new_id = get_random_hex()
+ new_zone = FakeZone(name, new_id)
+ self.zones[new_id] = new_zone
+ return new_zone
+
+ def get_all_hosted_zones(self):
+ return self.zones.values()
+
+ def get_hosted_zone(self, id):
+ return self.zones.get(id)
+
+ def delete_hosted_zone(self, id):
+ zone = self.zones.get(id)
+ if zone:
+ del self.zones[id]
+ return zone
+ return None
+
+
+route53_backend = Route53Backend()
diff --git a/moto/route53/responses.py b/moto/route53/responses.py
new file mode 100644
index 000000000..55160922e
--- /dev/null
+++ b/moto/route53/responses.py
@@ -0,0 +1,124 @@
+from jinja2 import Template
+from urlparse import parse_qs, urlparse
+from .models import route53_backend
+import xmltodict
+import dicttoxml
+
+
+def list_or_create_hostzone_response(request, full_url, headers):
+
+ if request.method == "POST":
+ elements = xmltodict.parse(request.body)
+ new_zone = route53_backend.create_hosted_zone(elements["CreateHostedZoneRequest"]["Name"])
+ template = Template(CREATE_HOSTED_ZONE_RESPONSE)
+ return 201, headers, template.render(zone=new_zone)
+
+ elif request.method == "GET":
+ all_zones = route53_backend.get_all_hosted_zones()
+ template = Template(LIST_HOSTED_ZONES_RESPONSE)
+ return 200, headers, template.render(zones=all_zones)
+
+
+def get_or_delete_hostzone_response(request, full_url, headers):
+ parsed_url = urlparse(full_url)
+ zoneid = parsed_url.path.rstrip('/').rsplit('/', 1)[1]
+ the_zone = route53_backend.get_hosted_zone(zoneid)
+ if not the_zone:
+ return 404, headers, "Zone %s not Found" % zoneid
+
+ if request.method == "GET":
+ template = Template(GET_HOSTED_ZONE_RESPONSE)
+ return 200, headers, template.render(zone=the_zone)
+ elif request.method == "DELETE":
+ route53_backend.delete_hosted_zone(zoneid)
+ return 200, headers, DELETE_HOSTED_ZONE_RESPONSE
+
+
+def rrset_response(request, full_url, headers):
+ parsed_url = urlparse(full_url)
+ method = request.method
+
+ zoneid = parsed_url.path.rstrip('/').rsplit('/', 2)[1]
+ the_zone = route53_backend.get_hosted_zone(zoneid)
+ if not the_zone:
+ return 404, headers, "Zone %s Not Found" % zoneid
+
+ if method == "POST":
+ elements = xmltodict.parse(request.body)
+ for key, value in elements['ChangeResourceRecordSetsRequest']['ChangeBatch']['Changes'].items():
+ action = value['Action']
+ rrset = value['ResourceRecordSet']
+
+ if action == 'CREATE':
+ the_zone.add_rrset(rrset["Name"], rrset)
+ elif action == "DELETE":
+ the_zone.delete_rrset(rrset["Name"])
+
+ return 200, headers, CHANGE_RRSET_RESPONSE
+
+ elif method == "GET":
+ querystring = parse_qs(parsed_url.query)
+ template = Template(LIST_RRSET_REPONSE)
+ rrset_list = []
+ for key, value in the_zone.rrsets.items():
+ if 'type' not in querystring or querystring["type"][0] == value["Type"]:
+ rrset_list.append(dicttoxml.dicttoxml({"ResourceRecordSet": value}, root=False))
+
+ return 200, headers, template.render(rrsets=rrset_list)
+
+
+LIST_RRSET_REPONSE = """
+
+ {% for rrset in rrsets %}
+ {{ rrset }}
+ {% endfor %}
+
+"""
+
+CHANGE_RRSET_RESPONSE = """
+
+ PENDING
+ 2010-09-10T01:36:41.958Z
+
+"""
+
+DELETE_HOSTED_ZONE_RESPONSE = """
+
+
+"""
+
+GET_HOSTED_ZONE_RESPONSE = """
+
+ /hostedzone/{{ zone.id }}
+ {{ zone.name }}
+ {{ zone.rrsets|count }}
+
+
+ moto.test.com
+
+"""
+
+CREATE_HOSTED_ZONE_RESPONSE = """
+
+ /hostedzone/{{ zone.id }}
+ {{ zone.name }}
+ 0
+
+
+
+ moto.test.com
+
+
+"""
+
+LIST_HOSTED_ZONES_RESPONSE = """
+
+ {% for zone in zones %}
+
+ {{ zone.id }}
+ {{ zone.name }}
+ {{ zone.rrsets|count }}
+
+ {% endfor %}
+
+"""
diff --git a/moto/route53/urls.py b/moto/route53/urls.py
new file mode 100644
index 000000000..7b76e6b23
--- /dev/null
+++ b/moto/route53/urls.py
@@ -0,0 +1,11 @@
+import responses
+
+url_bases = [
+ "https://route53.amazonaws.com/201.-..-../hostedzone",
+]
+
+url_paths = {
+ '{0}$': responses.list_or_create_hostzone_response,
+ '{0}/.+$': responses.get_or_delete_hostzone_response,
+ '{0}/.+/rrset$': responses.rrset_response,
+}
diff --git a/moto/s3/models.py b/moto/s3/models.py
index 6098ad21c..e59558864 100644
--- a/moto/s3/models.py
+++ b/moto/s3/models.py
@@ -1,3 +1,5 @@
+import os
+import base64
import datetime
import hashlib
@@ -5,6 +7,9 @@ from moto.core import BaseBackend
from moto.core.utils import iso_8601_datetime, rfc_1123_datetime
from .utils import clean_key_name
+UPLOAD_ID_BYTES=43
+UPLOAD_PART_MIN_SIZE=5242880
+
class FakeKey(object):
def __init__(self, name, value):
@@ -23,7 +28,7 @@ class FakeKey(object):
@property
def etag(self):
value_md5 = hashlib.md5()
- value_md5.update(self.value)
+ value_md5.update(bytes(self.value))
return '"{0}"'.format(value_md5.hexdigest())
@property
@@ -52,10 +57,48 @@ class FakeKey(object):
return len(self.value)
+class FakeMultipart(object):
+ def __init__(self, key_name):
+ self.key_name = key_name
+ self.parts = {}
+ self.id = base64.b64encode(os.urandom(UPLOAD_ID_BYTES)).replace('=', '').replace('+', '')
+
+ def complete(self):
+ total = bytearray()
+ last_part_name = len(self.list_parts())
+
+ for part in self.list_parts():
+ if part.name != last_part_name and len(part.value) < UPLOAD_PART_MIN_SIZE:
+ return
+ total.extend(part.value)
+
+ return total
+
+ def set_part(self, part_id, value):
+ if part_id < 1:
+ return
+
+ key = FakeKey(part_id, value)
+ self.parts[part_id] = key
+ return key
+
+ def list_parts(self):
+ parts = []
+
+ for part_id, index in enumerate(sorted(self.parts.keys()), start=1):
+ # Make sure part ids are continuous
+ if part_id != index:
+ return
+ parts.append(self.parts[part_id])
+
+ return parts
+
+
class FakeBucket(object):
def __init__(self, name):
self.name = name
self.keys = {}
+ self.multiparts = {}
class S3Backend(BaseBackend):
@@ -106,6 +149,36 @@ class S3Backend(BaseBackend):
if bucket:
return bucket.keys.get(key_name)
+ def initiate_multipart(self, bucket_name, key_name):
+ bucket = self.buckets[bucket_name]
+ new_multipart = FakeMultipart(key_name)
+ bucket.multiparts[new_multipart.id] = new_multipart
+
+ return new_multipart
+
+ def complete_multipart(self, bucket_name, multipart_id):
+ bucket = self.buckets[bucket_name]
+ multipart = bucket.multiparts[multipart_id]
+ value = multipart.complete()
+ if value is None:
+ return
+ del bucket.multiparts[multipart_id]
+
+ return self.set_key(bucket_name, multipart.key_name, value)
+
+ def cancel_multipart(self, bucket_name, multipart_id):
+ bucket = self.buckets[bucket_name]
+ del bucket.multiparts[multipart_id]
+
+ def list_multipart(self, bucket_name, multipart_id):
+ bucket = self.buckets[bucket_name]
+ return bucket.multiparts[multipart_id].list_parts()
+
+ def set_part(self, bucket_name, multipart_id, part_id, value):
+ bucket = self.buckets[bucket_name]
+ multipart = bucket.multiparts[multipart_id]
+ return multipart.set_part(part_id, value)
+
def prefix_query(self, bucket, prefix, delimiter):
key_results = set()
folder_results = set()
diff --git a/moto/s3/responses.py b/moto/s3/responses.py
index 2fd6f7dfb..d9b014206 100644
--- a/moto/s3/responses.py
+++ b/moto/s3/responses.py
@@ -7,173 +7,233 @@ from .models import s3_backend
from .utils import bucket_name_from_url
-def all_buckets():
- # No bucket specified. Listing all buckets
- all_buckets = s3_backend.get_all_buckets()
- template = Template(S3_ALL_BUCKETS)
- return template.render(buckets=all_buckets)
+def parse_key_name(pth):
+ return pth.lstrip("/")
-def bucket_response(request, full_url, headers):
- response = _bucket_response(request, full_url, headers)
- if isinstance(response, basestring):
- return 200, headers, response
+class ResponseObject(object):
+ def __init__(self, backend, bucket_name_from_url, parse_key_name):
+ self.backend = backend
+ self.bucket_name_from_url = bucket_name_from_url
+ self.parse_key_name = parse_key_name
- else:
- status_code, headers, response_content = response
- return status_code, headers, response_content
+ def all_buckets(self):
+ # No bucket specified. Listing all buckets
+ all_buckets = self.backend.get_all_buckets()
+ template = Template(S3_ALL_BUCKETS)
+ return template.render(buckets=all_buckets)
+ def bucket_response(self, request, full_url, headers):
+ response = self._bucket_response(request, full_url, headers)
+ if isinstance(response, basestring):
+ return 200, headers, response
-def _bucket_response(request, full_url, headers):
- parsed_url = urlparse(full_url)
- querystring = parse_qs(parsed_url.query)
- method = request.method
-
- bucket_name = bucket_name_from_url(full_url)
- if not bucket_name:
- # If no bucket specified, list all buckets
- return all_buckets()
-
- if method == 'GET':
- bucket = s3_backend.get_bucket(bucket_name)
- if bucket:
- prefix = querystring.get('prefix', [None])[0]
- delimiter = querystring.get('delimiter', [None])[0]
- result_keys, result_folders = s3_backend.prefix_query(bucket, prefix, delimiter)
- template = Template(S3_BUCKET_GET_RESPONSE)
- return template.render(
- bucket=bucket,
- prefix=prefix,
- delimiter=delimiter,
- result_keys=result_keys,
- result_folders=result_folders
- )
else:
- return 404, headers, ""
- elif method == 'PUT':
- new_bucket = s3_backend.create_bucket(bucket_name)
- template = Template(S3_BUCKET_CREATE_RESPONSE)
- return template.render(bucket=new_bucket)
- elif method == 'DELETE':
- removed_bucket = s3_backend.delete_bucket(bucket_name)
- if removed_bucket is None:
- # Non-existant bucket
- template = Template(S3_DELETE_NON_EXISTING_BUCKET)
- return 404, headers, template.render(bucket_name=bucket_name)
- elif removed_bucket:
- # Bucket exists
- template = Template(S3_DELETE_BUCKET_SUCCESS)
- return 204, headers, template.render(bucket=removed_bucket)
- else:
- # Tried to delete a bucket that still has keys
- template = Template(S3_DELETE_BUCKET_WITH_ITEMS_ERROR)
- return 409, headers, template.render(bucket=removed_bucket)
- elif method == 'POST':
- #POST to bucket-url should create file from form
- if hasattr(request, 'form'):
- #Not HTTPretty
- form = request.form
- else:
- #HTTPretty, build new form object
- form = {}
- for kv in request.body.split('&'):
- k, v = kv.split('=')
- form[k] = v
+ status_code, headers, response_content = response
+ return status_code, headers, response_content
- key = form['key']
- f = form['file']
+ def _bucket_response(self, request, full_url, headers):
+ parsed_url = urlparse(full_url)
+ querystring = parse_qs(parsed_url.query)
+ method = request.method
- new_key = s3_backend.set_key(bucket_name, key, f)
+ bucket_name = self.bucket_name_from_url(full_url)
+ if not bucket_name:
+ # If no bucket specified, list all buckets
+ return self.all_buckets()
- #Metadata
- meta_regex = re.compile('^x-amz-meta-([a-zA-Z0-9\-_]+)$', flags=re.IGNORECASE)
- for form_id in form:
- result = meta_regex.match(form_id)
- if result:
- meta_key = result.group(0).lower()
- metadata = form[form_id]
- new_key.set_metadata(meta_key, metadata)
- return 200, headers, ""
- else:
- raise NotImplementedError("Method {0} has not been impelemented in the S3 backend yet".format(method))
+ if method == 'GET':
+ bucket = self.backend.get_bucket(bucket_name)
+ if bucket:
+ prefix = querystring.get('prefix', [None])[0]
+ delimiter = querystring.get('delimiter', [None])[0]
+ result_keys, result_folders = self.backend.prefix_query(bucket, prefix, delimiter)
+ template = Template(S3_BUCKET_GET_RESPONSE)
+ return template.render(
+ bucket=bucket,
+ prefix=prefix,
+ delimiter=delimiter,
+ result_keys=result_keys,
+ result_folders=result_folders
+ )
+ else:
+ return 404, headers, ""
+ elif method == 'PUT':
+ new_bucket = self.backend.create_bucket(bucket_name)
+ template = Template(S3_BUCKET_CREATE_RESPONSE)
+ return template.render(bucket=new_bucket)
+ elif method == 'DELETE':
+ removed_bucket = self.backend.delete_bucket(bucket_name)
+ if removed_bucket is None:
+ # Non-existant bucket
+ template = Template(S3_DELETE_NON_EXISTING_BUCKET)
+ return 404, headers, template.render(bucket_name=bucket_name)
+ elif removed_bucket:
+ # Bucket exists
+ template = Template(S3_DELETE_BUCKET_SUCCESS)
+ return 204, headers, template.render(bucket=removed_bucket)
+ else:
+ # Tried to delete a bucket that still has keys
+ template = Template(S3_DELETE_BUCKET_WITH_ITEMS_ERROR)
+ return 409, headers, template.render(bucket=removed_bucket)
+ elif method == 'POST':
+ #POST to bucket-url should create file from form
+ if hasattr(request, 'form'):
+ #Not HTTPretty
+ form = request.form
+ else:
+ #HTTPretty, build new form object
+ form = {}
+ for kv in request.body.split('&'):
+ k, v = kv.split('=')
+ form[k] = v
+ key = form['key']
+ f = form['file']
-def key_response(request, full_url, headers):
- response = _key_response(request, full_url, headers)
- if isinstance(response, basestring):
- return 200, headers, response
- else:
- status_code, headers, response_content = response
- return status_code, headers, response_content
-
-
-def _key_response(request, full_url, headers):
- parsed_url = urlparse(full_url)
- method = request.method
-
- key_name = parsed_url.path.lstrip('/')
- bucket_name = bucket_name_from_url(full_url)
- if hasattr(request, 'body'):
- # Boto
- body = request.body
- else:
- # Flask server
- body = request.data
-
- if method == 'GET':
- key = s3_backend.get_key(bucket_name, key_name)
- if key:
- headers.update(key.metadata)
- return 200, headers, key.value
- else:
- return 404, headers, ""
- if method == 'PUT':
- if 'x-amz-copy-source' in request.headers:
- # Copy key
- src_bucket, src_key = request.headers.get("x-amz-copy-source").split("/",1)
- s3_backend.copy_key(src_bucket, src_key, bucket_name, key_name)
- template = Template(S3_OBJECT_COPY_RESPONSE)
- return template.render(key=src_key)
- streaming_request = hasattr(request, 'streaming') and request.streaming
- closing_connection = headers.get('connection') == 'close'
- if closing_connection and streaming_request:
- # Closing the connection of a streaming request. No more data
- new_key = s3_backend.get_key(bucket_name, key_name)
- elif streaming_request:
- # Streaming request, more data
- new_key = s3_backend.append_to_key(bucket_name, key_name, body)
- else:
- # Initial data
- new_key = s3_backend.set_key(bucket_name, key_name, body)
- request.streaming = True
+ new_key = self.backend.set_key(bucket_name, key, f)
#Metadata
meta_regex = re.compile('^x-amz-meta-([a-zA-Z0-9\-_]+)$', flags=re.IGNORECASE)
- for header in request.headers:
- if isinstance(header, basestring):
- result = meta_regex.match(header)
- if result:
- meta_key = result.group(0).lower()
- metadata = request.headers[header]
- new_key.set_metadata(meta_key, metadata)
- template = Template(S3_OBJECT_RESPONSE)
- headers.update(new_key.response_dict)
- return 200, headers, template.render(key=new_key)
- elif method == 'HEAD':
- key = s3_backend.get_key(bucket_name, key_name)
- if key:
- headers.update(key.metadata)
- headers.update(key.response_dict)
- return 200, headers, key.value
- else:
- return 404, headers, ""
- elif method == 'DELETE':
- removed_key = s3_backend.delete_key(bucket_name, key_name)
- template = Template(S3_DELETE_OBJECT_SUCCESS)
- return 204, headers, template.render(bucket=removed_key)
- else:
- raise NotImplementedError("Method {0} has not been impelemented in the S3 backend yet".format(method))
+ for form_id in form:
+ result = meta_regex.match(form_id)
+ if result:
+ meta_key = result.group(0).lower()
+ metadata = form[form_id]
+ new_key.set_metadata(meta_key, metadata)
+ return 200, headers, ""
+ else:
+ raise NotImplementedError("Method {0} has not been impelemented in the S3 backend yet".format(method))
+
+ def key_response(self, request, full_url, headers):
+ response = self._key_response(request, full_url, headers)
+ if isinstance(response, basestring):
+ return 200, headers, response
+ else:
+ status_code, headers, response_content = response
+ return status_code, headers, response_content
+
+ def _key_response(self, request, full_url, headers):
+ parsed_url = urlparse(full_url)
+ query = parse_qs(parsed_url.query)
+ method = request.method
+
+ key_name = self.parse_key_name(parsed_url.path)
+
+ bucket_name = self.bucket_name_from_url(full_url)
+
+ if hasattr(request, 'body'):
+ # Boto
+ body = request.body
+ else:
+ # Flask server
+ body = request.data
+
+ if method == 'GET':
+ if 'uploadId' in query:
+ upload_id = query['uploadId'][0]
+ parts = self.backend.list_multipart(bucket_name, upload_id)
+ template = Template(S3_MULTIPART_LIST_RESPONSE)
+ return 200, headers, template.render(
+ bucket_name=bucket_name,
+ key_name=key_name,
+ upload_id=upload_id,
+ count=len(parts),
+ parts=parts
+ )
+ key = self.backend.get_key(bucket_name, key_name)
+ if key:
+ headers.update(key.metadata)
+ return 200, headers, key.value
+ else:
+ return 404, headers, ""
+ if method == 'PUT':
+ if 'uploadId' in query and 'partNumber' in query and body:
+ upload_id = query['uploadId'][0]
+ part_number = int(query['partNumber'][0])
+ key = self.backend.set_part(bucket_name, upload_id, part_number, body)
+ template = Template(S3_MULTIPART_UPLOAD_RESPONSE)
+ headers.update(key.response_dict)
+ return 200, headers, template.render(part=key)
+
+ if 'x-amz-copy-source' in request.headers:
+ # Copy key
+ src_bucket, src_key = request.headers.get("x-amz-copy-source").split("/", 1)
+ self.backend.copy_key(src_bucket, src_key, bucket_name, key_name)
+ template = Template(S3_OBJECT_COPY_RESPONSE)
+ return template.render(key=src_key)
+ streaming_request = hasattr(request, 'streaming') and request.streaming
+ closing_connection = headers.get('connection') == 'close'
+ if closing_connection and streaming_request:
+ # Closing the connection of a streaming request. No more data
+ new_key = self.backend.get_key(bucket_name, key_name)
+ elif streaming_request:
+ # Streaming request, more data
+ new_key = self.backend.append_to_key(bucket_name, key_name, body)
+ else:
+ # Initial data
+ new_key = self.backend.set_key(bucket_name, key_name, body)
+ request.streaming = True
+
+ #Metadata
+ meta_regex = re.compile('^x-amz-meta-([a-zA-Z0-9\-_]+)$', flags=re.IGNORECASE)
+ for header in request.headers:
+ if isinstance(header, basestring):
+ result = meta_regex.match(header)
+ if result:
+ meta_key = result.group(0).lower()
+ metadata = request.headers[header]
+ new_key.set_metadata(meta_key, metadata)
+ template = Template(S3_OBJECT_RESPONSE)
+ headers.update(new_key.response_dict)
+ return 200, headers, template.render(key=new_key)
+ elif method == 'HEAD':
+ key = self.backend.get_key(bucket_name, key_name)
+ if key:
+ headers.update(key.metadata)
+ headers.update(key.response_dict)
+ return 200, headers, ""
+ else:
+ return 404, headers, ""
+ elif method == 'DELETE':
+ if 'uploadId' in query:
+ upload_id = query['uploadId'][0]
+ self.backend.cancel_multipart(bucket_name, upload_id)
+ return 204, headers, ""
+ removed_key = self.backend.delete_key(bucket_name, key_name)
+ template = Template(S3_DELETE_OBJECT_SUCCESS)
+ return 204, headers, template.render(bucket=removed_key)
+ elif method == 'POST':
+ if body == '' and parsed_url.query == 'uploads':
+ multipart = self.backend.initiate_multipart(bucket_name, key_name)
+ template = Template(S3_MULTIPART_INITIATE_RESPONSE)
+ response = template.render(
+ bucket_name=bucket_name,
+ key_name=key_name,
+ upload_id=multipart.id,
+ )
+ return 200, headers, response
+
+ if 'uploadId' in query:
+ upload_id = query['uploadId'][0]
+ key = self.backend.complete_multipart(bucket_name, upload_id)
+
+ if key is not None:
+ template = Template(S3_MULTIPART_COMPLETE_RESPONSE)
+ return template.render(
+ bucket_name=bucket_name,
+ key_name=key.name,
+ etag=key.etag,
+ )
+ template = Template(S3_MULTIPART_COMPLETE_TOO_SMALL_ERROR)
+ return 400, headers, template.render()
+ else:
+ raise NotImplementedError("Method POST had only been implemented for multipart uploads so far")
+ else:
+ raise NotImplementedError("Method {0} has not been impelemented in the S3 backend yet".format(method))
+
+S3ResponseInstance = ResponseObject(s3_backend, bucket_name_from_url, parse_key_name)
S3_ALL_BUCKETS = """
@@ -269,3 +329,62 @@ S3_OBJECT_COPY_RESPONSE = """
+
+ {{ bucket_name }}
+ {{ key_name }}
+ {{ upload_id }}
+"""
+
+S3_MULTIPART_UPLOAD_RESPONSE = """
+
+ {{ part.last_modified_ISO8601 }}
+ {{ part.etag }}
+"""
+
+S3_MULTIPART_LIST_RESPONSE = """
+
+ {{ bucket_name }}
+ {{ key_name }}
+ {{ upload_id }}
+ STANDARD
+
+ 75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a
+ webfile
+
+
+ 75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a
+ webfile
+
+ STANDARD
+ 1
+ {{ count }}
+ {{ count }}
+ false
+ {% for part in parts %}
+
+ {{ part.name }}
+ {{ part.last_modified_ISO8601 }}
+ {{ part.etag }}
+ {{ part.size }}
+
+ {% endfor %}
+"""
+
+S3_MULTIPART_COMPLETE_RESPONSE = """
+
+ http://{{ bucket_name }}.s3.amazonaws.com/{{ key_name }}
+ {{ bucket_name }}
+ {{ key_name }}
+ {{ etag }}
+
+"""
+
+S3_MULTIPART_COMPLETE_TOO_SMALL_ERROR = """
+
+ EntityTooSmall
+ Your proposed upload is smaller than the minimum allowed object size.
+ asdfasdfsdafds
+ sdfgdsfgdsfgdfsdsfgdfs
+"""
diff --git a/moto/s3/urls.py b/moto/s3/urls.py
index 21370c15a..5f9bc0cf1 100644
--- a/moto/s3/urls.py
+++ b/moto/s3/urls.py
@@ -1,10 +1,10 @@
-from .responses import bucket_response, key_response
+from .responses import S3ResponseInstance
url_bases = [
"https?://(?P[a-zA-Z0-9\-_.]*)\.?s3.amazonaws.com"
]
url_paths = {
- '{0}/$': bucket_response,
- '{0}/(?P[a-zA-Z0-9\-_.]+)': key_response,
+ '{0}/$': S3ResponseInstance.bucket_response,
+ '{0}/(?P[a-zA-Z0-9\-_.]+)': S3ResponseInstance.key_response,
}
diff --git a/moto/s3bucket_path/__init__.py b/moto/s3bucket_path/__init__.py
new file mode 100644
index 000000000..6dd680bed
--- /dev/null
+++ b/moto/s3bucket_path/__init__.py
@@ -0,0 +1,2 @@
+from .models import s3bucket_path_backend
+mock_s3bucket_path = s3bucket_path_backend.decorator
diff --git a/moto/s3bucket_path/models.py b/moto/s3bucket_path/models.py
new file mode 100644
index 000000000..2b7e99539
--- /dev/null
+++ b/moto/s3bucket_path/models.py
@@ -0,0 +1,7 @@
+from moto.s3.models import S3Backend
+
+
+class S3BucketPathBackend(S3Backend):
+ True
+
+s3bucket_path_backend = S3BucketPathBackend()
diff --git a/moto/s3bucket_path/responses.py b/moto/s3bucket_path/responses.py
new file mode 100644
index 000000000..0f54a1a1d
--- /dev/null
+++ b/moto/s3bucket_path/responses.py
@@ -0,0 +1,15 @@
+from .models import s3bucket_path_backend
+
+from .utils import bucket_name_from_url
+
+from moto.s3.responses import ResponseObject
+
+
+def parse_key_name(pth):
+ return "/".join(pth.rstrip("/").split("/")[2:])
+
+S3BucketPathResponseInstance = ResponseObject(
+ s3bucket_path_backend,
+ bucket_name_from_url,
+ parse_key_name,
+)
diff --git a/moto/s3bucket_path/urls.py b/moto/s3bucket_path/urls.py
new file mode 100644
index 000000000..28f1debc8
--- /dev/null
+++ b/moto/s3bucket_path/urls.py
@@ -0,0 +1,20 @@
+from .responses import S3BucketPathResponseInstance as ro
+
+url_bases = [
+ "https?://s3.amazonaws.com"
+]
+
+
+def bucket_response2(*args):
+ return ro.bucket_response(*args)
+
+
+def bucket_response3(*args):
+ return ro.bucket_response(*args)
+
+url_paths = {
+ '{0}/$': bucket_response3,
+ '{0}/(?P[a-zA-Z0-9\-_.]+)$': ro.bucket_response,
+ '{0}/(?P[a-zA-Z0-9\-_.]+)/$': bucket_response2,
+ '{0}/(?P[a-zA-Z0-9\-_./]+)/(?P[a-zA-Z0-9\-_.?]+)': ro.key_response
+}
diff --git a/moto/s3bucket_path/utils.py b/moto/s3bucket_path/utils.py
new file mode 100644
index 000000000..97f1d40f1
--- /dev/null
+++ b/moto/s3bucket_path/utils.py
@@ -0,0 +1,10 @@
+import urlparse
+
+
+def bucket_name_from_url(url):
+ pth = urlparse.urlparse(url).path.lstrip("/")
+
+ l = pth.lstrip("/").split("/")
+ if len(l) == 0 or l[0] == "":
+ return None
+ return l[0]
diff --git a/requirements.txt b/requirements.txt
index 62f6f0a27..81d8f8e04 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -4,4 +4,6 @@ mock
nose
https://github.com/spulec/python-coveralls/tarball/796d9dba34b759664e42ba39e6414209a0f319ad
requests
-sure
\ No newline at end of file
+sure
+xmltodict
+dicttoxml
diff --git a/setup.py b/setup.py
index 67e22feb9..4a5806ca9 100644
--- a/setup.py
+++ b/setup.py
@@ -17,7 +17,7 @@ if sys.version_info < (2, 7):
setup(
name='moto',
- version='0.2.9',
+ version='0.2.11',
description='A library that allows your python tests to easily'
' mock out the boto library',
author='Steve Pulec',
diff --git a/tests/test_route53/test_route53.py b/tests/test_route53/test_route53.py
new file mode 100644
index 000000000..57da8112a
--- /dev/null
+++ b/tests/test_route53/test_route53.py
@@ -0,0 +1,64 @@
+import urllib2
+
+import boto
+from boto.exception import S3ResponseError
+from boto.s3.key import Key
+from boto.route53.record import ResourceRecordSets
+from freezegun import freeze_time
+import requests
+
+import sure # noqa
+
+from moto import mock_route53
+
+
+@mock_route53
+def test_hosted_zone():
+ conn = boto.connect_route53('the_key', 'the_secret')
+ firstzone = conn.create_hosted_zone("testdns.aws.com")
+ zones = conn.get_all_hosted_zones()
+ len(zones["ListHostedZonesResponse"]["HostedZones"]).should.equal(1)
+
+ secondzone = conn.create_hosted_zone("testdns1.aws.com")
+ zones = conn.get_all_hosted_zones()
+ len(zones["ListHostedZonesResponse"]["HostedZones"]).should.equal(2)
+
+ id1 = firstzone["CreateHostedZoneResponse"]["HostedZone"]["Id"]
+ zone = conn.get_hosted_zone(id1)
+ zone["GetHostedZoneResponse"]["HostedZone"]["Name"].should.equal("testdns.aws.com")
+
+ conn.delete_hosted_zone(id1)
+ zones = conn.get_all_hosted_zones()
+ len(zones["ListHostedZonesResponse"]["HostedZones"]).should.equal(1)
+
+ conn.get_hosted_zone.when.called_with("abcd").should.throw(boto.route53.exception.DNSServerError, "404 Not Found")
+
+
+@mock_route53
+def test_rrset():
+ conn = boto.connect_route53('the_key', 'the_secret')
+
+ conn.get_all_rrsets.when.called_with("abcd", type="A").\
+ should.throw(boto.route53.exception.DNSServerError, "404 Not Found")
+
+ zone = conn.create_hosted_zone("testdns.aws.com")
+ zoneid = zone["CreateHostedZoneResponse"]["HostedZone"]["Id"]
+
+ changes = ResourceRecordSets(conn, zoneid)
+ change = changes.add_change("CREATE", "foo.bar.testdns.aws.com", "A")
+ change.add_value("1.2.3.4")
+ changes.commit()
+
+ rrsets = conn.get_all_rrsets(zoneid, type="A")
+ rrsets.should.have.length_of(1)
+ rrsets[0].resource_records[0].should.equal('1.2.3.4')
+
+ rrsets = conn.get_all_rrsets(zoneid, type="CNAME")
+ rrsets.should.have.length_of(0)
+
+ changes = ResourceRecordSets(conn, zoneid)
+ changes.add_change("DELETE", "foo.bar.testdns.aws.com", "A")
+ changes.commit()
+
+ rrsets = conn.get_all_rrsets(zoneid)
+ rrsets.should.have.length_of(0)
diff --git a/tests/test_s3/test_s3.py b/tests/test_s3/test_s3.py
index 6b33f5760..3d9e3f1fb 100644
--- a/tests/test_s3/test_s3.py
+++ b/tests/test_s3/test_s3.py
@@ -1,4 +1,5 @@
import urllib2
+from io import BytesIO
import boto
from boto.exception import S3ResponseError
@@ -37,6 +38,34 @@ def test_my_model_save():
conn.get_bucket('mybucket').get_key('steve').get_contents_as_string().should.equal('is awesome')
+@mock_s3
+def test_multipart_upload_too_small():
+ conn = boto.connect_s3('the_key', 'the_secret')
+ bucket = conn.create_bucket("foobar")
+
+ multipart = bucket.initiate_multipart_upload("the-key")
+ multipart.upload_part_from_file(BytesIO('hello'), 1)
+ multipart.upload_part_from_file(BytesIO('world'), 2)
+ # Multipart with total size under 5MB is refused
+ multipart.complete_upload.should.throw(S3ResponseError)
+
+
+@mock_s3
+def test_multipart_upload():
+ conn = boto.connect_s3('the_key', 'the_secret')
+ bucket = conn.create_bucket("foobar")
+
+ multipart = bucket.initiate_multipart_upload("the-key")
+ part1 = '0' * 5242880
+ multipart.upload_part_from_file(BytesIO(part1), 1)
+ # last part, can be less than 5 MB
+ part2 = '1'
+ multipart.upload_part_from_file(BytesIO(part2), 2)
+ multipart.complete_upload()
+ # we should get both parts as the key contents
+ bucket.get_key("the-key").get_contents_as_string().should.equal(part1 + part2)
+
+
@mock_s3
def test_missing_key():
conn = boto.connect_s3('the_key', 'the_secret')
diff --git a/tests/test_s3bucket_path/test_bucket_path_server.py b/tests/test_s3bucket_path/test_bucket_path_server.py
new file mode 100644
index 000000000..943615767
--- /dev/null
+++ b/tests/test_s3bucket_path/test_bucket_path_server.py
@@ -0,0 +1,50 @@
+import sure # noqa
+
+import moto.server as server
+
+'''
+Test the different server responses
+'''
+server.configure_urls("s3bucket_path")
+
+
+def test_s3_server_get():
+ test_client = server.app.test_client()
+ res = test_client.get('/')
+
+ res.data.should.contain('ListAllMyBucketsResult')
+
+
+def test_s3_server_bucket_create():
+ test_client = server.app.test_client()
+ res = test_client.put('/foobar', 'http://localhost:5000')
+ res.status_code.should.equal(200)
+
+ res = test_client.get('/')
+ res.data.should.contain('foobar')
+
+ res = test_client.get('/foobar', 'http://localhost:5000')
+ res.status_code.should.equal(200)
+ res.data.should.contain("ListBucketResult")
+
+ res = test_client.put('/foobar/bar', 'http://localhost:5000', data='test value')
+ res.status_code.should.equal(200)
+
+ res = test_client.get('/foobar/bar', 'http://localhost:5000')
+ res.status_code.should.equal(200)
+ res.data.should.equal("test value")
+
+
+def test_s3_server_post_to_bucket():
+ test_client = server.app.test_client()
+ res = test_client.put('/foobar', 'http://localhost:5000/')
+ res.status_code.should.equal(200)
+
+ test_client.post('/foobar', "https://localhost:5000/", data={
+ 'key': 'the-key',
+ 'file': 'nothing'
+ })
+
+ res = test_client.get('/foobar/the-key', 'http://localhost:5000/')
+ res.status_code.should.equal(200)
+ res.data.should.equal("nothing")
diff --git a/tests/test_s3bucket_path/test_s3bucket_path.py b/tests/test_s3bucket_path/test_s3bucket_path.py
new file mode 100644
index 000000000..1f62f23eb
--- /dev/null
+++ b/tests/test_s3bucket_path/test_s3bucket_path.py
@@ -0,0 +1,281 @@
+import urllib2
+
+import boto
+from boto.exception import S3ResponseError
+from boto.s3.key import Key
+from boto.s3.connection import OrdinaryCallingFormat
+
+from freezegun import freeze_time
+import requests
+
+import sure # noqa
+
+from moto import mock_s3bucket_path
+
+
+def create_connection(key=None, secret=None):
+ return boto.connect_s3(key, secret, calling_format=OrdinaryCallingFormat())
+
+
+class MyModel(object):
+ def __init__(self, name, value):
+ self.name = name
+ self.value = value
+
+ def save(self):
+ conn = create_connection('the_key', 'the_secret')
+ bucket = conn.get_bucket('mybucket')
+ k = Key(bucket)
+ k.key = self.name
+ k.set_contents_from_string(self.value)
+
+
+@mock_s3bucket_path
+def test_my_model_save():
+ # Create Bucket so that test can run
+ conn = create_connection('the_key', 'the_secret')
+ conn.create_bucket('mybucket')
+ ####################################
+
+ model_instance = MyModel('steve', 'is awesome')
+ model_instance.save()
+
+ conn.get_bucket('mybucket').get_key('steve').get_contents_as_string().should.equal('is awesome')
+
+
+@mock_s3bucket_path
+def test_missing_key():
+ conn = create_connection('the_key', 'the_secret')
+ bucket = conn.create_bucket("foobar")
+ bucket.get_key("the-key").should.equal(None)
+
+
+@mock_s3bucket_path
+def test_missing_key_urllib2():
+ conn = create_connection('the_key', 'the_secret')
+ conn.create_bucket("foobar")
+
+ urllib2.urlopen.when.called_with("http://s3.amazonaws.com/foobar/the-key").should.throw(urllib2.HTTPError)
+
+
+@mock_s3bucket_path
+def test_empty_key():
+ conn = create_connection('the_key', 'the_secret')
+ bucket = conn.create_bucket("foobar")
+ key = Key(bucket)
+ key.key = "the-key"
+ key.set_contents_from_string("")
+
+ bucket.get_key("the-key").get_contents_as_string().should.equal('')
+
+
+@mock_s3bucket_path
+def test_empty_key_set_on_existing_key():
+ conn = create_connection('the_key', 'the_secret')
+ bucket = conn.create_bucket("foobar")
+ key = Key(bucket)
+ key.key = "the-key"
+ key.set_contents_from_string("foobar")
+
+ bucket.get_key("the-key").get_contents_as_string().should.equal('foobar')
+
+ key.set_contents_from_string("")
+ bucket.get_key("the-key").get_contents_as_string().should.equal('')
+
+
+@mock_s3bucket_path
+def test_large_key_save():
+ conn = create_connection('the_key', 'the_secret')
+ bucket = conn.create_bucket("foobar")
+ key = Key(bucket)
+ key.key = "the-key"
+ key.set_contents_from_string("foobar" * 100000)
+
+ bucket.get_key("the-key").get_contents_as_string().should.equal('foobar' * 100000)
+
+
+@mock_s3bucket_path
+def test_copy_key():
+ conn = create_connection('the_key', 'the_secret')
+ bucket = conn.create_bucket("foobar")
+ key = Key(bucket)
+ key.key = "the-key"
+ key.set_contents_from_string("some value")
+
+ bucket.copy_key('new-key', 'foobar', 'the-key')
+
+ bucket.get_key("the-key").get_contents_as_string().should.equal("some value")
+ bucket.get_key("new-key").get_contents_as_string().should.equal("some value")
+
+
+@mock_s3bucket_path
+def test_set_metadata():
+ conn = create_connection('the_key', 'the_secret')
+ bucket = conn.create_bucket("foobar")
+ key = Key(bucket)
+ key.key = 'the-key'
+ key.set_metadata('md', 'Metadatastring')
+ key.set_contents_from_string("Testval")
+
+ bucket.get_key('the-key').get_metadata('md').should.equal('Metadatastring')
+
+
+@freeze_time("2012-01-01 12:00:00")
+@mock_s3bucket_path
+def test_last_modified():
+ # See https://github.com/boto/boto/issues/466
+ conn = create_connection()
+ bucket = conn.create_bucket("foobar")
+ key = Key(bucket)
+ key.key = "the-key"
+ key.set_contents_from_string("some value")
+
+ rs = bucket.get_all_keys()
+ rs[0].last_modified.should.equal('2012-01-01T12:00:00Z')
+
+ bucket.get_key("the-key").last_modified.should.equal('Sun, 01 Jan 2012 12:00:00 GMT')
+
+
+@mock_s3bucket_path
+def test_missing_bucket():
+ conn = create_connection('the_key', 'the_secret')
+ conn.get_bucket.when.called_with('mybucket').should.throw(S3ResponseError)
+
+
+@mock_s3bucket_path
+def test_bucket_with_dash():
+ conn = create_connection('the_key', 'the_secret')
+ conn.get_bucket.when.called_with('mybucket-test').should.throw(S3ResponseError)
+
+
+@mock_s3bucket_path
+def test_bucket_deletion():
+ conn = create_connection('the_key', 'the_secret')
+ bucket = conn.create_bucket("foobar")
+
+ key = Key(bucket)
+ key.key = "the-key"
+ key.set_contents_from_string("some value")
+
+ # Try to delete a bucket that still has keys
+ conn.delete_bucket.when.called_with("foobar").should.throw(S3ResponseError)
+
+ bucket.delete_key("the-key")
+ conn.delete_bucket("foobar")
+
+ # Get non-existing bucket
+ conn.get_bucket.when.called_with("foobar").should.throw(S3ResponseError)
+
+ # Delete non-existant bucket
+ conn.delete_bucket.when.called_with("foobar").should.throw(S3ResponseError)
+
+
+@mock_s3bucket_path
+def test_get_all_buckets():
+ conn = create_connection('the_key', 'the_secret')
+ conn.create_bucket("foobar")
+ conn.create_bucket("foobar2")
+ buckets = conn.get_all_buckets()
+
+ buckets.should.have.length_of(2)
+
+
+@mock_s3bucket_path
+def test_post_to_bucket():
+ conn = create_connection('the_key', 'the_secret')
+ bucket = conn.create_bucket("foobar")
+
+ requests.post("https://s3.amazonaws.com/foobar", {
+ 'key': 'the-key',
+ 'file': 'nothing'
+ })
+
+ bucket.get_key('the-key').get_contents_as_string().should.equal('nothing')
+
+
+@mock_s3bucket_path
+def test_post_with_metadata_to_bucket():
+ conn = create_connection('the_key', 'the_secret')
+ bucket = conn.create_bucket("foobar")
+
+ requests.post("https://s3.amazonaws.com/foobar", {
+ 'key': 'the-key',
+ 'file': 'nothing',
+ 'x-amz-meta-test': 'metadata'
+ })
+
+ bucket.get_key('the-key').get_metadata('test').should.equal('metadata')
+
+
+@mock_s3bucket_path
+def test_bucket_method_not_implemented():
+ requests.patch.when.called_with("https://s3.amazonaws.com/foobar").should.throw(NotImplementedError)
+
+
+@mock_s3bucket_path
+def test_key_method_not_implemented():
+ requests.post.when.called_with("https://s3.amazonaws.com/foobar/foo").should.throw(NotImplementedError)
+
+
+@mock_s3bucket_path
+def test_bucket_name_with_dot():
+ conn = create_connection()
+ bucket = conn.create_bucket('firstname.lastname')
+
+ k = Key(bucket, 'somekey')
+ k.set_contents_from_string('somedata')
+
+
+@mock_s3bucket_path
+def test_key_with_special_characters():
+ conn = create_connection()
+ bucket = conn.create_bucket('test_bucket_name')
+
+ key = Key(bucket, 'test_list_keys_2/x?y')
+ key.set_contents_from_string('value1')
+
+ key_list = bucket.list('test_list_keys_2/', '/')
+ keys = [x for x in key_list]
+ keys[0].name.should.equal("test_list_keys_2/x?y")
+
+
+@mock_s3bucket_path
+def test_bucket_key_listing_order():
+ conn = create_connection()
+ bucket = conn.create_bucket('test_bucket')
+ prefix = 'toplevel/'
+
+ def store(name):
+ k = Key(bucket, prefix + name)
+ k.set_contents_from_string('somedata')
+
+ names = ['x/key', 'y.key1', 'y.key2', 'y.key3', 'x/y/key', 'x/y/z/key']
+
+ for name in names:
+ store(name)
+
+ delimiter = None
+ keys = [x.name for x in bucket.list(prefix, delimiter)]
+ keys.should.equal([
+ 'toplevel/x/key', 'toplevel/x/y/key', 'toplevel/x/y/z/key',
+ 'toplevel/y.key1', 'toplevel/y.key2', 'toplevel/y.key3'
+ ])
+
+ delimiter = '/'
+ keys = [x.name for x in bucket.list(prefix, delimiter)]
+ keys.should.equal([
+ 'toplevel/y.key1', 'toplevel/y.key2', 'toplevel/y.key3', 'toplevel/x/'
+ ])
+
+ # Test delimiter with no prefix
+ delimiter = '/'
+ keys = [x.name for x in bucket.list(prefix=None, delimiter=delimiter)]
+ keys.should.equal(['toplevel'])
+
+ delimiter = None
+ keys = [x.name for x in bucket.list(prefix + 'x', delimiter)]
+ keys.should.equal([u'toplevel/x/key', u'toplevel/x/y/key', u'toplevel/x/y/z/key'])
+
+ delimiter = '/'
+ keys = [x.name for x in bucket.list(prefix + 'x', delimiter)]
+ keys.should.equal([u'toplevel/x/'])
diff --git a/tests/test_s3bucket_path/test_s3bucket_path_utils.py b/tests/test_s3bucket_path/test_s3bucket_path_utils.py
new file mode 100644
index 000000000..4b9ff30b1
--- /dev/null
+++ b/tests/test_s3bucket_path/test_s3bucket_path_utils.py
@@ -0,0 +1,14 @@
+from sure import expect
+from moto.s3bucket_path.utils import bucket_name_from_url
+
+
+def test_base_url():
+ expect(bucket_name_from_url('https://s3.amazonaws.com/')).should.equal(None)
+
+
+def test_localhost_bucket():
+ expect(bucket_name_from_url('https://localhost:5000/wfoobar/abc')).should.equal("wfoobar")
+
+
+def test_localhost_without_bucket():
+ expect(bucket_name_from_url('https://www.localhost:5000')).should.equal(None)