Merge branch 'master' of https://github.com/spulec/moto into sg_vpc_support
This commit is contained in:
commit
4fc7317804
@ -8,3 +8,7 @@ Moto is written by Steve Pulec with contributions from:
|
||||
* [Lincoln de Sousa](https://github.com/clarete)
|
||||
* [mhock](https://github.com/mhock)
|
||||
* [Ilya Sukhanov](https://github.com/IlyaSukhanov)
|
||||
* [Lucian Branescu Mihaila](https://github.com/lucian1900)
|
||||
* [Konstantinos Koukopoulos](https://github.com/kouk)
|
||||
* [attili](https://github.com/attili)
|
||||
* [JJ Zeng](https://github.com/jjofseattle)
|
||||
|
@ -7,6 +7,8 @@ from .ec2 import mock_ec2
|
||||
from .elb import mock_elb
|
||||
from .emr import mock_emr
|
||||
from .s3 import mock_s3
|
||||
from .s3bucket_path import mock_s3bucket_path
|
||||
from .ses import mock_ses
|
||||
from .sqs import mock_sqs
|
||||
from .sts import mock_sts
|
||||
from .route53 import mock_route53
|
||||
|
@ -4,9 +4,11 @@ from moto.ec2 import ec2_backend
|
||||
from moto.elb import elb_backend
|
||||
from moto.emr import emr_backend
|
||||
from moto.s3 import s3_backend
|
||||
from moto.s3bucket_path import s3bucket_path_backend
|
||||
from moto.ses import ses_backend
|
||||
from moto.sqs import sqs_backend
|
||||
from moto.sts import sts_backend
|
||||
from moto.route53 import route53_backend
|
||||
|
||||
BACKENDS = {
|
||||
'autoscaling': autoscaling_backend,
|
||||
@ -15,7 +17,9 @@ BACKENDS = {
|
||||
'elb': elb_backend,
|
||||
'emr': emr_backend,
|
||||
's3': s3_backend,
|
||||
's3bucket_path': s3bucket_path_backend,
|
||||
'ses': ses_backend,
|
||||
'sqs': sqs_backend,
|
||||
'sts': sts_backend,
|
||||
'route53': route53_backend
|
||||
}
|
||||
|
@ -9,6 +9,7 @@ from .utils import convert_regex_to_flask_path
|
||||
class MockAWS(object):
|
||||
def __init__(self, backend):
|
||||
self.backend = backend
|
||||
HTTPretty.reset()
|
||||
|
||||
def __call__(self, func):
|
||||
return self.decorate_callable(func)
|
||||
|
2
moto/route53/__init__.py
Normal file
2
moto/route53/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
from .models import route53_backend
|
||||
mock_route53 = route53_backend.decorator
|
44
moto/route53/models.py
Normal file
44
moto/route53/models.py
Normal file
@ -0,0 +1,44 @@
|
||||
from moto.core import BaseBackend
|
||||
from moto.core.utils import get_random_hex
|
||||
|
||||
|
||||
class FakeZone:
|
||||
|
||||
def __init__(self, name, id):
|
||||
self.name = name
|
||||
self.id = id
|
||||
self.rrsets = {}
|
||||
|
||||
def add_rrset(self, name, rrset):
|
||||
self.rrsets[name] = rrset
|
||||
|
||||
def delete_rrset(self, name):
|
||||
self.rrsets.pop(name, None)
|
||||
|
||||
|
||||
class Route53Backend(BaseBackend):
|
||||
|
||||
def __init__(self):
|
||||
self.zones = {}
|
||||
|
||||
def create_hosted_zone(self, name):
|
||||
new_id = get_random_hex()
|
||||
new_zone = FakeZone(name, new_id)
|
||||
self.zones[new_id] = new_zone
|
||||
return new_zone
|
||||
|
||||
def get_all_hosted_zones(self):
|
||||
return self.zones.values()
|
||||
|
||||
def get_hosted_zone(self, id):
|
||||
return self.zones.get(id)
|
||||
|
||||
def delete_hosted_zone(self, id):
|
||||
zone = self.zones.get(id)
|
||||
if zone:
|
||||
del self.zones[id]
|
||||
return zone
|
||||
return None
|
||||
|
||||
|
||||
route53_backend = Route53Backend()
|
124
moto/route53/responses.py
Normal file
124
moto/route53/responses.py
Normal file
@ -0,0 +1,124 @@
|
||||
from jinja2 import Template
|
||||
from urlparse import parse_qs, urlparse
|
||||
from .models import route53_backend
|
||||
import xmltodict
|
||||
import dicttoxml
|
||||
|
||||
|
||||
def list_or_create_hostzone_response(request, full_url, headers):
|
||||
|
||||
if request.method == "POST":
|
||||
elements = xmltodict.parse(request.body)
|
||||
new_zone = route53_backend.create_hosted_zone(elements["CreateHostedZoneRequest"]["Name"])
|
||||
template = Template(CREATE_HOSTED_ZONE_RESPONSE)
|
||||
return 201, headers, template.render(zone=new_zone)
|
||||
|
||||
elif request.method == "GET":
|
||||
all_zones = route53_backend.get_all_hosted_zones()
|
||||
template = Template(LIST_HOSTED_ZONES_RESPONSE)
|
||||
return 200, headers, template.render(zones=all_zones)
|
||||
|
||||
|
||||
def get_or_delete_hostzone_response(request, full_url, headers):
|
||||
parsed_url = urlparse(full_url)
|
||||
zoneid = parsed_url.path.rstrip('/').rsplit('/', 1)[1]
|
||||
the_zone = route53_backend.get_hosted_zone(zoneid)
|
||||
if not the_zone:
|
||||
return 404, headers, "Zone %s not Found" % zoneid
|
||||
|
||||
if request.method == "GET":
|
||||
template = Template(GET_HOSTED_ZONE_RESPONSE)
|
||||
return 200, headers, template.render(zone=the_zone)
|
||||
elif request.method == "DELETE":
|
||||
route53_backend.delete_hosted_zone(zoneid)
|
||||
return 200, headers, DELETE_HOSTED_ZONE_RESPONSE
|
||||
|
||||
|
||||
def rrset_response(request, full_url, headers):
|
||||
parsed_url = urlparse(full_url)
|
||||
method = request.method
|
||||
|
||||
zoneid = parsed_url.path.rstrip('/').rsplit('/', 2)[1]
|
||||
the_zone = route53_backend.get_hosted_zone(zoneid)
|
||||
if not the_zone:
|
||||
return 404, headers, "Zone %s Not Found" % zoneid
|
||||
|
||||
if method == "POST":
|
||||
elements = xmltodict.parse(request.body)
|
||||
for key, value in elements['ChangeResourceRecordSetsRequest']['ChangeBatch']['Changes'].items():
|
||||
action = value['Action']
|
||||
rrset = value['ResourceRecordSet']
|
||||
|
||||
if action == 'CREATE':
|
||||
the_zone.add_rrset(rrset["Name"], rrset)
|
||||
elif action == "DELETE":
|
||||
the_zone.delete_rrset(rrset["Name"])
|
||||
|
||||
return 200, headers, CHANGE_RRSET_RESPONSE
|
||||
|
||||
elif method == "GET":
|
||||
querystring = parse_qs(parsed_url.query)
|
||||
template = Template(LIST_RRSET_REPONSE)
|
||||
rrset_list = []
|
||||
for key, value in the_zone.rrsets.items():
|
||||
if 'type' not in querystring or querystring["type"][0] == value["Type"]:
|
||||
rrset_list.append(dicttoxml.dicttoxml({"ResourceRecordSet": value}, root=False))
|
||||
|
||||
return 200, headers, template.render(rrsets=rrset_list)
|
||||
|
||||
|
||||
LIST_RRSET_REPONSE = """<ListResourceRecordSetsResponse xmlns="https://route53.amazonaws.com/doc/2012-12-12/">
|
||||
<ResourceRecordSets>
|
||||
{% for rrset in rrsets %}
|
||||
{{ rrset }}
|
||||
{% endfor %}
|
||||
</ResourceRecordSets>
|
||||
</ListResourceRecordSetsResponse>"""
|
||||
|
||||
CHANGE_RRSET_RESPONSE = """<ChangeResourceRecordSetsResponse xmlns="https://route53.amazonaws.com/doc/2012-12-12/">
|
||||
<ChangeInfo>
|
||||
<Status>PENDING</Status>
|
||||
<SubmittedAt>2010-09-10T01:36:41.958Z</SubmittedAt>
|
||||
</ChangeInfo>
|
||||
</ChangeResourceRecordSetsResponse>"""
|
||||
|
||||
DELETE_HOSTED_ZONE_RESPONSE = """<DeleteHostedZoneResponse xmlns="https://route53.amazonaws.com/doc/2012-12-12/">
|
||||
<ChangeInfo>
|
||||
</ChangeInfo>
|
||||
</DeleteHostedZoneResponse>"""
|
||||
|
||||
GET_HOSTED_ZONE_RESPONSE = """<GetHostedZoneResponse xmlns="https://route53.amazonaws.com/doc/2012-12-12/">
|
||||
<HostedZone>
|
||||
<Id>/hostedzone/{{ zone.id }}</Id>
|
||||
<Name>{{ zone.name }}</Name>
|
||||
<ResourceRecordSetCount>{{ zone.rrsets|count }}</ResourceRecordSetCount>
|
||||
</HostedZone>
|
||||
<DelegationSet>
|
||||
<NameServer>moto.test.com</NameServer>
|
||||
</DelegationSet>
|
||||
</GetHostedZoneResponse>"""
|
||||
|
||||
CREATE_HOSTED_ZONE_RESPONSE = """<CreateHostedZoneResponse xmlns="https://route53.amazonaws.com/doc/2012-12-12/">
|
||||
<HostedZone>
|
||||
<Id>/hostedzone/{{ zone.id }}</Id>
|
||||
<Name>{{ zone.name }}</Name>
|
||||
<ResourceRecordSetCount>0</ResourceRecordSetCount>
|
||||
</HostedZone>
|
||||
<DelegationSet>
|
||||
<NameServers>
|
||||
<NameServer>moto.test.com</NameServer>
|
||||
</NameServers>
|
||||
</DelegationSet>
|
||||
</CreateHostedZoneResponse>"""
|
||||
|
||||
LIST_HOSTED_ZONES_RESPONSE = """<ListHostedZonesResponse xmlns="https://route53.amazonaws.com/doc/2012-12-12/">
|
||||
<HostedZones>
|
||||
{% for zone in zones %}
|
||||
<HostedZone>
|
||||
<Id>{{ zone.id }}</Id>
|
||||
<Name>{{ zone.name }}</Name>
|
||||
<ResourceRecordSetCount>{{ zone.rrsets|count }}</ResourceRecordSetCount>
|
||||
</HostedZone>
|
||||
{% endfor %}
|
||||
</HostedZones>
|
||||
</ListHostedZonesResponse>"""
|
11
moto/route53/urls.py
Normal file
11
moto/route53/urls.py
Normal file
@ -0,0 +1,11 @@
|
||||
import responses
|
||||
|
||||
url_bases = [
|
||||
"https://route53.amazonaws.com/201.-..-../hostedzone",
|
||||
]
|
||||
|
||||
url_paths = {
|
||||
'{0}$': responses.list_or_create_hostzone_response,
|
||||
'{0}/.+$': responses.get_or_delete_hostzone_response,
|
||||
'{0}/.+/rrset$': responses.rrset_response,
|
||||
}
|
@ -1,3 +1,5 @@
|
||||
import os
|
||||
import base64
|
||||
import datetime
|
||||
import hashlib
|
||||
|
||||
@ -5,6 +7,9 @@ from moto.core import BaseBackend
|
||||
from moto.core.utils import iso_8601_datetime, rfc_1123_datetime
|
||||
from .utils import clean_key_name
|
||||
|
||||
UPLOAD_ID_BYTES=43
|
||||
UPLOAD_PART_MIN_SIZE=5242880
|
||||
|
||||
|
||||
class FakeKey(object):
|
||||
def __init__(self, name, value):
|
||||
@ -23,7 +28,7 @@ class FakeKey(object):
|
||||
@property
|
||||
def etag(self):
|
||||
value_md5 = hashlib.md5()
|
||||
value_md5.update(self.value)
|
||||
value_md5.update(bytes(self.value))
|
||||
return '"{0}"'.format(value_md5.hexdigest())
|
||||
|
||||
@property
|
||||
@ -52,10 +57,48 @@ class FakeKey(object):
|
||||
return len(self.value)
|
||||
|
||||
|
||||
class FakeMultipart(object):
|
||||
def __init__(self, key_name):
|
||||
self.key_name = key_name
|
||||
self.parts = {}
|
||||
self.id = base64.b64encode(os.urandom(UPLOAD_ID_BYTES)).replace('=', '').replace('+', '')
|
||||
|
||||
def complete(self):
|
||||
total = bytearray()
|
||||
last_part_name = len(self.list_parts())
|
||||
|
||||
for part in self.list_parts():
|
||||
if part.name != last_part_name and len(part.value) < UPLOAD_PART_MIN_SIZE:
|
||||
return
|
||||
total.extend(part.value)
|
||||
|
||||
return total
|
||||
|
||||
def set_part(self, part_id, value):
|
||||
if part_id < 1:
|
||||
return
|
||||
|
||||
key = FakeKey(part_id, value)
|
||||
self.parts[part_id] = key
|
||||
return key
|
||||
|
||||
def list_parts(self):
|
||||
parts = []
|
||||
|
||||
for part_id, index in enumerate(sorted(self.parts.keys()), start=1):
|
||||
# Make sure part ids are continuous
|
||||
if part_id != index:
|
||||
return
|
||||
parts.append(self.parts[part_id])
|
||||
|
||||
return parts
|
||||
|
||||
|
||||
class FakeBucket(object):
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.keys = {}
|
||||
self.multiparts = {}
|
||||
|
||||
|
||||
class S3Backend(BaseBackend):
|
||||
@ -106,6 +149,36 @@ class S3Backend(BaseBackend):
|
||||
if bucket:
|
||||
return bucket.keys.get(key_name)
|
||||
|
||||
def initiate_multipart(self, bucket_name, key_name):
|
||||
bucket = self.buckets[bucket_name]
|
||||
new_multipart = FakeMultipart(key_name)
|
||||
bucket.multiparts[new_multipart.id] = new_multipart
|
||||
|
||||
return new_multipart
|
||||
|
||||
def complete_multipart(self, bucket_name, multipart_id):
|
||||
bucket = self.buckets[bucket_name]
|
||||
multipart = bucket.multiparts[multipart_id]
|
||||
value = multipart.complete()
|
||||
if value is None:
|
||||
return
|
||||
del bucket.multiparts[multipart_id]
|
||||
|
||||
return self.set_key(bucket_name, multipart.key_name, value)
|
||||
|
||||
def cancel_multipart(self, bucket_name, multipart_id):
|
||||
bucket = self.buckets[bucket_name]
|
||||
del bucket.multiparts[multipart_id]
|
||||
|
||||
def list_multipart(self, bucket_name, multipart_id):
|
||||
bucket = self.buckets[bucket_name]
|
||||
return bucket.multiparts[multipart_id].list_parts()
|
||||
|
||||
def set_part(self, bucket_name, multipart_id, part_id, value):
|
||||
bucket = self.buckets[bucket_name]
|
||||
multipart = bucket.multiparts[multipart_id]
|
||||
return multipart.set_part(part_id, value)
|
||||
|
||||
def prefix_query(self, bucket, prefix, delimiter):
|
||||
key_results = set()
|
||||
folder_results = set()
|
||||
|
@ -7,15 +7,24 @@ from .models import s3_backend
|
||||
from .utils import bucket_name_from_url
|
||||
|
||||
|
||||
def all_buckets():
|
||||
def parse_key_name(pth):
|
||||
return pth.lstrip("/")
|
||||
|
||||
|
||||
class ResponseObject(object):
|
||||
def __init__(self, backend, bucket_name_from_url, parse_key_name):
|
||||
self.backend = backend
|
||||
self.bucket_name_from_url = bucket_name_from_url
|
||||
self.parse_key_name = parse_key_name
|
||||
|
||||
def all_buckets(self):
|
||||
# No bucket specified. Listing all buckets
|
||||
all_buckets = s3_backend.get_all_buckets()
|
||||
all_buckets = self.backend.get_all_buckets()
|
||||
template = Template(S3_ALL_BUCKETS)
|
||||
return template.render(buckets=all_buckets)
|
||||
|
||||
|
||||
def bucket_response(request, full_url, headers):
|
||||
response = _bucket_response(request, full_url, headers)
|
||||
def bucket_response(self, request, full_url, headers):
|
||||
response = self._bucket_response(request, full_url, headers)
|
||||
if isinstance(response, basestring):
|
||||
return 200, headers, response
|
||||
|
||||
@ -23,23 +32,22 @@ def bucket_response(request, full_url, headers):
|
||||
status_code, headers, response_content = response
|
||||
return status_code, headers, response_content
|
||||
|
||||
|
||||
def _bucket_response(request, full_url, headers):
|
||||
def _bucket_response(self, request, full_url, headers):
|
||||
parsed_url = urlparse(full_url)
|
||||
querystring = parse_qs(parsed_url.query)
|
||||
method = request.method
|
||||
|
||||
bucket_name = bucket_name_from_url(full_url)
|
||||
bucket_name = self.bucket_name_from_url(full_url)
|
||||
if not bucket_name:
|
||||
# If no bucket specified, list all buckets
|
||||
return all_buckets()
|
||||
return self.all_buckets()
|
||||
|
||||
if method == 'GET':
|
||||
bucket = s3_backend.get_bucket(bucket_name)
|
||||
bucket = self.backend.get_bucket(bucket_name)
|
||||
if bucket:
|
||||
prefix = querystring.get('prefix', [None])[0]
|
||||
delimiter = querystring.get('delimiter', [None])[0]
|
||||
result_keys, result_folders = s3_backend.prefix_query(bucket, prefix, delimiter)
|
||||
result_keys, result_folders = self.backend.prefix_query(bucket, prefix, delimiter)
|
||||
template = Template(S3_BUCKET_GET_RESPONSE)
|
||||
return template.render(
|
||||
bucket=bucket,
|
||||
@ -51,11 +59,11 @@ def _bucket_response(request, full_url, headers):
|
||||
else:
|
||||
return 404, headers, ""
|
||||
elif method == 'PUT':
|
||||
new_bucket = s3_backend.create_bucket(bucket_name)
|
||||
new_bucket = self.backend.create_bucket(bucket_name)
|
||||
template = Template(S3_BUCKET_CREATE_RESPONSE)
|
||||
return template.render(bucket=new_bucket)
|
||||
elif method == 'DELETE':
|
||||
removed_bucket = s3_backend.delete_bucket(bucket_name)
|
||||
removed_bucket = self.backend.delete_bucket(bucket_name)
|
||||
if removed_bucket is None:
|
||||
# Non-existant bucket
|
||||
template = Template(S3_DELETE_NON_EXISTING_BUCKET)
|
||||
@ -83,10 +91,11 @@ def _bucket_response(request, full_url, headers):
|
||||
key = form['key']
|
||||
f = form['file']
|
||||
|
||||
new_key = s3_backend.set_key(bucket_name, key, f)
|
||||
new_key = self.backend.set_key(bucket_name, key, f)
|
||||
|
||||
#Metadata
|
||||
meta_regex = re.compile('^x-amz-meta-([a-zA-Z0-9\-_]+)$', flags=re.IGNORECASE)
|
||||
|
||||
for form_id in form:
|
||||
result = meta_regex.match(form_id)
|
||||
if result:
|
||||
@ -97,22 +106,23 @@ def _bucket_response(request, full_url, headers):
|
||||
else:
|
||||
raise NotImplementedError("Method {0} has not been impelemented in the S3 backend yet".format(method))
|
||||
|
||||
|
||||
def key_response(request, full_url, headers):
|
||||
response = _key_response(request, full_url, headers)
|
||||
def key_response(self, request, full_url, headers):
|
||||
response = self._key_response(request, full_url, headers)
|
||||
if isinstance(response, basestring):
|
||||
return 200, headers, response
|
||||
else:
|
||||
status_code, headers, response_content = response
|
||||
return status_code, headers, response_content
|
||||
|
||||
|
||||
def _key_response(request, full_url, headers):
|
||||
def _key_response(self, request, full_url, headers):
|
||||
parsed_url = urlparse(full_url)
|
||||
query = parse_qs(parsed_url.query)
|
||||
method = request.method
|
||||
|
||||
key_name = parsed_url.path.lstrip('/')
|
||||
bucket_name = bucket_name_from_url(full_url)
|
||||
key_name = self.parse_key_name(parsed_url.path)
|
||||
|
||||
bucket_name = self.bucket_name_from_url(full_url)
|
||||
|
||||
if hasattr(request, 'body'):
|
||||
# Boto
|
||||
body = request.body
|
||||
@ -121,30 +131,49 @@ def _key_response(request, full_url, headers):
|
||||
body = request.data
|
||||
|
||||
if method == 'GET':
|
||||
key = s3_backend.get_key(bucket_name, key_name)
|
||||
if 'uploadId' in query:
|
||||
upload_id = query['uploadId'][0]
|
||||
parts = self.backend.list_multipart(bucket_name, upload_id)
|
||||
template = Template(S3_MULTIPART_LIST_RESPONSE)
|
||||
return 200, headers, template.render(
|
||||
bucket_name=bucket_name,
|
||||
key_name=key_name,
|
||||
upload_id=upload_id,
|
||||
count=len(parts),
|
||||
parts=parts
|
||||
)
|
||||
key = self.backend.get_key(bucket_name, key_name)
|
||||
if key:
|
||||
headers.update(key.metadata)
|
||||
return 200, headers, key.value
|
||||
else:
|
||||
return 404, headers, ""
|
||||
if method == 'PUT':
|
||||
if 'uploadId' in query and 'partNumber' in query and body:
|
||||
upload_id = query['uploadId'][0]
|
||||
part_number = int(query['partNumber'][0])
|
||||
key = self.backend.set_part(bucket_name, upload_id, part_number, body)
|
||||
template = Template(S3_MULTIPART_UPLOAD_RESPONSE)
|
||||
headers.update(key.response_dict)
|
||||
return 200, headers, template.render(part=key)
|
||||
|
||||
if 'x-amz-copy-source' in request.headers:
|
||||
# Copy key
|
||||
src_bucket, src_key = request.headers.get("x-amz-copy-source").split("/",1)
|
||||
s3_backend.copy_key(src_bucket, src_key, bucket_name, key_name)
|
||||
src_bucket, src_key = request.headers.get("x-amz-copy-source").split("/", 1)
|
||||
self.backend.copy_key(src_bucket, src_key, bucket_name, key_name)
|
||||
template = Template(S3_OBJECT_COPY_RESPONSE)
|
||||
return template.render(key=src_key)
|
||||
streaming_request = hasattr(request, 'streaming') and request.streaming
|
||||
closing_connection = headers.get('connection') == 'close'
|
||||
if closing_connection and streaming_request:
|
||||
# Closing the connection of a streaming request. No more data
|
||||
new_key = s3_backend.get_key(bucket_name, key_name)
|
||||
new_key = self.backend.get_key(bucket_name, key_name)
|
||||
elif streaming_request:
|
||||
# Streaming request, more data
|
||||
new_key = s3_backend.append_to_key(bucket_name, key_name, body)
|
||||
new_key = self.backend.append_to_key(bucket_name, key_name, body)
|
||||
else:
|
||||
# Initial data
|
||||
new_key = s3_backend.set_key(bucket_name, key_name, body)
|
||||
new_key = self.backend.set_key(bucket_name, key_name, body)
|
||||
request.streaming = True
|
||||
|
||||
#Metadata
|
||||
@ -160,20 +189,51 @@ def _key_response(request, full_url, headers):
|
||||
headers.update(new_key.response_dict)
|
||||
return 200, headers, template.render(key=new_key)
|
||||
elif method == 'HEAD':
|
||||
key = s3_backend.get_key(bucket_name, key_name)
|
||||
key = self.backend.get_key(bucket_name, key_name)
|
||||
if key:
|
||||
headers.update(key.metadata)
|
||||
headers.update(key.response_dict)
|
||||
return 200, headers, key.value
|
||||
return 200, headers, ""
|
||||
else:
|
||||
return 404, headers, ""
|
||||
elif method == 'DELETE':
|
||||
removed_key = s3_backend.delete_key(bucket_name, key_name)
|
||||
if 'uploadId' in query:
|
||||
upload_id = query['uploadId'][0]
|
||||
self.backend.cancel_multipart(bucket_name, upload_id)
|
||||
return 204, headers, ""
|
||||
removed_key = self.backend.delete_key(bucket_name, key_name)
|
||||
template = Template(S3_DELETE_OBJECT_SUCCESS)
|
||||
return 204, headers, template.render(bucket=removed_key)
|
||||
elif method == 'POST':
|
||||
if body == '' and parsed_url.query == 'uploads':
|
||||
multipart = self.backend.initiate_multipart(bucket_name, key_name)
|
||||
template = Template(S3_MULTIPART_INITIATE_RESPONSE)
|
||||
response = template.render(
|
||||
bucket_name=bucket_name,
|
||||
key_name=key_name,
|
||||
upload_id=multipart.id,
|
||||
)
|
||||
return 200, headers, response
|
||||
|
||||
if 'uploadId' in query:
|
||||
upload_id = query['uploadId'][0]
|
||||
key = self.backend.complete_multipart(bucket_name, upload_id)
|
||||
|
||||
if key is not None:
|
||||
template = Template(S3_MULTIPART_COMPLETE_RESPONSE)
|
||||
return template.render(
|
||||
bucket_name=bucket_name,
|
||||
key_name=key.name,
|
||||
etag=key.etag,
|
||||
)
|
||||
template = Template(S3_MULTIPART_COMPLETE_TOO_SMALL_ERROR)
|
||||
return 400, headers, template.render()
|
||||
else:
|
||||
raise NotImplementedError("Method POST had only been implemented for multipart uploads so far")
|
||||
else:
|
||||
raise NotImplementedError("Method {0} has not been impelemented in the S3 backend yet".format(method))
|
||||
|
||||
S3ResponseInstance = ResponseObject(s3_backend, bucket_name_from_url, parse_key_name)
|
||||
|
||||
S3_ALL_BUCKETS = """<ListAllMyBucketsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01">
|
||||
<Owner>
|
||||
@ -269,3 +329,62 @@ S3_OBJECT_COPY_RESPONSE = """<CopyObjectResponse xmlns="http://doc.s3.amazonaws.
|
||||
<LastModified>{{ key.last_modified_ISO8601 }}</LastModified>
|
||||
</CopyObjectResponse>
|
||||
</CopyObjectResponse>"""
|
||||
|
||||
S3_MULTIPART_INITIATE_RESPONSE = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<InitiateMultipartUploadResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
||||
<Bucket>{{ bucket_name }}</Bucket>
|
||||
<Key>{{ key_name }}</Key>
|
||||
<UploadId>{{ upload_id }}</UploadId>
|
||||
</InitiateMultipartUploadResult>"""
|
||||
|
||||
S3_MULTIPART_UPLOAD_RESPONSE = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<CopyPartResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
||||
<LastModified>{{ part.last_modified_ISO8601 }}</LastModified>
|
||||
<ETag>{{ part.etag }}</ETag>
|
||||
</CopyPartResult>"""
|
||||
|
||||
S3_MULTIPART_LIST_RESPONSE = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ListPartsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
||||
<Bucket>{{ bucket_name }}</Bucket>
|
||||
<Key>{{ key_name }}</Key>
|
||||
<UploadId>{{ upload_id }}</UploadId>
|
||||
<StorageClass>STANDARD</StorageClass>
|
||||
<Initiator>
|
||||
<ID>75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a</ID>
|
||||
<DisplayName>webfile</DisplayName>
|
||||
</Initiator>
|
||||
<Owner>
|
||||
<ID>75aa57f09aa0c8caeab4f8c24e99d10f8e7faeebf76c078efc7c6caea54ba06a</ID>
|
||||
<DisplayName>webfile</DisplayName>
|
||||
</Owner>
|
||||
<StorageClass>STANDARD</StorageClass>
|
||||
<PartNumberMarker>1</PartNumberMarker>
|
||||
<NextPartNumberMarker>{{ count }} </NextPartNumberMarker>
|
||||
<MaxParts>{{ count }}</MaxParts>
|
||||
<IsTruncated>false</IsTruncated>
|
||||
{% for part in parts %}
|
||||
<Part>
|
||||
<PartNumber>{{ part.name }}</PartNumber>
|
||||
<LastModified>{{ part.last_modified_ISO8601 }}</LastModified>
|
||||
<ETag>{{ part.etag }}</ETag>
|
||||
<Size>{{ part.size }}</Size>
|
||||
</Part>
|
||||
{% endfor %}
|
||||
</ListPartsResult>"""
|
||||
|
||||
S3_MULTIPART_COMPLETE_RESPONSE = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<CompleteMultipartUploadResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
||||
<Location>http://{{ bucket_name }}.s3.amazonaws.com/{{ key_name }}</Location>
|
||||
<Bucket>{{ bucket_name }}</Bucket>
|
||||
<Key>{{ key_name }}</Key>
|
||||
<ETag>{{ etag }}</ETag>
|
||||
</CompleteMultipartUploadResult>
|
||||
"""
|
||||
|
||||
S3_MULTIPART_COMPLETE_TOO_SMALL_ERROR = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Error>
|
||||
<Code>EntityTooSmall</Code>
|
||||
<Message>Your proposed upload is smaller than the minimum allowed object size.</Message>
|
||||
<RequestId>asdfasdfsdafds</RequestId>
|
||||
<HostId>sdfgdsfgdsfgdfsdsfgdfs</HostId>
|
||||
</Error>"""
|
||||
|
@ -1,10 +1,10 @@
|
||||
from .responses import bucket_response, key_response
|
||||
from .responses import S3ResponseInstance
|
||||
|
||||
url_bases = [
|
||||
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3.amazonaws.com"
|
||||
]
|
||||
|
||||
url_paths = {
|
||||
'{0}/$': bucket_response,
|
||||
'{0}/(?P<key_name>[a-zA-Z0-9\-_.]+)': key_response,
|
||||
'{0}/$': S3ResponseInstance.bucket_response,
|
||||
'{0}/(?P<key_name>[a-zA-Z0-9\-_.]+)': S3ResponseInstance.key_response,
|
||||
}
|
||||
|
2
moto/s3bucket_path/__init__.py
Normal file
2
moto/s3bucket_path/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
from .models import s3bucket_path_backend
|
||||
mock_s3bucket_path = s3bucket_path_backend.decorator
|
7
moto/s3bucket_path/models.py
Normal file
7
moto/s3bucket_path/models.py
Normal file
@ -0,0 +1,7 @@
|
||||
from moto.s3.models import S3Backend
|
||||
|
||||
|
||||
class S3BucketPathBackend(S3Backend):
|
||||
True
|
||||
|
||||
s3bucket_path_backend = S3BucketPathBackend()
|
15
moto/s3bucket_path/responses.py
Normal file
15
moto/s3bucket_path/responses.py
Normal file
@ -0,0 +1,15 @@
|
||||
from .models import s3bucket_path_backend
|
||||
|
||||
from .utils import bucket_name_from_url
|
||||
|
||||
from moto.s3.responses import ResponseObject
|
||||
|
||||
|
||||
def parse_key_name(pth):
|
||||
return "/".join(pth.rstrip("/").split("/")[2:])
|
||||
|
||||
S3BucketPathResponseInstance = ResponseObject(
|
||||
s3bucket_path_backend,
|
||||
bucket_name_from_url,
|
||||
parse_key_name,
|
||||
)
|
20
moto/s3bucket_path/urls.py
Normal file
20
moto/s3bucket_path/urls.py
Normal file
@ -0,0 +1,20 @@
|
||||
from .responses import S3BucketPathResponseInstance as ro
|
||||
|
||||
url_bases = [
|
||||
"https?://s3.amazonaws.com"
|
||||
]
|
||||
|
||||
|
||||
def bucket_response2(*args):
|
||||
return ro.bucket_response(*args)
|
||||
|
||||
|
||||
def bucket_response3(*args):
|
||||
return ro.bucket_response(*args)
|
||||
|
||||
url_paths = {
|
||||
'{0}/$': bucket_response3,
|
||||
'{0}/(?P<bucket_name>[a-zA-Z0-9\-_.]+)$': ro.bucket_response,
|
||||
'{0}/(?P<bucket_name>[a-zA-Z0-9\-_.]+)/$': bucket_response2,
|
||||
'{0}/(?P<bucket_name>[a-zA-Z0-9\-_./]+)/(?P<key_name>[a-zA-Z0-9\-_.?]+)': ro.key_response
|
||||
}
|
10
moto/s3bucket_path/utils.py
Normal file
10
moto/s3bucket_path/utils.py
Normal file
@ -0,0 +1,10 @@
|
||||
import urlparse
|
||||
|
||||
|
||||
def bucket_name_from_url(url):
|
||||
pth = urlparse.urlparse(url).path.lstrip("/")
|
||||
|
||||
l = pth.lstrip("/").split("/")
|
||||
if len(l) == 0 or l[0] == "":
|
||||
return None
|
||||
return l[0]
|
@ -5,3 +5,5 @@ nose
|
||||
https://github.com/spulec/python-coveralls/tarball/796d9dba34b759664e42ba39e6414209a0f319ad
|
||||
requests
|
||||
sure
|
||||
xmltodict
|
||||
dicttoxml
|
||||
|
2
setup.py
2
setup.py
@ -17,7 +17,7 @@ if sys.version_info < (2, 7):
|
||||
|
||||
setup(
|
||||
name='moto',
|
||||
version='0.2.9',
|
||||
version='0.2.11',
|
||||
description='A library that allows your python tests to easily'
|
||||
' mock out the boto library',
|
||||
author='Steve Pulec',
|
||||
|
64
tests/test_route53/test_route53.py
Normal file
64
tests/test_route53/test_route53.py
Normal file
@ -0,0 +1,64 @@
|
||||
import urllib2
|
||||
|
||||
import boto
|
||||
from boto.exception import S3ResponseError
|
||||
from boto.s3.key import Key
|
||||
from boto.route53.record import ResourceRecordSets
|
||||
from freezegun import freeze_time
|
||||
import requests
|
||||
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_route53
|
||||
|
||||
|
||||
@mock_route53
|
||||
def test_hosted_zone():
|
||||
conn = boto.connect_route53('the_key', 'the_secret')
|
||||
firstzone = conn.create_hosted_zone("testdns.aws.com")
|
||||
zones = conn.get_all_hosted_zones()
|
||||
len(zones["ListHostedZonesResponse"]["HostedZones"]).should.equal(1)
|
||||
|
||||
secondzone = conn.create_hosted_zone("testdns1.aws.com")
|
||||
zones = conn.get_all_hosted_zones()
|
||||
len(zones["ListHostedZonesResponse"]["HostedZones"]).should.equal(2)
|
||||
|
||||
id1 = firstzone["CreateHostedZoneResponse"]["HostedZone"]["Id"]
|
||||
zone = conn.get_hosted_zone(id1)
|
||||
zone["GetHostedZoneResponse"]["HostedZone"]["Name"].should.equal("testdns.aws.com")
|
||||
|
||||
conn.delete_hosted_zone(id1)
|
||||
zones = conn.get_all_hosted_zones()
|
||||
len(zones["ListHostedZonesResponse"]["HostedZones"]).should.equal(1)
|
||||
|
||||
conn.get_hosted_zone.when.called_with("abcd").should.throw(boto.route53.exception.DNSServerError, "404 Not Found")
|
||||
|
||||
|
||||
@mock_route53
|
||||
def test_rrset():
|
||||
conn = boto.connect_route53('the_key', 'the_secret')
|
||||
|
||||
conn.get_all_rrsets.when.called_with("abcd", type="A").\
|
||||
should.throw(boto.route53.exception.DNSServerError, "404 Not Found")
|
||||
|
||||
zone = conn.create_hosted_zone("testdns.aws.com")
|
||||
zoneid = zone["CreateHostedZoneResponse"]["HostedZone"]["Id"]
|
||||
|
||||
changes = ResourceRecordSets(conn, zoneid)
|
||||
change = changes.add_change("CREATE", "foo.bar.testdns.aws.com", "A")
|
||||
change.add_value("1.2.3.4")
|
||||
changes.commit()
|
||||
|
||||
rrsets = conn.get_all_rrsets(zoneid, type="A")
|
||||
rrsets.should.have.length_of(1)
|
||||
rrsets[0].resource_records[0].should.equal('1.2.3.4')
|
||||
|
||||
rrsets = conn.get_all_rrsets(zoneid, type="CNAME")
|
||||
rrsets.should.have.length_of(0)
|
||||
|
||||
changes = ResourceRecordSets(conn, zoneid)
|
||||
changes.add_change("DELETE", "foo.bar.testdns.aws.com", "A")
|
||||
changes.commit()
|
||||
|
||||
rrsets = conn.get_all_rrsets(zoneid)
|
||||
rrsets.should.have.length_of(0)
|
@ -1,4 +1,5 @@
|
||||
import urllib2
|
||||
from io import BytesIO
|
||||
|
||||
import boto
|
||||
from boto.exception import S3ResponseError
|
||||
@ -37,6 +38,34 @@ def test_my_model_save():
|
||||
conn.get_bucket('mybucket').get_key('steve').get_contents_as_string().should.equal('is awesome')
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_multipart_upload_too_small():
|
||||
conn = boto.connect_s3('the_key', 'the_secret')
|
||||
bucket = conn.create_bucket("foobar")
|
||||
|
||||
multipart = bucket.initiate_multipart_upload("the-key")
|
||||
multipart.upload_part_from_file(BytesIO('hello'), 1)
|
||||
multipart.upload_part_from_file(BytesIO('world'), 2)
|
||||
# Multipart with total size under 5MB is refused
|
||||
multipart.complete_upload.should.throw(S3ResponseError)
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_multipart_upload():
|
||||
conn = boto.connect_s3('the_key', 'the_secret')
|
||||
bucket = conn.create_bucket("foobar")
|
||||
|
||||
multipart = bucket.initiate_multipart_upload("the-key")
|
||||
part1 = '0' * 5242880
|
||||
multipart.upload_part_from_file(BytesIO(part1), 1)
|
||||
# last part, can be less than 5 MB
|
||||
part2 = '1'
|
||||
multipart.upload_part_from_file(BytesIO(part2), 2)
|
||||
multipart.complete_upload()
|
||||
# we should get both parts as the key contents
|
||||
bucket.get_key("the-key").get_contents_as_string().should.equal(part1 + part2)
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_missing_key():
|
||||
conn = boto.connect_s3('the_key', 'the_secret')
|
||||
|
50
tests/test_s3bucket_path/test_bucket_path_server.py
Normal file
50
tests/test_s3bucket_path/test_bucket_path_server.py
Normal file
@ -0,0 +1,50 @@
|
||||
import sure # noqa
|
||||
|
||||
import moto.server as server
|
||||
|
||||
'''
|
||||
Test the different server responses
|
||||
'''
|
||||
server.configure_urls("s3bucket_path")
|
||||
|
||||
|
||||
def test_s3_server_get():
|
||||
test_client = server.app.test_client()
|
||||
res = test_client.get('/')
|
||||
|
||||
res.data.should.contain('ListAllMyBucketsResult')
|
||||
|
||||
|
||||
def test_s3_server_bucket_create():
|
||||
test_client = server.app.test_client()
|
||||
res = test_client.put('/foobar', 'http://localhost:5000')
|
||||
res.status_code.should.equal(200)
|
||||
|
||||
res = test_client.get('/')
|
||||
res.data.should.contain('<Name>foobar</Name>')
|
||||
|
||||
res = test_client.get('/foobar', 'http://localhost:5000')
|
||||
res.status_code.should.equal(200)
|
||||
res.data.should.contain("ListBucketResult")
|
||||
|
||||
res = test_client.put('/foobar/bar', 'http://localhost:5000', data='test value')
|
||||
res.status_code.should.equal(200)
|
||||
|
||||
res = test_client.get('/foobar/bar', 'http://localhost:5000')
|
||||
res.status_code.should.equal(200)
|
||||
res.data.should.equal("test value")
|
||||
|
||||
|
||||
def test_s3_server_post_to_bucket():
|
||||
test_client = server.app.test_client()
|
||||
res = test_client.put('/foobar', 'http://localhost:5000/')
|
||||
res.status_code.should.equal(200)
|
||||
|
||||
test_client.post('/foobar', "https://localhost:5000/", data={
|
||||
'key': 'the-key',
|
||||
'file': 'nothing'
|
||||
})
|
||||
|
||||
res = test_client.get('/foobar/the-key', 'http://localhost:5000/')
|
||||
res.status_code.should.equal(200)
|
||||
res.data.should.equal("nothing")
|
281
tests/test_s3bucket_path/test_s3bucket_path.py
Normal file
281
tests/test_s3bucket_path/test_s3bucket_path.py
Normal file
@ -0,0 +1,281 @@
|
||||
import urllib2
|
||||
|
||||
import boto
|
||||
from boto.exception import S3ResponseError
|
||||
from boto.s3.key import Key
|
||||
from boto.s3.connection import OrdinaryCallingFormat
|
||||
|
||||
from freezegun import freeze_time
|
||||
import requests
|
||||
|
||||
import sure # noqa
|
||||
|
||||
from moto import mock_s3bucket_path
|
||||
|
||||
|
||||
def create_connection(key=None, secret=None):
|
||||
return boto.connect_s3(key, secret, calling_format=OrdinaryCallingFormat())
|
||||
|
||||
|
||||
class MyModel(object):
|
||||
def __init__(self, name, value):
|
||||
self.name = name
|
||||
self.value = value
|
||||
|
||||
def save(self):
|
||||
conn = create_connection('the_key', 'the_secret')
|
||||
bucket = conn.get_bucket('mybucket')
|
||||
k = Key(bucket)
|
||||
k.key = self.name
|
||||
k.set_contents_from_string(self.value)
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_my_model_save():
|
||||
# Create Bucket so that test can run
|
||||
conn = create_connection('the_key', 'the_secret')
|
||||
conn.create_bucket('mybucket')
|
||||
####################################
|
||||
|
||||
model_instance = MyModel('steve', 'is awesome')
|
||||
model_instance.save()
|
||||
|
||||
conn.get_bucket('mybucket').get_key('steve').get_contents_as_string().should.equal('is awesome')
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_missing_key():
|
||||
conn = create_connection('the_key', 'the_secret')
|
||||
bucket = conn.create_bucket("foobar")
|
||||
bucket.get_key("the-key").should.equal(None)
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_missing_key_urllib2():
|
||||
conn = create_connection('the_key', 'the_secret')
|
||||
conn.create_bucket("foobar")
|
||||
|
||||
urllib2.urlopen.when.called_with("http://s3.amazonaws.com/foobar/the-key").should.throw(urllib2.HTTPError)
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_empty_key():
|
||||
conn = create_connection('the_key', 'the_secret')
|
||||
bucket = conn.create_bucket("foobar")
|
||||
key = Key(bucket)
|
||||
key.key = "the-key"
|
||||
key.set_contents_from_string("")
|
||||
|
||||
bucket.get_key("the-key").get_contents_as_string().should.equal('')
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_empty_key_set_on_existing_key():
|
||||
conn = create_connection('the_key', 'the_secret')
|
||||
bucket = conn.create_bucket("foobar")
|
||||
key = Key(bucket)
|
||||
key.key = "the-key"
|
||||
key.set_contents_from_string("foobar")
|
||||
|
||||
bucket.get_key("the-key").get_contents_as_string().should.equal('foobar')
|
||||
|
||||
key.set_contents_from_string("")
|
||||
bucket.get_key("the-key").get_contents_as_string().should.equal('')
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_large_key_save():
|
||||
conn = create_connection('the_key', 'the_secret')
|
||||
bucket = conn.create_bucket("foobar")
|
||||
key = Key(bucket)
|
||||
key.key = "the-key"
|
||||
key.set_contents_from_string("foobar" * 100000)
|
||||
|
||||
bucket.get_key("the-key").get_contents_as_string().should.equal('foobar' * 100000)
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_copy_key():
|
||||
conn = create_connection('the_key', 'the_secret')
|
||||
bucket = conn.create_bucket("foobar")
|
||||
key = Key(bucket)
|
||||
key.key = "the-key"
|
||||
key.set_contents_from_string("some value")
|
||||
|
||||
bucket.copy_key('new-key', 'foobar', 'the-key')
|
||||
|
||||
bucket.get_key("the-key").get_contents_as_string().should.equal("some value")
|
||||
bucket.get_key("new-key").get_contents_as_string().should.equal("some value")
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_set_metadata():
|
||||
conn = create_connection('the_key', 'the_secret')
|
||||
bucket = conn.create_bucket("foobar")
|
||||
key = Key(bucket)
|
||||
key.key = 'the-key'
|
||||
key.set_metadata('md', 'Metadatastring')
|
||||
key.set_contents_from_string("Testval")
|
||||
|
||||
bucket.get_key('the-key').get_metadata('md').should.equal('Metadatastring')
|
||||
|
||||
|
||||
@freeze_time("2012-01-01 12:00:00")
|
||||
@mock_s3bucket_path
|
||||
def test_last_modified():
|
||||
# See https://github.com/boto/boto/issues/466
|
||||
conn = create_connection()
|
||||
bucket = conn.create_bucket("foobar")
|
||||
key = Key(bucket)
|
||||
key.key = "the-key"
|
||||
key.set_contents_from_string("some value")
|
||||
|
||||
rs = bucket.get_all_keys()
|
||||
rs[0].last_modified.should.equal('2012-01-01T12:00:00Z')
|
||||
|
||||
bucket.get_key("the-key").last_modified.should.equal('Sun, 01 Jan 2012 12:00:00 GMT')
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_missing_bucket():
|
||||
conn = create_connection('the_key', 'the_secret')
|
||||
conn.get_bucket.when.called_with('mybucket').should.throw(S3ResponseError)
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_bucket_with_dash():
|
||||
conn = create_connection('the_key', 'the_secret')
|
||||
conn.get_bucket.when.called_with('mybucket-test').should.throw(S3ResponseError)
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_bucket_deletion():
|
||||
conn = create_connection('the_key', 'the_secret')
|
||||
bucket = conn.create_bucket("foobar")
|
||||
|
||||
key = Key(bucket)
|
||||
key.key = "the-key"
|
||||
key.set_contents_from_string("some value")
|
||||
|
||||
# Try to delete a bucket that still has keys
|
||||
conn.delete_bucket.when.called_with("foobar").should.throw(S3ResponseError)
|
||||
|
||||
bucket.delete_key("the-key")
|
||||
conn.delete_bucket("foobar")
|
||||
|
||||
# Get non-existing bucket
|
||||
conn.get_bucket.when.called_with("foobar").should.throw(S3ResponseError)
|
||||
|
||||
# Delete non-existant bucket
|
||||
conn.delete_bucket.when.called_with("foobar").should.throw(S3ResponseError)
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_get_all_buckets():
|
||||
conn = create_connection('the_key', 'the_secret')
|
||||
conn.create_bucket("foobar")
|
||||
conn.create_bucket("foobar2")
|
||||
buckets = conn.get_all_buckets()
|
||||
|
||||
buckets.should.have.length_of(2)
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_post_to_bucket():
|
||||
conn = create_connection('the_key', 'the_secret')
|
||||
bucket = conn.create_bucket("foobar")
|
||||
|
||||
requests.post("https://s3.amazonaws.com/foobar", {
|
||||
'key': 'the-key',
|
||||
'file': 'nothing'
|
||||
})
|
||||
|
||||
bucket.get_key('the-key').get_contents_as_string().should.equal('nothing')
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_post_with_metadata_to_bucket():
|
||||
conn = create_connection('the_key', 'the_secret')
|
||||
bucket = conn.create_bucket("foobar")
|
||||
|
||||
requests.post("https://s3.amazonaws.com/foobar", {
|
||||
'key': 'the-key',
|
||||
'file': 'nothing',
|
||||
'x-amz-meta-test': 'metadata'
|
||||
})
|
||||
|
||||
bucket.get_key('the-key').get_metadata('test').should.equal('metadata')
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_bucket_method_not_implemented():
|
||||
requests.patch.when.called_with("https://s3.amazonaws.com/foobar").should.throw(NotImplementedError)
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_key_method_not_implemented():
|
||||
requests.post.when.called_with("https://s3.amazonaws.com/foobar/foo").should.throw(NotImplementedError)
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_bucket_name_with_dot():
|
||||
conn = create_connection()
|
||||
bucket = conn.create_bucket('firstname.lastname')
|
||||
|
||||
k = Key(bucket, 'somekey')
|
||||
k.set_contents_from_string('somedata')
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_key_with_special_characters():
|
||||
conn = create_connection()
|
||||
bucket = conn.create_bucket('test_bucket_name')
|
||||
|
||||
key = Key(bucket, 'test_list_keys_2/x?y')
|
||||
key.set_contents_from_string('value1')
|
||||
|
||||
key_list = bucket.list('test_list_keys_2/', '/')
|
||||
keys = [x for x in key_list]
|
||||
keys[0].name.should.equal("test_list_keys_2/x?y")
|
||||
|
||||
|
||||
@mock_s3bucket_path
|
||||
def test_bucket_key_listing_order():
|
||||
conn = create_connection()
|
||||
bucket = conn.create_bucket('test_bucket')
|
||||
prefix = 'toplevel/'
|
||||
|
||||
def store(name):
|
||||
k = Key(bucket, prefix + name)
|
||||
k.set_contents_from_string('somedata')
|
||||
|
||||
names = ['x/key', 'y.key1', 'y.key2', 'y.key3', 'x/y/key', 'x/y/z/key']
|
||||
|
||||
for name in names:
|
||||
store(name)
|
||||
|
||||
delimiter = None
|
||||
keys = [x.name for x in bucket.list(prefix, delimiter)]
|
||||
keys.should.equal([
|
||||
'toplevel/x/key', 'toplevel/x/y/key', 'toplevel/x/y/z/key',
|
||||
'toplevel/y.key1', 'toplevel/y.key2', 'toplevel/y.key3'
|
||||
])
|
||||
|
||||
delimiter = '/'
|
||||
keys = [x.name for x in bucket.list(prefix, delimiter)]
|
||||
keys.should.equal([
|
||||
'toplevel/y.key1', 'toplevel/y.key2', 'toplevel/y.key3', 'toplevel/x/'
|
||||
])
|
||||
|
||||
# Test delimiter with no prefix
|
||||
delimiter = '/'
|
||||
keys = [x.name for x in bucket.list(prefix=None, delimiter=delimiter)]
|
||||
keys.should.equal(['toplevel'])
|
||||
|
||||
delimiter = None
|
||||
keys = [x.name for x in bucket.list(prefix + 'x', delimiter)]
|
||||
keys.should.equal([u'toplevel/x/key', u'toplevel/x/y/key', u'toplevel/x/y/z/key'])
|
||||
|
||||
delimiter = '/'
|
||||
keys = [x.name for x in bucket.list(prefix + 'x', delimiter)]
|
||||
keys.should.equal([u'toplevel/x/'])
|
14
tests/test_s3bucket_path/test_s3bucket_path_utils.py
Normal file
14
tests/test_s3bucket_path/test_s3bucket_path_utils.py
Normal file
@ -0,0 +1,14 @@
|
||||
from sure import expect
|
||||
from moto.s3bucket_path.utils import bucket_name_from_url
|
||||
|
||||
|
||||
def test_base_url():
|
||||
expect(bucket_name_from_url('https://s3.amazonaws.com/')).should.equal(None)
|
||||
|
||||
|
||||
def test_localhost_bucket():
|
||||
expect(bucket_name_from_url('https://localhost:5000/wfoobar/abc')).should.equal("wfoobar")
|
||||
|
||||
|
||||
def test_localhost_without_bucket():
|
||||
expect(bucket_name_from_url('https://www.localhost:5000')).should.equal(None)
|
Loading…
Reference in New Issue
Block a user