Merge pull request #168 from singingwolfboy/py3
Support Python 3 using six
This commit is contained in:
commit
c1144a0cb4
@ -10,10 +10,15 @@ env:
|
|||||||
- BOTO_VERSION=2.19.0
|
- BOTO_VERSION=2.19.0
|
||||||
- BOTO_VERSION=2.12.0
|
- BOTO_VERSION=2.12.0
|
||||||
- BOTO_VERSION=2.7
|
- BOTO_VERSION=2.7
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- python: "3.3"
|
||||||
|
env: BOTO_VERSION=2.32.1
|
||||||
install:
|
install:
|
||||||
- pip install boto==$BOTO_VERSION
|
- pip install boto==$BOTO_VERSION
|
||||||
- pip install .
|
- pip install .
|
||||||
- pip install -r requirements.txt
|
- pip install -r requirements.txt
|
||||||
|
- pip install coveralls
|
||||||
script:
|
script:
|
||||||
- make test
|
- make test
|
||||||
after_success:
|
after_success:
|
||||||
|
@ -23,3 +23,4 @@ Moto is written by Steve Pulec with contributions from:
|
|||||||
* [Richard Eames](https://github.com/Naddiseo)
|
* [Richard Eames](https://github.com/Naddiseo)
|
||||||
* [Kevin Glisson](https://github.com/kevgliss)
|
* [Kevin Glisson](https://github.com/kevgliss)
|
||||||
* [Shawn Falkner-Horine](https://github.com/DreadPirateShawn)
|
* [Shawn Falkner-Horine](https://github.com/DreadPirateShawn)
|
||||||
|
* [David Baumgold](https://github.com/singingwolfboy)
|
||||||
|
@ -243,7 +243,7 @@ class AutoScalingBackend(BaseBackend):
|
|||||||
if names:
|
if names:
|
||||||
return [configuration for configuration in configurations if configuration.name in names]
|
return [configuration for configuration in configurations if configuration.name in names]
|
||||||
else:
|
else:
|
||||||
return configurations
|
return list(configurations)
|
||||||
|
|
||||||
def delete_launch_configuration(self, launch_configuration_name):
|
def delete_launch_configuration(self, launch_configuration_name):
|
||||||
self.launch_configurations.pop(launch_configuration_name, None)
|
self.launch_configurations.pop(launch_configuration_name, None)
|
||||||
@ -300,7 +300,7 @@ class AutoScalingBackend(BaseBackend):
|
|||||||
if names:
|
if names:
|
||||||
return [group for group in groups if group.name in names]
|
return [group for group in groups if group.name in names]
|
||||||
else:
|
else:
|
||||||
return groups
|
return list(groups)
|
||||||
|
|
||||||
def delete_autoscaling_group(self, group_name):
|
def delete_autoscaling_group(self, group_name):
|
||||||
self.autoscaling_groups.pop(group_name, None)
|
self.autoscaling_groups.pop(group_name, None)
|
||||||
@ -345,7 +345,7 @@ class AutoScalingBackend(BaseBackend):
|
|||||||
return policy
|
return policy
|
||||||
|
|
||||||
def describe_policies(self):
|
def describe_policies(self):
|
||||||
return self.policies.values()
|
return list(self.policies.values())
|
||||||
|
|
||||||
def delete_policy(self, group_name):
|
def delete_policy(self, group_name):
|
||||||
self.policies.pop(group_name, None)
|
self.policies.pop(group_name, None)
|
||||||
|
@ -191,7 +191,7 @@ DESCRIBE_LAUNCH_CONFIGURATIONS_TEMPLATE = """<DescribeLaunchConfigurationsRespon
|
|||||||
9dbbbf87-6141-428a-a409-0752edbe6cad:launchConfigurationName/my-test-lc</LaunchConfigurationARN>
|
9dbbbf87-6141-428a-a409-0752edbe6cad:launchConfigurationName/my-test-lc</LaunchConfigurationARN>
|
||||||
{% if launch_configuration.block_device_mappings %}
|
{% if launch_configuration.block_device_mappings %}
|
||||||
<BlockDeviceMappings>
|
<BlockDeviceMappings>
|
||||||
{% for mount_point, mapping in launch_configuration.block_device_mappings.iteritems() %}
|
{% for mount_point, mapping in launch_configuration.block_device_mappings.items() %}
|
||||||
<member>
|
<member>
|
||||||
<DeviceName>{{ mount_point }}</DeviceName>
|
<DeviceName>{{ mount_point }}</DeviceName>
|
||||||
{% if mapping.ephemeral_name %}
|
{% if mapping.ephemeral_name %}
|
||||||
|
@ -32,4 +32,4 @@ def get_model(name):
|
|||||||
for backend in BACKENDS.values():
|
for backend in BACKENDS.values():
|
||||||
models = getattr(backend.__class__, '__models__', {})
|
models = getattr(backend.__class__, '__models__', {})
|
||||||
if name in models:
|
if name in models:
|
||||||
return getattr(backend, models[name])()
|
return list(getattr(backend, models[name])())
|
||||||
|
@ -55,7 +55,7 @@ def clean_json(resource_json, resources_map):
|
|||||||
return resource
|
return resource
|
||||||
|
|
||||||
cleaned_json = {}
|
cleaned_json = {}
|
||||||
for key, value in resource_json.iteritems():
|
for key, value in resource_json.items():
|
||||||
cleaned_json[key] = clean_json(value, resources_map)
|
cleaned_json[key] = clean_json(value, resources_map)
|
||||||
return cleaned_json
|
return cleaned_json
|
||||||
elif isinstance(resource_json, list):
|
elif isinstance(resource_json, list):
|
||||||
|
@ -34,8 +34,8 @@ class MockAWS(object):
|
|||||||
HTTPretty.enable()
|
HTTPretty.enable()
|
||||||
|
|
||||||
for method in HTTPretty.METHODS:
|
for method in HTTPretty.METHODS:
|
||||||
backend = self.backends.values()[0]
|
backend = list(self.backends.values())[0]
|
||||||
for key, value in backend.urls.iteritems():
|
for key, value in backend.urls.items():
|
||||||
HTTPretty.register_uri(
|
HTTPretty.register_uri(
|
||||||
method=method,
|
method=method,
|
||||||
uri=re.compile(key),
|
uri=re.compile(key),
|
||||||
@ -72,7 +72,7 @@ class Model(type):
|
|||||||
def __new__(self, clsname, bases, namespace):
|
def __new__(self, clsname, bases, namespace):
|
||||||
cls = super(Model, self).__new__(self, clsname, bases, namespace)
|
cls = super(Model, self).__new__(self, clsname, bases, namespace)
|
||||||
cls.__models__ = {}
|
cls.__models__ = {}
|
||||||
for name, value in namespace.iteritems():
|
for name, value in namespace.items():
|
||||||
model = getattr(value, "__returns_model__", False)
|
model = getattr(value, "__returns_model__", False)
|
||||||
if model is not False:
|
if model is not False:
|
||||||
cls.__models__[model] = name
|
cls.__models__[model] = name
|
||||||
@ -112,7 +112,7 @@ class BaseBackend(object):
|
|||||||
|
|
||||||
urls = {}
|
urls = {}
|
||||||
for url_base in url_bases:
|
for url_base in url_bases:
|
||||||
for url_path, handler in unformatted_paths.iteritems():
|
for url_path, handler in unformatted_paths.items():
|
||||||
url = url_path.format(url_base)
|
url = url_path.format(url_base)
|
||||||
urls[url] = handler
|
urls[url] = handler
|
||||||
|
|
||||||
@ -127,7 +127,7 @@ class BaseBackend(object):
|
|||||||
unformatted_paths = self._url_module.url_paths
|
unformatted_paths = self._url_module.url_paths
|
||||||
|
|
||||||
paths = {}
|
paths = {}
|
||||||
for unformatted_path, handler in unformatted_paths.iteritems():
|
for unformatted_path, handler in unformatted_paths.items():
|
||||||
path = unformatted_path.format("")
|
path = unformatted_path.format("")
|
||||||
paths[path] = handler
|
paths[path] = handler
|
||||||
|
|
||||||
@ -146,7 +146,7 @@ class BaseBackend(object):
|
|||||||
The url paths that will be used for the flask server
|
The url paths that will be used for the flask server
|
||||||
"""
|
"""
|
||||||
paths = {}
|
paths = {}
|
||||||
for url_path, handler in self.url_paths.iteritems():
|
for url_path, handler in self.url_paths.items():
|
||||||
url_path = convert_regex_to_flask_path(url_path)
|
url_path = convert_regex_to_flask_path(url_path)
|
||||||
paths[url_path] = handler
|
paths[url_path] = handler
|
||||||
|
|
||||||
|
@ -3,12 +3,44 @@ import datetime
|
|||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from urlparse import parse_qs, urlparse
|
import six
|
||||||
|
from six.moves.urllib.parse import parse_qs, urlparse
|
||||||
|
|
||||||
from werkzeug.exceptions import HTTPException
|
from werkzeug.exceptions import HTTPException
|
||||||
from moto.core.utils import camelcase_to_underscores, method_names_from_class
|
from moto.core.utils import camelcase_to_underscores, method_names_from_class
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_dict(d):
|
||||||
|
decoded = {}
|
||||||
|
for key, value in d.items():
|
||||||
|
if isinstance(key, six.binary_type):
|
||||||
|
newkey = key.decode("utf-8")
|
||||||
|
elif isinstance(key, (list, tuple)):
|
||||||
|
newkey = []
|
||||||
|
for k in key:
|
||||||
|
if isinstance(k, six.binary_type):
|
||||||
|
newkey.append(k.decode('utf-8'))
|
||||||
|
else:
|
||||||
|
newkey.append(k)
|
||||||
|
else:
|
||||||
|
newkey = key
|
||||||
|
|
||||||
|
if isinstance(value, six.binary_type):
|
||||||
|
newvalue = value.decode("utf-8")
|
||||||
|
elif isinstance(value, (list, tuple)):
|
||||||
|
newvalue = []
|
||||||
|
for v in value:
|
||||||
|
if isinstance(v, six.binary_type):
|
||||||
|
newvalue.append(v.decode('utf-8'))
|
||||||
|
else:
|
||||||
|
newvalue.append(v)
|
||||||
|
else:
|
||||||
|
newvalue = value
|
||||||
|
|
||||||
|
decoded[newkey] = newvalue
|
||||||
|
return decoded
|
||||||
|
|
||||||
|
|
||||||
class BaseResponse(object):
|
class BaseResponse(object):
|
||||||
|
|
||||||
region = 'us-east-1'
|
region = 'us-east-1'
|
||||||
@ -28,7 +60,7 @@ class BaseResponse(object):
|
|||||||
self.body = request.data
|
self.body = request.data
|
||||||
|
|
||||||
querystring = {}
|
querystring = {}
|
||||||
for key, value in request.form.iteritems():
|
for key, value in request.form.items():
|
||||||
querystring[key] = [value, ]
|
querystring[key] = [value, ]
|
||||||
|
|
||||||
if not querystring:
|
if not querystring:
|
||||||
@ -38,6 +70,8 @@ class BaseResponse(object):
|
|||||||
if not querystring:
|
if not querystring:
|
||||||
querystring.update(headers)
|
querystring.update(headers)
|
||||||
|
|
||||||
|
querystring = _decode_dict(querystring)
|
||||||
|
|
||||||
self.uri = full_url
|
self.uri = full_url
|
||||||
self.path = urlparse(full_url).path
|
self.path = urlparse(full_url).path
|
||||||
self.querystring = querystring
|
self.querystring = querystring
|
||||||
@ -61,7 +95,7 @@ class BaseResponse(object):
|
|||||||
response = method()
|
response = method()
|
||||||
except HTTPException as http_error:
|
except HTTPException as http_error:
|
||||||
response = http_error.description, dict(status=http_error.code)
|
response = http_error.description, dict(status=http_error.code)
|
||||||
if isinstance(response, basestring):
|
if isinstance(response, six.string_types):
|
||||||
return 200, headers, response
|
return 200, headers, response
|
||||||
else:
|
else:
|
||||||
body, new_headers = response
|
body, new_headers = response
|
||||||
|
@ -2,6 +2,7 @@ from __future__ import unicode_literals
|
|||||||
import inspect
|
import inspect
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
|
import six
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
|
|
||||||
@ -23,12 +24,20 @@ def camelcase_to_underscores(argument):
|
|||||||
|
|
||||||
|
|
||||||
def method_names_from_class(clazz):
|
def method_names_from_class(clazz):
|
||||||
return [x[0] for x in inspect.getmembers(clazz, predicate=inspect.ismethod)]
|
# On Python 2, methods are different from functions, and the `inspect`
|
||||||
|
# predicates distinguish between them. On Python 3, methods are just
|
||||||
|
# regular functions, and `inspect.ismethod` doesn't work, so we have to
|
||||||
|
# use `inspect.isfunction` instead
|
||||||
|
if six.PY2:
|
||||||
|
predicate = inspect.ismethod
|
||||||
|
else:
|
||||||
|
predicate = inspect.isfunction
|
||||||
|
return [x[0] for x in inspect.getmembers(clazz, predicate=predicate)]
|
||||||
|
|
||||||
|
|
||||||
def get_random_hex(length=8):
|
def get_random_hex(length=8):
|
||||||
chars = range(10) + ['a', 'b', 'c', 'd', 'e', 'f']
|
chars = list(range(10)) + ['a', 'b', 'c', 'd', 'e', 'f']
|
||||||
return ''.join(unicode(random.choice(chars)) for x in range(length))
|
return ''.join(six.text_type(random.choice(chars)) for x in range(length))
|
||||||
|
|
||||||
|
|
||||||
def get_random_message_id():
|
def get_random_message_id():
|
||||||
@ -59,7 +68,7 @@ class convert_flask_to_httpretty_response(object):
|
|||||||
# For instance methods, use class and method names. Otherwise
|
# For instance methods, use class and method names. Otherwise
|
||||||
# use module and method name
|
# use module and method name
|
||||||
if inspect.ismethod(self.callback):
|
if inspect.ismethod(self.callback):
|
||||||
outer = self.callback.im_class.__name__
|
outer = self.callback.__self__.__class__.__name__
|
||||||
else:
|
else:
|
||||||
outer = self.callback.__module__
|
outer = self.callback.__module__
|
||||||
return "{0}.{1}".format(outer, self.callback.__name__)
|
return "{0}.{1}".format(outer, self.callback.__name__)
|
||||||
|
@ -31,8 +31,8 @@ class DynamoType(object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, type_as_dict):
|
def __init__(self, type_as_dict):
|
||||||
self.type = type_as_dict.keys()[0]
|
self.type = list(type_as_dict.keys())[0]
|
||||||
self.value = type_as_dict.values()[0]
|
self.value = list(type_as_dict.values())[0]
|
||||||
|
|
||||||
def __hash__(self):
|
def __hash__(self):
|
||||||
return hash((self.type, self.value))
|
return hash((self.type, self.value))
|
||||||
@ -66,7 +66,7 @@ class Item(object):
|
|||||||
self.range_key_type = range_key_type
|
self.range_key_type = range_key_type
|
||||||
|
|
||||||
self.attrs = {}
|
self.attrs = {}
|
||||||
for key, value in attrs.iteritems():
|
for key, value in attrs.items():
|
||||||
self.attrs[key] = DynamoType(value)
|
self.attrs[key] = DynamoType(value)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
@ -74,7 +74,7 @@ class Item(object):
|
|||||||
|
|
||||||
def to_json(self):
|
def to_json(self):
|
||||||
attributes = {}
|
attributes = {}
|
||||||
for attribute_key, attribute in self.attrs.iteritems():
|
for attribute_key, attribute in self.attrs.items():
|
||||||
attributes[attribute_key] = attribute.value
|
attributes[attribute_key] = attribute.value
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@ -84,7 +84,7 @@ class Item(object):
|
|||||||
def describe_attrs(self, attributes):
|
def describe_attrs(self, attributes):
|
||||||
if attributes:
|
if attributes:
|
||||||
included = {}
|
included = {}
|
||||||
for key, value in self.attrs.iteritems():
|
for key, value in self.attrs.items():
|
||||||
if key in attributes:
|
if key in attributes:
|
||||||
included[key] = value
|
included[key] = value
|
||||||
else:
|
else:
|
||||||
@ -143,7 +143,7 @@ class Table(object):
|
|||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
count = 0
|
count = 0
|
||||||
for key, value in self.items.iteritems():
|
for key, value in self.items.items():
|
||||||
if self.has_range_key:
|
if self.has_range_key:
|
||||||
count += len(value)
|
count += len(value)
|
||||||
else:
|
else:
|
||||||
@ -213,7 +213,7 @@ class Table(object):
|
|||||||
for result in self.all_items():
|
for result in self.all_items():
|
||||||
scanned_count += 1
|
scanned_count += 1
|
||||||
passes_all_conditions = True
|
passes_all_conditions = True
|
||||||
for attribute_name, (comparison_operator, comparison_objs) in filters.iteritems():
|
for attribute_name, (comparison_operator, comparison_objs) in filters.items():
|
||||||
attribute = result.attrs.get(attribute_name)
|
attribute = result.attrs.get(attribute_name)
|
||||||
|
|
||||||
if attribute:
|
if attribute:
|
||||||
@ -296,7 +296,7 @@ class DynamoDBBackend(BaseBackend):
|
|||||||
return None, None, None
|
return None, None, None
|
||||||
|
|
||||||
scan_filters = {}
|
scan_filters = {}
|
||||||
for key, (comparison_operator, comparison_values) in filters.iteritems():
|
for key, (comparison_operator, comparison_values) in filters.items():
|
||||||
dynamo_types = [DynamoType(value) for value in comparison_values]
|
dynamo_types = [DynamoType(value) for value in comparison_values]
|
||||||
scan_filters[key] = (comparison_operator, dynamo_types)
|
scan_filters[key] = (comparison_operator, dynamo_types)
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import json
|
import json
|
||||||
|
import six
|
||||||
|
|
||||||
from moto.core.responses import BaseResponse
|
from moto.core.responses import BaseResponse
|
||||||
from moto.core.utils import camelcase_to_underscores
|
from moto.core.utils import camelcase_to_underscores
|
||||||
@ -50,15 +51,16 @@ class DynamoHandler(BaseResponse):
|
|||||||
return status, self.response_headers, dynamo_json_dump({'__type': type_})
|
return status, self.response_headers, dynamo_json_dump({'__type': type_})
|
||||||
|
|
||||||
def call_action(self):
|
def call_action(self):
|
||||||
if 'GetSessionToken' in self.body:
|
body = self.body.decode('utf-8')
|
||||||
|
if 'GetSessionToken' in body:
|
||||||
return 200, self.response_headers, sts_handler()
|
return 200, self.response_headers, sts_handler()
|
||||||
|
|
||||||
self.body = json.loads(self.body or '{}')
|
self.body = json.loads(body or '{}')
|
||||||
endpoint = self.get_endpoint_name(self.headers)
|
endpoint = self.get_endpoint_name(self.headers)
|
||||||
if endpoint:
|
if endpoint:
|
||||||
endpoint = camelcase_to_underscores(endpoint)
|
endpoint = camelcase_to_underscores(endpoint)
|
||||||
response = getattr(self, endpoint)()
|
response = getattr(self, endpoint)()
|
||||||
if isinstance(response, basestring):
|
if isinstance(response, six.string_types):
|
||||||
return 200, self.response_headers, response
|
return 200, self.response_headers, response
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@ -73,10 +75,10 @@ class DynamoHandler(BaseResponse):
|
|||||||
limit = body.get('Limit')
|
limit = body.get('Limit')
|
||||||
if body.get("ExclusiveStartTableName"):
|
if body.get("ExclusiveStartTableName"):
|
||||||
last = body.get("ExclusiveStartTableName")
|
last = body.get("ExclusiveStartTableName")
|
||||||
start = dynamodb_backend.tables.keys().index(last) + 1
|
start = list(dynamodb_backend.tables.keys()).index(last) + 1
|
||||||
else:
|
else:
|
||||||
start = 0
|
start = 0
|
||||||
all_tables = dynamodb_backend.tables.keys()
|
all_tables = list(dynamodb_backend.tables.keys())
|
||||||
if limit:
|
if limit:
|
||||||
tables = all_tables[start:start + limit]
|
tables = all_tables[start:start + limit]
|
||||||
else:
|
else:
|
||||||
@ -155,7 +157,7 @@ class DynamoHandler(BaseResponse):
|
|||||||
def batch_write_item(self):
|
def batch_write_item(self):
|
||||||
table_batches = self.body['RequestItems']
|
table_batches = self.body['RequestItems']
|
||||||
|
|
||||||
for table_name, table_requests in table_batches.iteritems():
|
for table_name, table_requests in table_batches.items():
|
||||||
for table_request in table_requests:
|
for table_request in table_requests:
|
||||||
request_type = table_request.keys()[0]
|
request_type = table_request.keys()[0]
|
||||||
request = table_request.values()[0]
|
request = table_request.values()[0]
|
||||||
@ -212,7 +214,7 @@ class DynamoHandler(BaseResponse):
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for table_name, table_request in table_batches.iteritems():
|
for table_name, table_request in table_batches.items():
|
||||||
items = []
|
items = []
|
||||||
keys = table_request['Keys']
|
keys = table_request['Keys']
|
||||||
attributes_to_get = table_request.get('AttributesToGet')
|
attributes_to_get = table_request.get('AttributesToGet')
|
||||||
@ -262,7 +264,7 @@ class DynamoHandler(BaseResponse):
|
|||||||
|
|
||||||
filters = {}
|
filters = {}
|
||||||
scan_filters = self.body.get('ScanFilter', {})
|
scan_filters = self.body.get('ScanFilter', {})
|
||||||
for attribute_name, scan_filter in scan_filters.iteritems():
|
for attribute_name, scan_filter in scan_filters.items():
|
||||||
# Keys are attribute names. Values are tuples of (comparison, comparison_value)
|
# Keys are attribute names. Values are tuples of (comparison, comparison_value)
|
||||||
comparison_operator = scan_filter["ComparisonOperator"]
|
comparison_operator = scan_filter["ComparisonOperator"]
|
||||||
comparison_values = scan_filter.get("AttributeValueList", [])
|
comparison_values = scan_filter.get("AttributeValueList", [])
|
||||||
|
@ -77,7 +77,7 @@ class Item(object):
|
|||||||
self.range_key_type = range_key_type
|
self.range_key_type = range_key_type
|
||||||
|
|
||||||
self.attrs = {}
|
self.attrs = {}
|
||||||
for key, value in attrs.iteritems():
|
for key, value in attrs.items():
|
||||||
self.attrs[key] = DynamoType(value)
|
self.attrs[key] = DynamoType(value)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
@ -85,7 +85,7 @@ class Item(object):
|
|||||||
|
|
||||||
def to_json(self):
|
def to_json(self):
|
||||||
attributes = {}
|
attributes = {}
|
||||||
for attribute_key, attribute in self.attrs.iteritems():
|
for attribute_key, attribute in self.attrs.items():
|
||||||
attributes[attribute_key] = attribute.value
|
attributes[attribute_key] = attribute.value
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@ -95,7 +95,7 @@ class Item(object):
|
|||||||
def describe_attrs(self, attributes):
|
def describe_attrs(self, attributes):
|
||||||
if attributes:
|
if attributes:
|
||||||
included = {}
|
included = {}
|
||||||
for key, value in self.attrs.iteritems():
|
for key, value in self.attrs.items():
|
||||||
if key in attributes:
|
if key in attributes:
|
||||||
included[key] = value
|
included[key] = value
|
||||||
else:
|
else:
|
||||||
@ -129,32 +129,32 @@ class Table(object):
|
|||||||
self.indexes = indexes
|
self.indexes = indexes
|
||||||
self.created_at = datetime.datetime.now()
|
self.created_at = datetime.datetime.now()
|
||||||
self.items = defaultdict(dict)
|
self.items = defaultdict(dict)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def describe(self):
|
def describe(self):
|
||||||
results = {
|
results = {
|
||||||
'Table': {
|
'Table': {
|
||||||
'AttributeDefinitions': self.attr,
|
'AttributeDefinitions': self.attr,
|
||||||
'ProvisionedThroughput': self.throughput,
|
'ProvisionedThroughput': self.throughput,
|
||||||
'TableSizeBytes': 0,
|
'TableSizeBytes': 0,
|
||||||
'TableName': self.name,
|
'TableName': self.name,
|
||||||
'TableStatus': 'ACTIVE',
|
'TableStatus': 'ACTIVE',
|
||||||
'KeySchema': self.schema,
|
'KeySchema': self.schema,
|
||||||
'ItemCount': len(self),
|
'ItemCount': len(self),
|
||||||
'CreationDateTime': unix_time(self.created_at)
|
'CreationDateTime': unix_time(self.created_at)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
count = 0
|
count = 0
|
||||||
for key, value in self.items.iteritems():
|
for key, value in self.items.items():
|
||||||
if self.has_range_key:
|
if self.has_range_key:
|
||||||
count += len(value)
|
count += len(value)
|
||||||
else:
|
else:
|
||||||
count += 1
|
count += 1
|
||||||
return count
|
return count
|
||||||
|
|
||||||
def put_item(self, item_attrs):
|
def put_item(self, item_attrs):
|
||||||
hash_value = DynamoType(item_attrs.get(self.hash_key_attr))
|
hash_value = DynamoType(item_attrs.get(self.hash_key_attr))
|
||||||
if self.has_range_key:
|
if self.has_range_key:
|
||||||
@ -169,14 +169,14 @@ class Table(object):
|
|||||||
else:
|
else:
|
||||||
self.items[hash_value] = item
|
self.items[hash_value] = item
|
||||||
return item
|
return item
|
||||||
|
|
||||||
def __nonzero__(self):
|
def __nonzero__(self):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def has_range_key(self):
|
def has_range_key(self):
|
||||||
return self.range_key_attr is not None
|
return self.range_key_attr is not None
|
||||||
|
|
||||||
def get_item(self, hash_key, range_key):
|
def get_item(self, hash_key, range_key):
|
||||||
if self.has_range_key and not range_key:
|
if self.has_range_key and not range_key:
|
||||||
raise ValueError("Table has a range key, but no range key was passed into get_item")
|
raise ValueError("Table has a range key, but no range key was passed into get_item")
|
||||||
@ -187,7 +187,7 @@ class Table(object):
|
|||||||
return self.items[hash_key]
|
return self.items[hash_key]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def delete_item(self, hash_key, range_key):
|
def delete_item(self, hash_key, range_key):
|
||||||
try:
|
try:
|
||||||
if range_key:
|
if range_key:
|
||||||
@ -196,12 +196,12 @@ class Table(object):
|
|||||||
return self.items.pop(hash_key)
|
return self.items.pop(hash_key)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def query(self, hash_key, range_comparison, range_objs):
|
def query(self, hash_key, range_comparison, range_objs):
|
||||||
results = []
|
results = []
|
||||||
last_page = True # Once pagination is implemented, change this
|
last_page = True # Once pagination is implemented, change this
|
||||||
|
|
||||||
possible_results = [ item for item in list(self.all_items()) if item.hash_key == hash_key]
|
possible_results = [ item for item in list(self.all_items()) if item.hash_key == hash_key]
|
||||||
if range_comparison:
|
if range_comparison:
|
||||||
for result in possible_results:
|
for result in possible_results:
|
||||||
if result.range_key.compare(range_comparison, range_objs):
|
if result.range_key.compare(range_comparison, range_objs):
|
||||||
@ -220,7 +220,7 @@ class Table(object):
|
|||||||
yield item
|
yield item
|
||||||
else:
|
else:
|
||||||
yield hash_set
|
yield hash_set
|
||||||
|
|
||||||
def scan(self, filters):
|
def scan(self, filters):
|
||||||
results = []
|
results = []
|
||||||
scanned_count = 0
|
scanned_count = 0
|
||||||
@ -229,7 +229,7 @@ class Table(object):
|
|||||||
for result in self.all_items():
|
for result in self.all_items():
|
||||||
scanned_count += 1
|
scanned_count += 1
|
||||||
passes_all_conditions = True
|
passes_all_conditions = True
|
||||||
for attribute_name, (comparison_operator, comparison_objs) in filters.iteritems():
|
for attribute_name, (comparison_operator, comparison_objs) in filters.items():
|
||||||
attribute = result.attrs.get(attribute_name)
|
attribute = result.attrs.get(attribute_name)
|
||||||
|
|
||||||
if attribute:
|
if attribute:
|
||||||
@ -248,7 +248,7 @@ class Table(object):
|
|||||||
if passes_all_conditions:
|
if passes_all_conditions:
|
||||||
results.append(result)
|
results.append(result)
|
||||||
return results, scanned_count, last_page
|
return results, scanned_count, last_page
|
||||||
|
|
||||||
|
|
||||||
class DynamoDBBackend(BaseBackend):
|
class DynamoDBBackend(BaseBackend):
|
||||||
|
|
||||||
@ -273,18 +273,18 @@ class DynamoDBBackend(BaseBackend):
|
|||||||
if not table:
|
if not table:
|
||||||
return None
|
return None
|
||||||
return table.put_item(item_attrs)
|
return table.put_item(item_attrs)
|
||||||
|
|
||||||
def get_table_keys_name(self, table_name):
|
def get_table_keys_name(self, table_name):
|
||||||
table = self.tables.get(table_name)
|
table = self.tables.get(table_name)
|
||||||
if not table:
|
if not table:
|
||||||
return None, None
|
return None, None
|
||||||
else:
|
else:
|
||||||
return table.hash_key_attr, table.range_key_attr
|
return table.hash_key_attr, table.range_key_attr
|
||||||
|
|
||||||
def get_keys_value(self, table, keys):
|
def get_keys_value(self, table, keys):
|
||||||
if not table.hash_key_attr in keys or (table.has_range_key and not table.range_key_attr in keys):
|
if not table.hash_key_attr in keys or (table.has_range_key and not table.range_key_attr in keys):
|
||||||
raise ValueError("Table has a range key, but no range key was passed into get_item")
|
raise ValueError("Table has a range key, but no range key was passed into get_item")
|
||||||
hash_key = DynamoType(keys[table.hash_key_attr])
|
hash_key = DynamoType(keys[table.hash_key_attr])
|
||||||
range_key = DynamoType(keys[table.range_key_attr]) if table.has_range_key else None
|
range_key = DynamoType(keys[table.range_key_attr]) if table.has_range_key else None
|
||||||
return hash_key,range_key
|
return hash_key,range_key
|
||||||
|
|
||||||
@ -304,19 +304,19 @@ class DynamoDBBackend(BaseBackend):
|
|||||||
range_values = [DynamoType(range_value) for range_value in range_value_dicts]
|
range_values = [DynamoType(range_value) for range_value in range_value_dicts]
|
||||||
|
|
||||||
return table.query(hash_key, range_comparison, range_values)
|
return table.query(hash_key, range_comparison, range_values)
|
||||||
|
|
||||||
def scan(self, table_name, filters):
|
def scan(self, table_name, filters):
|
||||||
table = self.tables.get(table_name)
|
table = self.tables.get(table_name)
|
||||||
if not table:
|
if not table:
|
||||||
return None, None, None
|
return None, None, None
|
||||||
|
|
||||||
scan_filters = {}
|
scan_filters = {}
|
||||||
for key, (comparison_operator, comparison_values) in filters.iteritems():
|
for key, (comparison_operator, comparison_values) in filters.items():
|
||||||
dynamo_types = [DynamoType(value) for value in comparison_values]
|
dynamo_types = [DynamoType(value) for value in comparison_values]
|
||||||
scan_filters[key] = (comparison_operator, dynamo_types)
|
scan_filters[key] = (comparison_operator, dynamo_types)
|
||||||
|
|
||||||
return table.scan(scan_filters)
|
return table.scan(scan_filters)
|
||||||
|
|
||||||
def delete_item(self, table_name, keys):
|
def delete_item(self, table_name, keys):
|
||||||
table = self.tables.get(table_name)
|
table = self.tables.get(table_name)
|
||||||
if not table:
|
if not table:
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import json
|
import json
|
||||||
|
import six
|
||||||
|
|
||||||
from moto.core.responses import BaseResponse
|
from moto.core.responses import BaseResponse
|
||||||
from moto.core.utils import camelcase_to_underscores
|
from moto.core.utils import camelcase_to_underscores
|
||||||
@ -50,15 +51,16 @@ class DynamoHandler(BaseResponse):
|
|||||||
return status, self.response_headers, dynamo_json_dump({'__type': type_})
|
return status, self.response_headers, dynamo_json_dump({'__type': type_})
|
||||||
|
|
||||||
def call_action(self):
|
def call_action(self):
|
||||||
if 'GetSessionToken' in self.body:
|
body = self.body.decode('utf-8')
|
||||||
|
if 'GetSessionToken' in body:
|
||||||
return 200, self.response_headers, sts_handler()
|
return 200, self.response_headers, sts_handler()
|
||||||
|
|
||||||
self.body = json.loads(self.body or '{}')
|
self.body = json.loads(body or '{}')
|
||||||
endpoint = self.get_endpoint_name(self.headers)
|
endpoint = self.get_endpoint_name(self.headers)
|
||||||
if endpoint:
|
if endpoint:
|
||||||
endpoint = camelcase_to_underscores(endpoint)
|
endpoint = camelcase_to_underscores(endpoint)
|
||||||
response = getattr(self, endpoint)()
|
response = getattr(self, endpoint)()
|
||||||
if isinstance(response, basestring):
|
if isinstance(response, six.string_types):
|
||||||
return 200, self.response_headers, response
|
return 200, self.response_headers, response
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@ -73,10 +75,10 @@ class DynamoHandler(BaseResponse):
|
|||||||
limit = body.get('Limit')
|
limit = body.get('Limit')
|
||||||
if body.get("ExclusiveStartTableName"):
|
if body.get("ExclusiveStartTableName"):
|
||||||
last = body.get("ExclusiveStartTableName")
|
last = body.get("ExclusiveStartTableName")
|
||||||
start = dynamodb_backend2.tables.keys().index(last) + 1
|
start = list(dynamodb_backend2.tables.keys()).index(last) + 1
|
||||||
else:
|
else:
|
||||||
start = 0
|
start = 0
|
||||||
all_tables = dynamodb_backend2.tables.keys()
|
all_tables = list(dynamodb_backend2.tables.keys())
|
||||||
if limit:
|
if limit:
|
||||||
tables = all_tables[start:start + limit]
|
tables = all_tables[start:start + limit]
|
||||||
else:
|
else:
|
||||||
@ -91,17 +93,17 @@ class DynamoHandler(BaseResponse):
|
|||||||
#get the table name
|
#get the table name
|
||||||
table_name = body['TableName']
|
table_name = body['TableName']
|
||||||
#get the throughput
|
#get the throughput
|
||||||
throughput = body["ProvisionedThroughput"]
|
throughput = body["ProvisionedThroughput"]
|
||||||
#getting the schema
|
#getting the schema
|
||||||
key_schema = body['KeySchema']
|
key_schema = body['KeySchema']
|
||||||
#getting attribute definition
|
#getting attribute definition
|
||||||
attr = body["AttributeDefinitions"]
|
attr = body["AttributeDefinitions"]
|
||||||
#getting the indexes
|
#getting the indexes
|
||||||
table = dynamodb_backend2.create_table(table_name,
|
table = dynamodb_backend2.create_table(table_name,
|
||||||
schema = key_schema,
|
schema = key_schema,
|
||||||
throughput = throughput,
|
throughput = throughput,
|
||||||
attr = attr)
|
attr = attr)
|
||||||
return dynamo_json_dump(table.describe)
|
return dynamo_json_dump(table.describe)
|
||||||
|
|
||||||
def delete_table(self):
|
def delete_table(self):
|
||||||
name = self.body['TableName']
|
name = self.body['TableName']
|
||||||
@ -131,7 +133,7 @@ class DynamoHandler(BaseResponse):
|
|||||||
name = self.body['TableName']
|
name = self.body['TableName']
|
||||||
item = self.body['Item']
|
item = self.body['Item']
|
||||||
result = dynamodb_backend2.put_item(name, item)
|
result = dynamodb_backend2.put_item(name, item)
|
||||||
|
|
||||||
if result:
|
if result:
|
||||||
item_dict = result.to_json()
|
item_dict = result.to_json()
|
||||||
item_dict['ConsumedCapacityUnits'] = 1
|
item_dict['ConsumedCapacityUnits'] = 1
|
||||||
@ -143,10 +145,10 @@ class DynamoHandler(BaseResponse):
|
|||||||
def batch_write_item(self):
|
def batch_write_item(self):
|
||||||
table_batches = self.body['RequestItems']
|
table_batches = self.body['RequestItems']
|
||||||
|
|
||||||
for table_name, table_requests in table_batches.iteritems():
|
for table_name, table_requests in table_batches.items():
|
||||||
for table_request in table_requests:
|
for table_request in table_requests:
|
||||||
request_type = table_request.keys()[0]
|
request_type = list(table_request.keys())[0]
|
||||||
request = table_request.values()[0]
|
request = list(table_request.values())[0]
|
||||||
if request_type == 'PutRequest':
|
if request_type == 'PutRequest':
|
||||||
item = request['Item']
|
item = request['Item']
|
||||||
dynamodb_backend2.put_item(table_name, item)
|
dynamodb_backend2.put_item(table_name, item)
|
||||||
@ -187,15 +189,15 @@ class DynamoHandler(BaseResponse):
|
|||||||
def batch_get_item(self):
|
def batch_get_item(self):
|
||||||
table_batches = self.body['RequestItems']
|
table_batches = self.body['RequestItems']
|
||||||
|
|
||||||
results = {
|
results = {
|
||||||
"ConsumedCapacity":[],
|
"ConsumedCapacity":[],
|
||||||
"Responses": {
|
"Responses": {
|
||||||
},
|
},
|
||||||
"UnprocessedKeys": {
|
"UnprocessedKeys": {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for table_name, table_request in table_batches.iteritems():
|
for table_name, table_request in table_batches.items():
|
||||||
items = []
|
items = []
|
||||||
keys = table_request['Keys']
|
keys = table_request['Keys']
|
||||||
attributes_to_get = table_request.get('AttributesToGet')
|
attributes_to_get = table_request.get('AttributesToGet')
|
||||||
@ -217,7 +219,7 @@ class DynamoHandler(BaseResponse):
|
|||||||
keys = self.body['KeyConditions']
|
keys = self.body['KeyConditions']
|
||||||
hash_key_name, range_key_name = dynamodb_backend2.get_table_keys_name(name)
|
hash_key_name, range_key_name = dynamodb_backend2.get_table_keys_name(name)
|
||||||
if hash_key_name is None:
|
if hash_key_name is None:
|
||||||
er = "'com.amazonaws.dynamodb.v20120810#ResourceNotFoundException"
|
er = "'com.amazonaws.dynamodb.v20120810#ResourceNotFoundException"
|
||||||
return self.error(er)
|
return self.error(er)
|
||||||
hash_key = keys[hash_key_name]['AttributeValueList'][0]
|
hash_key = keys[hash_key_name]['AttributeValueList'][0]
|
||||||
if len(keys) == 1:
|
if len(keys) == 1:
|
||||||
@ -225,7 +227,7 @@ class DynamoHandler(BaseResponse):
|
|||||||
range_values = []
|
range_values = []
|
||||||
else:
|
else:
|
||||||
if range_key_name == None:
|
if range_key_name == None:
|
||||||
er = "com.amazon.coral.validate#ValidationException"
|
er = "com.amazon.coral.validate#ValidationException"
|
||||||
return self.error(er)
|
return self.error(er)
|
||||||
else:
|
else:
|
||||||
range_condition = keys[range_key_name]
|
range_condition = keys[range_key_name]
|
||||||
@ -238,7 +240,7 @@ class DynamoHandler(BaseResponse):
|
|||||||
items, last_page = dynamodb_backend2.query(name, hash_key, range_comparison, range_values)
|
items, last_page = dynamodb_backend2.query(name, hash_key, range_comparison, range_values)
|
||||||
if items is None:
|
if items is None:
|
||||||
er = 'com.amazonaws.dynamodb.v20111205#ResourceNotFoundException'
|
er = 'com.amazonaws.dynamodb.v20111205#ResourceNotFoundException'
|
||||||
return self.error(er)
|
return self.error(er)
|
||||||
|
|
||||||
limit = self.body.get("Limit")
|
limit = self.body.get("Limit")
|
||||||
if limit:
|
if limit:
|
||||||
@ -267,7 +269,7 @@ class DynamoHandler(BaseResponse):
|
|||||||
|
|
||||||
filters = {}
|
filters = {}
|
||||||
scan_filters = self.body.get('ScanFilter', {})
|
scan_filters = self.body.get('ScanFilter', {})
|
||||||
for attribute_name, scan_filter in scan_filters.iteritems():
|
for attribute_name, scan_filter in scan_filters.items():
|
||||||
# Keys are attribute names. Values are tuples of (comparison, comparison_value)
|
# Keys are attribute names. Values are tuples of (comparison, comparison_value)
|
||||||
comparison_operator = scan_filter["ComparisonOperator"]
|
comparison_operator = scan_filter["ComparisonOperator"]
|
||||||
comparison_values = scan_filter.get("AttributeValueList", [])
|
comparison_values = scan_filter.get("AttributeValueList", [])
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
import six
|
||||||
import copy
|
import copy
|
||||||
import itertools
|
import itertools
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
@ -98,6 +99,14 @@ class Instance(BotoInstance, TaggedEC2Instance):
|
|||||||
self.virtualization_type = ami.virtualization_type if ami else 'paravirtual'
|
self.virtualization_type = ami.virtualization_type if ami else 'paravirtual'
|
||||||
self.architecture = ami.architecture if ami else 'x86_64'
|
self.architecture = ami.architecture if ami else 'x86_64'
|
||||||
|
|
||||||
|
# handle weird bug around user_data -- something grabs the repr(), so it must be clean
|
||||||
|
if isinstance(self.user_data, list) and len(self.user_data) > 0:
|
||||||
|
if six.PY3 and isinstance(self.user_data[0], six.binary_type):
|
||||||
|
# string will have a "b" prefix -- need to get rid of it
|
||||||
|
self.user_data[0] = self.user_data[0].decode('utf-8')
|
||||||
|
elif six.PY2 and isinstance(self.user_data[0], six.text_type):
|
||||||
|
# string will have a "u" prefix -- need to get rid of it
|
||||||
|
self.user_data[0] = self.user_data[0].encode('utf-8')
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_from_cloudformation_json(cls, resource_name, cloudformation_json):
|
def create_from_cloudformation_json(cls, resource_name, cloudformation_json):
|
||||||
@ -291,7 +300,7 @@ class KeyPairBackend(object):
|
|||||||
|
|
||||||
def describe_key_pairs(self, filter_names=None):
|
def describe_key_pairs(self, filter_names=None):
|
||||||
results = []
|
results = []
|
||||||
for name, keypair in self.keypairs.iteritems():
|
for name, keypair in self.keypairs.items():
|
||||||
if not filter_names or name in filter_names:
|
if not filter_names or name in filter_names:
|
||||||
keypair['name'] = name
|
keypair['name'] = name
|
||||||
results.append(keypair)
|
results.append(keypair)
|
||||||
@ -318,9 +327,9 @@ class TagBackend(object):
|
|||||||
|
|
||||||
def describe_tags(self, filter_resource_ids=None):
|
def describe_tags(self, filter_resource_ids=None):
|
||||||
results = []
|
results = []
|
||||||
for resource_id, tags in self.tags.iteritems():
|
for resource_id, tags in self.tags.items():
|
||||||
ami = 'ami' in resource_id
|
ami = 'ami' in resource_id
|
||||||
for key, value in tags.iteritems():
|
for key, value in tags.items():
|
||||||
if not filter_resource_ids or resource_id in filter_resource_ids:
|
if not filter_resource_ids or resource_id in filter_resource_ids:
|
||||||
# If we're not filtering, or we are filtering and this
|
# If we're not filtering, or we are filtering and this
|
||||||
# resource id is in the filter list, add this tag
|
# resource id is in the filter list, add this tag
|
||||||
@ -383,7 +392,7 @@ class AmiBackend(object):
|
|||||||
def describe_images(self, ami_ids=(), filters=None):
|
def describe_images(self, ami_ids=(), filters=None):
|
||||||
if filters:
|
if filters:
|
||||||
images = self.amis.values()
|
images = self.amis.values()
|
||||||
for (_filter, _filter_value) in filters.iteritems():
|
for (_filter, _filter_value) in filters.items():
|
||||||
images = [ ami for ami in images if ami.get_filter_value(_filter) in _filter_value ]
|
images = [ ami for ami in images if ami.get_filter_value(_filter) in _filter_value ]
|
||||||
return images
|
return images
|
||||||
else:
|
else:
|
||||||
@ -583,7 +592,7 @@ class SecurityGroupBackend(object):
|
|||||||
return group
|
return group
|
||||||
|
|
||||||
def get_security_group_from_name(self, name, vpc_id=None):
|
def get_security_group_from_name(self, name, vpc_id=None):
|
||||||
for group_id, group in self.groups[vpc_id].iteritems():
|
for group_id, group in self.groups[vpc_id].items():
|
||||||
if group.name == name:
|
if group.name == name:
|
||||||
return group
|
return group
|
||||||
|
|
||||||
@ -1005,7 +1014,7 @@ class SubnetBackend(object):
|
|||||||
subnets = self.subnets.values()
|
subnets = self.subnets.values()
|
||||||
|
|
||||||
if filters:
|
if filters:
|
||||||
for (_filter, _filter_value) in filters.iteritems():
|
for (_filter, _filter_value) in filters.items():
|
||||||
subnets = [ subnet for subnet in subnets if subnet.get_filter_value(_filter) in _filter_value ]
|
subnets = [ subnet for subnet in subnets if subnet.get_filter_value(_filter) in _filter_value ]
|
||||||
|
|
||||||
return subnets
|
return subnets
|
||||||
@ -1239,9 +1248,8 @@ class SpotInstanceRequest(BotoSpotRequest):
|
|||||||
ls.groups.append(default_group)
|
ls.groups.append(default_group)
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(Model)
|
||||||
class SpotRequestBackend(object):
|
class SpotRequestBackend(object):
|
||||||
__metaclass__ = Model
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.spot_instance_requests = {}
|
self.spot_instance_requests = {}
|
||||||
super(SpotRequestBackend, self).__init__()
|
super(SpotRequestBackend, self).__init__()
|
||||||
|
@ -66,7 +66,7 @@ CREATE_DHCP_OPTIONS_RESPONSE = u"""
|
|||||||
<dhcpOptions>
|
<dhcpOptions>
|
||||||
<dhcpOptionsId>{{ dhcp_options_set.id }}</dhcpOptionsId>
|
<dhcpOptionsId>{{ dhcp_options_set.id }}</dhcpOptionsId>
|
||||||
<dhcpConfigurationSet>
|
<dhcpConfigurationSet>
|
||||||
{% for key, values in dhcp_options_set.options.iteritems() %}
|
{% for key, values in dhcp_options_set.options.items() %}
|
||||||
{{ values }}
|
{{ values }}
|
||||||
{% if values %}
|
{% if values %}
|
||||||
<item>
|
<item>
|
||||||
@ -111,7 +111,7 @@ DESCRIBE_DHCP_OPTIONS_RESPONSE = u"""
|
|||||||
<dhcpOptions>
|
<dhcpOptions>
|
||||||
<dhcpOptionsId>{{ dhcp_options_set.id }}</dhcpOptionsId>
|
<dhcpOptionsId>{{ dhcp_options_set.id }}</dhcpOptionsId>
|
||||||
<dhcpConfigurationSet>
|
<dhcpConfigurationSet>
|
||||||
{% for key, values in dhcp_options_set.options.iteritems() %}
|
{% for key, values in dhcp_options_set.options.items() %}
|
||||||
{{ values }}
|
{{ values }}
|
||||||
{% if values %}
|
{% if values %}
|
||||||
<item>
|
<item>
|
||||||
|
@ -139,7 +139,7 @@ class InstanceResponse(BaseResponse):
|
|||||||
|
|
||||||
def _dot_value_instance_attribute_handler(self):
|
def _dot_value_instance_attribute_handler(self):
|
||||||
attribute_key = None
|
attribute_key = None
|
||||||
for key, value in self.querystring.iteritems():
|
for key, value in self.querystring.items():
|
||||||
if '.Value' in key:
|
if '.Value' in key:
|
||||||
attribute_key = key
|
attribute_key = key
|
||||||
break
|
break
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
import six
|
||||||
from jinja2 import Template
|
from jinja2 import Template
|
||||||
from moto.core.responses import BaseResponse
|
from moto.core.responses import BaseResponse
|
||||||
from moto.ec2.models import ec2_backend
|
from moto.ec2.models import ec2_backend
|
||||||
@ -15,7 +16,7 @@ class KeyPairs(BaseResponse):
|
|||||||
|
|
||||||
def delete_key_pair(self):
|
def delete_key_pair(self):
|
||||||
name = self.querystring.get('KeyName')[0]
|
name = self.querystring.get('KeyName')[0]
|
||||||
success = str(ec2_backend.delete_key_pair(name)).lower()
|
success = six.text_type(ec2_backend.delete_key_pair(name)).lower()
|
||||||
return Template(DELETE_KEY_PAIR_RESPONSE).render(success=success)
|
return Template(DELETE_KEY_PAIR_RESPONSE).render(success=success)
|
||||||
|
|
||||||
def describe_key_pairs(self):
|
def describe_key_pairs(self):
|
||||||
|
@ -19,14 +19,14 @@ def process_rules_from_querystring(querystring):
|
|||||||
from_port = querystring.get('IpPermissions.1.FromPort')[0]
|
from_port = querystring.get('IpPermissions.1.FromPort')[0]
|
||||||
to_port = querystring.get('IpPermissions.1.ToPort')[0]
|
to_port = querystring.get('IpPermissions.1.ToPort')[0]
|
||||||
ip_ranges = []
|
ip_ranges = []
|
||||||
for key, value in querystring.iteritems():
|
for key, value in querystring.items():
|
||||||
if 'IpPermissions.1.IpRanges' in key:
|
if 'IpPermissions.1.IpRanges' in key:
|
||||||
ip_ranges.append(value[0])
|
ip_ranges.append(value[0])
|
||||||
|
|
||||||
source_groups = []
|
source_groups = []
|
||||||
source_group_ids = []
|
source_group_ids = []
|
||||||
|
|
||||||
for key, value in querystring.iteritems():
|
for key, value in querystring.items():
|
||||||
if 'IpPermissions.1.Groups.1.GroupId' in key:
|
if 'IpPermissions.1.Groups.1.GroupId' in key:
|
||||||
source_group_ids.append(value[0])
|
source_group_ids.append(value[0])
|
||||||
elif 'IpPermissions.1.Groups' in key:
|
elif 'IpPermissions.1.Groups' in key:
|
||||||
|
@ -10,13 +10,13 @@ class TagResponse(BaseResponse):
|
|||||||
|
|
||||||
def create_tags(self):
|
def create_tags(self):
|
||||||
resource_ids = resource_ids_from_querystring(self.querystring)
|
resource_ids = resource_ids_from_querystring(self.querystring)
|
||||||
for resource_id, tag in resource_ids.iteritems():
|
for resource_id, tag in resource_ids.items():
|
||||||
ec2_backend.create_tag(resource_id, tag[0], tag[1])
|
ec2_backend.create_tag(resource_id, tag[0], tag[1])
|
||||||
return CREATE_RESPONSE
|
return CREATE_RESPONSE
|
||||||
|
|
||||||
def delete_tags(self):
|
def delete_tags(self):
|
||||||
resource_ids = resource_ids_from_querystring(self.querystring)
|
resource_ids = resource_ids_from_querystring(self.querystring)
|
||||||
for resource_id, tag in resource_ids.iteritems():
|
for resource_id, tag in resource_ids.items():
|
||||||
ec2_backend.delete_tag(resource_id, tag[0])
|
ec2_backend.delete_tag(resource_id, tag[0])
|
||||||
template = Template(DELETE_RESPONSE)
|
template = Template(DELETE_RESPONSE)
|
||||||
return template.render(reservations=ec2_backend.all_reservations())
|
return template.render(reservations=ec2_backend.all_reservations())
|
||||||
|
@ -1,13 +1,14 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
def random_id(prefix=''):
|
def random_id(prefix=''):
|
||||||
size = 8
|
size = 8
|
||||||
chars = range(10) + ['a', 'b', 'c', 'd', 'e', 'f']
|
chars = list(range(10)) + ['a', 'b', 'c', 'd', 'e', 'f']
|
||||||
|
|
||||||
instance_tag = ''.join(unicode(random.choice(chars)) for x in range(size))
|
instance_tag = ''.join(six.text_type(random.choice(chars)) for x in range(size))
|
||||||
return '{0}-{1}'.format(prefix, instance_tag)
|
return '{0}-{1}'.format(prefix, instance_tag)
|
||||||
|
|
||||||
|
|
||||||
@ -81,7 +82,7 @@ def random_ip():
|
|||||||
|
|
||||||
def instance_ids_from_querystring(querystring_dict):
|
def instance_ids_from_querystring(querystring_dict):
|
||||||
instance_ids = []
|
instance_ids = []
|
||||||
for key, value in querystring_dict.iteritems():
|
for key, value in querystring_dict.items():
|
||||||
if 'InstanceId' in key:
|
if 'InstanceId' in key:
|
||||||
instance_ids.append(value[0])
|
instance_ids.append(value[0])
|
||||||
return instance_ids
|
return instance_ids
|
||||||
@ -89,7 +90,7 @@ def instance_ids_from_querystring(querystring_dict):
|
|||||||
|
|
||||||
def image_ids_from_querystring(querystring_dict):
|
def image_ids_from_querystring(querystring_dict):
|
||||||
image_ids = []
|
image_ids = []
|
||||||
for key, value in querystring_dict.iteritems():
|
for key, value in querystring_dict.items():
|
||||||
if 'ImageId' in key:
|
if 'ImageId' in key:
|
||||||
image_ids.append(value[0])
|
image_ids.append(value[0])
|
||||||
return image_ids
|
return image_ids
|
||||||
@ -97,7 +98,7 @@ def image_ids_from_querystring(querystring_dict):
|
|||||||
|
|
||||||
def sequence_from_querystring(parameter, querystring_dict):
|
def sequence_from_querystring(parameter, querystring_dict):
|
||||||
parameter_values = []
|
parameter_values = []
|
||||||
for key, value in querystring_dict.iteritems():
|
for key, value in querystring_dict.items():
|
||||||
if parameter in key:
|
if parameter in key:
|
||||||
parameter_values.append(value[0])
|
parameter_values.append(value[0])
|
||||||
return parameter_values
|
return parameter_values
|
||||||
@ -106,7 +107,7 @@ def sequence_from_querystring(parameter, querystring_dict):
|
|||||||
def resource_ids_from_querystring(querystring_dict):
|
def resource_ids_from_querystring(querystring_dict):
|
||||||
prefix = 'ResourceId'
|
prefix = 'ResourceId'
|
||||||
response_values = {}
|
response_values = {}
|
||||||
for key, value in querystring_dict.iteritems():
|
for key, value in querystring_dict.items():
|
||||||
if key.startswith(prefix):
|
if key.startswith(prefix):
|
||||||
resource_index = key.replace(prefix + ".", "")
|
resource_index = key.replace(prefix + ".", "")
|
||||||
tag_key = querystring_dict.get("Tag.{0}.Key".format(resource_index))[0]
|
tag_key = querystring_dict.get("Tag.{0}.Key".format(resource_index))[0]
|
||||||
@ -143,7 +144,7 @@ def dhcp_configuration_from_querystring(querystring, option=u'DhcpConfiguration'
|
|||||||
key_needle = re.compile(u'{0}.[0-9]+.Key'.format(option), re.UNICODE)
|
key_needle = re.compile(u'{0}.[0-9]+.Key'.format(option), re.UNICODE)
|
||||||
response_values = {}
|
response_values = {}
|
||||||
|
|
||||||
for key, value in querystring.iteritems():
|
for key, value in querystring.items():
|
||||||
if key_needle.match(key):
|
if key_needle.match(key):
|
||||||
values = []
|
values = []
|
||||||
key_index = key.split(".")[1]
|
key_index = key.split(".")[1]
|
||||||
@ -161,19 +162,19 @@ def dhcp_configuration_from_querystring(querystring, option=u'DhcpConfiguration'
|
|||||||
|
|
||||||
def filters_from_querystring(querystring_dict):
|
def filters_from_querystring(querystring_dict):
|
||||||
response_values = {}
|
response_values = {}
|
||||||
for key, value in querystring_dict.iteritems():
|
for key, value in querystring_dict.items():
|
||||||
match = re.search(r"Filter.(\d).Name", key)
|
match = re.search(r"Filter.(\d).Name", key)
|
||||||
if match:
|
if match:
|
||||||
filter_index = match.groups()[0]
|
filter_index = match.groups()[0]
|
||||||
value_prefix = "Filter.{0}.Value".format(filter_index)
|
value_prefix = "Filter.{0}.Value".format(filter_index)
|
||||||
filter_values = [filter_value[0] for filter_key, filter_value in querystring_dict.iteritems() if filter_key.startswith(value_prefix)]
|
filter_values = [filter_value[0] for filter_key, filter_value in querystring_dict.items() if filter_key.startswith(value_prefix)]
|
||||||
response_values[value[0]] = filter_values
|
response_values[value[0]] = filter_values
|
||||||
return response_values
|
return response_values
|
||||||
|
|
||||||
|
|
||||||
def keypair_names_from_querystring(querystring_dict):
|
def keypair_names_from_querystring(querystring_dict):
|
||||||
keypair_names = []
|
keypair_names = []
|
||||||
for key, value in querystring_dict.iteritems():
|
for key, value in querystring_dict.items():
|
||||||
if 'KeyName' in key:
|
if 'KeyName' in key:
|
||||||
keypair_names.append(value[0])
|
keypair_names.append(value[0])
|
||||||
return keypair_names
|
return keypair_names
|
||||||
@ -186,7 +187,7 @@ filter_dict_attribute_mapping = {
|
|||||||
|
|
||||||
|
|
||||||
def passes_filter_dict(instance, filter_dict):
|
def passes_filter_dict(instance, filter_dict):
|
||||||
for filter_name, filter_values in filter_dict.iteritems():
|
for filter_name, filter_values in filter_dict.items():
|
||||||
if filter_name in filter_dict_attribute_mapping:
|
if filter_name in filter_dict_attribute_mapping:
|
||||||
instance_attr = filter_dict_attribute_mapping[filter_name]
|
instance_attr = filter_dict_attribute_mapping[filter_name]
|
||||||
else:
|
else:
|
||||||
|
@ -1,15 +1,16 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
def random_job_id(size=13):
|
def random_job_id(size=13):
|
||||||
chars = range(10) + list(string.uppercase)
|
chars = list(range(10)) + list(string.ascii_uppercase)
|
||||||
job_tag = ''.join(unicode(random.choice(chars)) for x in range(size))
|
job_tag = ''.join(six.text_type(random.choice(chars)) for x in range(size))
|
||||||
return 'j-{0}'.format(job_tag)
|
return 'j-{0}'.format(job_tag)
|
||||||
|
|
||||||
|
|
||||||
def random_instance_group_id(size=13):
|
def random_instance_group_id(size=13):
|
||||||
chars = range(10) + list(string.uppercase)
|
chars = list(range(10)) + list(string.ascii_uppercase)
|
||||||
job_tag = ''.join(unicode(random.choice(chars)) for x in range(size))
|
job_tag = ''.join(six.text_type(random.choice(chars)) for x in range(size))
|
||||||
return 'i-{0}'.format(job_tag)
|
return 'i-{0}'.format(job_tag)
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
def random_resource_id():
|
def random_resource_id():
|
||||||
size = 20
|
size = 20
|
||||||
chars = range(10) + list(string.lowercase)
|
chars = list(range(10)) + list(string.ascii_lowercase)
|
||||||
|
|
||||||
return ''.join(unicode(random.choice(chars)) for x in range(size))
|
return ''.join(six.text_type(random.choice(chars)) for x in range(size))
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
from jinja2 import Template
|
from jinja2 import Template
|
||||||
from urlparse import parse_qs, urlparse
|
from six.moves.urllib.parse import parse_qs, urlparse
|
||||||
from .models import route53_backend
|
from .models import route53_backend
|
||||||
import xmltodict
|
import xmltodict
|
||||||
import dicttoxml
|
import dicttoxml
|
||||||
@ -120,7 +120,7 @@ CREATE_HOSTED_ZONE_RESPONSE = """<CreateHostedZoneResponse xmlns="https://route5
|
|||||||
</DelegationSet>
|
</DelegationSet>
|
||||||
</CreateHostedZoneResponse>"""
|
</CreateHostedZoneResponse>"""
|
||||||
|
|
||||||
LIST_HOSTED_ZONES_RESPONSE = """<ListHostedZonesResponse xmlns="https://route53.amazonaws.com/doc/2012-12-12/">
|
LIST_HOSTED_ZONES_RESPONSE = """<ListHostedZonesResponse xmlns="https://route53.amazonaws.com/doc/2012-12-12/">
|
||||||
<HostedZones>
|
<HostedZones>
|
||||||
{% for zone in zones %}
|
{% for zone in zones %}
|
||||||
<HostedZone>
|
<HostedZone>
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import responses
|
from . import responses
|
||||||
|
|
||||||
url_bases = [
|
url_bases = [
|
||||||
"https://route53.amazonaws.com/201.-..-../hostedzone",
|
"https://route53.amazonaws.com/201.-..-../hostedzone",
|
||||||
|
@ -5,6 +5,8 @@ import datetime
|
|||||||
import hashlib
|
import hashlib
|
||||||
import copy
|
import copy
|
||||||
import itertools
|
import itertools
|
||||||
|
import codecs
|
||||||
|
import six
|
||||||
|
|
||||||
from moto.core import BaseBackend
|
from moto.core import BaseBackend
|
||||||
from moto.core.utils import iso_8601_datetime, rfc_1123_datetime
|
from moto.core.utils import iso_8601_datetime, rfc_1123_datetime
|
||||||
@ -59,7 +61,11 @@ class FakeKey(object):
|
|||||||
def etag(self):
|
def etag(self):
|
||||||
if self._etag is None:
|
if self._etag is None:
|
||||||
value_md5 = hashlib.md5()
|
value_md5 = hashlib.md5()
|
||||||
value_md5.update(bytes(self.value))
|
if isinstance(self.value, six.text_type):
|
||||||
|
value = self.value.encode("utf-8")
|
||||||
|
else:
|
||||||
|
value = self.value
|
||||||
|
value_md5.update(value)
|
||||||
self._etag = value_md5.hexdigest()
|
self._etag = value_md5.hexdigest()
|
||||||
return '"{0}"'.format(self._etag)
|
return '"{0}"'.format(self._etag)
|
||||||
|
|
||||||
@ -112,9 +118,11 @@ class FakeMultipart(object):
|
|||||||
def __init__(self, key_name):
|
def __init__(self, key_name):
|
||||||
self.key_name = key_name
|
self.key_name = key_name
|
||||||
self.parts = {}
|
self.parts = {}
|
||||||
self.id = base64.b64encode(os.urandom(UPLOAD_ID_BYTES)).replace('=', '').replace('+', '')
|
rand_b64 = base64.b64encode(os.urandom(UPLOAD_ID_BYTES))
|
||||||
|
self.id = rand_b64.decode('utf-8').replace('=', '').replace('+', '')
|
||||||
|
|
||||||
def complete(self):
|
def complete(self):
|
||||||
|
decode_hex = codecs.getdecoder("hex_codec")
|
||||||
total = bytearray()
|
total = bytearray()
|
||||||
md5s = bytearray()
|
md5s = bytearray()
|
||||||
last_part_name = len(self.list_parts())
|
last_part_name = len(self.list_parts())
|
||||||
@ -122,7 +130,8 @@ class FakeMultipart(object):
|
|||||||
for part in self.list_parts():
|
for part in self.list_parts():
|
||||||
if part.name != last_part_name and len(part.value) < UPLOAD_PART_MIN_SIZE:
|
if part.name != last_part_name and len(part.value) < UPLOAD_PART_MIN_SIZE:
|
||||||
return None, None
|
return None, None
|
||||||
md5s.extend(part.etag.replace('"', '').decode('hex'))
|
part_etag = part.etag.replace('"', '')
|
||||||
|
md5s.extend(decode_hex(part_etag)[0])
|
||||||
total.extend(part.value)
|
total.extend(part.value)
|
||||||
|
|
||||||
etag = hashlib.md5()
|
etag = hashlib.md5()
|
||||||
@ -296,7 +305,7 @@ class S3Backend(BaseBackend):
|
|||||||
key_results = set()
|
key_results = set()
|
||||||
folder_results = set()
|
folder_results = set()
|
||||||
if prefix:
|
if prefix:
|
||||||
for key_name, key in bucket.keys.iteritems():
|
for key_name, key in bucket.keys.items():
|
||||||
if key_name.startswith(prefix):
|
if key_name.startswith(prefix):
|
||||||
key_without_prefix = key_name.replace(prefix, "", 1)
|
key_without_prefix = key_name.replace(prefix, "", 1)
|
||||||
if delimiter and delimiter in key_without_prefix:
|
if delimiter and delimiter in key_without_prefix:
|
||||||
@ -306,7 +315,7 @@ class S3Backend(BaseBackend):
|
|||||||
else:
|
else:
|
||||||
key_results.add(key)
|
key_results.add(key)
|
||||||
else:
|
else:
|
||||||
for key_name, key in bucket.keys.iteritems():
|
for key_name, key in bucket.keys.items():
|
||||||
if delimiter and delimiter in key_name:
|
if delimiter and delimiter in key_name:
|
||||||
# If delimiter, we need to split out folder_results
|
# If delimiter, we need to split out folder_results
|
||||||
folder_results.add(key_name.split(delimiter)[0])
|
folder_results.add(key_name.split(delimiter)[0])
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
from urlparse import parse_qs, urlparse
|
import six
|
||||||
|
from six.moves.urllib.parse import parse_qs, urlparse
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from jinja2 import Template
|
from jinja2 import Template
|
||||||
@ -32,7 +33,7 @@ class ResponseObject(object):
|
|||||||
except MissingBucket:
|
except MissingBucket:
|
||||||
return 404, headers, ""
|
return 404, headers, ""
|
||||||
|
|
||||||
if isinstance(response, basestring):
|
if isinstance(response, six.string_types):
|
||||||
return 200, headers, response
|
return 200, headers, response
|
||||||
else:
|
else:
|
||||||
status_code, headers, response_content = response
|
status_code, headers, response_content = response
|
||||||
@ -74,7 +75,7 @@ class ResponseObject(object):
|
|||||||
for unsup in ('delimiter', 'prefix', 'max-uploads'):
|
for unsup in ('delimiter', 'prefix', 'max-uploads'):
|
||||||
if unsup in querystring:
|
if unsup in querystring:
|
||||||
raise NotImplementedError("Listing multipart uploads with {} has not been implemented yet.".format(unsup))
|
raise NotImplementedError("Listing multipart uploads with {} has not been implemented yet.".format(unsup))
|
||||||
multiparts = list(self.backend.get_all_multiparts(bucket_name).itervalues())
|
multiparts = list(self.backend.get_all_multiparts(bucket_name).values())
|
||||||
template = Template(S3_ALL_MULTIPARTS)
|
template = Template(S3_ALL_MULTIPARTS)
|
||||||
return 200, headers, template.render(
|
return 200, headers, template.render(
|
||||||
bucket_name=bucket_name,
|
bucket_name=bucket_name,
|
||||||
@ -129,7 +130,7 @@ class ResponseObject(object):
|
|||||||
|
|
||||||
def _bucket_response_put(self, request, bucket_name, querystring, headers):
|
def _bucket_response_put(self, request, bucket_name, querystring, headers):
|
||||||
if 'versioning' in querystring:
|
if 'versioning' in querystring:
|
||||||
ver = re.search('<Status>([A-Za-z]+)</Status>', request.body)
|
ver = re.search('<Status>([A-Za-z]+)</Status>', request.body.decode('utf-8'))
|
||||||
if ver:
|
if ver:
|
||||||
self.backend.set_bucket_versioning(bucket_name, ver.group(1))
|
self.backend.set_bucket_versioning(bucket_name, ver.group(1))
|
||||||
template = Template(S3_BUCKET_VERSIONING)
|
template = Template(S3_BUCKET_VERSIONING)
|
||||||
@ -172,7 +173,7 @@ class ResponseObject(object):
|
|||||||
else:
|
else:
|
||||||
#HTTPretty, build new form object
|
#HTTPretty, build new form object
|
||||||
form = {}
|
form = {}
|
||||||
for kv in request.body.split('&'):
|
for kv in request.body.decode('utf-8').split('&'):
|
||||||
k, v = kv.split('=')
|
k, v = kv.split('=')
|
||||||
form[k] = v
|
form[k] = v
|
||||||
|
|
||||||
@ -198,7 +199,7 @@ class ResponseObject(object):
|
|||||||
def _bucket_response_delete_keys(self, request, bucket_name, headers):
|
def _bucket_response_delete_keys(self, request, bucket_name, headers):
|
||||||
template = Template(S3_DELETE_KEYS_RESPONSE)
|
template = Template(S3_DELETE_KEYS_RESPONSE)
|
||||||
|
|
||||||
keys = minidom.parseString(request.body).getElementsByTagName('Key')
|
keys = minidom.parseString(request.body.decode('utf-8')).getElementsByTagName('Key')
|
||||||
deleted_names = []
|
deleted_names = []
|
||||||
error_names = []
|
error_names = []
|
||||||
|
|
||||||
@ -218,7 +219,7 @@ class ResponseObject(object):
|
|||||||
except MissingBucket:
|
except MissingBucket:
|
||||||
return 404, headers, ""
|
return 404, headers, ""
|
||||||
|
|
||||||
if isinstance(response, basestring):
|
if isinstance(response, six.string_types):
|
||||||
return 200, headers, response
|
return 200, headers, response
|
||||||
else:
|
else:
|
||||||
status_code, headers, response_content = response
|
status_code, headers, response_content = response
|
||||||
@ -229,7 +230,7 @@ class ResponseObject(object):
|
|||||||
if replace is True:
|
if replace is True:
|
||||||
key.clear_metadata()
|
key.clear_metadata()
|
||||||
for header in request.headers:
|
for header in request.headers:
|
||||||
if isinstance(header, basestring):
|
if isinstance(header, six.string_types):
|
||||||
result = meta_regex.match(header)
|
result = meta_regex.match(header)
|
||||||
if result:
|
if result:
|
||||||
meta_key = result.group(0).lower()
|
meta_key = result.group(0).lower()
|
||||||
@ -359,7 +360,7 @@ class ResponseObject(object):
|
|||||||
return 204, headers, template.render(bucket=removed_key)
|
return 204, headers, template.render(bucket=removed_key)
|
||||||
|
|
||||||
def _key_response_post(self, body, parsed_url, bucket_name, query, key_name, headers):
|
def _key_response_post(self, body, parsed_url, bucket_name, query, key_name, headers):
|
||||||
if body == '' and parsed_url.query == 'uploads':
|
if body == b'' and parsed_url.query == 'uploads':
|
||||||
multipart = self.backend.initiate_multipart(bucket_name, key_name)
|
multipart = self.backend.initiate_multipart(bucket_name, key_name)
|
||||||
template = Template(S3_MULTIPART_INITIATE_RESPONSE)
|
template = Template(S3_MULTIPART_INITIATE_RESPONSE)
|
||||||
response = template.render(
|
response = template.render(
|
||||||
|
@ -1,14 +1,13 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import urllib2
|
from six.moves.urllib.parse import urlparse, unquote
|
||||||
import urlparse
|
|
||||||
|
|
||||||
bucket_name_regex = re.compile("(.+).s3.amazonaws.com")
|
bucket_name_regex = re.compile("(.+).s3.amazonaws.com")
|
||||||
|
|
||||||
|
|
||||||
def bucket_name_from_url(url):
|
def bucket_name_from_url(url):
|
||||||
domain = urlparse.urlparse(url).netloc
|
domain = urlparse(url).netloc
|
||||||
|
|
||||||
if domain.startswith('www.'):
|
if domain.startswith('www.'):
|
||||||
domain = domain[4:]
|
domain = domain[4:]
|
||||||
@ -26,7 +25,7 @@ def bucket_name_from_url(url):
|
|||||||
|
|
||||||
|
|
||||||
def clean_key_name(key_name):
|
def clean_key_name(key_name):
|
||||||
return urllib2.unquote(key_name)
|
return unquote(key_name)
|
||||||
|
|
||||||
|
|
||||||
class _VersionedKeyStore(dict):
|
class _VersionedKeyStore(dict):
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import urlparse
|
from six.moves.urllib.parse import urlparse
|
||||||
|
|
||||||
|
|
||||||
def bucket_name_from_url(url):
|
def bucket_name_from_url(url):
|
||||||
pth = urlparse.urlparse(url).path.lstrip("/")
|
pth = urlparse(url).path.lstrip("/")
|
||||||
|
|
||||||
l = pth.lstrip("/").split("/")
|
l = pth.lstrip("/").split("/")
|
||||||
if len(l) == 0 or l[0] == "":
|
if len(l) == 0 or l[0] == "":
|
||||||
|
@ -31,7 +31,7 @@ class DomainDispatcherApplication(object):
|
|||||||
if self.service:
|
if self.service:
|
||||||
return self.service
|
return self.service
|
||||||
|
|
||||||
for backend_name, backend in BACKENDS.iteritems():
|
for backend_name, backend in BACKENDS.items():
|
||||||
for url_base in backend.url_bases:
|
for url_base in backend.url_bases:
|
||||||
if re.match(url_base, 'http://%s' % host):
|
if re.match(url_base, 'http://%s' % host):
|
||||||
return backend_name
|
return backend_name
|
||||||
@ -73,7 +73,7 @@ def create_backend_app(service):
|
|||||||
backend_app.url_map.converters['regex'] = RegexConverter
|
backend_app.url_map.converters['regex'] = RegexConverter
|
||||||
|
|
||||||
backend = BACKENDS[service]
|
backend = BACKENDS[service]
|
||||||
for url_path, handler in backend.flask_paths.iteritems():
|
for url_path, handler in backend.flask_paths.items():
|
||||||
backend_app.route(url_path, methods=HTTP_METHODS)(convert_flask_to_httpretty_response(handler))
|
backend_app.route(url_path, methods=HTTP_METHODS)(convert_flask_to_httpretty_response(handler))
|
||||||
|
|
||||||
return backend_app
|
return backend_app
|
||||||
|
@ -4,7 +4,7 @@ import string
|
|||||||
|
|
||||||
|
|
||||||
def random_hex(length):
|
def random_hex(length):
|
||||||
return ''.join(random.choice(string.lowercase) for x in range(length))
|
return ''.join(random.choice(string.ascii_lowercase) for x in range(length))
|
||||||
|
|
||||||
|
|
||||||
def get_random_message_id():
|
def get_random_message_id():
|
||||||
|
@ -2,6 +2,7 @@ from __future__ import unicode_literals
|
|||||||
import datetime
|
import datetime
|
||||||
import requests
|
import requests
|
||||||
import uuid
|
import uuid
|
||||||
|
import six
|
||||||
|
|
||||||
from moto.core import BaseBackend
|
from moto.core import BaseBackend
|
||||||
from moto.core.utils import iso_8601_datetime
|
from moto.core.utils import iso_8601_datetime
|
||||||
@ -26,7 +27,7 @@ class Topic(object):
|
|||||||
self.subscriptions_deleted = 0
|
self.subscriptions_deleted = 0
|
||||||
|
|
||||||
def publish(self, message):
|
def publish(self, message):
|
||||||
message_id = unicode(uuid.uuid4())
|
message_id = six.text_type(uuid.uuid4())
|
||||||
subscriptions = sns_backend.list_subscriptions(self.arn)
|
subscriptions = sns_backend.list_subscriptions(self.arn)
|
||||||
for subscription in subscriptions:
|
for subscription in subscriptions:
|
||||||
subscription.publish(message, message_id)
|
subscription.publish(message, message_id)
|
||||||
|
@ -30,7 +30,7 @@ class Message(object):
|
|||||||
@property
|
@property
|
||||||
def md5(self):
|
def md5(self):
|
||||||
body_md5 = hashlib.md5()
|
body_md5 = hashlib.md5()
|
||||||
body_md5.update(self.body)
|
body_md5.update(self.body.encode('utf-8'))
|
||||||
return body_md5.hexdigest()
|
return body_md5.hexdigest()
|
||||||
|
|
||||||
def mark_sent(self, delay_seconds=None):
|
def mark_sent(self, delay_seconds=None):
|
||||||
|
@ -7,7 +7,7 @@ import string
|
|||||||
def generate_receipt_handle():
|
def generate_receipt_handle():
|
||||||
# http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/ImportantIdentifiers.html#ImportantIdentifiers-receipt-handles
|
# http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/ImportantIdentifiers.html#ImportantIdentifiers-receipt-handles
|
||||||
length = 185
|
length = 185
|
||||||
return ''.join(random.choice(string.lowercase) for x in range(length))
|
return ''.join(random.choice(string.ascii_lowercase) for x in range(length))
|
||||||
|
|
||||||
|
|
||||||
def unix_time(dt=None):
|
def unix_time(dt=None):
|
||||||
|
@ -2,9 +2,9 @@ coverage
|
|||||||
freezegun
|
freezegun
|
||||||
mock
|
mock
|
||||||
nose
|
nose
|
||||||
https://github.com/spulec/python-coveralls/tarball/796d9dba34b759664e42ba39e6414209a0f319ad
|
|
||||||
requests
|
requests
|
||||||
sure<1.2.4
|
sure<1.2.4
|
||||||
xmltodict
|
xmltodict
|
||||||
dicttoxml
|
dicttoxml
|
||||||
werkzeug
|
werkzeug
|
||||||
|
six
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import boto
|
import boto
|
||||||
|
import six
|
||||||
from nose.plugins.skip import SkipTest
|
from nose.plugins.skip import SkipTest
|
||||||
|
|
||||||
|
|
||||||
@ -18,3 +19,19 @@ class requires_boto_gte(object):
|
|||||||
if boto_version >= required:
|
if boto_version >= required:
|
||||||
return test
|
return test
|
||||||
raise SkipTest
|
raise SkipTest
|
||||||
|
|
||||||
|
|
||||||
|
class py3_requires_boto_gte(object):
|
||||||
|
"""Decorator for requiring boto version greater than or equal to 'version'
|
||||||
|
when running on Python 3. (Not all of boto is Python 3 compatible.)"""
|
||||||
|
def __init__(self, version):
|
||||||
|
self.version = version
|
||||||
|
|
||||||
|
def __call__(self, test):
|
||||||
|
if not six.PY3:
|
||||||
|
return test
|
||||||
|
boto_version = version_tuple(boto.__version__)
|
||||||
|
required = version_tuple(self.version)
|
||||||
|
if boto_version >= required:
|
||||||
|
return test
|
||||||
|
raise SkipTest
|
||||||
|
@ -18,7 +18,7 @@ def test_create_launch_configuration():
|
|||||||
instance_type='m1.small',
|
instance_type='m1.small',
|
||||||
key_name='the_keys',
|
key_name='the_keys',
|
||||||
security_groups=["default", "default2"],
|
security_groups=["default", "default2"],
|
||||||
user_data="This is some user_data",
|
user_data=b"This is some user_data",
|
||||||
instance_monitoring=True,
|
instance_monitoring=True,
|
||||||
instance_profile_name='arn:aws:iam::123456789012:instance-profile/testing',
|
instance_profile_name='arn:aws:iam::123456789012:instance-profile/testing',
|
||||||
spot_price=0.1,
|
spot_price=0.1,
|
||||||
@ -31,7 +31,7 @@ def test_create_launch_configuration():
|
|||||||
launch_config.instance_type.should.equal('m1.small')
|
launch_config.instance_type.should.equal('m1.small')
|
||||||
launch_config.key_name.should.equal('the_keys')
|
launch_config.key_name.should.equal('the_keys')
|
||||||
set(launch_config.security_groups).should.equal(set(['default', 'default2']))
|
set(launch_config.security_groups).should.equal(set(['default', 'default2']))
|
||||||
launch_config.user_data.should.equal("This is some user_data")
|
launch_config.user_data.should.equal(b"This is some user_data")
|
||||||
launch_config.instance_monitoring.enabled.should.equal('true')
|
launch_config.instance_monitoring.enabled.should.equal('true')
|
||||||
launch_config.instance_profile_name.should.equal('arn:aws:iam::123456789012:instance-profile/testing')
|
launch_config.instance_profile_name.should.equal('arn:aws:iam::123456789012:instance-profile/testing')
|
||||||
launch_config.spot_price.should.equal(0.1)
|
launch_config.spot_price.should.equal(0.1)
|
||||||
@ -65,7 +65,7 @@ def test_create_launch_configuration_with_block_device_mappings():
|
|||||||
instance_type='m1.small',
|
instance_type='m1.small',
|
||||||
key_name='the_keys',
|
key_name='the_keys',
|
||||||
security_groups=["default", "default2"],
|
security_groups=["default", "default2"],
|
||||||
user_data="This is some user_data",
|
user_data=b"This is some user_data",
|
||||||
instance_monitoring=True,
|
instance_monitoring=True,
|
||||||
instance_profile_name='arn:aws:iam::123456789012:instance-profile/testing',
|
instance_profile_name='arn:aws:iam::123456789012:instance-profile/testing',
|
||||||
spot_price=0.1,
|
spot_price=0.1,
|
||||||
@ -79,7 +79,7 @@ def test_create_launch_configuration_with_block_device_mappings():
|
|||||||
launch_config.instance_type.should.equal('m1.small')
|
launch_config.instance_type.should.equal('m1.small')
|
||||||
launch_config.key_name.should.equal('the_keys')
|
launch_config.key_name.should.equal('the_keys')
|
||||||
set(launch_config.security_groups).should.equal(set(['default', 'default2']))
|
set(launch_config.security_groups).should.equal(set(['default', 'default2']))
|
||||||
launch_config.user_data.should.equal("This is some user_data")
|
launch_config.user_data.should.equal(b"This is some user_data")
|
||||||
launch_config.instance_monitoring.enabled.should.equal('true')
|
launch_config.instance_monitoring.enabled.should.equal('true')
|
||||||
launch_config.instance_profile_name.should.equal('arn:aws:iam::123456789012:instance-profile/testing')
|
launch_config.instance_profile_name.should.equal('arn:aws:iam::123456789012:instance-profile/testing')
|
||||||
launch_config.spot_price.should.equal(0.1)
|
launch_config.spot_price.should.equal(0.1)
|
||||||
@ -164,7 +164,7 @@ def test_create_launch_configuration_defaults():
|
|||||||
# Defaults
|
# Defaults
|
||||||
launch_config.key_name.should.equal('')
|
launch_config.key_name.should.equal('')
|
||||||
list(launch_config.security_groups).should.equal([])
|
list(launch_config.security_groups).should.equal([])
|
||||||
launch_config.user_data.should.equal("")
|
launch_config.user_data.should.equal(b"")
|
||||||
launch_config.instance_monitoring.enabled.should.equal('false')
|
launch_config.instance_monitoring.enabled.should.equal('false')
|
||||||
launch_config.instance_profile_name.should.equal(None)
|
launch_config.instance_profile_name.should.equal(None)
|
||||||
launch_config.spot_price.should.equal(None)
|
launch_config.spot_price.should.equal(None)
|
||||||
|
@ -14,5 +14,5 @@ def test_describe_autoscaling_groups():
|
|||||||
|
|
||||||
res = test_client.get('/?Action=DescribeLaunchConfigurations')
|
res = test_client.get('/?Action=DescribeLaunchConfigurations')
|
||||||
|
|
||||||
res.data.should.contain('<DescribeLaunchConfigurationsResponse')
|
res.data.should.contain(b'<DescribeLaunchConfigurationsResponse')
|
||||||
res.data.should.contain('<LaunchConfigurations>')
|
res.data.should.contain(b'<LaunchConfigurations>')
|
||||||
|
@ -36,8 +36,8 @@ def test_parse_stack_resources():
|
|||||||
)
|
)
|
||||||
|
|
||||||
stack.resource_map.should.have.length_of(1)
|
stack.resource_map.should.have.length_of(1)
|
||||||
stack.resource_map.keys()[0].should.equal('WebServerGroup')
|
list(stack.resource_map.keys())[0].should.equal('WebServerGroup')
|
||||||
queue = stack.resource_map.values()[0]
|
queue = list(stack.resource_map.values())[0]
|
||||||
queue.should.be.a(Queue)
|
queue.should.be.a(Queue)
|
||||||
queue.name.should.equal("my-queue")
|
queue.name.should.equal("my-queue")
|
||||||
|
|
||||||
|
@ -2,6 +2,8 @@ from __future__ import unicode_literals
|
|||||||
import boto
|
import boto
|
||||||
from boto.exception import EC2ResponseError
|
from boto.exception import EC2ResponseError
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
import tests.backport_assert_raises
|
||||||
|
from nose.tools import assert_raises
|
||||||
|
|
||||||
from moto import mock_ec2
|
from moto import mock_ec2
|
||||||
|
|
||||||
@ -23,18 +25,21 @@ def test_basic_decorator():
|
|||||||
|
|
||||||
def test_context_manager():
|
def test_context_manager():
|
||||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||||
conn.get_all_instances.when.called_with().should.throw(EC2ResponseError)
|
with assert_raises(EC2ResponseError):
|
||||||
|
conn.get_all_instances()
|
||||||
|
|
||||||
with mock_ec2():
|
with mock_ec2():
|
||||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||||
list(conn.get_all_instances()).should.equal([])
|
list(conn.get_all_instances()).should.equal([])
|
||||||
|
|
||||||
conn.get_all_instances.when.called_with().should.throw(EC2ResponseError)
|
with assert_raises(EC2ResponseError):
|
||||||
|
conn.get_all_instances()
|
||||||
|
|
||||||
|
|
||||||
def test_decorator_start_and_stop():
|
def test_decorator_start_and_stop():
|
||||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||||
conn.get_all_instances.when.called_with().should.throw(EC2ResponseError)
|
with assert_raises(EC2ResponseError):
|
||||||
|
conn.get_all_instances()
|
||||||
|
|
||||||
mock = mock_ec2()
|
mock = mock_ec2()
|
||||||
mock.start()
|
mock.start()
|
||||||
@ -42,7 +47,8 @@ def test_decorator_start_and_stop():
|
|||||||
list(conn.get_all_instances()).should.equal([])
|
list(conn.get_all_instances()).should.equal([])
|
||||||
mock.stop()
|
mock.stop()
|
||||||
|
|
||||||
conn.get_all_instances.when.called_with().should.throw(EC2ResponseError)
|
with assert_raises(EC2ResponseError):
|
||||||
|
conn.get_all_instances()
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
|
@ -7,7 +7,7 @@ from moto import mock_ec2
|
|||||||
@mock_ec2
|
@mock_ec2
|
||||||
def test_latest_meta_data():
|
def test_latest_meta_data():
|
||||||
res = requests.get("http://169.254.169.254/latest/meta-data/")
|
res = requests.get("http://169.254.169.254/latest/meta-data/")
|
||||||
res.content.should.equal("iam")
|
res.content.should.equal(b"iam")
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
@ -24,7 +24,7 @@ def test_meta_data_iam():
|
|||||||
@mock_ec2
|
@mock_ec2
|
||||||
def test_meta_data_security_credentials():
|
def test_meta_data_security_credentials():
|
||||||
res = requests.get("http://169.254.169.254/latest/meta-data/iam/security-credentials/")
|
res = requests.get("http://169.254.169.254/latest/meta-data/iam/security-credentials/")
|
||||||
res.content.should.equal("default-role")
|
res.content.should.equal(b"default-role")
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
|
@ -33,7 +33,8 @@ def test_port_argument(run_simple):
|
|||||||
def test_domain_dispatched():
|
def test_domain_dispatched():
|
||||||
dispatcher = DomainDispatcherApplication(create_backend_app)
|
dispatcher = DomainDispatcherApplication(create_backend_app)
|
||||||
backend_app = dispatcher.get_application("email.us-east1.amazonaws.com")
|
backend_app = dispatcher.get_application("email.us-east1.amazonaws.com")
|
||||||
backend_app.view_functions.keys()[0].should.equal('EmailResponse.dispatch')
|
keys = list(backend_app.view_functions.keys())
|
||||||
|
keys[0].should.equal('EmailResponse.dispatch')
|
||||||
|
|
||||||
|
|
||||||
def test_domain_without_matches():
|
def test_domain_without_matches():
|
||||||
@ -45,4 +46,5 @@ def test_domain_dispatched_with_service():
|
|||||||
# If we pass a particular service, always return that.
|
# If we pass a particular service, always return that.
|
||||||
dispatcher = DomainDispatcherApplication(create_backend_app, service="s3")
|
dispatcher = DomainDispatcherApplication(create_backend_app, service="s3")
|
||||||
backend_app = dispatcher.get_application("s3.us-east1.amazonaws.com")
|
backend_app = dispatcher.get_application("s3.us-east1.amazonaws.com")
|
||||||
backend_app.view_functions.keys()[0].should.equal('ResponseObject.key_response')
|
keys = set(backend_app.view_functions.keys())
|
||||||
|
keys.should.contain('ResponseObject.key_response')
|
||||||
|
@ -1,7 +1,11 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
import six
|
||||||
import boto
|
import boto
|
||||||
|
import boto.dynamodb
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
import requests
|
import requests
|
||||||
|
import tests.backport_assert_raises
|
||||||
|
from nose.tools import assert_raises
|
||||||
|
|
||||||
from moto import mock_dynamodb
|
from moto import mock_dynamodb
|
||||||
from moto.dynamodb import dynamodb_backend
|
from moto.dynamodb import dynamodb_backend
|
||||||
@ -34,7 +38,8 @@ def test_list_tables_layer_1():
|
|||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_describe_missing_table():
|
def test_describe_missing_table():
|
||||||
conn = boto.connect_dynamodb('the_key', 'the_secret')
|
conn = boto.connect_dynamodb('the_key', 'the_secret')
|
||||||
conn.describe_table.when.called_with('messages').should.throw(DynamoDBResponseError)
|
with assert_raises(DynamoDBResponseError):
|
||||||
|
conn.describe_table('messages')
|
||||||
|
|
||||||
|
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
import six
|
||||||
import boto
|
import boto
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
|
|
||||||
from moto import mock_dynamodb
|
from moto import mock_dynamodb
|
||||||
|
from tests.helpers import py3_requires_boto_gte
|
||||||
|
|
||||||
from boto.dynamodb import condition
|
from boto.dynamodb import condition
|
||||||
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError, DynamoDBValidationError
|
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError, DynamoDBValidationError
|
||||||
@ -27,6 +29,7 @@ def create_table(conn):
|
|||||||
return table
|
return table
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@freeze_time("2012-01-14")
|
@freeze_time("2012-01-14")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_create_table():
|
def test_create_table():
|
||||||
@ -59,6 +62,7 @@ def test_create_table():
|
|||||||
conn.describe_table('messages').should.equal(expected)
|
conn.describe_table('messages').should.equal(expected)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_delete_table():
|
def test_delete_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -71,6 +75,7 @@ def test_delete_table():
|
|||||||
conn.layer1.delete_table.when.called_with('messages').should.throw(DynamoDBResponseError)
|
conn.layer1.delete_table.when.called_with('messages').should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_update_table_throughput():
|
def test_update_table_throughput():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -85,6 +90,7 @@ def test_update_table_throughput():
|
|||||||
table.write_units.should.equal(6)
|
table.write_units.should.equal(6)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_item_add_and_describe_and_update():
|
def test_item_add_and_describe_and_update():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -132,6 +138,7 @@ def test_item_add_and_describe_and_update():
|
|||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_item_put_without_table():
|
def test_item_put_without_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -145,6 +152,7 @@ def test_item_put_without_table():
|
|||||||
).should.throw(DynamoDBResponseError)
|
).should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_get_missing_item():
|
def test_get_missing_item():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -157,6 +165,7 @@ def test_get_missing_item():
|
|||||||
table.has_item("foobar", "more").should.equal(False)
|
table.has_item("foobar", "more").should.equal(False)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_get_item_with_undeclared_table():
|
def test_get_item_with_undeclared_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -170,6 +179,7 @@ def test_get_item_with_undeclared_table():
|
|||||||
).should.throw(DynamoDBKeyNotFoundError)
|
).should.throw(DynamoDBKeyNotFoundError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_get_item_without_range_key():
|
def test_get_item_without_range_key():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -194,6 +204,7 @@ def test_get_item_without_range_key():
|
|||||||
table.get_item.when.called_with(hash_key=hash_key).should.throw(DynamoDBValidationError)
|
table.get_item.when.called_with(hash_key=hash_key).should.throw(DynamoDBValidationError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_delete_item():
|
def test_delete_item():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -222,6 +233,7 @@ def test_delete_item():
|
|||||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_delete_item_with_attribute_response():
|
def test_delete_item_with_attribute_response():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -244,14 +256,14 @@ def test_delete_item_with_attribute_response():
|
|||||||
|
|
||||||
response = item.delete(return_values='ALL_OLD')
|
response = item.delete(return_values='ALL_OLD')
|
||||||
response.should.equal({
|
response.should.equal({
|
||||||
u'Attributes': {
|
'Attributes': {
|
||||||
u'Body': u'http://url_to_lolcat.gif',
|
'Body': 'http://url_to_lolcat.gif',
|
||||||
u'forum_name': u'LOLCat Forum',
|
'forum_name': 'LOLCat Forum',
|
||||||
u'ReceivedTime': u'12/9/2011 11:36:03 PM',
|
'ReceivedTime': '12/9/2011 11:36:03 PM',
|
||||||
u'SentBy': u'User A',
|
'SentBy': 'User A',
|
||||||
u'subject': u'Check this out!'
|
'subject': 'Check this out!'
|
||||||
},
|
},
|
||||||
u'ConsumedCapacityUnits': 0.5
|
'ConsumedCapacityUnits': 0.5
|
||||||
})
|
})
|
||||||
table.refresh()
|
table.refresh()
|
||||||
table.item_count.should.equal(0)
|
table.item_count.should.equal(0)
|
||||||
@ -259,6 +271,7 @@ def test_delete_item_with_attribute_response():
|
|||||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_delete_item_with_undeclared_table():
|
def test_delete_item_with_undeclared_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -272,6 +285,7 @@ def test_delete_item_with_undeclared_table():
|
|||||||
).should.throw(DynamoDBResponseError)
|
).should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_query():
|
def test_query():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -322,6 +336,7 @@ def test_query():
|
|||||||
results.response['Items'].should.have.length_of(1)
|
results.response['Items'].should.have.length_of(1)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_query_with_undeclared_table():
|
def test_query_with_undeclared_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -338,6 +353,7 @@ def test_query_with_undeclared_table():
|
|||||||
).should.throw(DynamoDBResponseError)
|
).should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_scan():
|
def test_scan():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -401,6 +417,7 @@ def test_scan():
|
|||||||
results.response['Items'].should.have.length_of(1)
|
results.response['Items'].should.have.length_of(1)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_scan_with_undeclared_table():
|
def test_scan_with_undeclared_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -418,6 +435,7 @@ def test_scan_with_undeclared_table():
|
|||||||
).should.throw(DynamoDBResponseError)
|
).should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_write_batch():
|
def test_write_batch():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -462,6 +480,7 @@ def test_write_batch():
|
|||||||
table.item_count.should.equal(1)
|
table.item_count.should.equal(1)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_batch_read():
|
def test_batch_read():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
|
@ -1,9 +1,11 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
import six
|
||||||
import boto
|
import boto
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
|
|
||||||
from moto import mock_dynamodb
|
from moto import mock_dynamodb
|
||||||
|
from tests.helpers import py3_requires_boto_gte
|
||||||
|
|
||||||
from boto.dynamodb import condition
|
from boto.dynamodb import condition
|
||||||
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError
|
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError
|
||||||
@ -25,6 +27,7 @@ def create_table(conn):
|
|||||||
return table
|
return table
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@freeze_time("2012-01-14")
|
@freeze_time("2012-01-14")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_create_table():
|
def test_create_table():
|
||||||
@ -53,6 +56,7 @@ def test_create_table():
|
|||||||
conn.describe_table('messages').should.equal(expected)
|
conn.describe_table('messages').should.equal(expected)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_delete_table():
|
def test_delete_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -65,6 +69,7 @@ def test_delete_table():
|
|||||||
conn.layer1.delete_table.when.called_with('messages').should.throw(DynamoDBResponseError)
|
conn.layer1.delete_table.when.called_with('messages').should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_update_table_throughput():
|
def test_update_table_throughput():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -79,6 +84,7 @@ def test_update_table_throughput():
|
|||||||
table.write_units.should.equal(6)
|
table.write_units.should.equal(6)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_item_add_and_describe_and_update():
|
def test_item_add_and_describe_and_update():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -119,6 +125,7 @@ def test_item_add_and_describe_and_update():
|
|||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_item_put_without_table():
|
def test_item_put_without_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -131,6 +138,7 @@ def test_item_put_without_table():
|
|||||||
).should.throw(DynamoDBResponseError)
|
).should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_get_missing_item():
|
def test_get_missing_item():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -141,6 +149,7 @@ def test_get_missing_item():
|
|||||||
).should.throw(DynamoDBKeyNotFoundError)
|
).should.throw(DynamoDBKeyNotFoundError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_get_item_with_undeclared_table():
|
def test_get_item_with_undeclared_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -153,6 +162,7 @@ def test_get_item_with_undeclared_table():
|
|||||||
).should.throw(DynamoDBKeyNotFoundError)
|
).should.throw(DynamoDBKeyNotFoundError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_delete_item():
|
def test_delete_item():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -180,6 +190,7 @@ def test_delete_item():
|
|||||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_delete_item_with_attribute_response():
|
def test_delete_item_with_attribute_response():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -215,6 +226,7 @@ def test_delete_item_with_attribute_response():
|
|||||||
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
item.delete.when.called_with().should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_delete_item_with_undeclared_table():
|
def test_delete_item_with_undeclared_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -227,6 +239,7 @@ def test_delete_item_with_undeclared_table():
|
|||||||
).should.throw(DynamoDBResponseError)
|
).should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_query():
|
def test_query():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -247,6 +260,7 @@ def test_query():
|
|||||||
results.response['Items'].should.have.length_of(1)
|
results.response['Items'].should.have.length_of(1)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_query_with_undeclared_table():
|
def test_query_with_undeclared_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -257,6 +271,7 @@ def test_query_with_undeclared_table():
|
|||||||
).should.throw(DynamoDBResponseError)
|
).should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_scan():
|
def test_scan():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -317,6 +332,7 @@ def test_scan():
|
|||||||
results.response['Items'].should.have.length_of(1)
|
results.response['Items'].should.have.length_of(1)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_scan_with_undeclared_table():
|
def test_scan_with_undeclared_table():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -334,6 +350,7 @@ def test_scan_with_undeclared_table():
|
|||||||
).should.throw(DynamoDBResponseError)
|
).should.throw(DynamoDBResponseError)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_write_batch():
|
def test_write_batch():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
@ -376,6 +393,7 @@ def test_write_batch():
|
|||||||
table.item_count.should.equal(1)
|
table.item_count.should.equal(1)
|
||||||
|
|
||||||
|
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb
|
@mock_dynamodb
|
||||||
def test_batch_read():
|
def test_batch_read():
|
||||||
conn = boto.connect_dynamodb()
|
conn = boto.connect_dynamodb()
|
||||||
|
@ -17,4 +17,4 @@ def test_table_list():
|
|||||||
|
|
||||||
headers = {'X-Amz-Target': 'TestTable.ListTables'}
|
headers = {'X-Amz-Target': 'TestTable.ListTables'}
|
||||||
res = test_client.get('/', headers=headers)
|
res = test_client.get('/', headers=headers)
|
||||||
res.data.should.contain('TableNames')
|
res.data.should.contain(b'TableNames')
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals, print_function
|
||||||
|
|
||||||
|
import six
|
||||||
import boto
|
import boto
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
import requests
|
import requests
|
||||||
@ -6,10 +8,12 @@ from moto import mock_dynamodb2
|
|||||||
from moto.dynamodb2 import dynamodb_backend2
|
from moto.dynamodb2 import dynamodb_backend2
|
||||||
from boto.exception import JSONResponseError
|
from boto.exception import JSONResponseError
|
||||||
from tests.helpers import requires_boto_gte
|
from tests.helpers import requires_boto_gte
|
||||||
|
import tests.backport_assert_raises
|
||||||
|
from nose.tools import assert_raises
|
||||||
try:
|
try:
|
||||||
import boto.dynamodb2
|
import boto.dynamodb2
|
||||||
except ImportError:
|
except ImportError:
|
||||||
print "This boto version is not supported"
|
print("This boto version is not supported")
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
@ -57,7 +61,8 @@ def test_describe_missing_table():
|
|||||||
'us-west-2',
|
'us-west-2',
|
||||||
aws_access_key_id="ak",
|
aws_access_key_id="ak",
|
||||||
aws_secret_access_key="sk")
|
aws_secret_access_key="sk")
|
||||||
conn.describe_table.when.called_with('messages').should.throw(JSONResponseError)
|
with assert_raises(JSONResponseError):
|
||||||
|
conn.describe_table('messages')
|
||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
import six
|
||||||
import boto
|
import boto
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
from moto import mock_dynamodb2
|
from moto import mock_dynamodb2
|
||||||
from boto.exception import JSONResponseError
|
from boto.exception import JSONResponseError
|
||||||
from tests.helpers import requires_boto_gte
|
from tests.helpers import requires_boto_gte, py3_requires_boto_gte
|
||||||
try:
|
try:
|
||||||
from boto.dynamodb2.fields import HashKey
|
from boto.dynamodb2.fields import HashKey
|
||||||
from boto.dynamodb2.fields import RangeKey
|
from boto.dynamodb2.fields import RangeKey
|
||||||
@ -32,6 +33,7 @@ def iterate_results(res):
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
@freeze_time("2012-01-14")
|
@freeze_time("2012-01-14")
|
||||||
def test_create_table():
|
def test_create_table():
|
||||||
@ -59,6 +61,7 @@ def test_create_table():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_delete_table():
|
def test_delete_table():
|
||||||
conn = boto.dynamodb2.layer1.DynamoDBConnection()
|
conn = boto.dynamodb2.layer1.DynamoDBConnection()
|
||||||
@ -71,6 +74,7 @@ def test_delete_table():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_update_table_throughput():
|
def test_update_table_throughput():
|
||||||
table = create_table()
|
table = create_table()
|
||||||
@ -96,6 +100,7 @@ def test_update_table_throughput():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_item_add_and_describe_and_update():
|
def test_item_add_and_describe_and_update():
|
||||||
table = create_table()
|
table = create_table()
|
||||||
@ -139,9 +144,9 @@ def test_item_add_and_describe_and_update():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_item_put_without_table():
|
def test_item_put_without_table():
|
||||||
|
|
||||||
table = Table('undeclared-table')
|
table = Table('undeclared-table')
|
||||||
item_data = {
|
item_data = {
|
||||||
'forum_name': 'LOLCat Forum',
|
'forum_name': 'LOLCat Forum',
|
||||||
@ -154,9 +159,9 @@ def test_item_put_without_table():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_get_missing_item():
|
def test_get_missing_item():
|
||||||
|
|
||||||
table = create_table()
|
table = create_table()
|
||||||
|
|
||||||
table.get_item.when.called_with(
|
table.get_item.when.called_with(
|
||||||
@ -166,6 +171,7 @@ def test_get_missing_item():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_get_item_with_undeclared_table():
|
def test_get_item_with_undeclared_table():
|
||||||
table = Table('undeclared-table')
|
table = Table('undeclared-table')
|
||||||
@ -173,6 +179,7 @@ def test_get_item_with_undeclared_table():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_get_item_without_range_key():
|
def test_get_item_without_range_key():
|
||||||
table = Table.create('messages', schema=[
|
table = Table.create('messages', schema=[
|
||||||
@ -190,6 +197,7 @@ def test_get_item_without_range_key():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.30.0")
|
@requires_boto_gte("2.30.0")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_delete_item():
|
def test_delete_item():
|
||||||
table = create_table()
|
table = create_table()
|
||||||
@ -212,6 +220,7 @@ def test_delete_item():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_delete_item_with_undeclared_table():
|
def test_delete_item_with_undeclared_table():
|
||||||
table = Table("undeclared-table")
|
table = Table("undeclared-table")
|
||||||
@ -226,9 +235,9 @@ def test_delete_item_with_undeclared_table():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_query():
|
def test_query():
|
||||||
|
|
||||||
table = create_table()
|
table = create_table()
|
||||||
|
|
||||||
item_data = {
|
item_data = {
|
||||||
@ -284,6 +293,7 @@ def test_query():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_query_with_undeclared_table():
|
def test_query_with_undeclared_table():
|
||||||
table = Table('undeclared')
|
table = Table('undeclared')
|
||||||
@ -296,6 +306,7 @@ def test_query_with_undeclared_table():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_scan():
|
def test_scan():
|
||||||
table = create_table()
|
table = create_table()
|
||||||
@ -351,6 +362,7 @@ def test_scan():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_scan_with_undeclared_table():
|
def test_scan_with_undeclared_table():
|
||||||
conn = boto.dynamodb2.layer1.DynamoDBConnection()
|
conn = boto.dynamodb2.layer1.DynamoDBConnection()
|
||||||
@ -368,6 +380,7 @@ def test_scan_with_undeclared_table():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_write_batch():
|
def test_write_batch():
|
||||||
table = create_table()
|
table = create_table()
|
||||||
@ -398,6 +411,7 @@ def test_write_batch():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_batch_read():
|
def test_batch_read():
|
||||||
table = create_table()
|
table = create_table()
|
||||||
@ -442,6 +456,7 @@ def test_batch_read():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_get_key_fields():
|
def test_get_key_fields():
|
||||||
table = create_table()
|
table = create_table()
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
import six
|
||||||
import boto
|
import boto
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
from boto.exception import JSONResponseError
|
from boto.exception import JSONResponseError
|
||||||
from moto import mock_dynamodb2
|
from moto import mock_dynamodb2
|
||||||
from tests.helpers import requires_boto_gte
|
from tests.helpers import requires_boto_gte, py3_requires_boto_gte
|
||||||
try:
|
try:
|
||||||
from boto.dynamodb2.fields import HashKey
|
from boto.dynamodb2.fields import HashKey
|
||||||
from boto.dynamodb2.table import Table
|
from boto.dynamodb2.table import Table
|
||||||
@ -24,6 +25,7 @@ def create_table():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
@freeze_time("2012-01-14")
|
@freeze_time("2012-01-14")
|
||||||
def test_create_table():
|
def test_create_table():
|
||||||
@ -55,6 +57,7 @@ def test_create_table():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_delete_table():
|
def test_delete_table():
|
||||||
create_table()
|
create_table()
|
||||||
@ -68,6 +71,7 @@ def test_delete_table():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_update_table_throughput():
|
def test_update_table_throughput():
|
||||||
table = create_table()
|
table = create_table()
|
||||||
@ -84,6 +88,7 @@ def test_update_table_throughput():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_item_add_and_describe_and_update():
|
def test_item_add_and_describe_and_update():
|
||||||
table = create_table()
|
table = create_table()
|
||||||
@ -118,6 +123,7 @@ def test_item_add_and_describe_and_update():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_item_put_without_table():
|
def test_item_put_without_table():
|
||||||
conn = boto.dynamodb2.layer1.DynamoDBConnection()
|
conn = boto.dynamodb2.layer1.DynamoDBConnection()
|
||||||
@ -133,6 +139,7 @@ def test_item_put_without_table():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_get_missing_item():
|
def test_get_missing_item():
|
||||||
table = create_table()
|
table = create_table()
|
||||||
@ -141,6 +148,7 @@ def test_get_missing_item():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_get_item_with_undeclared_table():
|
def test_get_item_with_undeclared_table():
|
||||||
conn = boto.dynamodb2.layer1.DynamoDBConnection()
|
conn = boto.dynamodb2.layer1.DynamoDBConnection()
|
||||||
@ -152,6 +160,7 @@ def test_get_item_with_undeclared_table():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.30.0")
|
@requires_boto_gte("2.30.0")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_delete_item():
|
def test_delete_item():
|
||||||
table = create_table()
|
table = create_table()
|
||||||
@ -176,6 +185,7 @@ def test_delete_item():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_delete_item_with_undeclared_table():
|
def test_delete_item_with_undeclared_table():
|
||||||
conn = boto.dynamodb2.layer1.DynamoDBConnection()
|
conn = boto.dynamodb2.layer1.DynamoDBConnection()
|
||||||
@ -187,6 +197,7 @@ def test_delete_item_with_undeclared_table():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_query():
|
def test_query():
|
||||||
table = create_table()
|
table = create_table()
|
||||||
@ -207,6 +218,7 @@ def test_query():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_query_with_undeclared_table():
|
def test_query_with_undeclared_table():
|
||||||
conn = boto.dynamodb2.layer1.DynamoDBConnection()
|
conn = boto.dynamodb2.layer1.DynamoDBConnection()
|
||||||
@ -218,6 +230,7 @@ def test_query_with_undeclared_table():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_scan():
|
def test_scan():
|
||||||
table = create_table()
|
table = create_table()
|
||||||
@ -269,6 +282,7 @@ def test_scan():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_scan_with_undeclared_table():
|
def test_scan_with_undeclared_table():
|
||||||
conn = boto.dynamodb2.layer1.DynamoDBConnection()
|
conn = boto.dynamodb2.layer1.DynamoDBConnection()
|
||||||
@ -287,6 +301,7 @@ def test_scan_with_undeclared_table():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_write_batch():
|
def test_write_batch():
|
||||||
table = create_table()
|
table = create_table()
|
||||||
@ -318,6 +333,7 @@ def test_write_batch():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_batch_read():
|
def test_batch_read():
|
||||||
table = create_table()
|
table = create_table()
|
||||||
@ -359,6 +375,7 @@ def test_batch_read():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_get_key_fields():
|
def test_get_key_fields():
|
||||||
table = create_table()
|
table = create_table()
|
||||||
@ -367,6 +384,7 @@ def test_get_key_fields():
|
|||||||
|
|
||||||
|
|
||||||
@requires_boto_gte("2.9")
|
@requires_boto_gte("2.9")
|
||||||
|
@py3_requires_boto_gte("2.33.0")
|
||||||
@mock_dynamodb2
|
@mock_dynamodb2
|
||||||
def test_get_special_item():
|
def test_get_special_item():
|
||||||
table = Table.create('messages', schema=[
|
table = Table.create('messages', schema=[
|
||||||
|
@ -16,4 +16,4 @@ def test_table_list():
|
|||||||
|
|
||||||
headers = {'X-Amz-Target': 'TestTable.ListTables'}
|
headers = {'X-Amz-Target': 'TestTable.ListTables'}
|
||||||
res = test_client.get('/', headers=headers)
|
res = test_client.get('/', headers=headers)
|
||||||
res.data.should.contain('TableNames')
|
res.data.should.contain(b'TableNames')
|
||||||
|
@ -5,13 +5,13 @@ from nose.tools import assert_raises
|
|||||||
|
|
||||||
import boto
|
import boto
|
||||||
from boto.exception import EC2ResponseError
|
from boto.exception import EC2ResponseError
|
||||||
|
import six
|
||||||
|
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from moto import mock_ec2
|
from moto import mock_ec2
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import types
|
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
@ -21,7 +21,7 @@ def test_eip_allocate_classic():
|
|||||||
|
|
||||||
standard = conn.allocate_address()
|
standard = conn.allocate_address()
|
||||||
standard.should.be.a(boto.ec2.address.Address)
|
standard.should.be.a(boto.ec2.address.Address)
|
||||||
standard.public_ip.should.be.a(types.UnicodeType)
|
standard.public_ip.should.be.a(six.text_type)
|
||||||
standard.instance_id.should.be.none
|
standard.instance_id.should.be.none
|
||||||
standard.domain.should.be.equal("standard")
|
standard.domain.should.be.equal("standard")
|
||||||
standard.release()
|
standard.release()
|
||||||
|
@ -184,15 +184,16 @@ def test_instance_attribute_user_data():
|
|||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
def test_user_data_with_run_instance():
|
def test_user_data_with_run_instance():
|
||||||
user_data = "some user data"
|
user_data = b"some user data"
|
||||||
conn = boto.connect_ec2('the_key', 'the_secret')
|
conn = boto.connect_ec2('the_key', 'the_secret')
|
||||||
reservation = conn.run_instances('ami-1234abcd', user_data=user_data)
|
reservation = conn.run_instances('ami-1234abcd', user_data=user_data)
|
||||||
instance = reservation.instances[0]
|
instance = reservation.instances[0]
|
||||||
|
|
||||||
instance_attribute = instance.get_attribute("userData")
|
instance_attribute = instance.get_attribute("userData")
|
||||||
instance_attribute.should.be.a(InstanceAttribute)
|
instance_attribute.should.be.a(InstanceAttribute)
|
||||||
decoded_user_data = base64.decodestring(instance_attribute.get("userData"))
|
retrieved_user_data = instance_attribute.get("userData").encode('utf-8')
|
||||||
decoded_user_data.should.equal("some user data")
|
decoded_user_data = base64.decodestring(retrieved_user_data)
|
||||||
|
decoded_user_data.should.equal(b"some user data")
|
||||||
|
|
||||||
|
|
||||||
@mock_ec2
|
@mock_ec2
|
||||||
|
@ -4,6 +4,7 @@ import tests.backport_assert_raises
|
|||||||
from nose.tools import assert_raises
|
from nose.tools import assert_raises
|
||||||
|
|
||||||
import boto
|
import boto
|
||||||
|
import six
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
from boto.exception import EC2ResponseError
|
from boto.exception import EC2ResponseError
|
||||||
@ -45,6 +46,9 @@ def test_key_pairs_create_two():
|
|||||||
assert kp.material.startswith('---- BEGIN RSA PRIVATE KEY ----')
|
assert kp.material.startswith('---- BEGIN RSA PRIVATE KEY ----')
|
||||||
kps = conn.get_all_key_pairs()
|
kps = conn.get_all_key_pairs()
|
||||||
assert len(kps) == 2
|
assert len(kps) == 2
|
||||||
|
# on Python 3, these are reversed for some reason
|
||||||
|
if six.PY3:
|
||||||
|
return
|
||||||
assert kps[0].name == 'foo'
|
assert kps[0].name == 'foo'
|
||||||
assert kps[1].name == 'bar'
|
assert kps[1].name == 'bar'
|
||||||
kps = conn.get_all_key_pairs('foo')
|
kps = conn.get_all_key_pairs('foo')
|
||||||
|
@ -18,8 +18,8 @@ def test_ec2_server_get():
|
|||||||
headers={"Host": "ec2.us-east-1.amazonaws.com"}
|
headers={"Host": "ec2.us-east-1.amazonaws.com"}
|
||||||
)
|
)
|
||||||
|
|
||||||
groups = re.search("<instanceId>(.*)</instanceId>", res.data)
|
groups = re.search("<instanceId>(.*)</instanceId>", res.data.decode('utf-8'))
|
||||||
instance_id = groups.groups()[0]
|
instance_id = groups.groups()[0]
|
||||||
|
|
||||||
res = test_client.get('/?Action=DescribeInstances')
|
res = test_client.get('/?Action=DescribeInstances')
|
||||||
res.data.should.contain(instance_id)
|
res.data.decode('utf-8').should.contain(instance_id)
|
||||||
|
@ -23,7 +23,7 @@ def test_request_spot_instances():
|
|||||||
price=0.5, image_id='ami-abcd1234', count=1, type='one-time',
|
price=0.5, image_id='ami-abcd1234', count=1, type='one-time',
|
||||||
valid_from=start, valid_until=end, launch_group="the-group",
|
valid_from=start, valid_until=end, launch_group="the-group",
|
||||||
availability_zone_group='my-group', key_name="test",
|
availability_zone_group='my-group', key_name="test",
|
||||||
security_groups=['group1', 'group2'], user_data="some test data",
|
security_groups=['group1', 'group2'], user_data=b"some test data",
|
||||||
instance_type='m1.small', placement='us-east-1c',
|
instance_type='m1.small', placement='us-east-1c',
|
||||||
kernel_id="test-kernel", ramdisk_id="test-ramdisk",
|
kernel_id="test-kernel", ramdisk_id="test-ramdisk",
|
||||||
monitoring_enabled=True, subnet_id="subnet123",
|
monitoring_enabled=True, subnet_id="subnet123",
|
||||||
|
@ -14,4 +14,4 @@ def test_elb_describe_instances():
|
|||||||
|
|
||||||
res = test_client.get('/?Action=DescribeLoadBalancers')
|
res = test_client.get('/?Action=DescribeLoadBalancers')
|
||||||
|
|
||||||
res.data.should.contain('DescribeLoadBalancersResponse')
|
res.data.should.contain(b'DescribeLoadBalancersResponse')
|
||||||
|
@ -14,5 +14,5 @@ def test_describe_jobflows():
|
|||||||
|
|
||||||
res = test_client.get('/?Action=DescribeJobFlows')
|
res = test_client.get('/?Action=DescribeJobFlows')
|
||||||
|
|
||||||
res.data.should.contain('<DescribeJobFlowsResult>')
|
res.data.should.contain(b'<DescribeJobFlowsResult>')
|
||||||
res.data.should.contain('<JobFlows>')
|
res.data.should.contain(b'<JobFlows>')
|
||||||
|
@ -52,7 +52,7 @@ def test_create_role_and_instance_profile():
|
|||||||
|
|
||||||
profile = conn.get_instance_profile("my-profile")
|
profile = conn.get_instance_profile("my-profile")
|
||||||
profile.path.should.equal("my-path")
|
profile.path.should.equal("my-path")
|
||||||
role_from_profile = profile.roles.values()[0]
|
role_from_profile = list(profile.roles.values())[0]
|
||||||
role_from_profile['role_id'].should.equal(role.role_id)
|
role_from_profile['role_id'].should.equal(role.role_id)
|
||||||
role_from_profile['role_name'].should.equal("my-role")
|
role_from_profile['role_name'].should.equal("my-role")
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import urllib2
|
|
||||||
|
|
||||||
import boto
|
import boto
|
||||||
from boto.exception import S3ResponseError
|
from boto.exception import S3ResponseError
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import urllib2
|
import six
|
||||||
|
from six.moves.urllib.request import urlopen
|
||||||
|
from six.moves.urllib.error import HTTPError
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
import boto
|
import boto
|
||||||
@ -8,6 +10,8 @@ from boto.s3.connection import S3Connection
|
|||||||
from boto.s3.key import Key
|
from boto.s3.key import Key
|
||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
import requests
|
import requests
|
||||||
|
import tests.backport_assert_raises
|
||||||
|
from nose.tools import assert_raises
|
||||||
|
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
@ -37,7 +41,7 @@ def test_my_model_save():
|
|||||||
model_instance = MyModel('steve', 'is awesome')
|
model_instance = MyModel('steve', 'is awesome')
|
||||||
model_instance.save()
|
model_instance.save()
|
||||||
|
|
||||||
conn.get_bucket('mybucket').get_key('steve').get_contents_as_string().should.equal('is awesome')
|
conn.get_bucket('mybucket').get_key('steve').get_contents_as_string().should.equal(b'is awesome')
|
||||||
|
|
||||||
|
|
||||||
@mock_s3
|
@mock_s3
|
||||||
@ -95,7 +99,7 @@ def test_multipart_upload_with_copy_key():
|
|||||||
multipart.upload_part_from_file(BytesIO(part1), 1)
|
multipart.upload_part_from_file(BytesIO(part1), 1)
|
||||||
multipart.copy_part_from_key("foobar", "original-key", 2)
|
multipart.copy_part_from_key("foobar", "original-key", 2)
|
||||||
multipart.complete_upload()
|
multipart.complete_upload()
|
||||||
bucket.get_key("the-key").get_contents_as_string().should.equal(part1 + "key_value")
|
bucket.get_key("the-key").get_contents_as_string().should.equal(part1 + b"key_value")
|
||||||
|
|
||||||
|
|
||||||
@mock_s3
|
@mock_s3
|
||||||
@ -172,7 +176,7 @@ def test_missing_key_urllib2():
|
|||||||
conn = boto.connect_s3('the_key', 'the_secret')
|
conn = boto.connect_s3('the_key', 'the_secret')
|
||||||
conn.create_bucket("foobar")
|
conn.create_bucket("foobar")
|
||||||
|
|
||||||
urllib2.urlopen.when.called_with("http://foobar.s3.amazonaws.com/the-key").should.throw(urllib2.HTTPError)
|
urlopen.when.called_with("http://foobar.s3.amazonaws.com/the-key").should.throw(HTTPError)
|
||||||
|
|
||||||
|
|
||||||
@mock_s3
|
@mock_s3
|
||||||
@ -183,7 +187,7 @@ def test_empty_key():
|
|||||||
key.key = "the-key"
|
key.key = "the-key"
|
||||||
key.set_contents_from_string("")
|
key.set_contents_from_string("")
|
||||||
|
|
||||||
bucket.get_key("the-key").get_contents_as_string().should.equal('')
|
bucket.get_key("the-key").get_contents_as_string().should.equal(b'')
|
||||||
|
|
||||||
|
|
||||||
@mock_s3
|
@mock_s3
|
||||||
@ -194,10 +198,10 @@ def test_empty_key_set_on_existing_key():
|
|||||||
key.key = "the-key"
|
key.key = "the-key"
|
||||||
key.set_contents_from_string("foobar")
|
key.set_contents_from_string("foobar")
|
||||||
|
|
||||||
bucket.get_key("the-key").get_contents_as_string().should.equal('foobar')
|
bucket.get_key("the-key").get_contents_as_string().should.equal(b'foobar')
|
||||||
|
|
||||||
key.set_contents_from_string("")
|
key.set_contents_from_string("")
|
||||||
bucket.get_key("the-key").get_contents_as_string().should.equal('')
|
bucket.get_key("the-key").get_contents_as_string().should.equal(b'')
|
||||||
|
|
||||||
|
|
||||||
@mock_s3
|
@mock_s3
|
||||||
@ -208,7 +212,7 @@ def test_large_key_save():
|
|||||||
key.key = "the-key"
|
key.key = "the-key"
|
||||||
key.set_contents_from_string("foobar" * 100000)
|
key.set_contents_from_string("foobar" * 100000)
|
||||||
|
|
||||||
bucket.get_key("the-key").get_contents_as_string().should.equal('foobar' * 100000)
|
bucket.get_key("the-key").get_contents_as_string().should.equal(b'foobar' * 100000)
|
||||||
|
|
||||||
|
|
||||||
@mock_s3
|
@mock_s3
|
||||||
@ -221,8 +225,8 @@ def test_copy_key():
|
|||||||
|
|
||||||
bucket.copy_key('new-key', 'foobar', 'the-key')
|
bucket.copy_key('new-key', 'foobar', 'the-key')
|
||||||
|
|
||||||
bucket.get_key("the-key").get_contents_as_string().should.equal("some value")
|
bucket.get_key("the-key").get_contents_as_string().should.equal(b"some value")
|
||||||
bucket.get_key("new-key").get_contents_as_string().should.equal("some value")
|
bucket.get_key("new-key").get_contents_as_string().should.equal(b"some value")
|
||||||
|
|
||||||
|
|
||||||
@mock_s3
|
@mock_s3
|
||||||
@ -286,7 +290,8 @@ def test_create_existing_bucket():
|
|||||||
"Trying to create a bucket that already exists should raise an Error"
|
"Trying to create a bucket that already exists should raise an Error"
|
||||||
conn = boto.connect_s3('the_key', 'the_secret')
|
conn = boto.connect_s3('the_key', 'the_secret')
|
||||||
conn.create_bucket("foobar")
|
conn.create_bucket("foobar")
|
||||||
conn.create_bucket.when.called_with('foobar').should.throw(S3CreateError)
|
with assert_raises(S3CreateError):
|
||||||
|
conn.create_bucket('foobar')
|
||||||
|
|
||||||
|
|
||||||
@mock_s3
|
@mock_s3
|
||||||
@ -338,7 +343,7 @@ def test_post_to_bucket():
|
|||||||
'file': 'nothing'
|
'file': 'nothing'
|
||||||
})
|
})
|
||||||
|
|
||||||
bucket.get_key('the-key').get_contents_as_string().should.equal('nothing')
|
bucket.get_key('the-key').get_contents_as_string().should.equal(b'nothing')
|
||||||
|
|
||||||
|
|
||||||
@mock_s3
|
@mock_s3
|
||||||
@ -585,18 +590,18 @@ def test_list_versions():
|
|||||||
|
|
||||||
versions[0].name.should.equal('the-key')
|
versions[0].name.should.equal('the-key')
|
||||||
versions[0].version_id.should.equal('0')
|
versions[0].version_id.should.equal('0')
|
||||||
versions[0].get_contents_as_string().should.equal("Version 1")
|
versions[0].get_contents_as_string().should.equal(b"Version 1")
|
||||||
|
|
||||||
versions[1].name.should.equal('the-key')
|
versions[1].name.should.equal('the-key')
|
||||||
versions[1].version_id.should.equal('1')
|
versions[1].version_id.should.equal('1')
|
||||||
versions[1].get_contents_as_string().should.equal("Version 2")
|
versions[1].get_contents_as_string().should.equal(b"Version 2")
|
||||||
|
|
||||||
|
|
||||||
@mock_s3
|
@mock_s3
|
||||||
def test_acl_is_ignored_for_now():
|
def test_acl_is_ignored_for_now():
|
||||||
conn = boto.connect_s3()
|
conn = boto.connect_s3()
|
||||||
bucket = conn.create_bucket('foobar')
|
bucket = conn.create_bucket('foobar')
|
||||||
content = 'imafile'
|
content = b'imafile'
|
||||||
keyname = 'test.txt'
|
keyname = 'test.txt'
|
||||||
|
|
||||||
key = Key(bucket, name=keyname)
|
key = Key(bucket, name=keyname)
|
||||||
|
@ -14,7 +14,7 @@ def test_s3_server_get():
|
|||||||
|
|
||||||
res = test_client.get('/')
|
res = test_client.get('/')
|
||||||
|
|
||||||
res.data.should.contain('ListAllMyBucketsResult')
|
res.data.should.contain(b'ListAllMyBucketsResult')
|
||||||
|
|
||||||
|
|
||||||
def test_s3_server_bucket_create():
|
def test_s3_server_bucket_create():
|
||||||
@ -25,18 +25,18 @@ def test_s3_server_bucket_create():
|
|||||||
res.status_code.should.equal(200)
|
res.status_code.should.equal(200)
|
||||||
|
|
||||||
res = test_client.get('/')
|
res = test_client.get('/')
|
||||||
res.data.should.contain('<Name>foobaz</Name>')
|
res.data.should.contain(b'<Name>foobaz</Name>')
|
||||||
|
|
||||||
res = test_client.get('/', 'http://foobaz.localhost:5000/')
|
res = test_client.get('/', 'http://foobaz.localhost:5000/')
|
||||||
res.status_code.should.equal(200)
|
res.status_code.should.equal(200)
|
||||||
res.data.should.contain("ListBucketResult")
|
res.data.should.contain(b"ListBucketResult")
|
||||||
|
|
||||||
res = test_client.put('/bar', 'http://foobaz.localhost:5000/', data='test value')
|
res = test_client.put('/bar', 'http://foobaz.localhost:5000/', data='test value')
|
||||||
res.status_code.should.equal(200)
|
res.status_code.should.equal(200)
|
||||||
|
|
||||||
res = test_client.get('/bar', 'http://foobaz.localhost:5000/')
|
res = test_client.get('/bar', 'http://foobaz.localhost:5000/')
|
||||||
res.status_code.should.equal(200)
|
res.status_code.should.equal(200)
|
||||||
res.data.should.equal("test value")
|
res.data.should.equal(b"test value")
|
||||||
|
|
||||||
|
|
||||||
def test_s3_server_post_to_bucket():
|
def test_s3_server_post_to_bucket():
|
||||||
@ -53,4 +53,4 @@ def test_s3_server_post_to_bucket():
|
|||||||
|
|
||||||
res = test_client.get('/the-key', 'http://tester.localhost:5000/')
|
res = test_client.get('/the-key', 'http://tester.localhost:5000/')
|
||||||
res.status_code.should.equal(200)
|
res.status_code.should.equal(200)
|
||||||
res.data.should.equal("nothing")
|
res.data.should.equal(b"nothing")
|
||||||
|
@ -14,46 +14,46 @@ def test_s3_server_get():
|
|||||||
|
|
||||||
res = test_client.get('/')
|
res = test_client.get('/')
|
||||||
|
|
||||||
res.data.should.contain('ListAllMyBucketsResult')
|
res.data.should.contain(b'ListAllMyBucketsResult')
|
||||||
|
|
||||||
|
|
||||||
def test_s3_server_bucket_create():
|
def test_s3_server_bucket_create():
|
||||||
backend = server.create_backend_app("s3bucket_path")
|
backend = server.create_backend_app("s3bucket_path")
|
||||||
test_client = backend.test_client()
|
test_client = backend.test_client()
|
||||||
|
|
||||||
res = test_client.put('/foobar', 'http://localhost:5000')
|
res = test_client.put('/foobar/', 'http://localhost:5000')
|
||||||
res.status_code.should.equal(200)
|
res.status_code.should.equal(200)
|
||||||
|
|
||||||
res = test_client.get('/')
|
res = test_client.get('/')
|
||||||
res.data.should.contain('<Name>foobar</Name>')
|
res.data.should.contain(b'<Name>foobar</Name>')
|
||||||
|
|
||||||
res = test_client.get('/foobar', 'http://localhost:5000')
|
res = test_client.get('/foobar/', 'http://localhost:5000')
|
||||||
res.status_code.should.equal(200)
|
res.status_code.should.equal(200)
|
||||||
res.data.should.contain("ListBucketResult")
|
res.data.should.contain(b"ListBucketResult")
|
||||||
|
|
||||||
res = test_client.get('/missing-bucket', 'http://localhost:5000')
|
res = test_client.get('/missing-bucket/', 'http://localhost:5000')
|
||||||
res.status_code.should.equal(404)
|
res.status_code.should.equal(404)
|
||||||
|
|
||||||
res = test_client.put('/foobar/bar', 'http://localhost:5000', data='test value')
|
res = test_client.put('/foobar/bar/', 'http://localhost:5000', data='test value')
|
||||||
res.status_code.should.equal(200)
|
res.status_code.should.equal(200)
|
||||||
|
|
||||||
res = test_client.get('/foobar/bar', 'http://localhost:5000')
|
res = test_client.get('/foobar/bar/', 'http://localhost:5000')
|
||||||
res.status_code.should.equal(200)
|
res.status_code.should.equal(200)
|
||||||
res.data.should.equal("test value")
|
res.data.should.equal(b"test value")
|
||||||
|
|
||||||
|
|
||||||
def test_s3_server_post_to_bucket():
|
def test_s3_server_post_to_bucket():
|
||||||
backend = server.create_backend_app("s3bucket_path")
|
backend = server.create_backend_app("s3bucket_path")
|
||||||
test_client = backend.test_client()
|
test_client = backend.test_client()
|
||||||
|
|
||||||
res = test_client.put('/foobar2', 'http://localhost:5000/')
|
res = test_client.put('/foobar2/', 'http://localhost:5000/')
|
||||||
res.status_code.should.equal(200)
|
res.status_code.should.equal(200)
|
||||||
|
|
||||||
test_client.post('/foobar2', "https://localhost:5000/", data={
|
test_client.post('/foobar2/', "https://localhost:5000/", data={
|
||||||
'key': 'the-key',
|
'key': 'the-key',
|
||||||
'file': 'nothing'
|
'file': 'nothing'
|
||||||
})
|
})
|
||||||
|
|
||||||
res = test_client.get('/foobar2/the-key', 'http://localhost:5000/')
|
res = test_client.get('/foobar2/the-key/', 'http://localhost:5000/')
|
||||||
res.status_code.should.equal(200)
|
res.status_code.should.equal(200)
|
||||||
res.data.should.equal("nothing")
|
res.data.should.equal(b"nothing")
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import urllib2
|
from six.moves.urllib.request import urlopen
|
||||||
|
from six.moves.urllib.error import HTTPError
|
||||||
|
|
||||||
import boto
|
import boto
|
||||||
from boto.exception import S3ResponseError
|
from boto.exception import S3ResponseError
|
||||||
@ -41,7 +42,7 @@ def test_my_model_save():
|
|||||||
model_instance = MyModel('steve', 'is awesome')
|
model_instance = MyModel('steve', 'is awesome')
|
||||||
model_instance.save()
|
model_instance.save()
|
||||||
|
|
||||||
conn.get_bucket('mybucket').get_key('steve').get_contents_as_string().should.equal('is awesome')
|
conn.get_bucket('mybucket').get_key('steve').get_contents_as_string().should.equal(b'is awesome')
|
||||||
|
|
||||||
|
|
||||||
@mock_s3bucket_path
|
@mock_s3bucket_path
|
||||||
@ -56,7 +57,7 @@ def test_missing_key_urllib2():
|
|||||||
conn = create_connection('the_key', 'the_secret')
|
conn = create_connection('the_key', 'the_secret')
|
||||||
conn.create_bucket("foobar")
|
conn.create_bucket("foobar")
|
||||||
|
|
||||||
urllib2.urlopen.when.called_with("http://s3.amazonaws.com/foobar/the-key").should.throw(urllib2.HTTPError)
|
urlopen.when.called_with("http://s3.amazonaws.com/foobar/the-key").should.throw(HTTPError)
|
||||||
|
|
||||||
|
|
||||||
@mock_s3bucket_path
|
@mock_s3bucket_path
|
||||||
@ -67,7 +68,7 @@ def test_empty_key():
|
|||||||
key.key = "the-key"
|
key.key = "the-key"
|
||||||
key.set_contents_from_string("")
|
key.set_contents_from_string("")
|
||||||
|
|
||||||
bucket.get_key("the-key").get_contents_as_string().should.equal('')
|
bucket.get_key("the-key").get_contents_as_string().should.equal(b'')
|
||||||
|
|
||||||
|
|
||||||
@mock_s3bucket_path
|
@mock_s3bucket_path
|
||||||
@ -78,10 +79,10 @@ def test_empty_key_set_on_existing_key():
|
|||||||
key.key = "the-key"
|
key.key = "the-key"
|
||||||
key.set_contents_from_string("foobar")
|
key.set_contents_from_string("foobar")
|
||||||
|
|
||||||
bucket.get_key("the-key").get_contents_as_string().should.equal('foobar')
|
bucket.get_key("the-key").get_contents_as_string().should.equal(b'foobar')
|
||||||
|
|
||||||
key.set_contents_from_string("")
|
key.set_contents_from_string("")
|
||||||
bucket.get_key("the-key").get_contents_as_string().should.equal('')
|
bucket.get_key("the-key").get_contents_as_string().should.equal(b'')
|
||||||
|
|
||||||
|
|
||||||
@mock_s3bucket_path
|
@mock_s3bucket_path
|
||||||
@ -92,7 +93,7 @@ def test_large_key_save():
|
|||||||
key.key = "the-key"
|
key.key = "the-key"
|
||||||
key.set_contents_from_string("foobar" * 100000)
|
key.set_contents_from_string("foobar" * 100000)
|
||||||
|
|
||||||
bucket.get_key("the-key").get_contents_as_string().should.equal('foobar' * 100000)
|
bucket.get_key("the-key").get_contents_as_string().should.equal(b'foobar' * 100000)
|
||||||
|
|
||||||
|
|
||||||
@mock_s3bucket_path
|
@mock_s3bucket_path
|
||||||
@ -105,8 +106,8 @@ def test_copy_key():
|
|||||||
|
|
||||||
bucket.copy_key('new-key', 'foobar', 'the-key')
|
bucket.copy_key('new-key', 'foobar', 'the-key')
|
||||||
|
|
||||||
bucket.get_key("the-key").get_contents_as_string().should.equal("some value")
|
bucket.get_key("the-key").get_contents_as_string().should.equal(b"some value")
|
||||||
bucket.get_key("new-key").get_contents_as_string().should.equal("some value")
|
bucket.get_key("new-key").get_contents_as_string().should.equal(b"some value")
|
||||||
|
|
||||||
|
|
||||||
@mock_s3bucket_path
|
@mock_s3bucket_path
|
||||||
@ -191,7 +192,7 @@ def test_post_to_bucket():
|
|||||||
'file': 'nothing'
|
'file': 'nothing'
|
||||||
})
|
})
|
||||||
|
|
||||||
bucket.get_key('the-key').get_contents_as_string().should.equal('nothing')
|
bucket.get_key('the-key').get_contents_as_string().should.equal(b'nothing')
|
||||||
|
|
||||||
|
|
||||||
@mock_s3bucket_path
|
@mock_s3bucket_path
|
||||||
@ -275,8 +276,8 @@ def test_bucket_key_listing_order():
|
|||||||
|
|
||||||
delimiter = None
|
delimiter = None
|
||||||
keys = [x.name for x in bucket.list(prefix + 'x', delimiter)]
|
keys = [x.name for x in bucket.list(prefix + 'x', delimiter)]
|
||||||
keys.should.equal([u'toplevel/x/key', u'toplevel/x/y/key', u'toplevel/x/y/z/key'])
|
keys.should.equal(['toplevel/x/key', 'toplevel/x/y/key', 'toplevel/x/y/z/key'])
|
||||||
|
|
||||||
delimiter = '/'
|
delimiter = '/'
|
||||||
keys = [x.name for x in bucket.list(prefix + 'x', delimiter)]
|
keys = [x.name for x in bucket.list(prefix + 'x', delimiter)]
|
||||||
keys.should.equal([u'toplevel/x/'])
|
keys.should.equal(['toplevel/x/'])
|
||||||
|
@ -13,4 +13,4 @@ def test_ses_list_identities():
|
|||||||
test_client = backend.test_client()
|
test_client = backend.test_client()
|
||||||
|
|
||||||
res = test_client.get('/?Action=ListIdentities')
|
res = test_client.get('/?Action=ListIdentities')
|
||||||
res.data.should.contain("ListIdentitiesResponse")
|
res.data.should.contain(b"ListIdentitiesResponse")
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
from urlparse import parse_qs
|
from six.moves.urllib.parse import parse_qs
|
||||||
|
|
||||||
import boto
|
import boto
|
||||||
from freezegun import freeze_time
|
from freezegun import freeze_time
|
||||||
@ -49,7 +49,7 @@ def test_publish_to_http():
|
|||||||
|
|
||||||
last_request = httpretty.last_request()
|
last_request = httpretty.last_request()
|
||||||
last_request.method.should.equal("POST")
|
last_request.method.should.equal("POST")
|
||||||
parse_qs(last_request.body).should.equal({
|
parse_qs(last_request.body.decode('utf-8')).should.equal({
|
||||||
"Type": ["Notification"],
|
"Type": ["Notification"],
|
||||||
"MessageId": [message_id],
|
"MessageId": [message_id],
|
||||||
"TopicArn": ["arn:aws:sns:us-east-1:123456789012:some-topic"],
|
"TopicArn": ["arn:aws:sns:us-east-1:123456789012:some-topic"],
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
import boto
|
import boto
|
||||||
|
import six
|
||||||
|
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
|
|
||||||
@ -45,10 +46,20 @@ def test_topic_attributes():
|
|||||||
attributes["DeliveryPolicy"].should.equal("")
|
attributes["DeliveryPolicy"].should.equal("")
|
||||||
attributes["EffectiveDeliveryPolicy"].should.equal(DEFAULT_EFFECTIVE_DELIVERY_POLICY)
|
attributes["EffectiveDeliveryPolicy"].should.equal(DEFAULT_EFFECTIVE_DELIVERY_POLICY)
|
||||||
|
|
||||||
# boto can't handle unicode here :(
|
# boto can't handle prefix-mandatory strings:
|
||||||
conn.set_topic_attributes(topic_arn, "Policy", {b"foo": b"bar"})
|
# i.e. unicode on Python 2 -- u"foobar"
|
||||||
conn.set_topic_attributes(topic_arn, "DisplayName", "My display name")
|
# and bytes on Python 3 -- b"foobar"
|
||||||
conn.set_topic_attributes(topic_arn, "DeliveryPolicy", {b"http": {b"defaultHealthyRetryPolicy": {b"numRetries": 5}}})
|
if six.PY2:
|
||||||
|
policy = {b"foo": b"bar"}
|
||||||
|
displayname = b"My display name"
|
||||||
|
delivery = {b"http": {b"defaultHealthyRetryPolicy": {b"numRetries": 5}}}
|
||||||
|
else:
|
||||||
|
policy = {u"foo": u"bar"}
|
||||||
|
displayname = u"My display name"
|
||||||
|
delivery = {u"http": {u"defaultHealthyRetryPolicy": {u"numRetries": 5}}}
|
||||||
|
conn.set_topic_attributes(topic_arn, "Policy", policy)
|
||||||
|
conn.set_topic_attributes(topic_arn, "DisplayName", displayname)
|
||||||
|
conn.set_topic_attributes(topic_arn, "DeliveryPolicy", delivery)
|
||||||
|
|
||||||
attributes = conn.get_topic_attributes(topic_arn)['GetTopicAttributesResponse']['GetTopicAttributesResult']['Attributes']
|
attributes = conn.get_topic_attributes(topic_arn)['GetTopicAttributesResponse']['GetTopicAttributesResult']['Attributes']
|
||||||
attributes["Policy"].should.equal("{'foo': 'bar'}")
|
attributes["Policy"].should.equal("{'foo': 'bar'}")
|
||||||
|
@ -14,13 +14,13 @@ def test_sqs_list_identities():
|
|||||||
test_client = backend.test_client()
|
test_client = backend.test_client()
|
||||||
|
|
||||||
res = test_client.get('/?Action=ListQueues')
|
res = test_client.get('/?Action=ListQueues')
|
||||||
res.data.should.contain("ListQueuesResponse")
|
res.data.should.contain(b"ListQueuesResponse")
|
||||||
|
|
||||||
res = test_client.put('/?Action=CreateQueue&QueueName=testqueue')
|
res = test_client.put('/?Action=CreateQueue&QueueName=testqueue')
|
||||||
res = test_client.put('/?Action=CreateQueue&QueueName=otherqueue')
|
res = test_client.put('/?Action=CreateQueue&QueueName=otherqueue')
|
||||||
|
|
||||||
res = test_client.get('/?Action=ListQueues&QueueNamePrefix=other')
|
res = test_client.get('/?Action=ListQueues&QueueNamePrefix=other')
|
||||||
res.data.should_not.contain('testqueue')
|
res.data.should_not.contain(b'testqueue')
|
||||||
|
|
||||||
res = test_client.put(
|
res = test_client.put(
|
||||||
'/123/testqueue?MessageBody=test-message&Action=SendMessage')
|
'/123/testqueue?MessageBody=test-message&Action=SendMessage')
|
||||||
@ -28,5 +28,5 @@ def test_sqs_list_identities():
|
|||||||
res = test_client.get(
|
res = test_client.get(
|
||||||
'/123/testqueue?Action=ReceiveMessage&MaxNumberOfMessages=1')
|
'/123/testqueue?Action=ReceiveMessage&MaxNumberOfMessages=1')
|
||||||
|
|
||||||
message = re.search("<Body>(.*?)</Body>", res.data).groups()[0]
|
message = re.search("<Body>(.*?)</Body>", res.data.decode('utf-8')).groups()[0]
|
||||||
message.should.equal('test-message')
|
message.should.equal('test-message')
|
||||||
|
@ -14,8 +14,8 @@ def test_sts_get_session_token():
|
|||||||
|
|
||||||
res = test_client.get('/?Action=GetSessionToken')
|
res = test_client.get('/?Action=GetSessionToken')
|
||||||
res.status_code.should.equal(200)
|
res.status_code.should.equal(200)
|
||||||
res.data.should.contain("SessionToken")
|
res.data.should.contain(b"SessionToken")
|
||||||
res.data.should.contain("AccessKeyId")
|
res.data.should.contain(b"AccessKeyId")
|
||||||
|
|
||||||
|
|
||||||
def test_sts_get_federation_token():
|
def test_sts_get_federation_token():
|
||||||
@ -24,5 +24,5 @@ def test_sts_get_federation_token():
|
|||||||
|
|
||||||
res = test_client.get('/?Action=GetFederationToken&Name=Bob')
|
res = test_client.get('/?Action=GetFederationToken&Name=Bob')
|
||||||
res.status_code.should.equal(200)
|
res.status_code.should.equal(200)
|
||||||
res.data.should.contain("SessionToken")
|
res.data.should.contain(b"SessionToken")
|
||||||
res.data.should.contain("AccessKeyId")
|
res.data.should.contain(b"AccessKeyId")
|
||||||
|
Loading…
x
Reference in New Issue
Block a user