commit
dd0f04764c
@ -23,6 +23,8 @@ matrix:
|
||||
sudo: true
|
||||
before_install:
|
||||
- export BOTO_CONFIG=/dev/null
|
||||
- export AWS_SECRET_ACCESS_KEY=foobar_secret
|
||||
- export AWS_ACCESS_KEY_ID=foobar_key
|
||||
install:
|
||||
# We build moto first so the docker container doesn't try to compile it as well, also note we don't use
|
||||
# -d for docker run so the logs show up in travis
|
||||
@ -32,8 +34,6 @@ install:
|
||||
|
||||
if [ "$TEST_SERVER_MODE" = "true" ]; then
|
||||
docker run --rm -t --name motoserver -e TEST_SERVER_MODE=true -e AWS_SECRET_ACCESS_KEY=server_secret -e AWS_ACCESS_KEY_ID=server_key -v `pwd`:/moto -p 5000:5000 -v /var/run/docker.sock:/var/run/docker.sock python:${TRAVIS_PYTHON_VERSION}-stretch /moto/travis_moto_server.sh &
|
||||
export AWS_SECRET_ACCESS_KEY=foobar_secret
|
||||
export AWS_ACCESS_KEY_ID=foobar_key
|
||||
fi
|
||||
travis_retry pip install boto==2.45.0
|
||||
travis_retry pip install boto3
|
||||
|
@ -1,6 +1,11 @@
|
||||
Moto Changelog
|
||||
===================
|
||||
|
||||
1.3.7
|
||||
-----
|
||||
|
||||
* Switch from mocking requests to using before-send for AWS calls
|
||||
|
||||
1.3.6
|
||||
-----
|
||||
|
||||
|
@ -835,8 +835,8 @@
|
||||
- [ ] admin_delete_user
|
||||
- [ ] admin_delete_user_attributes
|
||||
- [ ] admin_disable_provider_for_user
|
||||
- [ ] admin_disable_user
|
||||
- [ ] admin_enable_user
|
||||
- [X] admin_disable_user
|
||||
- [X] admin_enable_user
|
||||
- [ ] admin_forget_device
|
||||
- [ ] admin_get_device
|
||||
- [ ] admin_get_user
|
||||
@ -3092,23 +3092,23 @@
|
||||
- [ ] update_server
|
||||
- [ ] update_server_engine_attributes
|
||||
|
||||
## organizations - 0% implemented
|
||||
## organizations - 30% implemented
|
||||
- [ ] accept_handshake
|
||||
- [ ] attach_policy
|
||||
- [ ] cancel_handshake
|
||||
- [ ] create_account
|
||||
- [ ] create_organization
|
||||
- [ ] create_organizational_unit
|
||||
- [X] create_account
|
||||
- [X] create_organization
|
||||
- [X] create_organizational_unit
|
||||
- [ ] create_policy
|
||||
- [ ] decline_handshake
|
||||
- [ ] delete_organization
|
||||
- [ ] delete_organizational_unit
|
||||
- [ ] delete_policy
|
||||
- [ ] describe_account
|
||||
- [X] describe_account
|
||||
- [ ] describe_create_account_status
|
||||
- [ ] describe_handshake
|
||||
- [ ] describe_organization
|
||||
- [ ] describe_organizational_unit
|
||||
- [X] describe_organization
|
||||
- [X] describe_organizational_unit
|
||||
- [ ] describe_policy
|
||||
- [ ] detach_policy
|
||||
- [ ] disable_aws_service_access
|
||||
@ -3118,20 +3118,20 @@
|
||||
- [ ] enable_policy_type
|
||||
- [ ] invite_account_to_organization
|
||||
- [ ] leave_organization
|
||||
- [ ] list_accounts
|
||||
- [ ] list_accounts_for_parent
|
||||
- [X] list_accounts
|
||||
- [X] list_accounts_for_parent
|
||||
- [ ] list_aws_service_access_for_organization
|
||||
- [ ] list_children
|
||||
- [X] list_children
|
||||
- [ ] list_create_account_status
|
||||
- [ ] list_handshakes_for_account
|
||||
- [ ] list_handshakes_for_organization
|
||||
- [ ] list_organizational_units_for_parent
|
||||
- [ ] list_parents
|
||||
- [X] list_organizational_units_for_parent
|
||||
- [X] list_parents
|
||||
- [ ] list_policies
|
||||
- [ ] list_policies_for_target
|
||||
- [ ] list_roots
|
||||
- [X] list_roots
|
||||
- [ ] list_targets_for_policy
|
||||
- [ ] move_account
|
||||
- [X] move_account
|
||||
- [ ] remove_account_from_organization
|
||||
- [ ] update_organizational_unit
|
||||
- [ ] update_policy
|
||||
|
@ -112,6 +112,8 @@ It gets even better! Moto isn't just for Python code and it isn't just for S3. L
|
||||
|------------------------------------------------------------------------------|
|
||||
| KMS | @mock_kms | basic endpoints done |
|
||||
|------------------------------------------------------------------------------|
|
||||
| Organizations | @mock_organizations | some core endpoints done |
|
||||
|------------------------------------------------------------------------------|
|
||||
| Polly | @mock_polly | all endpoints done |
|
||||
|------------------------------------------------------------------------------|
|
||||
| RDS | @mock_rds | core endpoints done |
|
||||
|
@ -34,11 +34,11 @@ Currently implemented Services:
|
||||
| - DynamoDB2 | - @mock_dynamodb2 | - core endpoints + partial indexes|
|
||||
+-----------------------+---------------------+-----------------------------------+
|
||||
| EC2 | @mock_ec2 | core endpoints done |
|
||||
| - AMI | | core endpoints done |
|
||||
| - EBS | | core endpoints done |
|
||||
| - Instances | | all endpoints done |
|
||||
| - Security Groups | | core endpoints done |
|
||||
| - Tags | | all endpoints done |
|
||||
| - AMI | | - core endpoints done |
|
||||
| - EBS | | - core endpoints done |
|
||||
| - Instances | | - all endpoints done |
|
||||
| - Security Groups | | - core endpoints done |
|
||||
| - Tags | | - all endpoints done |
|
||||
+-----------------------+---------------------+-----------------------------------+
|
||||
| ECS | @mock_ecs | basic endpoints done |
|
||||
+-----------------------+---------------------+-----------------------------------+
|
||||
|
@ -3,7 +3,7 @@ import logging
|
||||
# logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||
|
||||
__title__ = 'moto'
|
||||
__version__ = '1.3.6'
|
||||
__version__ = '1.3.7'
|
||||
|
||||
from .acm import mock_acm # flake8: noqa
|
||||
from .apigateway import mock_apigateway, mock_apigateway_deprecated # flake8: noqa
|
||||
@ -28,6 +28,7 @@ from .glue import mock_glue # flake8: noqa
|
||||
from .iam import mock_iam, mock_iam_deprecated # flake8: noqa
|
||||
from .kinesis import mock_kinesis, mock_kinesis_deprecated # flake8: noqa
|
||||
from .kms import mock_kms, mock_kms_deprecated # flake8: noqa
|
||||
from .organizations import mock_organizations # flake8: noqa
|
||||
from .opsworks import mock_opsworks, mock_opsworks_deprecated # flake8: noqa
|
||||
from .polly import mock_polly # flake8: noqa
|
||||
from .rds import mock_rds, mock_rds_deprecated # flake8: noqa
|
||||
|
@ -10,6 +10,7 @@ from boto3.session import Session
|
||||
import responses
|
||||
from moto.core import BaseBackend, BaseModel
|
||||
from .utils import create_id
|
||||
from moto.core.utils import path_url
|
||||
from .exceptions import StageNotFoundException, ApiKeyNotFoundException
|
||||
|
||||
STAGE_URL = "https://{api_id}.execute-api.{region_name}.amazonaws.com/{stage_name}"
|
||||
@ -372,7 +373,8 @@ class RestAPI(BaseModel):
|
||||
# TODO deal with no matching resource
|
||||
|
||||
def resource_callback(self, request):
|
||||
path_after_stage_name = '/'.join(request.path_url.split("/")[2:])
|
||||
path = path_url(request.url)
|
||||
path_after_stage_name = '/'.join(path.split("/")[2:])
|
||||
if not path_after_stage_name:
|
||||
path_after_stage_name = '/'
|
||||
|
||||
@ -606,8 +608,15 @@ class APIGatewayBackend(BaseBackend):
|
||||
self.usage_plans[plan['id']] = plan
|
||||
return plan
|
||||
|
||||
def get_usage_plans(self):
|
||||
return list(self.usage_plans.values())
|
||||
def get_usage_plans(self, api_key_id=None):
|
||||
plans = list(self.usage_plans.values())
|
||||
if api_key_id is not None:
|
||||
plans = [
|
||||
plan
|
||||
for plan in plans
|
||||
if self.usage_plan_keys.get(plan['id'], {}).get(api_key_id, False)
|
||||
]
|
||||
return plans
|
||||
|
||||
def get_usage_plan(self, usage_plan_id):
|
||||
return self.usage_plans[usage_plan_id]
|
||||
|
@ -255,7 +255,8 @@ class APIGatewayResponse(BaseResponse):
|
||||
if self.method == 'POST':
|
||||
usage_plan_response = self.backend.create_usage_plan(json.loads(self.body))
|
||||
elif self.method == 'GET':
|
||||
usage_plans_response = self.backend.get_usage_plans()
|
||||
api_key_id = self.querystring.get("keyId", [None])[0]
|
||||
usage_plans_response = self.backend.get_usage_plans(api_key_id=api_key_id)
|
||||
return 200, {}, json.dumps({"item": usage_plans_response})
|
||||
return 200, {}, json.dumps(usage_plan_response)
|
||||
|
||||
|
@ -7,7 +7,7 @@ try:
|
||||
except ImportError:
|
||||
from urllib.parse import unquote
|
||||
|
||||
from moto.core.utils import amz_crc32, amzn_request_id
|
||||
from moto.core.utils import amz_crc32, amzn_request_id, path_url
|
||||
from moto.core.responses import BaseResponse
|
||||
from .models import lambda_backends
|
||||
|
||||
@ -94,7 +94,7 @@ class LambdaResponse(BaseResponse):
|
||||
return self._add_policy(request, full_url, headers)
|
||||
|
||||
def _add_policy(self, request, full_url, headers):
|
||||
path = request.path if hasattr(request, 'path') else request.path_url
|
||||
path = request.path if hasattr(request, 'path') else path_url(request.url)
|
||||
function_name = path.split('/')[-2]
|
||||
if self.lambda_backend.get_function(function_name):
|
||||
policy = request.body.decode('utf8')
|
||||
@ -104,7 +104,7 @@ class LambdaResponse(BaseResponse):
|
||||
return 404, {}, "{}"
|
||||
|
||||
def _get_policy(self, request, full_url, headers):
|
||||
path = request.path if hasattr(request, 'path') else request.path_url
|
||||
path = request.path if hasattr(request, 'path') else path_url(request.url)
|
||||
function_name = path.split('/')[-2]
|
||||
if self.lambda_backend.get_function(function_name):
|
||||
lambda_function = self.lambda_backend.get_function(function_name)
|
||||
|
@ -27,6 +27,7 @@ from moto.kinesis import kinesis_backends
|
||||
from moto.kms import kms_backends
|
||||
from moto.logs import logs_backends
|
||||
from moto.opsworks import opsworks_backends
|
||||
from moto.organizations import organizations_backends
|
||||
from moto.polly import polly_backends
|
||||
from moto.rds2 import rds2_backends
|
||||
from moto.redshift import redshift_backends
|
||||
@ -74,6 +75,7 @@ BACKENDS = {
|
||||
'kinesis': kinesis_backends,
|
||||
'kms': kms_backends,
|
||||
'opsworks': opsworks_backends,
|
||||
'organizations': organizations_backends,
|
||||
'polly': polly_backends,
|
||||
'redshift': redshift_backends,
|
||||
'rds': rds2_backends,
|
||||
|
@ -3,6 +3,7 @@ from __future__ import unicode_literals
|
||||
from moto.core.responses import BaseResponse
|
||||
|
||||
from .models import cognitoidentity_backends
|
||||
from .utils import get_random_identity_id
|
||||
|
||||
|
||||
class CognitoIdentityResponse(BaseResponse):
|
||||
@ -31,4 +32,6 @@ class CognitoIdentityResponse(BaseResponse):
|
||||
return cognitoidentity_backends[self.region].get_credentials_for_identity(self._get_param('IdentityId'))
|
||||
|
||||
def get_open_id_token_for_developer_identity(self):
|
||||
return cognitoidentity_backends[self.region].get_open_id_token_for_developer_identity(self._get_param('IdentityId'))
|
||||
return cognitoidentity_backends[self.region].get_open_id_token_for_developer_identity(
|
||||
self._get_param('IdentityId') or get_random_identity_id(self.region)
|
||||
)
|
||||
|
@ -2,4 +2,4 @@ from moto.core.utils import get_random_hex
|
||||
|
||||
|
||||
def get_random_identity_id(region):
|
||||
return "{0}:{0}".format(region, get_random_hex(length=19))
|
||||
return "{0}:{1}".format(region, get_random_hex(length=19))
|
||||
|
@ -24,7 +24,7 @@ class CognitoIdpUserPool(BaseModel):
|
||||
|
||||
def __init__(self, region, name, extended_config):
|
||||
self.region = region
|
||||
self.id = str(uuid.uuid4())
|
||||
self.id = "{}_{}".format(self.region, str(uuid.uuid4().hex))
|
||||
self.name = name
|
||||
self.status = None
|
||||
self.extended_config = extended_config or {}
|
||||
@ -84,7 +84,11 @@ class CognitoIdpUserPool(BaseModel):
|
||||
return refresh_token
|
||||
|
||||
def create_access_token(self, client_id, username):
|
||||
access_token, expires_in = self.create_jwt(client_id, username)
|
||||
extra_data = self.get_user_extra_data_by_client_id(
|
||||
client_id, username
|
||||
)
|
||||
access_token, expires_in = self.create_jwt(client_id, username,
|
||||
extra_data=extra_data)
|
||||
self.access_tokens[access_token] = (client_id, username)
|
||||
return access_token, expires_in
|
||||
|
||||
@ -97,6 +101,21 @@ class CognitoIdpUserPool(BaseModel):
|
||||
id_token, _ = self.create_id_token(client_id, username)
|
||||
return access_token, id_token, expires_in
|
||||
|
||||
def get_user_extra_data_by_client_id(self, client_id, username):
|
||||
extra_data = {}
|
||||
current_client = self.clients.get(client_id, None)
|
||||
if current_client:
|
||||
for readable_field in current_client.get_readable_fields():
|
||||
attribute = list(filter(
|
||||
lambda f: f['Name'] == readable_field,
|
||||
self.users.get(username).attributes
|
||||
))
|
||||
if len(attribute) > 0:
|
||||
extra_data.update({
|
||||
attribute[0]['Name']: attribute[0]['Value']
|
||||
})
|
||||
return extra_data
|
||||
|
||||
|
||||
class CognitoIdpUserPoolDomain(BaseModel):
|
||||
|
||||
@ -138,6 +157,9 @@ class CognitoIdpUserPoolClient(BaseModel):
|
||||
|
||||
return user_pool_client_json
|
||||
|
||||
def get_readable_fields(self):
|
||||
return self.extended_config.get('ReadAttributes', [])
|
||||
|
||||
|
||||
class CognitoIdpIdentityProvider(BaseModel):
|
||||
|
||||
@ -361,7 +383,7 @@ class CognitoIdpBackend(BaseBackend):
|
||||
raise ResourceNotFoundError(user_pool_id)
|
||||
|
||||
if username not in user_pool.users:
|
||||
raise ResourceNotFoundError(username)
|
||||
raise UserNotFoundError(username)
|
||||
|
||||
return user_pool.users[username]
|
||||
|
||||
@ -372,13 +394,21 @@ class CognitoIdpBackend(BaseBackend):
|
||||
|
||||
return user_pool.users.values()
|
||||
|
||||
def admin_disable_user(self, user_pool_id, username):
|
||||
user = self.admin_get_user(user_pool_id, username)
|
||||
user.enabled = False
|
||||
|
||||
def admin_enable_user(self, user_pool_id, username):
|
||||
user = self.admin_get_user(user_pool_id, username)
|
||||
user.enabled = True
|
||||
|
||||
def admin_delete_user(self, user_pool_id, username):
|
||||
user_pool = self.user_pools.get(user_pool_id)
|
||||
if not user_pool:
|
||||
raise ResourceNotFoundError(user_pool_id)
|
||||
|
||||
if username not in user_pool.users:
|
||||
raise ResourceNotFoundError(username)
|
||||
raise UserNotFoundError(username)
|
||||
|
||||
del user_pool.users[username]
|
||||
|
||||
|
@ -160,6 +160,18 @@ class CognitoIdpResponse(BaseResponse):
|
||||
"Users": [user.to_json(extended=True) for user in users]
|
||||
})
|
||||
|
||||
def admin_disable_user(self):
|
||||
user_pool_id = self._get_param("UserPoolId")
|
||||
username = self._get_param("Username")
|
||||
cognitoidp_backends[self.region].admin_disable_user(user_pool_id, username)
|
||||
return ""
|
||||
|
||||
def admin_enable_user(self):
|
||||
user_pool_id = self._get_param("UserPoolId")
|
||||
username = self._get_param("Username")
|
||||
cognitoidp_backends[self.region].admin_enable_user(user_pool_id, username)
|
||||
return ""
|
||||
|
||||
def admin_delete_user(self):
|
||||
user_pool_id = self._get_param("UserPoolId")
|
||||
username = self._get_param("Username")
|
||||
|
@ -2,11 +2,14 @@
|
||||
from __future__ import unicode_literals
|
||||
from __future__ import absolute_import
|
||||
|
||||
from collections import defaultdict
|
||||
import functools
|
||||
import inspect
|
||||
import re
|
||||
import six
|
||||
from io import BytesIO
|
||||
from collections import defaultdict
|
||||
from botocore.handlers import BUILTIN_HANDLERS
|
||||
from botocore.awsrequest import AWSResponse
|
||||
|
||||
from moto import settings
|
||||
import responses
|
||||
@ -233,7 +236,111 @@ class ResponsesMockAWS(BaseMockAWS):
|
||||
pass
|
||||
|
||||
|
||||
MockAWS = ResponsesMockAWS
|
||||
BOTOCORE_HTTP_METHODS = [
|
||||
'GET', 'DELETE', 'HEAD', 'OPTIONS', 'PATCH', 'POST', 'PUT'
|
||||
]
|
||||
|
||||
|
||||
class MockRawResponse(BytesIO):
|
||||
def __init__(self, input):
|
||||
if isinstance(input, six.text_type):
|
||||
input = input.encode('utf-8')
|
||||
super(MockRawResponse, self).__init__(input)
|
||||
|
||||
def stream(self, **kwargs):
|
||||
contents = self.read()
|
||||
while contents:
|
||||
yield contents
|
||||
contents = self.read()
|
||||
|
||||
|
||||
class BotocoreStubber(object):
|
||||
def __init__(self):
|
||||
self.enabled = False
|
||||
self.methods = defaultdict(list)
|
||||
|
||||
def reset(self):
|
||||
self.methods.clear()
|
||||
|
||||
def register_response(self, method, pattern, response):
|
||||
matchers = self.methods[method]
|
||||
matchers.append((pattern, response))
|
||||
|
||||
def __call__(self, event_name, request, **kwargs):
|
||||
if not self.enabled:
|
||||
return None
|
||||
|
||||
response = None
|
||||
response_callback = None
|
||||
found_index = None
|
||||
matchers = self.methods.get(request.method)
|
||||
|
||||
base_url = request.url.split('?', 1)[0]
|
||||
for i, (pattern, callback) in enumerate(matchers):
|
||||
if pattern.match(base_url):
|
||||
if found_index is None:
|
||||
found_index = i
|
||||
response_callback = callback
|
||||
else:
|
||||
matchers.pop(found_index)
|
||||
break
|
||||
|
||||
if response_callback is not None:
|
||||
for header, value in request.headers.items():
|
||||
if isinstance(value, six.binary_type):
|
||||
request.headers[header] = value.decode('utf-8')
|
||||
status, headers, body = response_callback(request, request.url, request.headers)
|
||||
body = MockRawResponse(body)
|
||||
response = AWSResponse(request.url, status, headers, body)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
botocore_stubber = BotocoreStubber()
|
||||
BUILTIN_HANDLERS.append(('before-send', botocore_stubber))
|
||||
|
||||
|
||||
class BotocoreEventMockAWS(BaseMockAWS):
|
||||
def reset(self):
|
||||
botocore_stubber.reset()
|
||||
responses_mock.reset()
|
||||
|
||||
def enable_patching(self):
|
||||
botocore_stubber.enabled = True
|
||||
for method in BOTOCORE_HTTP_METHODS:
|
||||
for backend in self.backends_for_urls.values():
|
||||
for key, value in backend.urls.items():
|
||||
pattern = re.compile(key)
|
||||
botocore_stubber.register_response(method, pattern, value)
|
||||
|
||||
if not hasattr(responses_mock, '_patcher') or not hasattr(responses_mock._patcher, 'target'):
|
||||
responses_mock.start()
|
||||
|
||||
for method in RESPONSES_METHODS:
|
||||
# for backend in default_backends.values():
|
||||
for backend in self.backends_for_urls.values():
|
||||
for key, value in backend.urls.items():
|
||||
responses_mock.add(
|
||||
CallbackResponse(
|
||||
method=method,
|
||||
url=re.compile(key),
|
||||
callback=convert_flask_to_responses_response(value),
|
||||
stream=True,
|
||||
match_querystring=False,
|
||||
)
|
||||
)
|
||||
|
||||
def disable_patching(self):
|
||||
botocore_stubber.enabled = False
|
||||
self.reset()
|
||||
|
||||
try:
|
||||
responses_mock.stop()
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
|
||||
MockAWS = BotocoreEventMockAWS
|
||||
|
||||
|
||||
class ServerModeMockAWS(BaseMockAWS):
|
||||
|
@ -8,6 +8,7 @@ import random
|
||||
import re
|
||||
import six
|
||||
import string
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
|
||||
REQUEST_ID_LONG = string.digits + string.ascii_uppercase
|
||||
@ -286,3 +287,13 @@ def amzn_request_id(f):
|
||||
return status, headers, body
|
||||
|
||||
return _wrapper
|
||||
|
||||
|
||||
def path_url(url):
|
||||
parsed_url = urlparse(url)
|
||||
path = parsed_url.path
|
||||
if not path:
|
||||
path = '/'
|
||||
if parsed_url.query:
|
||||
path = path + '?' + parsed_url.query
|
||||
return path
|
||||
|
@ -154,7 +154,7 @@ class Item(BaseModel):
|
||||
# If not exists, changes value to a default if needed, else its the same as it was
|
||||
if value.startswith('if_not_exists'):
|
||||
# Function signature
|
||||
match = re.match(r'.*if_not_exists\((?P<path>.+),\s*(?P<default>.+)\).*', value)
|
||||
match = re.match(r'.*if_not_exists\s*\((?P<path>.+),\s*(?P<default>.+)\).*', value)
|
||||
if not match:
|
||||
raise TypeError
|
||||
|
||||
@ -162,12 +162,13 @@ class Item(BaseModel):
|
||||
|
||||
# If it already exists, get its value so we dont overwrite it
|
||||
if path in self.attrs:
|
||||
value = self.attrs[path].cast_value
|
||||
value = self.attrs[path]
|
||||
|
||||
if value in expression_attribute_values:
|
||||
value = DynamoType(expression_attribute_values[value])
|
||||
else:
|
||||
value = DynamoType({"S": value})
|
||||
if type(value) != DynamoType:
|
||||
if value in expression_attribute_values:
|
||||
value = DynamoType(expression_attribute_values[value])
|
||||
else:
|
||||
value = DynamoType({"S": value})
|
||||
|
||||
if '.' not in key:
|
||||
self.attrs[key] = value
|
||||
@ -264,9 +265,9 @@ class Item(BaseModel):
|
||||
self.attrs[attribute_name] = DynamoType({"SS": new_value})
|
||||
elif isinstance(new_value, dict):
|
||||
self.attrs[attribute_name] = DynamoType({"M": new_value})
|
||||
elif update_action['Value'].keys() == ['N']:
|
||||
elif set(update_action['Value'].keys()) == set(['N']):
|
||||
self.attrs[attribute_name] = DynamoType({"N": new_value})
|
||||
elif update_action['Value'].keys() == ['NULL']:
|
||||
elif set(update_action['Value'].keys()) == set(['NULL']):
|
||||
if attribute_name in self.attrs:
|
||||
del self.attrs[attribute_name]
|
||||
else:
|
||||
|
@ -204,9 +204,9 @@ class DynamoHandler(BaseResponse):
|
||||
if cond_items:
|
||||
expected = {}
|
||||
overwrite = False
|
||||
exists_re = re.compile('^attribute_exists\((.*)\)$')
|
||||
exists_re = re.compile('^attribute_exists\s*\((.*)\)$')
|
||||
not_exists_re = re.compile(
|
||||
'^attribute_not_exists\((.*)\)$')
|
||||
'^attribute_not_exists\s*\((.*)\)$')
|
||||
|
||||
for cond in cond_items:
|
||||
exists_m = exists_re.match(cond)
|
||||
@ -556,9 +556,9 @@ class DynamoHandler(BaseResponse):
|
||||
|
||||
if cond_items:
|
||||
expected = {}
|
||||
exists_re = re.compile('^attribute_exists\((.*)\)$')
|
||||
exists_re = re.compile('^attribute_exists\s*\((.*)\)$')
|
||||
not_exists_re = re.compile(
|
||||
'^attribute_not_exists\((.*)\)$')
|
||||
'^attribute_not_exists\s*\((.*)\)$')
|
||||
|
||||
for cond in cond_items:
|
||||
exists_m = exists_re.match(cond)
|
||||
|
@ -8,4 +8,6 @@ class ServiceNotFoundException(RESTError):
|
||||
def __init__(self, service_name):
|
||||
super(ServiceNotFoundException, self).__init__(
|
||||
error_type="ServiceNotFoundException",
|
||||
message="The service {0} does not exist".format(service_name))
|
||||
message="The service {0} does not exist".format(service_name),
|
||||
template='error_json',
|
||||
)
|
||||
|
@ -6,19 +6,56 @@ class GlueClientError(JsonRESTError):
|
||||
code = 400
|
||||
|
||||
|
||||
class DatabaseAlreadyExistsException(GlueClientError):
|
||||
def __init__(self):
|
||||
self.code = 400
|
||||
super(DatabaseAlreadyExistsException, self).__init__(
|
||||
'DatabaseAlreadyExistsException',
|
||||
'Database already exists.'
|
||||
class AlreadyExistsException(GlueClientError):
|
||||
def __init__(self, typ):
|
||||
super(GlueClientError, self).__init__(
|
||||
'AlreadyExistsException',
|
||||
'%s already exists.' % (typ),
|
||||
)
|
||||
|
||||
|
||||
class TableAlreadyExistsException(GlueClientError):
|
||||
class DatabaseAlreadyExistsException(AlreadyExistsException):
|
||||
def __init__(self):
|
||||
self.code = 400
|
||||
super(TableAlreadyExistsException, self).__init__(
|
||||
'TableAlreadyExistsException',
|
||||
'Table already exists.'
|
||||
super(DatabaseAlreadyExistsException, self).__init__('Database')
|
||||
|
||||
|
||||
class TableAlreadyExistsException(AlreadyExistsException):
|
||||
def __init__(self):
|
||||
super(TableAlreadyExistsException, self).__init__('Table')
|
||||
|
||||
|
||||
class PartitionAlreadyExistsException(AlreadyExistsException):
|
||||
def __init__(self):
|
||||
super(PartitionAlreadyExistsException, self).__init__('Partition')
|
||||
|
||||
|
||||
class EntityNotFoundException(GlueClientError):
|
||||
def __init__(self, msg):
|
||||
super(GlueClientError, self).__init__(
|
||||
'EntityNotFoundException',
|
||||
msg,
|
||||
)
|
||||
|
||||
|
||||
class DatabaseNotFoundException(EntityNotFoundException):
|
||||
def __init__(self, db):
|
||||
super(DatabaseNotFoundException, self).__init__(
|
||||
'Database %s not found.' % db,
|
||||
)
|
||||
|
||||
|
||||
class TableNotFoundException(EntityNotFoundException):
|
||||
def __init__(self, tbl):
|
||||
super(TableNotFoundException, self).__init__(
|
||||
'Table %s not found.' % tbl,
|
||||
)
|
||||
|
||||
|
||||
class PartitionNotFoundException(EntityNotFoundException):
|
||||
def __init__(self):
|
||||
super(PartitionNotFoundException, self).__init__("Cannot find partition.")
|
||||
|
||||
|
||||
class VersionNotFoundException(EntityNotFoundException):
|
||||
def __init__(self):
|
||||
super(VersionNotFoundException, self).__init__("Version not found.")
|
||||
|
@ -1,8 +1,19 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import time
|
||||
|
||||
from moto.core import BaseBackend, BaseModel
|
||||
from moto.compat import OrderedDict
|
||||
from.exceptions import DatabaseAlreadyExistsException, TableAlreadyExistsException
|
||||
from.exceptions import (
|
||||
JsonRESTError,
|
||||
DatabaseAlreadyExistsException,
|
||||
DatabaseNotFoundException,
|
||||
TableAlreadyExistsException,
|
||||
TableNotFoundException,
|
||||
PartitionAlreadyExistsException,
|
||||
PartitionNotFoundException,
|
||||
VersionNotFoundException,
|
||||
)
|
||||
|
||||
|
||||
class GlueBackend(BaseBackend):
|
||||
@ -19,7 +30,10 @@ class GlueBackend(BaseBackend):
|
||||
return database
|
||||
|
||||
def get_database(self, database_name):
|
||||
return self.databases[database_name]
|
||||
try:
|
||||
return self.databases[database_name]
|
||||
except KeyError:
|
||||
raise DatabaseNotFoundException(database_name)
|
||||
|
||||
def create_table(self, database_name, table_name, table_input):
|
||||
database = self.get_database(database_name)
|
||||
@ -33,7 +47,10 @@ class GlueBackend(BaseBackend):
|
||||
|
||||
def get_table(self, database_name, table_name):
|
||||
database = self.get_database(database_name)
|
||||
return database.tables[table_name]
|
||||
try:
|
||||
return database.tables[table_name]
|
||||
except KeyError:
|
||||
raise TableNotFoundException(table_name)
|
||||
|
||||
def get_tables(self, database_name):
|
||||
database = self.get_database(database_name)
|
||||
@ -52,9 +69,84 @@ class FakeTable(BaseModel):
|
||||
def __init__(self, database_name, table_name, table_input):
|
||||
self.database_name = database_name
|
||||
self.name = table_name
|
||||
self.table_input = table_input
|
||||
self.storage_descriptor = self.table_input.get('StorageDescriptor', {})
|
||||
self.partition_keys = self.table_input.get('PartitionKeys', [])
|
||||
self.partitions = OrderedDict()
|
||||
self.versions = []
|
||||
self.update(table_input)
|
||||
|
||||
def update(self, table_input):
|
||||
self.versions.append(table_input)
|
||||
|
||||
def get_version(self, ver):
|
||||
try:
|
||||
if not isinstance(ver, int):
|
||||
# "1" goes to [0]
|
||||
ver = int(ver) - 1
|
||||
except ValueError as e:
|
||||
raise JsonRESTError("InvalidInputException", str(e))
|
||||
|
||||
try:
|
||||
return self.versions[ver]
|
||||
except IndexError:
|
||||
raise VersionNotFoundException()
|
||||
|
||||
def as_dict(self, version=-1):
|
||||
obj = {
|
||||
'DatabaseName': self.database_name,
|
||||
'Name': self.name,
|
||||
}
|
||||
obj.update(self.get_version(version))
|
||||
return obj
|
||||
|
||||
def create_partition(self, partiton_input):
|
||||
partition = FakePartition(self.database_name, self.name, partiton_input)
|
||||
key = str(partition.values)
|
||||
if key in self.partitions:
|
||||
raise PartitionAlreadyExistsException()
|
||||
self.partitions[str(partition.values)] = partition
|
||||
|
||||
def get_partitions(self):
|
||||
return [p for str_part_values, p in self.partitions.items()]
|
||||
|
||||
def get_partition(self, values):
|
||||
try:
|
||||
return self.partitions[str(values)]
|
||||
except KeyError:
|
||||
raise PartitionNotFoundException()
|
||||
|
||||
def update_partition(self, old_values, partiton_input):
|
||||
partition = FakePartition(self.database_name, self.name, partiton_input)
|
||||
key = str(partition.values)
|
||||
if old_values == partiton_input['Values']:
|
||||
# Altering a partition in place. Don't remove it so the order of
|
||||
# returned partitions doesn't change
|
||||
if key not in self.partitions:
|
||||
raise PartitionNotFoundException()
|
||||
else:
|
||||
removed = self.partitions.pop(str(old_values), None)
|
||||
if removed is None:
|
||||
raise PartitionNotFoundException()
|
||||
if key in self.partitions:
|
||||
# Trying to update to overwrite a partition that exists
|
||||
raise PartitionAlreadyExistsException()
|
||||
self.partitions[key] = partition
|
||||
|
||||
|
||||
class FakePartition(BaseModel):
|
||||
def __init__(self, database_name, table_name, partiton_input):
|
||||
self.creation_time = time.time()
|
||||
self.database_name = database_name
|
||||
self.table_name = table_name
|
||||
self.partition_input = partiton_input
|
||||
self.values = self.partition_input.get('Values', [])
|
||||
|
||||
def as_dict(self):
|
||||
obj = {
|
||||
'DatabaseName': self.database_name,
|
||||
'TableName': self.table_name,
|
||||
'CreationTime': self.creation_time,
|
||||
}
|
||||
obj.update(self.partition_input)
|
||||
return obj
|
||||
|
||||
|
||||
glue_backend = GlueBackend()
|
||||
|
@ -37,27 +37,94 @@ class GlueResponse(BaseResponse):
|
||||
database_name = self.parameters.get('DatabaseName')
|
||||
table_name = self.parameters.get('Name')
|
||||
table = self.glue_backend.get_table(database_name, table_name)
|
||||
|
||||
return json.dumps({'Table': table.as_dict()})
|
||||
|
||||
def update_table(self):
|
||||
database_name = self.parameters.get('DatabaseName')
|
||||
table_input = self.parameters.get('TableInput')
|
||||
table_name = table_input.get('Name')
|
||||
table = self.glue_backend.get_table(database_name, table_name)
|
||||
table.update(table_input)
|
||||
return ""
|
||||
|
||||
def get_table_versions(self):
|
||||
database_name = self.parameters.get('DatabaseName')
|
||||
table_name = self.parameters.get('TableName')
|
||||
table = self.glue_backend.get_table(database_name, table_name)
|
||||
|
||||
return json.dumps({
|
||||
'Table': {
|
||||
'DatabaseName': table.database_name,
|
||||
'Name': table.name,
|
||||
'PartitionKeys': table.partition_keys,
|
||||
'StorageDescriptor': table.storage_descriptor
|
||||
}
|
||||
"TableVersions": [
|
||||
{
|
||||
"Table": table.as_dict(version=n),
|
||||
"VersionId": str(n + 1),
|
||||
} for n in range(len(table.versions))
|
||||
],
|
||||
})
|
||||
|
||||
def get_table_version(self):
|
||||
database_name = self.parameters.get('DatabaseName')
|
||||
table_name = self.parameters.get('TableName')
|
||||
table = self.glue_backend.get_table(database_name, table_name)
|
||||
ver_id = self.parameters.get('VersionId')
|
||||
|
||||
return json.dumps({
|
||||
"TableVersion": {
|
||||
"Table": table.as_dict(version=ver_id),
|
||||
"VersionId": ver_id,
|
||||
},
|
||||
})
|
||||
|
||||
def get_tables(self):
|
||||
database_name = self.parameters.get('DatabaseName')
|
||||
tables = self.glue_backend.get_tables(database_name)
|
||||
return json.dumps(
|
||||
{
|
||||
'TableList': [
|
||||
{
|
||||
'DatabaseName': table.database_name,
|
||||
'Name': table.name,
|
||||
'PartitionKeys': table.partition_keys,
|
||||
'StorageDescriptor': table.storage_descriptor
|
||||
} for table in tables
|
||||
]
|
||||
}
|
||||
)
|
||||
return json.dumps({
|
||||
'TableList': [
|
||||
table.as_dict() for table in tables
|
||||
]
|
||||
})
|
||||
|
||||
def get_partitions(self):
|
||||
database_name = self.parameters.get('DatabaseName')
|
||||
table_name = self.parameters.get('TableName')
|
||||
if 'Expression' in self.parameters:
|
||||
raise NotImplementedError("Expression filtering in get_partitions is not implemented in moto")
|
||||
table = self.glue_backend.get_table(database_name, table_name)
|
||||
|
||||
return json.dumps({
|
||||
'Partitions': [
|
||||
p.as_dict() for p in table.get_partitions()
|
||||
]
|
||||
})
|
||||
|
||||
def get_partition(self):
|
||||
database_name = self.parameters.get('DatabaseName')
|
||||
table_name = self.parameters.get('TableName')
|
||||
values = self.parameters.get('PartitionValues')
|
||||
|
||||
table = self.glue_backend.get_table(database_name, table_name)
|
||||
|
||||
p = table.get_partition(values)
|
||||
|
||||
return json.dumps({'Partition': p.as_dict()})
|
||||
|
||||
def create_partition(self):
|
||||
database_name = self.parameters.get('DatabaseName')
|
||||
table_name = self.parameters.get('TableName')
|
||||
part_input = self.parameters.get('PartitionInput')
|
||||
|
||||
table = self.glue_backend.get_table(database_name, table_name)
|
||||
table.create_partition(part_input)
|
||||
|
||||
return ""
|
||||
|
||||
def update_partition(self):
|
||||
database_name = self.parameters.get('DatabaseName')
|
||||
table_name = self.parameters.get('TableName')
|
||||
part_input = self.parameters.get('PartitionInput')
|
||||
part_to_update = self.parameters.get('PartitionValueList')
|
||||
|
||||
table = self.glue_backend.get_table(database_name, table_name)
|
||||
table.update_partition(part_to_update, part_input)
|
||||
|
||||
return ""
|
||||
|
@ -255,7 +255,15 @@ class Group(BaseModel):
|
||||
|
||||
@property
|
||||
def arn(self):
|
||||
return "arn:aws:iam::{0}:group/{1}".format(ACCOUNT_ID, self.path)
|
||||
if self.path == '/':
|
||||
return "arn:aws:iam::{0}:group/{1}".format(ACCOUNT_ID, self.name)
|
||||
|
||||
else:
|
||||
return "arn:aws:iam::{0}:group/{1}/{2}".format(ACCOUNT_ID, self.path, self.name)
|
||||
|
||||
@property
|
||||
def create_date(self):
|
||||
return self.created
|
||||
|
||||
def get_policy(self, policy_name):
|
||||
try:
|
||||
|
@ -285,7 +285,7 @@ class IamResponse(BaseResponse):
|
||||
|
||||
def create_group(self):
|
||||
group_name = self._get_param('GroupName')
|
||||
path = self._get_param('Path')
|
||||
path = self._get_param('Path', '/')
|
||||
|
||||
group = iam_backend.create_group(group_name, path)
|
||||
template = self.response_template(CREATE_GROUP_TEMPLATE)
|
||||
@ -1007,6 +1007,7 @@ CREATE_GROUP_TEMPLATE = """<CreateGroupResponse>
|
||||
<GroupName>{{ group.name }}</GroupName>
|
||||
<GroupId>{{ group.id }}</GroupId>
|
||||
<Arn>{{ group.arn }}</Arn>
|
||||
<CreateDate>{{ group.create_date }}</CreateDate>
|
||||
</Group>
|
||||
</CreateGroupResult>
|
||||
<ResponseMetadata>
|
||||
@ -1021,6 +1022,7 @@ GET_GROUP_TEMPLATE = """<GetGroupResponse>
|
||||
<GroupName>{{ group.name }}</GroupName>
|
||||
<GroupId>{{ group.id }}</GroupId>
|
||||
<Arn>{{ group.arn }}</Arn>
|
||||
<CreateDate>{{ group.create_date }}</CreateDate>
|
||||
</Group>
|
||||
<Users>
|
||||
{% for user in group.users %}
|
||||
@ -1384,10 +1386,6 @@ GET_ACCOUNT_AUTHORIZATION_DETAILS_TEMPLATE = """<GetAccountAuthorizationDetailsR
|
||||
</member>
|
||||
{% endfor %}
|
||||
</UserDetailList>
|
||||
<Marker>
|
||||
EXAMPLEkakv9BCuUNFDtxWSyfzetYwEx2ADc8dnzfvERF5S6YMvXKx41t6gCl/eeaCX3Jo94/
|
||||
bKqezEAg8TEVS99EKFLxm3jtbpl25FDWEXAMPLE
|
||||
</Marker>
|
||||
<GroupDetailList>
|
||||
{% for group in groups %}
|
||||
<member>
|
||||
|
@ -2,8 +2,10 @@ from __future__ import unicode_literals
|
||||
|
||||
import boto.kms
|
||||
from moto.core import BaseBackend, BaseModel
|
||||
from moto.core.utils import iso_8601_datetime_without_milliseconds
|
||||
from .utils import generate_key_id
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
||||
class Key(BaseModel):
|
||||
@ -12,11 +14,13 @@ class Key(BaseModel):
|
||||
self.id = generate_key_id()
|
||||
self.policy = policy
|
||||
self.key_usage = key_usage
|
||||
self.key_state = "Enabled"
|
||||
self.description = description
|
||||
self.enabled = True
|
||||
self.region = region
|
||||
self.account_id = "0123456789012"
|
||||
self.key_rotation_status = False
|
||||
self.deletion_date = None
|
||||
|
||||
@property
|
||||
def physical_resource_id(self):
|
||||
@ -27,7 +31,7 @@ class Key(BaseModel):
|
||||
return "arn:aws:kms:{0}:{1}:key/{2}".format(self.region, self.account_id, self.id)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
key_dict = {
|
||||
"KeyMetadata": {
|
||||
"AWSAccountId": self.account_id,
|
||||
"Arn": self.arn,
|
||||
@ -36,8 +40,12 @@ class Key(BaseModel):
|
||||
"Enabled": self.enabled,
|
||||
"KeyId": self.id,
|
||||
"KeyUsage": self.key_usage,
|
||||
"KeyState": self.key_state,
|
||||
}
|
||||
}
|
||||
if self.key_state == 'PendingDeletion':
|
||||
key_dict['KeyMetadata']['DeletionDate'] = iso_8601_datetime_without_milliseconds(self.deletion_date)
|
||||
return key_dict
|
||||
|
||||
def delete(self, region_name):
|
||||
kms_backends[region_name].delete_key(self.id)
|
||||
@ -138,6 +146,29 @@ class KmsBackend(BaseBackend):
|
||||
def get_key_policy(self, key_id):
|
||||
return self.keys[self.get_key_id(key_id)].policy
|
||||
|
||||
def disable_key(self, key_id):
|
||||
if key_id in self.keys:
|
||||
self.keys[key_id].enabled = False
|
||||
self.keys[key_id].key_state = 'Disabled'
|
||||
|
||||
def enable_key(self, key_id):
|
||||
if key_id in self.keys:
|
||||
self.keys[key_id].enabled = True
|
||||
self.keys[key_id].key_state = 'Enabled'
|
||||
|
||||
def cancel_key_deletion(self, key_id):
|
||||
if key_id in self.keys:
|
||||
self.keys[key_id].key_state = 'Disabled'
|
||||
self.keys[key_id].deletion_date = None
|
||||
|
||||
def schedule_key_deletion(self, key_id, pending_window_in_days):
|
||||
if key_id in self.keys:
|
||||
if 7 <= pending_window_in_days <= 30:
|
||||
self.keys[key_id].enabled = False
|
||||
self.keys[key_id].key_state = 'PendingDeletion'
|
||||
self.keys[key_id].deletion_date = datetime.now() + timedelta(days=pending_window_in_days)
|
||||
return iso_8601_datetime_without_milliseconds(self.keys[key_id].deletion_date)
|
||||
|
||||
|
||||
kms_backends = {}
|
||||
for region in boto.kms.regions():
|
||||
|
@ -233,6 +233,56 @@ class KmsResponse(BaseResponse):
|
||||
value = self.parameters.get("CiphertextBlob")
|
||||
return json.dumps({"Plaintext": base64.b64decode(value).decode("utf-8")})
|
||||
|
||||
def disable_key(self):
|
||||
key_id = self.parameters.get('KeyId')
|
||||
_assert_valid_key_id(self.kms_backend.get_key_id(key_id))
|
||||
try:
|
||||
self.kms_backend.disable_key(key_id)
|
||||
except KeyError:
|
||||
raise JSONResponseError(404, 'Not Found', body={
|
||||
'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id),
|
||||
'__type': 'NotFoundException'})
|
||||
return json.dumps(None)
|
||||
|
||||
def enable_key(self):
|
||||
key_id = self.parameters.get('KeyId')
|
||||
_assert_valid_key_id(self.kms_backend.get_key_id(key_id))
|
||||
try:
|
||||
self.kms_backend.enable_key(key_id)
|
||||
except KeyError:
|
||||
raise JSONResponseError(404, 'Not Found', body={
|
||||
'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id),
|
||||
'__type': 'NotFoundException'})
|
||||
return json.dumps(None)
|
||||
|
||||
def cancel_key_deletion(self):
|
||||
key_id = self.parameters.get('KeyId')
|
||||
_assert_valid_key_id(self.kms_backend.get_key_id(key_id))
|
||||
try:
|
||||
self.kms_backend.cancel_key_deletion(key_id)
|
||||
except KeyError:
|
||||
raise JSONResponseError(404, 'Not Found', body={
|
||||
'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id),
|
||||
'__type': 'NotFoundException'})
|
||||
return json.dumps({'KeyId': key_id})
|
||||
|
||||
def schedule_key_deletion(self):
|
||||
key_id = self.parameters.get('KeyId')
|
||||
if self.parameters.get('PendingWindowInDays') is None:
|
||||
pending_window_in_days = 30
|
||||
else:
|
||||
pending_window_in_days = self.parameters.get('PendingWindowInDays')
|
||||
_assert_valid_key_id(self.kms_backend.get_key_id(key_id))
|
||||
try:
|
||||
return json.dumps({
|
||||
'KeyId': key_id,
|
||||
'DeletionDate': self.kms_backend.schedule_key_deletion(key_id, pending_window_in_days)
|
||||
})
|
||||
except KeyError:
|
||||
raise JSONResponseError(404, 'Not Found', body={
|
||||
'message': "Key 'arn:aws:kms:{region}:012345678912:key/{key_id}' does not exist".format(region=self.region, key_id=key_id),
|
||||
'__type': 'NotFoundException'})
|
||||
|
||||
|
||||
def _assert_valid_key_id(key_id):
|
||||
if not re.match(r'^[A-F0-9]{8}-[A-F0-9]{4}-[A-F0-9]{4}-[A-F0-9]{4}-[A-F0-9]{12}$', key_id, re.IGNORECASE):
|
||||
|
@ -19,7 +19,7 @@ class LogEvent:
|
||||
|
||||
def to_filter_dict(self):
|
||||
return {
|
||||
"eventId": self.eventId,
|
||||
"eventId": str(self.eventId),
|
||||
"ingestionTime": self.ingestionTime,
|
||||
# "logStreamName":
|
||||
"message": self.message,
|
||||
|
6
moto/organizations/__init__.py
Normal file
6
moto/organizations/__init__.py
Normal file
@ -0,0 +1,6 @@
|
||||
from __future__ import unicode_literals
|
||||
from .models import organizations_backend
|
||||
from ..core.models import base_decorator
|
||||
|
||||
organizations_backends = {"global": organizations_backend}
|
||||
mock_organizations = base_decorator(organizations_backends)
|
296
moto/organizations/models.py
Normal file
296
moto/organizations/models.py
Normal file
@ -0,0 +1,296 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import datetime
|
||||
import re
|
||||
|
||||
from moto.core import BaseBackend, BaseModel
|
||||
from moto.core.exceptions import RESTError
|
||||
from moto.core.utils import unix_time
|
||||
from moto.organizations import utils
|
||||
|
||||
|
||||
class FakeOrganization(BaseModel):
|
||||
|
||||
def __init__(self, feature_set):
|
||||
self.id = utils.make_random_org_id()
|
||||
self.root_id = utils.make_random_root_id()
|
||||
self.feature_set = feature_set
|
||||
self.master_account_id = utils.MASTER_ACCOUNT_ID
|
||||
self.master_account_email = utils.MASTER_ACCOUNT_EMAIL
|
||||
self.available_policy_types = [{
|
||||
'Type': 'SERVICE_CONTROL_POLICY',
|
||||
'Status': 'ENABLED'
|
||||
}]
|
||||
|
||||
@property
|
||||
def arn(self):
|
||||
return utils.ORGANIZATION_ARN_FORMAT.format(self.master_account_id, self.id)
|
||||
|
||||
@property
|
||||
def master_account_arn(self):
|
||||
return utils.MASTER_ACCOUNT_ARN_FORMAT.format(self.master_account_id, self.id)
|
||||
|
||||
def describe(self):
|
||||
return {
|
||||
'Organization': {
|
||||
'Id': self.id,
|
||||
'Arn': self.arn,
|
||||
'FeatureSet': self.feature_set,
|
||||
'MasterAccountArn': self.master_account_arn,
|
||||
'MasterAccountId': self.master_account_id,
|
||||
'MasterAccountEmail': self.master_account_email,
|
||||
'AvailablePolicyTypes': self.available_policy_types,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class FakeAccount(BaseModel):
|
||||
|
||||
def __init__(self, organization, **kwargs):
|
||||
self.organization_id = organization.id
|
||||
self.master_account_id = organization.master_account_id
|
||||
self.create_account_status_id = utils.make_random_create_account_status_id()
|
||||
self.id = utils.make_random_account_id()
|
||||
self.name = kwargs['AccountName']
|
||||
self.email = kwargs['Email']
|
||||
self.create_time = datetime.datetime.utcnow()
|
||||
self.status = 'ACTIVE'
|
||||
self.joined_method = 'CREATED'
|
||||
self.parent_id = organization.root_id
|
||||
|
||||
@property
|
||||
def arn(self):
|
||||
return utils.ACCOUNT_ARN_FORMAT.format(
|
||||
self.master_account_id,
|
||||
self.organization_id,
|
||||
self.id
|
||||
)
|
||||
|
||||
@property
|
||||
def create_account_status(self):
|
||||
return {
|
||||
'CreateAccountStatus': {
|
||||
'Id': self.create_account_status_id,
|
||||
'AccountName': self.name,
|
||||
'State': 'SUCCEEDED',
|
||||
'RequestedTimestamp': unix_time(self.create_time),
|
||||
'CompletedTimestamp': unix_time(self.create_time),
|
||||
'AccountId': self.id,
|
||||
}
|
||||
}
|
||||
|
||||
def describe(self):
|
||||
return {
|
||||
'Account': {
|
||||
'Id': self.id,
|
||||
'Arn': self.arn,
|
||||
'Email': self.email,
|
||||
'Name': self.name,
|
||||
'Status': self.status,
|
||||
'JoinedMethod': self.joined_method,
|
||||
'JoinedTimestamp': unix_time(self.create_time),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class FakeOrganizationalUnit(BaseModel):
|
||||
|
||||
def __init__(self, organization, **kwargs):
|
||||
self.type = 'ORGANIZATIONAL_UNIT'
|
||||
self.organization_id = organization.id
|
||||
self.master_account_id = organization.master_account_id
|
||||
self.id = utils.make_random_ou_id(organization.root_id)
|
||||
self.name = kwargs.get('Name')
|
||||
self.parent_id = kwargs.get('ParentId')
|
||||
self._arn_format = utils.OU_ARN_FORMAT
|
||||
|
||||
@property
|
||||
def arn(self):
|
||||
return self._arn_format.format(
|
||||
self.master_account_id,
|
||||
self.organization_id,
|
||||
self.id
|
||||
)
|
||||
|
||||
def describe(self):
|
||||
return {
|
||||
'OrganizationalUnit': {
|
||||
'Id': self.id,
|
||||
'Arn': self.arn,
|
||||
'Name': self.name,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class FakeRoot(FakeOrganizationalUnit):
|
||||
|
||||
def __init__(self, organization, **kwargs):
|
||||
super(FakeRoot, self).__init__(organization, **kwargs)
|
||||
self.type = 'ROOT'
|
||||
self.id = organization.root_id
|
||||
self.name = 'Root'
|
||||
self.policy_types = [{
|
||||
'Type': 'SERVICE_CONTROL_POLICY',
|
||||
'Status': 'ENABLED'
|
||||
}]
|
||||
self._arn_format = utils.ROOT_ARN_FORMAT
|
||||
|
||||
def describe(self):
|
||||
return {
|
||||
'Id': self.id,
|
||||
'Arn': self.arn,
|
||||
'Name': self.name,
|
||||
'PolicyTypes': self.policy_types
|
||||
}
|
||||
|
||||
|
||||
class OrganizationsBackend(BaseBackend):
|
||||
|
||||
def __init__(self):
|
||||
self.org = None
|
||||
self.accounts = []
|
||||
self.ou = []
|
||||
|
||||
def create_organization(self, **kwargs):
|
||||
self.org = FakeOrganization(kwargs['FeatureSet'])
|
||||
self.ou.append(FakeRoot(self.org))
|
||||
return self.org.describe()
|
||||
|
||||
def describe_organization(self):
|
||||
if not self.org:
|
||||
raise RESTError(
|
||||
'AWSOrganizationsNotInUseException',
|
||||
"Your account is not a member of an organization."
|
||||
)
|
||||
return self.org.describe()
|
||||
|
||||
def list_roots(self):
|
||||
return dict(
|
||||
Roots=[ou.describe() for ou in self.ou if isinstance(ou, FakeRoot)]
|
||||
)
|
||||
|
||||
def create_organizational_unit(self, **kwargs):
|
||||
new_ou = FakeOrganizationalUnit(self.org, **kwargs)
|
||||
self.ou.append(new_ou)
|
||||
return new_ou.describe()
|
||||
|
||||
def get_organizational_unit_by_id(self, ou_id):
|
||||
ou = next((ou for ou in self.ou if ou.id == ou_id), None)
|
||||
if ou is None:
|
||||
raise RESTError(
|
||||
'OrganizationalUnitNotFoundException',
|
||||
"You specified an organizational unit that doesn't exist."
|
||||
)
|
||||
return ou
|
||||
|
||||
def validate_parent_id(self, parent_id):
|
||||
try:
|
||||
self.get_organizational_unit_by_id(parent_id)
|
||||
except RESTError:
|
||||
raise RESTError(
|
||||
'ParentNotFoundException',
|
||||
"You specified parent that doesn't exist."
|
||||
)
|
||||
return parent_id
|
||||
|
||||
def describe_organizational_unit(self, **kwargs):
|
||||
ou = self.get_organizational_unit_by_id(kwargs['OrganizationalUnitId'])
|
||||
return ou.describe()
|
||||
|
||||
def list_organizational_units_for_parent(self, **kwargs):
|
||||
parent_id = self.validate_parent_id(kwargs['ParentId'])
|
||||
return dict(
|
||||
OrganizationalUnits=[
|
||||
{
|
||||
'Id': ou.id,
|
||||
'Arn': ou.arn,
|
||||
'Name': ou.name,
|
||||
}
|
||||
for ou in self.ou
|
||||
if ou.parent_id == parent_id
|
||||
]
|
||||
)
|
||||
|
||||
def create_account(self, **kwargs):
|
||||
new_account = FakeAccount(self.org, **kwargs)
|
||||
self.accounts.append(new_account)
|
||||
return new_account.create_account_status
|
||||
|
||||
def get_account_by_id(self, account_id):
|
||||
account = next((
|
||||
account for account in self.accounts
|
||||
if account.id == account_id
|
||||
), None)
|
||||
if account is None:
|
||||
raise RESTError(
|
||||
'AccountNotFoundException',
|
||||
"You specified an account that doesn't exist."
|
||||
)
|
||||
return account
|
||||
|
||||
def describe_account(self, **kwargs):
|
||||
account = self.get_account_by_id(kwargs['AccountId'])
|
||||
return account.describe()
|
||||
|
||||
def list_accounts(self):
|
||||
return dict(
|
||||
Accounts=[account.describe()['Account'] for account in self.accounts]
|
||||
)
|
||||
|
||||
def list_accounts_for_parent(self, **kwargs):
|
||||
parent_id = self.validate_parent_id(kwargs['ParentId'])
|
||||
return dict(
|
||||
Accounts=[
|
||||
account.describe()['Account']
|
||||
for account in self.accounts
|
||||
if account.parent_id == parent_id
|
||||
]
|
||||
)
|
||||
|
||||
def move_account(self, **kwargs):
|
||||
new_parent_id = self.validate_parent_id(kwargs['DestinationParentId'])
|
||||
self.validate_parent_id(kwargs['SourceParentId'])
|
||||
account = self.get_account_by_id(kwargs['AccountId'])
|
||||
index = self.accounts.index(account)
|
||||
self.accounts[index].parent_id = new_parent_id
|
||||
|
||||
def list_parents(self, **kwargs):
|
||||
if re.compile(r'[0-9]{12}').match(kwargs['ChildId']):
|
||||
child_object = self.get_account_by_id(kwargs['ChildId'])
|
||||
else:
|
||||
child_object = self.get_organizational_unit_by_id(kwargs['ChildId'])
|
||||
return dict(
|
||||
Parents=[
|
||||
{
|
||||
'Id': ou.id,
|
||||
'Type': ou.type,
|
||||
}
|
||||
for ou in self.ou
|
||||
if ou.id == child_object.parent_id
|
||||
]
|
||||
)
|
||||
|
||||
def list_children(self, **kwargs):
|
||||
parent_id = self.validate_parent_id(kwargs['ParentId'])
|
||||
if kwargs['ChildType'] == 'ACCOUNT':
|
||||
obj_list = self.accounts
|
||||
elif kwargs['ChildType'] == 'ORGANIZATIONAL_UNIT':
|
||||
obj_list = self.ou
|
||||
else:
|
||||
raise RESTError(
|
||||
'InvalidInputException',
|
||||
'You specified an invalid value.'
|
||||
)
|
||||
return dict(
|
||||
Children=[
|
||||
{
|
||||
'Id': obj.id,
|
||||
'Type': kwargs['ChildType'],
|
||||
}
|
||||
for obj in obj_list
|
||||
if obj.parent_id == parent_id
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
organizations_backend = OrganizationsBackend()
|
87
moto/organizations/responses.py
Normal file
87
moto/organizations/responses.py
Normal file
@ -0,0 +1,87 @@
|
||||
from __future__ import unicode_literals
|
||||
import json
|
||||
|
||||
from moto.core.responses import BaseResponse
|
||||
from .models import organizations_backend
|
||||
|
||||
|
||||
class OrganizationsResponse(BaseResponse):
|
||||
|
||||
@property
|
||||
def organizations_backend(self):
|
||||
return organizations_backend
|
||||
|
||||
@property
|
||||
def request_params(self):
|
||||
try:
|
||||
return json.loads(self.body)
|
||||
except ValueError:
|
||||
return {}
|
||||
|
||||
def _get_param(self, param, default=None):
|
||||
return self.request_params.get(param, default)
|
||||
|
||||
def create_organization(self):
|
||||
return json.dumps(
|
||||
self.organizations_backend.create_organization(**self.request_params)
|
||||
)
|
||||
|
||||
def describe_organization(self):
|
||||
return json.dumps(
|
||||
self.organizations_backend.describe_organization()
|
||||
)
|
||||
|
||||
def list_roots(self):
|
||||
return json.dumps(
|
||||
self.organizations_backend.list_roots()
|
||||
)
|
||||
|
||||
def create_organizational_unit(self):
|
||||
return json.dumps(
|
||||
self.organizations_backend.create_organizational_unit(**self.request_params)
|
||||
)
|
||||
|
||||
def describe_organizational_unit(self):
|
||||
return json.dumps(
|
||||
self.organizations_backend.describe_organizational_unit(**self.request_params)
|
||||
)
|
||||
|
||||
def list_organizational_units_for_parent(self):
|
||||
return json.dumps(
|
||||
self.organizations_backend.list_organizational_units_for_parent(**self.request_params)
|
||||
)
|
||||
|
||||
def list_parents(self):
|
||||
return json.dumps(
|
||||
self.organizations_backend.list_parents(**self.request_params)
|
||||
)
|
||||
|
||||
def create_account(self):
|
||||
return json.dumps(
|
||||
self.organizations_backend.create_account(**self.request_params)
|
||||
)
|
||||
|
||||
def describe_account(self):
|
||||
return json.dumps(
|
||||
self.organizations_backend.describe_account(**self.request_params)
|
||||
)
|
||||
|
||||
def list_accounts(self):
|
||||
return json.dumps(
|
||||
self.organizations_backend.list_accounts()
|
||||
)
|
||||
|
||||
def list_accounts_for_parent(self):
|
||||
return json.dumps(
|
||||
self.organizations_backend.list_accounts_for_parent(**self.request_params)
|
||||
)
|
||||
|
||||
def move_account(self):
|
||||
return json.dumps(
|
||||
self.organizations_backend.move_account(**self.request_params)
|
||||
)
|
||||
|
||||
def list_children(self):
|
||||
return json.dumps(
|
||||
self.organizations_backend.list_children(**self.request_params)
|
||||
)
|
10
moto/organizations/urls.py
Normal file
10
moto/organizations/urls.py
Normal file
@ -0,0 +1,10 @@
|
||||
from __future__ import unicode_literals
|
||||
from .responses import OrganizationsResponse
|
||||
|
||||
url_bases = [
|
||||
"https?://organizations.(.+).amazonaws.com",
|
||||
]
|
||||
|
||||
url_paths = {
|
||||
'{0}/$': OrganizationsResponse.dispatch,
|
||||
}
|
59
moto/organizations/utils.py
Normal file
59
moto/organizations/utils.py
Normal file
@ -0,0 +1,59 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import random
|
||||
import string
|
||||
|
||||
MASTER_ACCOUNT_ID = '123456789012'
|
||||
MASTER_ACCOUNT_EMAIL = 'fakeorg@moto-example.com'
|
||||
ORGANIZATION_ARN_FORMAT = 'arn:aws:organizations::{0}:organization/{1}'
|
||||
MASTER_ACCOUNT_ARN_FORMAT = 'arn:aws:organizations::{0}:account/{1}/{0}'
|
||||
ACCOUNT_ARN_FORMAT = 'arn:aws:organizations::{0}:account/{1}/{2}'
|
||||
ROOT_ARN_FORMAT = 'arn:aws:organizations::{0}:root/{1}/{2}'
|
||||
OU_ARN_FORMAT = 'arn:aws:organizations::{0}:ou/{1}/{2}'
|
||||
|
||||
CHARSET = string.ascii_lowercase + string.digits
|
||||
ORG_ID_SIZE = 10
|
||||
ROOT_ID_SIZE = 4
|
||||
ACCOUNT_ID_SIZE = 12
|
||||
OU_ID_SUFFIX_SIZE = 8
|
||||
CREATE_ACCOUNT_STATUS_ID_SIZE = 8
|
||||
|
||||
|
||||
def make_random_org_id():
|
||||
# The regex pattern for an organization ID string requires "o-"
|
||||
# followed by from 10 to 32 lower-case letters or digits.
|
||||
# e.g. 'o-vipjnq5z86'
|
||||
return 'o-' + ''.join(random.choice(CHARSET) for x in range(ORG_ID_SIZE))
|
||||
|
||||
|
||||
def make_random_root_id():
|
||||
# The regex pattern for a root ID string requires "r-" followed by
|
||||
# from 4 to 32 lower-case letters or digits.
|
||||
# e.g. 'r-3zwx'
|
||||
return 'r-' + ''.join(random.choice(CHARSET) for x in range(ROOT_ID_SIZE))
|
||||
|
||||
|
||||
def make_random_ou_id(root_id):
|
||||
# The regex pattern for an organizational unit ID string requires "ou-"
|
||||
# followed by from 4 to 32 lower-case letters or digits (the ID of the root
|
||||
# that contains the OU) followed by a second "-" dash and from 8 to 32
|
||||
# additional lower-case letters or digits.
|
||||
# e.g. ou-g8sd-5oe3bjaw
|
||||
return '-'.join([
|
||||
'ou',
|
||||
root_id.partition('-')[2],
|
||||
''.join(random.choice(CHARSET) for x in range(OU_ID_SUFFIX_SIZE)),
|
||||
])
|
||||
|
||||
|
||||
def make_random_account_id():
|
||||
# The regex pattern for an account ID string requires exactly 12 digits.
|
||||
# e.g. '488633172133'
|
||||
return ''.join([random.choice(string.digits) for n in range(ACCOUNT_ID_SIZE)])
|
||||
|
||||
|
||||
def make_random_create_account_status_id():
|
||||
# The regex pattern for an create account request ID string requires
|
||||
# "car-" followed by from 8 to 32 lower-case letters or digits.
|
||||
# e.g. 'car-35gxzwrp'
|
||||
return 'car-' + ''.join(random.choice(CHARSET) for x in range(CREATE_ACCOUNT_STATUS_ID_SIZE))
|
@ -341,8 +341,9 @@ class LifecycleAndFilter(BaseModel):
|
||||
class LifecycleRule(BaseModel):
|
||||
|
||||
def __init__(self, id=None, prefix=None, lc_filter=None, status=None, expiration_days=None,
|
||||
expiration_date=None, transition_days=None, expired_object_delete_marker=None,
|
||||
transition_date=None, storage_class=None):
|
||||
expiration_date=None, transition_days=None, transition_date=None, storage_class=None,
|
||||
expired_object_delete_marker=None, nve_noncurrent_days=None, nvt_noncurrent_days=None,
|
||||
nvt_storage_class=None, aimu_days=None):
|
||||
self.id = id
|
||||
self.prefix = prefix
|
||||
self.filter = lc_filter
|
||||
@ -351,8 +352,12 @@ class LifecycleRule(BaseModel):
|
||||
self.expiration_date = expiration_date
|
||||
self.transition_days = transition_days
|
||||
self.transition_date = transition_date
|
||||
self.expired_object_delete_marker = expired_object_delete_marker
|
||||
self.storage_class = storage_class
|
||||
self.expired_object_delete_marker = expired_object_delete_marker
|
||||
self.nve_noncurrent_days = nve_noncurrent_days
|
||||
self.nvt_noncurrent_days = nvt_noncurrent_days
|
||||
self.nvt_storage_class = nvt_storage_class
|
||||
self.aimu_days = aimu_days
|
||||
|
||||
|
||||
class CorsRule(BaseModel):
|
||||
@ -414,9 +419,32 @@ class FakeBucket(BaseModel):
|
||||
def set_lifecycle(self, rules):
|
||||
self.rules = []
|
||||
for rule in rules:
|
||||
# Extract and validate actions from Lifecycle rule
|
||||
expiration = rule.get('Expiration')
|
||||
transition = rule.get('Transition')
|
||||
|
||||
nve_noncurrent_days = None
|
||||
if rule.get('NoncurrentVersionExpiration') is not None:
|
||||
if rule["NoncurrentVersionExpiration"].get('NoncurrentDays') is None:
|
||||
raise MalformedXML()
|
||||
nve_noncurrent_days = rule["NoncurrentVersionExpiration"]["NoncurrentDays"]
|
||||
|
||||
nvt_noncurrent_days = None
|
||||
nvt_storage_class = None
|
||||
if rule.get('NoncurrentVersionTransition') is not None:
|
||||
if rule["NoncurrentVersionTransition"].get('NoncurrentDays') is None:
|
||||
raise MalformedXML()
|
||||
if rule["NoncurrentVersionTransition"].get('StorageClass') is None:
|
||||
raise MalformedXML()
|
||||
nvt_noncurrent_days = rule["NoncurrentVersionTransition"]["NoncurrentDays"]
|
||||
nvt_storage_class = rule["NoncurrentVersionTransition"]["StorageClass"]
|
||||
|
||||
aimu_days = None
|
||||
if rule.get('AbortIncompleteMultipartUpload') is not None:
|
||||
if rule["AbortIncompleteMultipartUpload"].get('DaysAfterInitiation') is None:
|
||||
raise MalformedXML()
|
||||
aimu_days = rule["AbortIncompleteMultipartUpload"]["DaysAfterInitiation"]
|
||||
|
||||
eodm = None
|
||||
if expiration and expiration.get("ExpiredObjectDeleteMarker") is not None:
|
||||
# This cannot be set if Date or Days is set:
|
||||
@ -459,11 +487,14 @@ class FakeBucket(BaseModel):
|
||||
status=rule['Status'],
|
||||
expiration_days=expiration.get('Days') if expiration else None,
|
||||
expiration_date=expiration.get('Date') if expiration else None,
|
||||
expired_object_delete_marker=eodm,
|
||||
transition_days=transition.get('Days') if transition else None,
|
||||
transition_date=transition.get('Date') if transition else None,
|
||||
storage_class=transition[
|
||||
'StorageClass'] if transition else None,
|
||||
storage_class=transition.get('StorageClass') if transition else None,
|
||||
expired_object_delete_marker=eodm,
|
||||
nve_noncurrent_days=nve_noncurrent_days,
|
||||
nvt_noncurrent_days=nvt_noncurrent_days,
|
||||
nvt_storage_class=nvt_storage_class,
|
||||
aimu_days=aimu_days,
|
||||
))
|
||||
|
||||
def delete_lifecycle(self):
|
||||
|
@ -10,6 +10,7 @@ import xmltodict
|
||||
|
||||
from moto.packages.httpretty.core import HTTPrettyRequest
|
||||
from moto.core.responses import _TemplateEnvironmentMixin
|
||||
from moto.core.utils import path_url
|
||||
|
||||
from moto.s3bucket_path.utils import bucket_name_from_url as bucketpath_bucket_name_from_url, \
|
||||
parse_key_name as bucketpath_parse_key_name, is_delete_keys as bucketpath_is_delete_keys
|
||||
@ -487,7 +488,7 @@ class ResponseObject(_TemplateEnvironmentMixin):
|
||||
if isinstance(request, HTTPrettyRequest):
|
||||
path = request.path
|
||||
else:
|
||||
path = request.full_path if hasattr(request, 'full_path') else request.path_url
|
||||
path = request.full_path if hasattr(request, 'full_path') else path_url(request.url)
|
||||
|
||||
if self.is_delete_keys(request, path, bucket_name):
|
||||
return self._bucket_response_delete_keys(request, body, bucket_name, headers)
|
||||
@ -708,7 +709,10 @@ class ResponseObject(_TemplateEnvironmentMixin):
|
||||
# Copy key
|
||||
# you can have a quoted ?version=abc with a version Id, so work on
|
||||
# we need to parse the unquoted string first
|
||||
src_key_parsed = urlparse(request.headers.get("x-amz-copy-source"))
|
||||
src_key = request.headers.get("x-amz-copy-source")
|
||||
if isinstance(src_key, six.binary_type):
|
||||
src_key = src_key.decode('utf-8')
|
||||
src_key_parsed = urlparse(src_key)
|
||||
src_bucket, src_key = unquote(src_key_parsed.path).\
|
||||
lstrip("/").split("/", 1)
|
||||
src_version_id = parse_qs(src_key_parsed.query).get(
|
||||
@ -1228,6 +1232,22 @@ S3_BUCKET_LIFECYCLE_CONFIGURATION = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
{% endif %}
|
||||
</Expiration>
|
||||
{% endif %}
|
||||
{% if rule.nvt_noncurrent_days and rule.nvt_storage_class %}
|
||||
<NoncurrentVersionTransition>
|
||||
<NoncurrentDays>{{ rule.nvt_noncurrent_days }}</NoncurrentDays>
|
||||
<StorageClass>{{ rule.nvt_storage_class }}</StorageClass>
|
||||
</NoncurrentVersionTransition>
|
||||
{% endif %}
|
||||
{% if rule.nve_noncurrent_days %}
|
||||
<NoncurrentVersionExpiration>
|
||||
<NoncurrentDays>{{ rule.nve_noncurrent_days }}</NoncurrentDays>
|
||||
</NoncurrentVersionExpiration>
|
||||
{% endif %}
|
||||
{% if rule.aimu_days %}
|
||||
<AbortIncompleteMultipartUpload>
|
||||
<DaysAfterInitiation>{{ rule.aimu_days }}</DaysAfterInitiation>
|
||||
</AbortIncompleteMultipartUpload>
|
||||
{% endif %}
|
||||
</Rule>
|
||||
{% endfor %}
|
||||
</LifecycleConfiguration>
|
||||
@ -1433,7 +1453,7 @@ S3_MULTIPART_LIST_RESPONSE = """<?xml version="1.0" encoding="UTF-8"?>
|
||||
</Owner>
|
||||
<StorageClass>STANDARD</StorageClass>
|
||||
<PartNumberMarker>1</PartNumberMarker>
|
||||
<NextPartNumberMarker>{{ count }} </NextPartNumberMarker>
|
||||
<NextPartNumberMarker>{{ count }}</NextPartNumberMarker>
|
||||
<MaxParts>{{ count }}</MaxParts>
|
||||
<IsTruncated>false</IsTruncated>
|
||||
{% for part in parts %}
|
||||
|
@ -8,7 +8,7 @@ freezegun
|
||||
flask
|
||||
boto>=2.45.0
|
||||
boto3>=1.4.4
|
||||
botocore>=1.8.36
|
||||
botocore>=1.12.13
|
||||
six>=1.9
|
||||
prompt-toolkit==1.0.14
|
||||
click==6.7
|
||||
|
8
setup.py
8
setup.py
@ -8,8 +8,8 @@ import sys
|
||||
install_requires = [
|
||||
"Jinja2>=2.7.3",
|
||||
"boto>=2.36.0",
|
||||
"boto3>=1.6.16,<1.8",
|
||||
"botocore>=1.9.16,<1.11",
|
||||
"boto3>=1.6.16",
|
||||
"botocore>=1.12.13",
|
||||
"cryptography>=2.3.0",
|
||||
"requests>=2.5",
|
||||
"xmltodict",
|
||||
@ -22,7 +22,7 @@ install_requires = [
|
||||
"mock",
|
||||
"docker>=2.5.1",
|
||||
"jsondiff==1.1.1",
|
||||
"aws-xray-sdk<0.96,>=0.93",
|
||||
"aws-xray-sdk!=0.96,>=0.93",
|
||||
"responses>=0.9.0",
|
||||
]
|
||||
|
||||
@ -40,7 +40,7 @@ else:
|
||||
|
||||
setup(
|
||||
name='moto',
|
||||
version='1.3.6',
|
||||
version='1.3.7',
|
||||
description='A library that allows your python tests to easily'
|
||||
' mock out the boto library',
|
||||
author='Steve Pulec',
|
||||
|
@ -1084,3 +1084,36 @@ def test_create_usage_plan_key_non_existent_api_key():
|
||||
# Attempt to create a usage plan key for a API key that doesn't exists
|
||||
payload = {'usagePlanId': usage_plan_id, 'keyId': 'non-existent', 'keyType': 'API_KEY' }
|
||||
client.create_usage_plan_key.when.called_with(**payload).should.throw(ClientError)
|
||||
|
||||
|
||||
@mock_apigateway
|
||||
def test_get_usage_plans_using_key_id():
|
||||
region_name = 'us-west-2'
|
||||
client = boto3.client('apigateway', region_name=region_name)
|
||||
|
||||
# Create 2 Usage Plans
|
||||
# one will be attached to an API Key, the other will remain unattached
|
||||
attached_plan = client.create_usage_plan(name='Attached')
|
||||
unattached_plan = client.create_usage_plan(name='Unattached')
|
||||
|
||||
# Create an API key
|
||||
# to attach to the usage plan
|
||||
key_name = 'test-api-key'
|
||||
response = client.create_api_key(name=key_name)
|
||||
key_id = response["id"]
|
||||
|
||||
# Create a Usage Plan Key
|
||||
# Attached the Usage Plan and API Key
|
||||
key_type = 'API_KEY'
|
||||
payload = {'usagePlanId': attached_plan['id'], 'keyId': key_id, 'keyType': key_type}
|
||||
response = client.create_usage_plan_key(**payload)
|
||||
|
||||
# All usage plans should be returned when keyId is not included
|
||||
all_plans = client.get_usage_plans()
|
||||
len(all_plans['items']).should.equal(2)
|
||||
|
||||
# Only the usage plan attached to the given api key are included
|
||||
only_plans_with_key = client.get_usage_plans(keyId=key_id)
|
||||
len(only_plans_with_key['items']).should.equal(1)
|
||||
only_plans_with_key['items'][0]['name'].should.equal(attached_plan['name'])
|
||||
only_plans_with_key['items'][0]['id'].should.equal(attached_plan['id'])
|
||||
|
@ -31,6 +31,7 @@ def test_create_identity_pool():
|
||||
# testing a helper function
|
||||
def test_get_random_identity_id():
|
||||
assert len(get_random_identity_id('us-west-2')) > 0
|
||||
assert len(get_random_identity_id('us-west-2').split(':')[1]) == 19
|
||||
|
||||
|
||||
@mock_cognitoidentity
|
||||
@ -69,3 +70,16 @@ def test_get_open_id_token_for_developer_identity():
|
||||
)
|
||||
assert len(result['Token'])
|
||||
assert result['IdentityId'] == '12345'
|
||||
|
||||
@mock_cognitoidentity
|
||||
def test_get_open_id_token_for_developer_identity_when_no_explicit_identity_id():
|
||||
conn = boto3.client('cognito-identity', 'us-west-2')
|
||||
result = conn.get_open_id_token_for_developer_identity(
|
||||
IdentityPoolId='us-west-2:12345',
|
||||
Logins={
|
||||
'someurl': '12345'
|
||||
},
|
||||
TokenDuration=123
|
||||
)
|
||||
assert len(result['Token']) > 0
|
||||
assert len(result['IdentityId']) > 0
|
||||
|
@ -6,6 +6,7 @@ import os
|
||||
import uuid
|
||||
|
||||
from jose import jws
|
||||
|
||||
from moto import mock_cognitoidp
|
||||
import sure # noqa
|
||||
|
||||
@ -24,6 +25,7 @@ def test_create_user_pool():
|
||||
)
|
||||
|
||||
result["UserPool"]["Id"].should_not.be.none
|
||||
result["UserPool"]["Id"].should.match(r'[\w-]+_[0-9a-zA-Z]+')
|
||||
result["UserPool"]["Name"].should.equal(name)
|
||||
result["UserPool"]["LambdaConfig"]["PreSignUp"].should.equal(value)
|
||||
|
||||
@ -341,6 +343,7 @@ def test_admin_create_user():
|
||||
result["User"]["Attributes"].should.have.length_of(1)
|
||||
result["User"]["Attributes"][0]["Name"].should.equal("thing")
|
||||
result["User"]["Attributes"][0]["Value"].should.equal(value)
|
||||
result["User"]["Enabled"].should.equal(True)
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
@ -365,6 +368,22 @@ def test_admin_get_user():
|
||||
result["UserAttributes"][0]["Value"].should.equal(value)
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
def test_admin_get_missing_user():
|
||||
conn = boto3.client("cognito-idp", "us-west-2")
|
||||
|
||||
username = str(uuid.uuid4())
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
|
||||
caught = False
|
||||
try:
|
||||
conn.admin_get_user(UserPoolId=user_pool_id, Username=username)
|
||||
except conn.exceptions.UserNotFoundException:
|
||||
caught = True
|
||||
|
||||
caught.should.be.true
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
def test_list_users():
|
||||
conn = boto3.client("cognito-idp", "us-west-2")
|
||||
@ -377,6 +396,37 @@ def test_list_users():
|
||||
result["Users"][0]["Username"].should.equal(username)
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
def test_admin_disable_user():
|
||||
conn = boto3.client("cognito-idp", "us-west-2")
|
||||
|
||||
username = str(uuid.uuid4())
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
|
||||
|
||||
result = conn.admin_disable_user(UserPoolId=user_pool_id, Username=username)
|
||||
list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected
|
||||
|
||||
conn.admin_get_user(UserPoolId=user_pool_id, Username=username) \
|
||||
["Enabled"].should.equal(False)
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
def test_admin_enable_user():
|
||||
conn = boto3.client("cognito-idp", "us-west-2")
|
||||
|
||||
username = str(uuid.uuid4())
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
conn.admin_create_user(UserPoolId=user_pool_id, Username=username)
|
||||
conn.admin_disable_user(UserPoolId=user_pool_id, Username=username)
|
||||
|
||||
result = conn.admin_enable_user(UserPoolId=user_pool_id, Username=username)
|
||||
list(result.keys()).should.equal(["ResponseMetadata"]) # No response expected
|
||||
|
||||
conn.admin_get_user(UserPoolId=user_pool_id, Username=username) \
|
||||
["Enabled"].should.equal(True)
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
def test_admin_delete_user():
|
||||
conn = boto3.client("cognito-idp", "us-west-2")
|
||||
@ -389,7 +439,7 @@ def test_admin_delete_user():
|
||||
caught = False
|
||||
try:
|
||||
conn.admin_get_user(UserPoolId=user_pool_id, Username=username)
|
||||
except conn.exceptions.ResourceNotFoundException:
|
||||
except conn.exceptions.UserNotFoundException:
|
||||
caught = True
|
||||
|
||||
caught.should.be.true
|
||||
@ -399,15 +449,22 @@ def authentication_flow(conn):
|
||||
username = str(uuid.uuid4())
|
||||
temporary_password = str(uuid.uuid4())
|
||||
user_pool_id = conn.create_user_pool(PoolName=str(uuid.uuid4()))["UserPool"]["Id"]
|
||||
user_attribute_name = str(uuid.uuid4())
|
||||
user_attribute_value = str(uuid.uuid4())
|
||||
client_id = conn.create_user_pool_client(
|
||||
UserPoolId=user_pool_id,
|
||||
ClientName=str(uuid.uuid4()),
|
||||
ReadAttributes=[user_attribute_name]
|
||||
)["UserPoolClient"]["ClientId"]
|
||||
|
||||
conn.admin_create_user(
|
||||
UserPoolId=user_pool_id,
|
||||
Username=username,
|
||||
TemporaryPassword=temporary_password,
|
||||
UserAttributes=[{
|
||||
'Name': user_attribute_name,
|
||||
'Value': user_attribute_value
|
||||
}]
|
||||
)
|
||||
|
||||
result = conn.admin_initiate_auth(
|
||||
@ -446,6 +503,9 @@ def authentication_flow(conn):
|
||||
"access_token": result["AuthenticationResult"]["AccessToken"],
|
||||
"username": username,
|
||||
"password": new_password,
|
||||
"additional_fields": {
|
||||
user_attribute_name: user_attribute_value
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -475,6 +535,8 @@ def test_token_legitimacy():
|
||||
access_claims = json.loads(jws.verify(access_token, json_web_key, "RS256"))
|
||||
access_claims["iss"].should.equal(issuer)
|
||||
access_claims["aud"].should.equal(client_id)
|
||||
for k, v in outputs["additional_fields"].items():
|
||||
access_claims[k].should.equal(v)
|
||||
|
||||
|
||||
@mock_cognitoidp
|
||||
|
@ -700,8 +700,8 @@ def test_filter_expression():
|
||||
filter_expr = moto.dynamodb2.comparisons.get_filter_expression('Id IN :v0', {}, {':v0': {'NS': [7, 8, 9]}})
|
||||
filter_expr.expr(row1).should.be(True)
|
||||
|
||||
# attribute function tests
|
||||
filter_expr = moto.dynamodb2.comparisons.get_filter_expression('attribute_exists(Id) AND attribute_not_exists(User)', {}, {})
|
||||
# attribute function tests (with extra spaces)
|
||||
filter_expr = moto.dynamodb2.comparisons.get_filter_expression('attribute_exists(Id) AND attribute_not_exists (User)', {}, {})
|
||||
filter_expr.expr(row1).should.be(True)
|
||||
|
||||
filter_expr = moto.dynamodb2.comparisons.get_filter_expression('attribute_type(Id, N)', {}, {})
|
||||
@ -1220,7 +1220,8 @@ def test_update_if_not_exists():
|
||||
'forum_name': 'the-key',
|
||||
'subject': '123'
|
||||
},
|
||||
UpdateExpression='SET created_at = if_not_exists(created_at, :created_at)',
|
||||
# if_not_exists without space
|
||||
UpdateExpression='SET created_at=if_not_exists(created_at,:created_at)',
|
||||
ExpressionAttributeValues={
|
||||
':created_at': 123
|
||||
}
|
||||
@ -1233,7 +1234,8 @@ def test_update_if_not_exists():
|
||||
'forum_name': 'the-key',
|
||||
'subject': '123'
|
||||
},
|
||||
UpdateExpression='SET created_at = if_not_exists(created_at, :created_at)',
|
||||
# if_not_exists with space
|
||||
UpdateExpression='SET created_at = if_not_exists (created_at, :created_at)',
|
||||
ExpressionAttributeValues={
|
||||
':created_at': 456
|
||||
}
|
||||
|
@ -615,8 +615,8 @@ def test_copy_snapshot():
|
||||
dest = dest_ec2.Snapshot(copy_snapshot_response['SnapshotId'])
|
||||
|
||||
attribs = ['data_encryption_key_id', 'encrypted',
|
||||
'kms_key_id', 'owner_alias', 'owner_id', 'progress',
|
||||
'start_time', 'state', 'state_message',
|
||||
'kms_key_id', 'owner_alias', 'owner_id',
|
||||
'progress', 'state', 'state_message',
|
||||
'tags', 'volume_id', 'volume_size']
|
||||
|
||||
for attrib in attribs:
|
||||
|
@ -631,7 +631,22 @@ def test_delete_service():
|
||||
response['service']['schedulingStrategy'].should.equal('REPLICA')
|
||||
response['service']['taskDefinition'].should.equal(
|
||||
'arn:aws:ecs:us-east-1:012345678910:task-definition/test_ecs_task:1')
|
||||
|
||||
|
||||
|
||||
@mock_ecs
|
||||
def test_update_non_existant_service():
|
||||
client = boto3.client('ecs', region_name='us-east-1')
|
||||
try:
|
||||
client.update_service(
|
||||
cluster="my-clustet",
|
||||
service="my-service",
|
||||
desiredCount=0,
|
||||
)
|
||||
except ClientError as exc:
|
||||
error_code = exc.response['Error']['Code']
|
||||
error_code.should.equal('ServiceNotFoundException')
|
||||
else:
|
||||
raise Exception("Didn't raise ClientError")
|
||||
|
||||
|
||||
@mock_ec2
|
||||
|
@ -29,3 +29,28 @@ TABLE_INPUT = {
|
||||
},
|
||||
'TableType': 'EXTERNAL_TABLE',
|
||||
}
|
||||
|
||||
|
||||
PARTITION_INPUT = {
|
||||
# 'DatabaseName': 'dbname',
|
||||
'StorageDescriptor': {
|
||||
'BucketColumns': [],
|
||||
'Columns': [],
|
||||
'Compressed': False,
|
||||
'InputFormat': 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat',
|
||||
'Location': 's3://.../partition=value',
|
||||
'NumberOfBuckets': -1,
|
||||
'OutputFormat': 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat',
|
||||
'Parameters': {},
|
||||
'SerdeInfo': {
|
||||
'Parameters': {'path': 's3://...', 'serialization.format': '1'},
|
||||
'SerializationLibrary': 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'},
|
||||
'SkewedInfo': {'SkewedColumnNames': [],
|
||||
'SkewedColumnValueLocationMaps': {},
|
||||
'SkewedColumnValues': []},
|
||||
'SortColumns': [],
|
||||
'StoredAsSubDirectories': False,
|
||||
},
|
||||
# 'TableName': 'source_table',
|
||||
# 'Values': ['2018-06-26'],
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ from __future__ import unicode_literals
|
||||
|
||||
import copy
|
||||
|
||||
from .fixtures.datacatalog import TABLE_INPUT
|
||||
from .fixtures.datacatalog import TABLE_INPUT, PARTITION_INPUT
|
||||
|
||||
|
||||
def create_database(client, database_name):
|
||||
@ -17,22 +17,38 @@ def get_database(client, database_name):
|
||||
return client.get_database(Name=database_name)
|
||||
|
||||
|
||||
def create_table_input(table_name, s3_location, columns=[], partition_keys=[]):
|
||||
def create_table_input(database_name, table_name, columns=[], partition_keys=[]):
|
||||
table_input = copy.deepcopy(TABLE_INPUT)
|
||||
table_input['Name'] = table_name
|
||||
table_input['PartitionKeys'] = partition_keys
|
||||
table_input['StorageDescriptor']['Columns'] = columns
|
||||
table_input['StorageDescriptor']['Location'] = s3_location
|
||||
table_input['StorageDescriptor']['Location'] = 's3://my-bucket/{database_name}/{table_name}'.format(
|
||||
database_name=database_name,
|
||||
table_name=table_name
|
||||
)
|
||||
return table_input
|
||||
|
||||
|
||||
def create_table(client, database_name, table_name, table_input):
|
||||
def create_table(client, database_name, table_name, table_input=None, **kwargs):
|
||||
if table_input is None:
|
||||
table_input = create_table_input(database_name, table_name, **kwargs)
|
||||
|
||||
return client.create_table(
|
||||
DatabaseName=database_name,
|
||||
TableInput=table_input
|
||||
)
|
||||
|
||||
|
||||
def update_table(client, database_name, table_name, table_input=None, **kwargs):
|
||||
if table_input is None:
|
||||
table_input = create_table_input(database_name, table_name, **kwargs)
|
||||
|
||||
return client.update_table(
|
||||
DatabaseName=database_name,
|
||||
TableInput=table_input,
|
||||
)
|
||||
|
||||
|
||||
def get_table(client, database_name, table_name):
|
||||
return client.get_table(
|
||||
DatabaseName=database_name,
|
||||
@ -44,3 +60,60 @@ def get_tables(client, database_name):
|
||||
return client.get_tables(
|
||||
DatabaseName=database_name
|
||||
)
|
||||
|
||||
|
||||
def get_table_versions(client, database_name, table_name):
|
||||
return client.get_table_versions(
|
||||
DatabaseName=database_name,
|
||||
TableName=table_name
|
||||
)
|
||||
|
||||
|
||||
def get_table_version(client, database_name, table_name, version_id):
|
||||
return client.get_table_version(
|
||||
DatabaseName=database_name,
|
||||
TableName=table_name,
|
||||
VersionId=version_id,
|
||||
)
|
||||
|
||||
|
||||
def create_partition_input(database_name, table_name, values=[], columns=[]):
|
||||
root_path = 's3://my-bucket/{database_name}/{table_name}'.format(
|
||||
database_name=database_name,
|
||||
table_name=table_name
|
||||
)
|
||||
|
||||
part_input = copy.deepcopy(PARTITION_INPUT)
|
||||
part_input['Values'] = values
|
||||
part_input['StorageDescriptor']['Columns'] = columns
|
||||
part_input['StorageDescriptor']['SerdeInfo']['Parameters']['path'] = root_path
|
||||
return part_input
|
||||
|
||||
|
||||
def create_partition(client, database_name, table_name, partiton_input=None, **kwargs):
|
||||
if partiton_input is None:
|
||||
partiton_input = create_partition_input(database_name, table_name, **kwargs)
|
||||
return client.create_partition(
|
||||
DatabaseName=database_name,
|
||||
TableName=table_name,
|
||||
PartitionInput=partiton_input
|
||||
)
|
||||
|
||||
|
||||
def update_partition(client, database_name, table_name, old_values=[], partiton_input=None, **kwargs):
|
||||
if partiton_input is None:
|
||||
partiton_input = create_partition_input(database_name, table_name, **kwargs)
|
||||
return client.update_partition(
|
||||
DatabaseName=database_name,
|
||||
TableName=table_name,
|
||||
PartitionInput=partiton_input,
|
||||
PartitionValueList=old_values,
|
||||
)
|
||||
|
||||
|
||||
def get_partition(client, database_name, table_name, values):
|
||||
return client.get_partition(
|
||||
DatabaseName=database_name,
|
||||
TableName=table_name,
|
||||
PartitionValues=values,
|
||||
)
|
||||
|
@ -1,10 +1,15 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import sure # noqa
|
||||
import re
|
||||
from nose.tools import assert_raises
|
||||
import boto3
|
||||
from botocore.client import ClientError
|
||||
|
||||
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
|
||||
from moto import mock_glue
|
||||
from . import helpers
|
||||
|
||||
@ -30,7 +35,19 @@ def test_create_database_already_exists():
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('DatabaseAlreadyExistsException')
|
||||
exc.exception.response['Error']['Code'].should.equal('AlreadyExistsException')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_database_not_exits():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'nosuchdatabase'
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_database(client, database_name)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('Database nosuchdatabase not found')
|
||||
|
||||
|
||||
@mock_glue
|
||||
@ -40,12 +57,7 @@ def test_create_table():
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
table_name = 'myspecialtable'
|
||||
s3_location = 's3://my-bucket/{database_name}/{table_name}'.format(
|
||||
database_name=database_name,
|
||||
table_name=table_name
|
||||
)
|
||||
|
||||
table_input = helpers.create_table_input(table_name, s3_location)
|
||||
table_input = helpers.create_table_input(database_name, table_name)
|
||||
helpers.create_table(client, database_name, table_name, table_input)
|
||||
|
||||
response = helpers.get_table(client, database_name, table_name)
|
||||
@ -63,18 +75,12 @@ def test_create_table_already_exists():
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
table_name = 'cantcreatethistabletwice'
|
||||
s3_location = 's3://my-bucket/{database_name}/{table_name}'.format(
|
||||
database_name=database_name,
|
||||
table_name=table_name
|
||||
)
|
||||
|
||||
table_input = helpers.create_table_input(table_name, s3_location)
|
||||
helpers.create_table(client, database_name, table_name, table_input)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.create_table(client, database_name, table_name, table_input)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('TableAlreadyExistsException')
|
||||
exc.exception.response['Error']['Code'].should.equal('AlreadyExistsException')
|
||||
|
||||
|
||||
@mock_glue
|
||||
@ -87,11 +93,7 @@ def test_get_tables():
|
||||
table_inputs = {}
|
||||
|
||||
for table_name in table_names:
|
||||
s3_location = 's3://my-bucket/{database_name}/{table_name}'.format(
|
||||
database_name=database_name,
|
||||
table_name=table_name
|
||||
)
|
||||
table_input = helpers.create_table_input(table_name, s3_location)
|
||||
table_input = helpers.create_table_input(database_name, table_name)
|
||||
table_inputs[table_name] = table_input
|
||||
helpers.create_table(client, database_name, table_name, table_input)
|
||||
|
||||
@ -99,10 +101,326 @@ def test_get_tables():
|
||||
|
||||
tables = response['TableList']
|
||||
|
||||
assert len(tables) == 3
|
||||
tables.should.have.length_of(3)
|
||||
|
||||
for table in tables:
|
||||
table_name = table['Name']
|
||||
table_name.should.equal(table_inputs[table_name]['Name'])
|
||||
table['StorageDescriptor'].should.equal(table_inputs[table_name]['StorageDescriptor'])
|
||||
table['PartitionKeys'].should.equal(table_inputs[table_name]['PartitionKeys'])
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_table_versions():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
table_name = 'myfirsttable'
|
||||
version_inputs = {}
|
||||
|
||||
table_input = helpers.create_table_input(database_name, table_name)
|
||||
helpers.create_table(client, database_name, table_name, table_input)
|
||||
version_inputs["1"] = table_input
|
||||
|
||||
columns = [{'Name': 'country', 'Type': 'string'}]
|
||||
table_input = helpers.create_table_input(database_name, table_name, columns=columns)
|
||||
helpers.update_table(client, database_name, table_name, table_input)
|
||||
version_inputs["2"] = table_input
|
||||
|
||||
# Updateing with an indentical input should still create a new version
|
||||
helpers.update_table(client, database_name, table_name, table_input)
|
||||
version_inputs["3"] = table_input
|
||||
|
||||
response = helpers.get_table_versions(client, database_name, table_name)
|
||||
|
||||
vers = response['TableVersions']
|
||||
|
||||
vers.should.have.length_of(3)
|
||||
vers[0]['Table']['StorageDescriptor']['Columns'].should.equal([])
|
||||
vers[-1]['Table']['StorageDescriptor']['Columns'].should.equal(columns)
|
||||
|
||||
for n, ver in enumerate(vers):
|
||||
n = str(n + 1)
|
||||
ver['VersionId'].should.equal(n)
|
||||
ver['Table']['Name'].should.equal(table_name)
|
||||
ver['Table']['StorageDescriptor'].should.equal(version_inputs[n]['StorageDescriptor'])
|
||||
ver['Table']['PartitionKeys'].should.equal(version_inputs[n]['PartitionKeys'])
|
||||
|
||||
response = helpers.get_table_version(client, database_name, table_name, "3")
|
||||
ver = response['TableVersion']
|
||||
|
||||
ver['VersionId'].should.equal("3")
|
||||
ver['Table']['Name'].should.equal(table_name)
|
||||
ver['Table']['StorageDescriptor']['Columns'].should.equal(columns)
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_table_version_not_found():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_table_version(client, database_name, 'myfirsttable', "20")
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('version', re.I)
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_table_version_invalid_input():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_table_version(client, database_name, 'myfirsttable', "10not-an-int")
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('InvalidInputException')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_table_not_exits():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_table(client, database_name, 'myfirsttable')
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('Table myfirsttable not found')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_table_when_database_not_exits():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'nosuchdatabase'
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_table(client, database_name, 'myfirsttable')
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('Database nosuchdatabase not found')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_partitions_empty():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
response = client.get_partitions(DatabaseName=database_name, TableName=table_name)
|
||||
|
||||
response['Partitions'].should.have.length_of(0)
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_partition():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
before = datetime.now(pytz.utc)
|
||||
|
||||
part_input = helpers.create_partition_input(database_name, table_name, values=values)
|
||||
helpers.create_partition(client, database_name, table_name, part_input)
|
||||
|
||||
after = datetime.now(pytz.utc)
|
||||
|
||||
response = client.get_partitions(DatabaseName=database_name, TableName=table_name)
|
||||
|
||||
partitions = response['Partitions']
|
||||
|
||||
partitions.should.have.length_of(1)
|
||||
|
||||
partition = partitions[0]
|
||||
|
||||
partition['TableName'].should.equal(table_name)
|
||||
partition['StorageDescriptor'].should.equal(part_input['StorageDescriptor'])
|
||||
partition['Values'].should.equal(values)
|
||||
partition['CreationTime'].should.be.greater_than(before)
|
||||
partition['CreationTime'].should.be.lower_than(after)
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_create_partition_already_exist():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
helpers.create_partition(client, database_name, table_name, values=values)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.create_partition(client, database_name, table_name, values=values)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('AlreadyExistsException')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_partition_not_found():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_partition(client, database_name, table_name, values)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('partition')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_get_partition():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
values = [['2018-10-01'], ['2018-09-01']]
|
||||
|
||||
helpers.create_partition(client, database_name, table_name, values=values[0])
|
||||
helpers.create_partition(client, database_name, table_name, values=values[1])
|
||||
|
||||
response = client.get_partition(DatabaseName=database_name, TableName=table_name, PartitionValues=values[1])
|
||||
|
||||
partition = response['Partition']
|
||||
|
||||
partition['TableName'].should.equal(table_name)
|
||||
partition['Values'].should.equal(values[1])
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_update_partition_not_found_moving():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.update_partition(client, database_name, table_name, old_values=['0000-00-00'], values=['2018-10-02'])
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('partition')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_update_partition_not_found_change_in_place():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.update_partition(client, database_name, table_name, old_values=values, values=values)
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
exc.exception.response['Error']['Message'].should.match('partition')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_update_partition_cannot_overwrite():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
helpers.create_database(client, database_name)
|
||||
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
|
||||
values = [['2018-10-01'], ['2018-09-01']]
|
||||
|
||||
helpers.create_partition(client, database_name, table_name, values=values[0])
|
||||
helpers.create_partition(client, database_name, table_name, values=values[1])
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.update_partition(client, database_name, table_name, old_values=values[0], values=values[1])
|
||||
|
||||
exc.exception.response['Error']['Code'].should.equal('AlreadyExistsException')
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_update_partition():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
helpers.create_partition(client, database_name, table_name, values=values)
|
||||
|
||||
response = helpers.update_partition(
|
||||
client,
|
||||
database_name,
|
||||
table_name,
|
||||
old_values=values,
|
||||
values=values,
|
||||
columns=[{'Name': 'country', 'Type': 'string'}],
|
||||
)
|
||||
|
||||
response = client.get_partition(DatabaseName=database_name, TableName=table_name, PartitionValues=values)
|
||||
partition = response['Partition']
|
||||
|
||||
partition['TableName'].should.equal(table_name)
|
||||
partition['StorageDescriptor']['Columns'].should.equal([{'Name': 'country', 'Type': 'string'}])
|
||||
|
||||
|
||||
@mock_glue
|
||||
def test_update_partition_move():
|
||||
client = boto3.client('glue', region_name='us-east-1')
|
||||
database_name = 'myspecialdatabase'
|
||||
table_name = 'myfirsttable'
|
||||
values = ['2018-10-01']
|
||||
new_values = ['2018-09-01']
|
||||
|
||||
helpers.create_database(client, database_name)
|
||||
helpers.create_table(client, database_name, table_name)
|
||||
helpers.create_partition(client, database_name, table_name, values=values)
|
||||
|
||||
response = helpers.update_partition(
|
||||
client,
|
||||
database_name,
|
||||
table_name,
|
||||
old_values=values,
|
||||
values=new_values,
|
||||
columns=[{'Name': 'country', 'Type': 'string'}],
|
||||
)
|
||||
|
||||
with assert_raises(ClientError) as exc:
|
||||
helpers.get_partition(client, database_name, table_name, values)
|
||||
|
||||
# Old partition shouldn't exist anymore
|
||||
exc.exception.response['Error']['Code'].should.equal('EntityNotFoundException')
|
||||
|
||||
response = client.get_partition(DatabaseName=database_name, TableName=table_name, PartitionValues=new_values)
|
||||
partition = response['Partition']
|
||||
|
||||
partition['TableName'].should.equal(table_name)
|
||||
partition['StorageDescriptor']['Columns'].should.equal([{'Name': 'country', 'Type': 'string'}])
|
||||
|
@ -1,4 +1,7 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
import boto
|
||||
import boto3
|
||||
import sure # noqa
|
||||
@ -25,6 +28,25 @@ def test_get_group():
|
||||
conn.get_group('not-group')
|
||||
|
||||
|
||||
@mock_iam()
|
||||
def test_get_group_current():
|
||||
conn = boto3.client('iam', region_name='us-east-1')
|
||||
conn.create_group(GroupName='my-group')
|
||||
result = conn.get_group(GroupName='my-group')
|
||||
|
||||
assert result['Group']['Path'] == '/'
|
||||
assert result['Group']['GroupName'] == 'my-group'
|
||||
assert isinstance(result['Group']['CreateDate'], datetime)
|
||||
assert result['Group']['GroupId']
|
||||
assert result['Group']['Arn'] == 'arn:aws:iam::123456789012:group/my-group'
|
||||
assert not result['Users']
|
||||
|
||||
# Make a group with a different path:
|
||||
other_group = conn.create_group(GroupName='my-other-group', Path='some/location')
|
||||
assert other_group['Group']['Path'] == 'some/location'
|
||||
assert other_group['Group']['Arn'] == 'arn:aws:iam::123456789012:group/some/location/my-other-group'
|
||||
|
||||
|
||||
@mock_iam_deprecated()
|
||||
def test_get_all_groups():
|
||||
conn = boto.connect_iam()
|
||||
|
@ -1,5 +1,5 @@
|
||||
from __future__ import unicode_literals
|
||||
import re
|
||||
import os, re
|
||||
|
||||
import boto3
|
||||
import boto.kms
|
||||
@ -8,6 +8,9 @@ from boto.kms.exceptions import AlreadyExistsException, NotFoundException
|
||||
import sure # noqa
|
||||
from moto import mock_kms, mock_kms_deprecated
|
||||
from nose.tools import assert_raises
|
||||
from freezegun import freeze_time
|
||||
from datetime import datetime, timedelta
|
||||
from dateutil.tz import tzlocal
|
||||
|
||||
|
||||
@mock_kms_deprecated
|
||||
@ -617,3 +620,100 @@ def test_kms_encrypt_boto3():
|
||||
|
||||
response = client.decrypt(CiphertextBlob=response['CiphertextBlob'])
|
||||
response['Plaintext'].should.equal(b'bar')
|
||||
|
||||
|
||||
@mock_kms
|
||||
def test_disable_key():
|
||||
client = boto3.client('kms', region_name='us-east-1')
|
||||
key = client.create_key(Description='disable-key')
|
||||
client.disable_key(
|
||||
KeyId=key['KeyMetadata']['KeyId']
|
||||
)
|
||||
|
||||
result = client.describe_key(KeyId=key['KeyMetadata']['KeyId'])
|
||||
assert result["KeyMetadata"]["Enabled"] == False
|
||||
assert result["KeyMetadata"]["KeyState"] == 'Disabled'
|
||||
|
||||
|
||||
@mock_kms
|
||||
def test_enable_key():
|
||||
client = boto3.client('kms', region_name='us-east-1')
|
||||
key = client.create_key(Description='enable-key')
|
||||
client.disable_key(
|
||||
KeyId=key['KeyMetadata']['KeyId']
|
||||
)
|
||||
client.enable_key(
|
||||
KeyId=key['KeyMetadata']['KeyId']
|
||||
)
|
||||
|
||||
result = client.describe_key(KeyId=key['KeyMetadata']['KeyId'])
|
||||
assert result["KeyMetadata"]["Enabled"] == True
|
||||
assert result["KeyMetadata"]["KeyState"] == 'Enabled'
|
||||
|
||||
|
||||
@mock_kms
|
||||
def test_schedule_key_deletion():
|
||||
client = boto3.client('kms', region_name='us-east-1')
|
||||
key = client.create_key(Description='schedule-key-deletion')
|
||||
if os.environ.get('TEST_SERVER_MODE', 'false').lower() == 'false':
|
||||
with freeze_time("2015-01-01 12:00:00"):
|
||||
response = client.schedule_key_deletion(
|
||||
KeyId=key['KeyMetadata']['KeyId']
|
||||
)
|
||||
assert response['KeyId'] == key['KeyMetadata']['KeyId']
|
||||
assert response['DeletionDate'] == datetime(2015, 1, 31, 12, 0, tzinfo=tzlocal())
|
||||
else:
|
||||
# Can't manipulate time in server mode
|
||||
response = client.schedule_key_deletion(
|
||||
KeyId=key['KeyMetadata']['KeyId']
|
||||
)
|
||||
assert response['KeyId'] == key['KeyMetadata']['KeyId']
|
||||
|
||||
result = client.describe_key(KeyId=key['KeyMetadata']['KeyId'])
|
||||
assert result["KeyMetadata"]["Enabled"] == False
|
||||
assert result["KeyMetadata"]["KeyState"] == 'PendingDeletion'
|
||||
assert 'DeletionDate' in result["KeyMetadata"]
|
||||
|
||||
|
||||
@mock_kms
|
||||
def test_schedule_key_deletion_custom():
|
||||
client = boto3.client('kms', region_name='us-east-1')
|
||||
key = client.create_key(Description='schedule-key-deletion')
|
||||
if os.environ.get('TEST_SERVER_MODE', 'false').lower() == 'false':
|
||||
with freeze_time("2015-01-01 12:00:00"):
|
||||
response = client.schedule_key_deletion(
|
||||
KeyId=key['KeyMetadata']['KeyId'],
|
||||
PendingWindowInDays=7
|
||||
)
|
||||
assert response['KeyId'] == key['KeyMetadata']['KeyId']
|
||||
assert response['DeletionDate'] == datetime(2015, 1, 8, 12, 0, tzinfo=tzlocal())
|
||||
else:
|
||||
# Can't manipulate time in server mode
|
||||
response = client.schedule_key_deletion(
|
||||
KeyId=key['KeyMetadata']['KeyId'],
|
||||
PendingWindowInDays=7
|
||||
)
|
||||
assert response['KeyId'] == key['KeyMetadata']['KeyId']
|
||||
|
||||
result = client.describe_key(KeyId=key['KeyMetadata']['KeyId'])
|
||||
assert result["KeyMetadata"]["Enabled"] == False
|
||||
assert result["KeyMetadata"]["KeyState"] == 'PendingDeletion'
|
||||
assert 'DeletionDate' in result["KeyMetadata"]
|
||||
|
||||
|
||||
@mock_kms
|
||||
def test_cancel_key_deletion():
|
||||
client = boto3.client('kms', region_name='us-east-1')
|
||||
key = client.create_key(Description='cancel-key-deletion')
|
||||
client.schedule_key_deletion(
|
||||
KeyId=key['KeyMetadata']['KeyId']
|
||||
)
|
||||
response = client.cancel_key_deletion(
|
||||
KeyId=key['KeyMetadata']['KeyId']
|
||||
)
|
||||
assert response['KeyId'] == key['KeyMetadata']['KeyId']
|
||||
|
||||
result = client.describe_key(KeyId=key['KeyMetadata']['KeyId'])
|
||||
assert result["KeyMetadata"]["Enabled"] == False
|
||||
assert result["KeyMetadata"]["KeyState"] == 'Disabled'
|
||||
assert 'DeletionDate' not in result["KeyMetadata"]
|
||||
|
@ -121,4 +121,8 @@ def test_filter_logs_interleaved():
|
||||
interleaved=True,
|
||||
)
|
||||
events = res['events']
|
||||
events.should.have.length_of(2)
|
||||
for original_message, resulting_event in zip(messages, events):
|
||||
resulting_event['eventId'].should.equal(str(resulting_event['eventId']))
|
||||
resulting_event['timestamp'].should.equal(original_message['timestamp'])
|
||||
resulting_event['message'].should.equal(original_message['message'])
|
||||
|
||||
|
0
tests/test_organizations/__init__.py
Normal file
0
tests/test_organizations/__init__.py
Normal file
136
tests/test_organizations/organizations_test_utils.py
Normal file
136
tests/test_organizations/organizations_test_utils.py
Normal file
@ -0,0 +1,136 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import six
|
||||
import sure # noqa
|
||||
import datetime
|
||||
from moto.organizations import utils
|
||||
|
||||
EMAIL_REGEX = "^.+@[a-zA-Z0-9-.]+.[a-zA-Z]{2,3}|[0-9]{1,3}$"
|
||||
ORG_ID_REGEX = r'o-[a-z0-9]{%s}' % utils.ORG_ID_SIZE
|
||||
ROOT_ID_REGEX = r'r-[a-z0-9]{%s}' % utils.ROOT_ID_SIZE
|
||||
OU_ID_REGEX = r'ou-[a-z0-9]{%s}-[a-z0-9]{%s}' % (utils.ROOT_ID_SIZE, utils.OU_ID_SUFFIX_SIZE)
|
||||
ACCOUNT_ID_REGEX = r'[0-9]{%s}' % utils.ACCOUNT_ID_SIZE
|
||||
CREATE_ACCOUNT_STATUS_ID_REGEX = r'car-[a-z0-9]{%s}' % utils.CREATE_ACCOUNT_STATUS_ID_SIZE
|
||||
|
||||
|
||||
def test_make_random_org_id():
|
||||
org_id = utils.make_random_org_id()
|
||||
org_id.should.match(ORG_ID_REGEX)
|
||||
|
||||
|
||||
def test_make_random_root_id():
|
||||
root_id = utils.make_random_root_id()
|
||||
root_id.should.match(ROOT_ID_REGEX)
|
||||
|
||||
|
||||
def test_make_random_ou_id():
|
||||
root_id = utils.make_random_root_id()
|
||||
ou_id = utils.make_random_ou_id(root_id)
|
||||
ou_id.should.match(OU_ID_REGEX)
|
||||
|
||||
|
||||
def test_make_random_account_id():
|
||||
account_id = utils.make_random_account_id()
|
||||
account_id.should.match(ACCOUNT_ID_REGEX)
|
||||
|
||||
|
||||
def test_make_random_create_account_status_id():
|
||||
create_account_status_id = utils.make_random_create_account_status_id()
|
||||
create_account_status_id.should.match(CREATE_ACCOUNT_STATUS_ID_REGEX)
|
||||
|
||||
|
||||
def validate_organization(response):
|
||||
org = response['Organization']
|
||||
sorted(org.keys()).should.equal([
|
||||
'Arn',
|
||||
'AvailablePolicyTypes',
|
||||
'FeatureSet',
|
||||
'Id',
|
||||
'MasterAccountArn',
|
||||
'MasterAccountEmail',
|
||||
'MasterAccountId',
|
||||
])
|
||||
org['Id'].should.match(ORG_ID_REGEX)
|
||||
org['MasterAccountId'].should.equal(utils.MASTER_ACCOUNT_ID)
|
||||
org['MasterAccountArn'].should.equal(utils.MASTER_ACCOUNT_ARN_FORMAT.format(
|
||||
org['MasterAccountId'],
|
||||
org['Id'],
|
||||
))
|
||||
org['Arn'].should.equal(utils.ORGANIZATION_ARN_FORMAT.format(
|
||||
org['MasterAccountId'],
|
||||
org['Id'],
|
||||
))
|
||||
org['MasterAccountEmail'].should.equal(utils.MASTER_ACCOUNT_EMAIL)
|
||||
org['FeatureSet'].should.be.within(['ALL', 'CONSOLIDATED_BILLING'])
|
||||
org['AvailablePolicyTypes'].should.equal([{
|
||||
'Type': 'SERVICE_CONTROL_POLICY',
|
||||
'Status': 'ENABLED'
|
||||
}])
|
||||
|
||||
|
||||
def validate_roots(org, response):
|
||||
response.should.have.key('Roots').should.be.a(list)
|
||||
response['Roots'].should_not.be.empty
|
||||
root = response['Roots'][0]
|
||||
root.should.have.key('Id').should.match(ROOT_ID_REGEX)
|
||||
root.should.have.key('Arn').should.equal(utils.ROOT_ARN_FORMAT.format(
|
||||
org['MasterAccountId'],
|
||||
org['Id'],
|
||||
root['Id'],
|
||||
))
|
||||
root.should.have.key('Name').should.be.a(six.string_types)
|
||||
root.should.have.key('PolicyTypes').should.be.a(list)
|
||||
root['PolicyTypes'][0].should.have.key('Type').should.equal('SERVICE_CONTROL_POLICY')
|
||||
root['PolicyTypes'][0].should.have.key('Status').should.equal('ENABLED')
|
||||
|
||||
|
||||
def validate_organizational_unit(org, response):
|
||||
response.should.have.key('OrganizationalUnit').should.be.a(dict)
|
||||
ou = response['OrganizationalUnit']
|
||||
ou.should.have.key('Id').should.match(OU_ID_REGEX)
|
||||
ou.should.have.key('Arn').should.equal(utils.OU_ARN_FORMAT.format(
|
||||
org['MasterAccountId'],
|
||||
org['Id'],
|
||||
ou['Id'],
|
||||
))
|
||||
ou.should.have.key('Name').should.be.a(six.string_types)
|
||||
|
||||
|
||||
def validate_account(org, account):
|
||||
sorted(account.keys()).should.equal([
|
||||
'Arn',
|
||||
'Email',
|
||||
'Id',
|
||||
'JoinedMethod',
|
||||
'JoinedTimestamp',
|
||||
'Name',
|
||||
'Status',
|
||||
])
|
||||
account['Id'].should.match(ACCOUNT_ID_REGEX)
|
||||
account['Arn'].should.equal(utils.ACCOUNT_ARN_FORMAT.format(
|
||||
org['MasterAccountId'],
|
||||
org['Id'],
|
||||
account['Id'],
|
||||
))
|
||||
account['Email'].should.match(EMAIL_REGEX)
|
||||
account['JoinedMethod'].should.be.within(['INVITED', 'CREATED'])
|
||||
account['Status'].should.be.within(['ACTIVE', 'SUSPENDED'])
|
||||
account['Name'].should.be.a(six.string_types)
|
||||
account['JoinedTimestamp'].should.be.a(datetime.datetime)
|
||||
|
||||
|
||||
def validate_create_account_status(create_status):
|
||||
sorted(create_status.keys()).should.equal([
|
||||
'AccountId',
|
||||
'AccountName',
|
||||
'CompletedTimestamp',
|
||||
'Id',
|
||||
'RequestedTimestamp',
|
||||
'State',
|
||||
])
|
||||
create_status['Id'].should.match(CREATE_ACCOUNT_STATUS_ID_REGEX)
|
||||
create_status['AccountId'].should.match(ACCOUNT_ID_REGEX)
|
||||
create_status['AccountName'].should.be.a(six.string_types)
|
||||
create_status['State'].should.equal('SUCCEEDED')
|
||||
create_status['RequestedTimestamp'].should.be.a(datetime.datetime)
|
||||
create_status['CompletedTimestamp'].should.be.a(datetime.datetime)
|
322
tests/test_organizations/test_organizations_boto3.py
Normal file
322
tests/test_organizations/test_organizations_boto3.py
Normal file
@ -0,0 +1,322 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import boto3
|
||||
import sure # noqa
|
||||
from botocore.exceptions import ClientError
|
||||
from nose.tools import assert_raises
|
||||
|
||||
from moto import mock_organizations
|
||||
from moto.organizations import utils
|
||||
from .organizations_test_utils import (
|
||||
validate_organization,
|
||||
validate_roots,
|
||||
validate_organizational_unit,
|
||||
validate_account,
|
||||
validate_create_account_status,
|
||||
)
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_create_organization():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
response = client.create_organization(FeatureSet='ALL')
|
||||
validate_organization(response)
|
||||
response['Organization']['FeatureSet'].should.equal('ALL')
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_describe_organization():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
client.create_organization(FeatureSet='ALL')
|
||||
response = client.describe_organization()
|
||||
validate_organization(response)
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_describe_organization_exception():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
with assert_raises(ClientError) as e:
|
||||
response = client.describe_organization()
|
||||
ex = e.exception
|
||||
ex.operation_name.should.equal('DescribeOrganization')
|
||||
ex.response['Error']['Code'].should.equal('400')
|
||||
ex.response['Error']['Message'].should.contain('AWSOrganizationsNotInUseException')
|
||||
|
||||
|
||||
# Organizational Units
|
||||
|
||||
@mock_organizations
|
||||
def test_list_roots():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
org = client.create_organization(FeatureSet='ALL')['Organization']
|
||||
response = client.list_roots()
|
||||
validate_roots(org, response)
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_create_organizational_unit():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
org = client.create_organization(FeatureSet='ALL')['Organization']
|
||||
root_id = client.list_roots()['Roots'][0]['Id']
|
||||
ou_name = 'ou01'
|
||||
response = client.create_organizational_unit(
|
||||
ParentId=root_id,
|
||||
Name=ou_name,
|
||||
)
|
||||
validate_organizational_unit(org, response)
|
||||
response['OrganizationalUnit']['Name'].should.equal(ou_name)
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_describe_organizational_unit():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
org = client.create_organization(FeatureSet='ALL')['Organization']
|
||||
root_id = client.list_roots()['Roots'][0]['Id']
|
||||
ou_id = client.create_organizational_unit(
|
||||
ParentId=root_id,
|
||||
Name='ou01',
|
||||
)['OrganizationalUnit']['Id']
|
||||
response = client.describe_organizational_unit(OrganizationalUnitId=ou_id)
|
||||
validate_organizational_unit(org, response)
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_describe_organizational_unit_exception():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
org = client.create_organization(FeatureSet='ALL')['Organization']
|
||||
with assert_raises(ClientError) as e:
|
||||
response = client.describe_organizational_unit(
|
||||
OrganizationalUnitId=utils.make_random_root_id()
|
||||
)
|
||||
ex = e.exception
|
||||
ex.operation_name.should.equal('DescribeOrganizationalUnit')
|
||||
ex.response['Error']['Code'].should.equal('400')
|
||||
ex.response['Error']['Message'].should.contain('OrganizationalUnitNotFoundException')
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_list_organizational_units_for_parent():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
org = client.create_organization(FeatureSet='ALL')['Organization']
|
||||
root_id = client.list_roots()['Roots'][0]['Id']
|
||||
client.create_organizational_unit(ParentId=root_id, Name='ou01')
|
||||
client.create_organizational_unit(ParentId=root_id, Name='ou02')
|
||||
client.create_organizational_unit(ParentId=root_id, Name='ou03')
|
||||
response = client.list_organizational_units_for_parent(ParentId=root_id)
|
||||
response.should.have.key('OrganizationalUnits').should.be.a(list)
|
||||
for ou in response['OrganizationalUnits']:
|
||||
validate_organizational_unit(org, dict(OrganizationalUnit=ou))
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_list_organizational_units_for_parent_exception():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
with assert_raises(ClientError) as e:
|
||||
response = client.list_organizational_units_for_parent(
|
||||
ParentId=utils.make_random_root_id()
|
||||
)
|
||||
ex = e.exception
|
||||
ex.operation_name.should.equal('ListOrganizationalUnitsForParent')
|
||||
ex.response['Error']['Code'].should.equal('400')
|
||||
ex.response['Error']['Message'].should.contain('ParentNotFoundException')
|
||||
|
||||
|
||||
# Accounts
|
||||
mockname = 'mock-account'
|
||||
mockdomain = 'moto-example.org'
|
||||
mockemail = '@'.join([mockname, mockdomain])
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_create_account():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
client.create_organization(FeatureSet='ALL')
|
||||
create_status = client.create_account(
|
||||
AccountName=mockname, Email=mockemail
|
||||
)['CreateAccountStatus']
|
||||
validate_create_account_status(create_status)
|
||||
create_status['AccountName'].should.equal(mockname)
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_describe_account():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
org = client.create_organization(FeatureSet='ALL')['Organization']
|
||||
account_id = client.create_account(
|
||||
AccountName=mockname, Email=mockemail
|
||||
)['CreateAccountStatus']['AccountId']
|
||||
response = client.describe_account(AccountId=account_id)
|
||||
validate_account(org, response['Account'])
|
||||
response['Account']['Name'].should.equal(mockname)
|
||||
response['Account']['Email'].should.equal(mockemail)
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_describe_account_exception():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
with assert_raises(ClientError) as e:
|
||||
response = client.describe_account(AccountId=utils.make_random_account_id())
|
||||
ex = e.exception
|
||||
ex.operation_name.should.equal('DescribeAccount')
|
||||
ex.response['Error']['Code'].should.equal('400')
|
||||
ex.response['Error']['Message'].should.contain('AccountNotFoundException')
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_list_accounts():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
org = client.create_organization(FeatureSet='ALL')['Organization']
|
||||
for i in range(5):
|
||||
name = mockname + str(i)
|
||||
email = name + '@' + mockdomain
|
||||
client.create_account(AccountName=name, Email=email)
|
||||
response = client.list_accounts()
|
||||
response.should.have.key('Accounts')
|
||||
accounts = response['Accounts']
|
||||
len(accounts).should.equal(5)
|
||||
for account in accounts:
|
||||
validate_account(org, account)
|
||||
accounts[3]['Name'].should.equal(mockname + '3')
|
||||
accounts[2]['Email'].should.equal(mockname + '2' + '@' + mockdomain)
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_list_accounts_for_parent():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
org = client.create_organization(FeatureSet='ALL')['Organization']
|
||||
root_id = client.list_roots()['Roots'][0]['Id']
|
||||
account_id = client.create_account(
|
||||
AccountName=mockname,
|
||||
Email=mockemail,
|
||||
)['CreateAccountStatus']['AccountId']
|
||||
response = client.list_accounts_for_parent(ParentId=root_id)
|
||||
account_id.should.be.within([account['Id'] for account in response['Accounts']])
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_move_account():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
org = client.create_organization(FeatureSet='ALL')['Organization']
|
||||
root_id = client.list_roots()['Roots'][0]['Id']
|
||||
account_id = client.create_account(
|
||||
AccountName=mockname, Email=mockemail
|
||||
)['CreateAccountStatus']['AccountId']
|
||||
ou01 = client.create_organizational_unit(ParentId=root_id, Name='ou01')
|
||||
ou01_id = ou01['OrganizationalUnit']['Id']
|
||||
client.move_account(
|
||||
AccountId=account_id,
|
||||
SourceParentId=root_id,
|
||||
DestinationParentId=ou01_id,
|
||||
)
|
||||
response = client.list_accounts_for_parent(ParentId=ou01_id)
|
||||
account_id.should.be.within([account['Id'] for account in response['Accounts']])
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_list_parents_for_ou():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
org = client.create_organization(FeatureSet='ALL')['Organization']
|
||||
root_id = client.list_roots()['Roots'][0]['Id']
|
||||
ou01 = client.create_organizational_unit(ParentId=root_id, Name='ou01')
|
||||
ou01_id = ou01['OrganizationalUnit']['Id']
|
||||
response01 = client.list_parents(ChildId=ou01_id)
|
||||
response01.should.have.key('Parents').should.be.a(list)
|
||||
response01['Parents'][0].should.have.key('Id').should.equal(root_id)
|
||||
response01['Parents'][0].should.have.key('Type').should.equal('ROOT')
|
||||
ou02 = client.create_organizational_unit(ParentId=ou01_id, Name='ou02')
|
||||
ou02_id = ou02['OrganizationalUnit']['Id']
|
||||
response02 = client.list_parents(ChildId=ou02_id)
|
||||
response02.should.have.key('Parents').should.be.a(list)
|
||||
response02['Parents'][0].should.have.key('Id').should.equal(ou01_id)
|
||||
response02['Parents'][0].should.have.key('Type').should.equal('ORGANIZATIONAL_UNIT')
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_list_parents_for_accounts():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
org = client.create_organization(FeatureSet='ALL')['Organization']
|
||||
root_id = client.list_roots()['Roots'][0]['Id']
|
||||
ou01 = client.create_organizational_unit(ParentId=root_id, Name='ou01')
|
||||
ou01_id = ou01['OrganizationalUnit']['Id']
|
||||
account01_id = client.create_account(
|
||||
AccountName='account01',
|
||||
Email='account01@moto-example.org'
|
||||
)['CreateAccountStatus']['AccountId']
|
||||
account02_id = client.create_account(
|
||||
AccountName='account02',
|
||||
Email='account02@moto-example.org'
|
||||
)['CreateAccountStatus']['AccountId']
|
||||
client.move_account(
|
||||
AccountId=account02_id,
|
||||
SourceParentId=root_id,
|
||||
DestinationParentId=ou01_id,
|
||||
)
|
||||
response01 = client.list_parents(ChildId=account01_id)
|
||||
response01.should.have.key('Parents').should.be.a(list)
|
||||
response01['Parents'][0].should.have.key('Id').should.equal(root_id)
|
||||
response01['Parents'][0].should.have.key('Type').should.equal('ROOT')
|
||||
response02 = client.list_parents(ChildId=account02_id)
|
||||
response02.should.have.key('Parents').should.be.a(list)
|
||||
response02['Parents'][0].should.have.key('Id').should.equal(ou01_id)
|
||||
response02['Parents'][0].should.have.key('Type').should.equal('ORGANIZATIONAL_UNIT')
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_list_children():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
org = client.create_organization(FeatureSet='ALL')['Organization']
|
||||
root_id = client.list_roots()['Roots'][0]['Id']
|
||||
ou01 = client.create_organizational_unit(ParentId=root_id, Name='ou01')
|
||||
ou01_id = ou01['OrganizationalUnit']['Id']
|
||||
ou02 = client.create_organizational_unit(ParentId=ou01_id, Name='ou02')
|
||||
ou02_id = ou02['OrganizationalUnit']['Id']
|
||||
account01_id = client.create_account(
|
||||
AccountName='account01',
|
||||
Email='account01@moto-example.org'
|
||||
)['CreateAccountStatus']['AccountId']
|
||||
account02_id = client.create_account(
|
||||
AccountName='account02',
|
||||
Email='account02@moto-example.org'
|
||||
)['CreateAccountStatus']['AccountId']
|
||||
client.move_account(
|
||||
AccountId=account02_id,
|
||||
SourceParentId=root_id,
|
||||
DestinationParentId=ou01_id,
|
||||
)
|
||||
response01 = client.list_children(ParentId=root_id, ChildType='ACCOUNT')
|
||||
response02 = client.list_children(ParentId=root_id, ChildType='ORGANIZATIONAL_UNIT')
|
||||
response03 = client.list_children(ParentId=ou01_id, ChildType='ACCOUNT')
|
||||
response04 = client.list_children(ParentId=ou01_id, ChildType='ORGANIZATIONAL_UNIT')
|
||||
response01['Children'][0]['Id'].should.equal(account01_id)
|
||||
response01['Children'][0]['Type'].should.equal('ACCOUNT')
|
||||
response02['Children'][0]['Id'].should.equal(ou01_id)
|
||||
response02['Children'][0]['Type'].should.equal('ORGANIZATIONAL_UNIT')
|
||||
response03['Children'][0]['Id'].should.equal(account02_id)
|
||||
response03['Children'][0]['Type'].should.equal('ACCOUNT')
|
||||
response04['Children'][0]['Id'].should.equal(ou02_id)
|
||||
response04['Children'][0]['Type'].should.equal('ORGANIZATIONAL_UNIT')
|
||||
|
||||
|
||||
@mock_organizations
|
||||
def test_list_children_exception():
|
||||
client = boto3.client('organizations', region_name='us-east-1')
|
||||
org = client.create_organization(FeatureSet='ALL')['Organization']
|
||||
root_id = client.list_roots()['Roots'][0]['Id']
|
||||
with assert_raises(ClientError) as e:
|
||||
response = client.list_children(
|
||||
ParentId=utils.make_random_root_id(),
|
||||
ChildType='ACCOUNT'
|
||||
)
|
||||
ex = e.exception
|
||||
ex.operation_name.should.equal('ListChildren')
|
||||
ex.response['Error']['Code'].should.equal('400')
|
||||
ex.response['Error']['Message'].should.contain('ParentNotFoundException')
|
||||
with assert_raises(ClientError) as e:
|
||||
response = client.list_children(
|
||||
ParentId=root_id,
|
||||
ChildType='BLEE'
|
||||
)
|
||||
ex = e.exception
|
||||
ex.operation_name.should.equal('ListChildren')
|
||||
ex.response['Error']['Code'].should.equal('400')
|
||||
ex.response['Error']['Message'].should.contain('InvalidInputException')
|
@ -191,6 +191,127 @@ def test_lifecycle_with_eodm():
|
||||
assert err.exception.response["Error"]["Code"] == "MalformedXML"
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_lifecycle_with_nve():
|
||||
client = boto3.client("s3")
|
||||
client.create_bucket(Bucket="bucket")
|
||||
|
||||
lfc = {
|
||||
"Rules": [
|
||||
{
|
||||
"NoncurrentVersionExpiration": {
|
||||
"NoncurrentDays": 30
|
||||
},
|
||||
"ID": "wholebucket",
|
||||
"Filter": {
|
||||
"Prefix": ""
|
||||
},
|
||||
"Status": "Enabled"
|
||||
}
|
||||
]
|
||||
}
|
||||
client.put_bucket_lifecycle_configuration(Bucket="bucket", LifecycleConfiguration=lfc)
|
||||
result = client.get_bucket_lifecycle_configuration(Bucket="bucket")
|
||||
assert len(result["Rules"]) == 1
|
||||
assert result["Rules"][0]["NoncurrentVersionExpiration"]["NoncurrentDays"] == 30
|
||||
|
||||
# Change NoncurrentDays:
|
||||
lfc["Rules"][0]["NoncurrentVersionExpiration"]["NoncurrentDays"] = 10
|
||||
client.put_bucket_lifecycle_configuration(Bucket="bucket", LifecycleConfiguration=lfc)
|
||||
result = client.get_bucket_lifecycle_configuration(Bucket="bucket")
|
||||
assert len(result["Rules"]) == 1
|
||||
assert result["Rules"][0]["NoncurrentVersionExpiration"]["NoncurrentDays"] == 10
|
||||
|
||||
# TODO: Add test for failures due to missing children
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_lifecycle_with_nvt():
|
||||
client = boto3.client("s3")
|
||||
client.create_bucket(Bucket="bucket")
|
||||
|
||||
lfc = {
|
||||
"Rules": [
|
||||
{
|
||||
"NoncurrentVersionTransitions": [{
|
||||
"NoncurrentDays": 30,
|
||||
"StorageClass": "ONEZONE_IA"
|
||||
}],
|
||||
"ID": "wholebucket",
|
||||
"Filter": {
|
||||
"Prefix": ""
|
||||
},
|
||||
"Status": "Enabled"
|
||||
}
|
||||
]
|
||||
}
|
||||
client.put_bucket_lifecycle_configuration(Bucket="bucket", LifecycleConfiguration=lfc)
|
||||
result = client.get_bucket_lifecycle_configuration(Bucket="bucket")
|
||||
assert len(result["Rules"]) == 1
|
||||
assert result["Rules"][0]["NoncurrentVersionTransitions"][0]["NoncurrentDays"] == 30
|
||||
assert result["Rules"][0]["NoncurrentVersionTransitions"][0]["StorageClass"] == "ONEZONE_IA"
|
||||
|
||||
# Change NoncurrentDays:
|
||||
lfc["Rules"][0]["NoncurrentVersionTransitions"][0]["NoncurrentDays"] = 10
|
||||
client.put_bucket_lifecycle_configuration(Bucket="bucket", LifecycleConfiguration=lfc)
|
||||
result = client.get_bucket_lifecycle_configuration(Bucket="bucket")
|
||||
assert len(result["Rules"]) == 1
|
||||
assert result["Rules"][0]["NoncurrentVersionTransitions"][0]["NoncurrentDays"] == 10
|
||||
|
||||
# Change StorageClass:
|
||||
lfc["Rules"][0]["NoncurrentVersionTransitions"][0]["StorageClass"] = "GLACIER"
|
||||
client.put_bucket_lifecycle_configuration(Bucket="bucket", LifecycleConfiguration=lfc)
|
||||
result = client.get_bucket_lifecycle_configuration(Bucket="bucket")
|
||||
assert len(result["Rules"]) == 1
|
||||
assert result["Rules"][0]["NoncurrentVersionTransitions"][0]["StorageClass"] == "GLACIER"
|
||||
|
||||
# With failures for missing children:
|
||||
del lfc["Rules"][0]["NoncurrentVersionTransitions"][0]["NoncurrentDays"]
|
||||
with assert_raises(ClientError) as err:
|
||||
client.put_bucket_lifecycle_configuration(Bucket="bucket", LifecycleConfiguration=lfc)
|
||||
assert err.exception.response["Error"]["Code"] == "MalformedXML"
|
||||
lfc["Rules"][0]["NoncurrentVersionTransitions"][0]["NoncurrentDays"] = 30
|
||||
|
||||
del lfc["Rules"][0]["NoncurrentVersionTransitions"][0]["StorageClass"]
|
||||
with assert_raises(ClientError) as err:
|
||||
client.put_bucket_lifecycle_configuration(Bucket="bucket", LifecycleConfiguration=lfc)
|
||||
assert err.exception.response["Error"]["Code"] == "MalformedXML"
|
||||
|
||||
|
||||
@mock_s3
|
||||
def test_lifecycle_with_aimu():
|
||||
client = boto3.client("s3")
|
||||
client.create_bucket(Bucket="bucket")
|
||||
|
||||
lfc = {
|
||||
"Rules": [
|
||||
{
|
||||
"AbortIncompleteMultipartUpload": {
|
||||
"DaysAfterInitiation": 7
|
||||
},
|
||||
"ID": "wholebucket",
|
||||
"Filter": {
|
||||
"Prefix": ""
|
||||
},
|
||||
"Status": "Enabled"
|
||||
}
|
||||
]
|
||||
}
|
||||
client.put_bucket_lifecycle_configuration(Bucket="bucket", LifecycleConfiguration=lfc)
|
||||
result = client.get_bucket_lifecycle_configuration(Bucket="bucket")
|
||||
assert len(result["Rules"]) == 1
|
||||
assert result["Rules"][0]["AbortIncompleteMultipartUpload"]["DaysAfterInitiation"] == 7
|
||||
|
||||
# Change DaysAfterInitiation:
|
||||
lfc["Rules"][0]["AbortIncompleteMultipartUpload"]["DaysAfterInitiation"] = 30
|
||||
client.put_bucket_lifecycle_configuration(Bucket="bucket", LifecycleConfiguration=lfc)
|
||||
result = client.get_bucket_lifecycle_configuration(Bucket="bucket")
|
||||
assert len(result["Rules"]) == 1
|
||||
assert result["Rules"][0]["AbortIncompleteMultipartUpload"]["DaysAfterInitiation"] == 30
|
||||
|
||||
# TODO: Add test for failures due to missing children
|
||||
|
||||
|
||||
@mock_s3_deprecated
|
||||
def test_lifecycle_with_glacier_transition():
|
||||
conn = boto.s3.connect_to_region("us-west-1")
|
||||
|
Loading…
Reference in New Issue
Block a user