Merge pull request #29 from spulec/master

Merge upstream
This commit is contained in:
Bert Blommers 2020-02-16 12:15:06 +00:00 committed by GitHub
commit 954004669a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 438 additions and 181 deletions

View File

@ -83,14 +83,14 @@ class MethodResponse(BaseModel, dict):
class Method(BaseModel, dict):
def __init__(self, method_type, authorization_type):
def __init__(self, method_type, authorization_type, **kwargs):
super(Method, self).__init__()
self.update(
dict(
httpMethod=method_type,
authorizationType=authorization_type,
authorizerId=None,
apiKeyRequired=None,
apiKeyRequired=kwargs.get("api_key_required") or False,
requestParameters=None,
requestModels=None,
methodIntegration=None,
@ -158,8 +158,12 @@ class Resource(BaseModel):
)
return response.status_code, response.text
def add_method(self, method_type, authorization_type):
method = Method(method_type=method_type, authorization_type=authorization_type)
def add_method(self, method_type, authorization_type, api_key_required):
method = Method(
method_type=method_type,
authorization_type=authorization_type,
api_key_required=api_key_required,
)
self.resource_methods[method_type] = method
return method
@ -594,9 +598,18 @@ class APIGatewayBackend(BaseBackend):
resource = self.get_resource(function_id, resource_id)
return resource.get_method(method_type)
def create_method(self, function_id, resource_id, method_type, authorization_type):
def create_method(
self,
function_id,
resource_id,
method_type,
authorization_type,
api_key_required=None,
):
resource = self.get_resource(function_id, resource_id)
method = resource.add_method(method_type, authorization_type)
method = resource.add_method(
method_type, authorization_type, api_key_required=api_key_required
)
return method
def get_stage(self, function_id, stage_name):

View File

@ -145,8 +145,13 @@ class APIGatewayResponse(BaseResponse):
return 200, {}, json.dumps(method)
elif self.method == "PUT":
authorization_type = self._get_param("authorizationType")
api_key_required = self._get_param("apiKeyRequired")
method = self.backend.create_method(
function_id, resource_id, method_type, authorization_type
function_id,
resource_id,
method_type,
authorization_type,
api_key_required,
)
return 200, {}, json.dumps(method)

View File

@ -184,7 +184,13 @@ class LambdaResponse(BaseResponse):
function_name, qualifier, self.body, self.headers, response_headers
)
if payload:
return 202, response_headers, payload
if request.headers["X-Amz-Invocation-Type"] == "Event":
status_code = 202
elif request.headers["X-Amz-Invocation-Type"] == "DryRun":
status_code = 204
else:
status_code = 200
return status_code, response_headers, payload
else:
return 404, response_headers, "{}"
@ -295,7 +301,7 @@ class LambdaResponse(BaseResponse):
code["Configuration"]["FunctionArn"] += ":$LATEST"
return 200, {}, json.dumps(code)
else:
return 404, {}, "{}"
return 404, {"x-amzn-ErrorType": "ResourceNotFoundException"}, "{}"
def _get_aws_region(self, full_url):
region = self.region_regex.search(full_url)

View File

@ -448,13 +448,18 @@ class Item(BaseModel):
if list_append_re:
new_value = expression_attribute_values[list_append_re.group(2).strip()]
old_list_key = list_append_re.group(1)
# Get the existing value
old_list = self.attrs[old_list_key.split(".")[0]]
if "." in old_list_key:
# Value is nested inside a map - find the appropriate child attr
old_list = old_list.child_attr(
".".join(old_list_key.split(".")[1:])
# old_key could be a function itself (if_not_exists)
if old_list_key.startswith("if_not_exists"):
old_list = DynamoType(
expression_attribute_values[self._get_default(old_list_key)]
)
else:
old_list = self.attrs[old_list_key.split(".")[0]]
if "." in old_list_key:
# Value is nested inside a map - find the appropriate child attr
old_list = old_list.child_attr(
".".join(old_list_key.split(".")[1:])
)
if not old_list.is_list():
raise ParamValidationError
old_list.value.extend([DynamoType(v) for v in new_value["L"]])

View File

@ -27,6 +27,7 @@ from moto.core.utils import (
iso_8601_datetime_with_milliseconds,
camelcase_to_underscores,
)
from moto.iam.models import ACCOUNT_ID
from .exceptions import (
CidrLimitExceeded,
DependencyViolationError,
@ -155,7 +156,7 @@ AMIS = _load_resource(
)
OWNER_ID = "111122223333"
OWNER_ID = ACCOUNT_ID
def utc_date_and_time():
@ -1341,7 +1342,7 @@ class AmiBackend(object):
source_ami=None,
name=name,
description=description,
owner_id=context.get_current_user() if context else OWNER_ID,
owner_id=OWNER_ID,
)
self.amis[ami_id] = ami
return ami
@ -1392,14 +1393,7 @@ class AmiBackend(object):
# Limit by owner ids
if owners:
# support filtering by Owners=['self']
owners = list(
map(
lambda o: context.get_current_user()
if context and o == "self"
else o,
owners,
)
)
owners = list(map(lambda o: OWNER_ID if o == "self" else o, owners,))
images = [ami for ami in images if ami.owner_id in owners]
# Generic filters

View File

@ -127,6 +127,18 @@ class InvalidRequest(S3ClientError):
)
class IllegalLocationConstraintException(S3ClientError):
code = 400
def __init__(self, *args, **kwargs):
super(IllegalLocationConstraintException, self).__init__(
"IllegalLocationConstraintException",
"The unspecified location constraint is incompatible for the region specific endpoint this request was sent to.",
*args,
**kwargs
)
class MalformedXML(S3ClientError):
code = 400

View File

@ -29,6 +29,7 @@ from .exceptions import (
InvalidPartOrder,
MalformedXML,
MalformedACLError,
IllegalLocationConstraintException,
InvalidNotificationARN,
InvalidNotificationEvent,
ObjectNotInActiveTierError,
@ -585,6 +586,15 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
next_continuation_token = None
return result_keys, is_truncated, next_continuation_token
def _body_contains_location_constraint(self, body):
if body:
try:
xmltodict.parse(body)["CreateBucketConfiguration"]["LocationConstraint"]
return True
except KeyError:
pass
return False
def _bucket_response_put(
self, request, body, region_name, bucket_name, querystring
):
@ -680,10 +690,16 @@ class ResponseObject(_TemplateEnvironmentMixin, ActionAuthenticatorMixin):
return ""
else:
# us-east-1, the default AWS region behaves a bit differently
# - you should not use it as a location constraint --> it fails
# - querying the location constraint returns None
# - LocationConstraint has to be specified if outside us-east-1
if (
region_name != DEFAULT_REGION_NAME
and not self._body_contains_location_constraint(body)
):
raise IllegalLocationConstraintException()
if body:
# us-east-1, the default AWS region behaves a bit differently
# - you should not use it as a location constraint --> it fails
# - querying the location constraint returns None
try:
forced_region = xmltodict.parse(body)["CreateBucketConfiguration"][
"LocationConstraint"

View File

@ -37,7 +37,7 @@ def bucket_name_from_url(url):
REGION_URL_REGEX = re.compile(
r"^https?://(s3[-\.](?P<region1>.+)\.amazonaws\.com/(.+)|"
r"(.+)\.s3-(?P<region2>.+)\.amazonaws\.com)/?"
r"(.+)\.s3[-\.](?P<region2>.+)\.amazonaws\.com)/?"
)

View File

@ -286,6 +286,41 @@ def test_create_method():
{
"httpMethod": "GET",
"authorizationType": "none",
"apiKeyRequired": False,
"ResponseMetadata": {"HTTPStatusCode": 200},
}
)
@mock_apigateway
def test_create_method_apikeyrequired():
client = boto3.client("apigateway", region_name="us-west-2")
response = client.create_rest_api(name="my_api", description="this is my api")
api_id = response["id"]
resources = client.get_resources(restApiId=api_id)
root_id = [resource for resource in resources["items"] if resource["path"] == "/"][
0
]["id"]
client.put_method(
restApiId=api_id,
resourceId=root_id,
httpMethod="GET",
authorizationType="none",
apiKeyRequired=True,
)
response = client.get_method(restApiId=api_id, resourceId=root_id, httpMethod="GET")
# this is hard to match against, so remove it
response["ResponseMetadata"].pop("HTTPHeaders", None)
response["ResponseMetadata"].pop("RetryAttempts", None)
response.should.equal(
{
"httpMethod": "GET",
"authorizationType": "none",
"apiKeyRequired": True,
"ResponseMetadata": {"HTTPStatusCode": 200},
}
)

View File

@ -78,7 +78,7 @@ def lambda_handler(event, context):
def get_test_zip_file4():
pfunc = """
def lambda_handler(event, context):
def lambda_handler(event, context):
raise Exception('I failed!')
"""
return _process_lambda(pfunc)
@ -86,14 +86,14 @@ def lambda_handler(event, context):
@mock_lambda
def test_list_functions():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
result = conn.list_functions()
result["Functions"].should.have.length_of(0)
@mock_lambda
def test_invoke_requestresponse_function():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
Runtime="python2.7",
@ -113,7 +113,7 @@ def test_invoke_requestresponse_function():
Payload=json.dumps(in_data),
)
success_result["StatusCode"].should.equal(202)
success_result["StatusCode"].should.equal(200)
result_obj = json.loads(
base64.b64decode(success_result["LogResult"]).decode("utf-8")
)
@ -163,7 +163,7 @@ def test_invoke_requestresponse_function_with_arn():
@mock_lambda
def test_invoke_event_function():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
Runtime="python2.7",
@ -188,16 +188,44 @@ def test_invoke_event_function():
json.loads(success_result["Payload"].read().decode("utf-8")).should.equal(in_data)
@mock_lambda
def test_invoke_dryrun_function():
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
Runtime="python2.7",
Role=get_role_name(),
Handler="lambda_function.lambda_handler",
Code={"ZipFile": get_test_zip_file1(),},
Description="test lambda function",
Timeout=3,
MemorySize=128,
Publish=True,
)
conn.invoke.when.called_with(
FunctionName="notAFunction", InvocationType="Event", Payload="{}"
).should.throw(botocore.client.ClientError)
in_data = {"msg": "So long and thanks for all the fish"}
success_result = conn.invoke(
FunctionName="testFunction",
InvocationType="DryRun",
Payload=json.dumps(in_data),
)
success_result["StatusCode"].should.equal(204)
if settings.TEST_SERVER_MODE:
@mock_ec2
@mock_lambda
def test_invoke_function_get_ec2_volume():
conn = boto3.resource("ec2", "us-west-2")
vol = conn.create_volume(Size=99, AvailabilityZone="us-west-2")
conn = boto3.resource("ec2", _lambda_region)
vol = conn.create_volume(Size=99, AvailabilityZone=_lambda_region)
vol = conn.Volume(vol.id)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
Runtime="python3.7",
@ -216,7 +244,7 @@ if settings.TEST_SERVER_MODE:
InvocationType="RequestResponse",
Payload=json.dumps(in_data),
)
result["StatusCode"].should.equal(202)
result["StatusCode"].should.equal(200)
actual_payload = json.loads(result["Payload"].read().decode("utf-8"))
expected_payload = {"id": vol.id, "state": vol.state, "size": vol.size}
actual_payload.should.equal(expected_payload)
@ -227,14 +255,14 @@ if settings.TEST_SERVER_MODE:
@mock_ec2
@mock_lambda
def test_invoke_function_from_sns():
logs_conn = boto3.client("logs", region_name="us-west-2")
sns_conn = boto3.client("sns", region_name="us-west-2")
logs_conn = boto3.client("logs", region_name=_lambda_region)
sns_conn = boto3.client("sns", region_name=_lambda_region)
sns_conn.create_topic(Name="some-topic")
topics_json = sns_conn.list_topics()
topics = topics_json["Topics"]
topic_arn = topics[0]["TopicArn"]
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
result = conn.create_function(
FunctionName="testFunction",
Runtime="python2.7",
@ -277,7 +305,7 @@ def test_invoke_function_from_sns():
@mock_lambda
def test_create_based_on_s3_with_missing_bucket():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function.when.called_with(
FunctionName="testFunction",
@ -297,12 +325,15 @@ def test_create_based_on_s3_with_missing_bucket():
@mock_s3
@freeze_time("2015-01-01 00:00:00")
def test_create_function_from_aws_bucket():
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
result = conn.create_function(
FunctionName="testFunction",
@ -350,7 +381,7 @@ def test_create_function_from_aws_bucket():
@mock_lambda
@freeze_time("2015-01-01 00:00:00")
def test_create_function_from_zipfile():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
zip_content = get_test_zip_file1()
result = conn.create_function(
FunctionName="testFunction",
@ -395,12 +426,15 @@ def test_create_function_from_zipfile():
@mock_s3
@freeze_time("2015-01-01 00:00:00")
def test_get_function():
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file1()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
@ -455,7 +489,7 @@ def test_get_function():
)
# Test get function when can't find function name
with assert_raises(ClientError):
with assert_raises(conn.exceptions.ResourceNotFoundException):
conn.get_function(FunctionName="junk", Qualifier="$LATEST")
@ -464,7 +498,10 @@ def test_get_function():
def test_get_function_by_arn():
bucket_name = "test-bucket"
s3_conn = boto3.client("s3", "us-east-1")
s3_conn.create_bucket(Bucket=bucket_name)
s3_conn.create_bucket(
Bucket=bucket_name,
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket=bucket_name, Key="test.zip", Body=zip_content)
@ -489,12 +526,15 @@ def test_get_function_by_arn():
@mock_lambda
@mock_s3
def test_delete_function():
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
@ -525,7 +565,10 @@ def test_delete_function():
def test_delete_function_by_arn():
bucket_name = "test-bucket"
s3_conn = boto3.client("s3", "us-east-1")
s3_conn.create_bucket(Bucket=bucket_name)
s3_conn.create_bucket(
Bucket=bucket_name,
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket=bucket_name, Key="test.zip", Body=zip_content)
@ -550,7 +593,7 @@ def test_delete_function_by_arn():
@mock_lambda
def test_delete_unknown_function():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.delete_function.when.called_with(
FunctionName="testFunctionThatDoesntExist"
).should.throw(botocore.client.ClientError)
@ -559,12 +602,15 @@ def test_delete_unknown_function():
@mock_lambda
@mock_s3
def test_publish():
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
@ -609,12 +655,15 @@ def test_list_create_list_get_delete_list():
test `list -> create -> list -> get -> delete -> list` integration
"""
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.list_functions()["Functions"].should.have.length_of(0)
@ -711,12 +760,15 @@ def test_tags():
"""
test list_tags -> tag_resource -> list_tags -> tag_resource -> list_tags -> untag_resource -> list_tags integration
"""
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
function = conn.create_function(
FunctionName="testFunction",
@ -768,7 +820,7 @@ def test_tags_not_found():
"""
Test list_tags and tag_resource when the lambda with the given arn does not exist
"""
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.list_tags.when.called_with(
Resource="arn:aws:lambda:{}:function:not-found".format(ACCOUNT_ID)
).should.throw(botocore.client.ClientError)
@ -786,7 +838,7 @@ def test_tags_not_found():
@mock_lambda
def test_invoke_async_function():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
Runtime="python2.7",
@ -809,7 +861,7 @@ def test_invoke_async_function():
@mock_lambda
@freeze_time("2015-01-01 00:00:00")
def test_get_function_created_with_zipfile():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
zip_content = get_test_zip_file1()
result = conn.create_function(
FunctionName="testFunction",
@ -855,7 +907,7 @@ def test_get_function_created_with_zipfile():
@mock_lambda
def test_add_function_permission():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
zip_content = get_test_zip_file1()
conn.create_function(
FunctionName="testFunction",
@ -886,7 +938,7 @@ def test_add_function_permission():
@mock_lambda
def test_get_function_policy():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
zip_content = get_test_zip_file1()
conn.create_function(
FunctionName="testFunction",
@ -921,12 +973,15 @@ def test_get_function_policy():
@mock_lambda
@mock_s3
def test_list_versions_by_function():
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
@ -977,12 +1032,15 @@ def test_list_versions_by_function():
@mock_lambda
@mock_s3
def test_create_function_with_already_exists():
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
conn.create_function(
FunctionName="testFunction",
@ -1014,7 +1072,7 @@ def test_create_function_with_already_exists():
@mock_lambda
@mock_s3
def test_list_versions_by_function_for_nonexistent_function():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
versions = conn.list_versions_by_function(FunctionName="testFunction")
assert len(versions["Versions"]) == 0
@ -1363,12 +1421,15 @@ def test_delete_event_source_mapping():
@mock_lambda
@mock_s3
def test_update_configuration():
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
fxn = conn.create_function(
FunctionName="testFunction",
@ -1411,7 +1472,7 @@ def test_update_configuration():
@mock_lambda
def test_update_function_zip():
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
zip_content_one = get_test_zip_file1()
@ -1466,13 +1527,16 @@ def test_update_function_zip():
@mock_lambda
@mock_s3
def test_update_function_s3():
s3_conn = boto3.client("s3", "us-west-2")
s3_conn.create_bucket(Bucket="test-bucket")
s3_conn = boto3.client("s3", _lambda_region)
s3_conn.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": _lambda_region},
)
zip_content = get_test_zip_file1()
s3_conn.put_object(Bucket="test-bucket", Key="test.zip", Body=zip_content)
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
fxn = conn.create_function(
FunctionName="testFunctionS3",
@ -1553,7 +1617,7 @@ def test_create_function_with_unknown_arn():
def create_invalid_lambda(role):
conn = boto3.client("lambda", "us-west-2")
conn = boto3.client("lambda", _lambda_region)
zip_content = get_test_zip_file1()
with assert_raises(ClientError) as err:
conn.create_function(
@ -1572,7 +1636,7 @@ def create_invalid_lambda(role):
def get_role_name():
with mock_iam():
iam = boto3.client("iam", region_name="us-west-2")
iam = boto3.client("iam", region_name=_lambda_region)
try:
return iam.get_role(RoleName="my-role")["Role"]["Arn"]
except ClientError:

View File

@ -94,7 +94,7 @@ def test_lambda_can_be_deleted_by_cloudformation():
# Verify function was deleted
with assert_raises(ClientError) as e:
lmbda.get_function(FunctionName=created_fn_name)
e.exception.response["Error"]["Code"].should.equal("404")
e.exception.response["Error"]["Code"].should.equal("ResourceNotFoundException")
def create_stack(cf, s3):

View File

@ -143,7 +143,7 @@ def test_create_stack_with_notification_arn():
@mock_s3_deprecated
def test_create_stack_from_s3_url():
s3_conn = boto.s3.connect_to_region("us-west-1")
bucket = s3_conn.create_bucket("foobar")
bucket = s3_conn.create_bucket("foobar", location="us-west-1")
key = boto.s3.key.Key(bucket)
key.key = "template-key"
key.set_contents_from_string(dummy_template_json)

View File

@ -3609,6 +3609,31 @@ def test_update_supports_list_append_maps():
)
@mock_dynamodb2
def test_update_supports_list_append_with_nested_if_not_exists_operation():
dynamo = boto3.resource("dynamodb", region_name="us-west-1")
table_name = "test"
dynamo.create_table(
TableName=table_name,
AttributeDefinitions=[{"AttributeName": "Id", "AttributeType": "S"}],
KeySchema=[{"AttributeName": "Id", "KeyType": "HASH"}],
ProvisionedThroughput={"ReadCapacityUnits": 20, "WriteCapacityUnits": 20},
)
table = dynamo.Table(table_name)
table.put_item(Item={"Id": "item-id", "nest1": {"nest2": {}}})
table.update_item(
Key={"Id": "item-id"},
UpdateExpression="SET nest1.nest2.event_history = list_append(if_not_exists(nest1.nest2.event_history, :empty_list), :new_value)",
ExpressionAttributeValues={":empty_list": [], ":new_value": ["some_value"]},
)
table.get_item(Key={"Id": "item-id"})["Item"].should.equal(
{"Id": "item-id", "nest1": {"nest2": {"event_history": ["some_value"]}}}
)
@mock_dynamodb2
def test_update_catches_invalid_list_append_operation():
client = boto3.client("dynamodb", region_name="us-east-1")

View File

@ -12,6 +12,7 @@ import sure # noqa
from moto import mock_ec2_deprecated, mock_ec2
from moto.ec2.models import AMIS, OWNER_ID
from moto.iam.models import ACCOUNT_ID
from tests.helpers import requires_boto_gte
@ -251,6 +252,19 @@ def test_ami_pulls_attributes_from_instance():
image.kernel_id.should.equal("test-kernel")
@mock_ec2_deprecated
def test_ami_uses_account_id_if_valid_access_key_is_supplied():
access_key = "AKIAXXXXXXXXXXXXXXXX"
conn = boto.connect_ec2(access_key, "the_secret")
reservation = conn.run_instances("ami-1234abcd")
instance = reservation.instances[0]
instance.modify_attribute("kernel", "test-kernel")
image_id = conn.create_image(instance.id, "test-ami", "this is a test ami")
images = conn.get_all_images(owners=["self"])
[(ami.id, ami.owner_id) for ami in images].should.equal([(image_id, ACCOUNT_ID)])
@mock_ec2_deprecated
def test_ami_filters():
conn = boto.connect_ec2("the_key", "the_secret")
@ -773,7 +787,7 @@ def test_ami_filter_wildcard():
instance.create_image(Name="not-matching-image")
my_images = ec2_client.describe_images(
Owners=["111122223333"], Filters=[{"Name": "name", "Values": ["test*"]}]
Owners=[ACCOUNT_ID], Filters=[{"Name": "name", "Values": ["test*"]}]
)["Images"]
my_images.should.have.length_of(1)

View File

@ -21,7 +21,10 @@ def test_get_resources_s3():
# Create 4 buckets
for i in range(1, 5):
i_str = str(i)
s3_client.create_bucket(Bucket="test_bucket" + i_str)
s3_client.create_bucket(
Bucket="test_bucket" + i_str,
CreateBucketConfiguration={"LocationConstraint": "eu-central-1"},
)
s3_client.put_bucket_tagging(
Bucket="test_bucket" + i_str,
Tagging={"TagSet": [{"Key": "key" + i_str, "Value": "value" + i_str}]},

View File

@ -27,6 +27,7 @@ from parameterized import parameterized
import six
import requests
import tests.backport_assert_raises # noqa
from moto.s3.responses import DEFAULT_REGION_NAME
from nose import SkipTest
from nose.tools import assert_raises
@ -68,7 +69,7 @@ class MyModel(object):
self.value = value
def save(self):
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.put_object(Bucket="mybucket", Key=self.name, Body=self.value)
@ -119,7 +120,7 @@ def test_append_to_value__empty_key():
@mock_s3
def test_my_model_save():
# Create Bucket so that test can run
conn = boto3.resource("s3", region_name="us-east-1")
conn = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
conn.create_bucket(Bucket="mybucket")
####################################
@ -133,7 +134,7 @@ def test_my_model_save():
@mock_s3
def test_key_etag():
conn = boto3.resource("s3", region_name="us-east-1")
conn = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
conn.create_bucket(Bucket="mybucket")
model_instance = MyModel("steve", "is awesome")
@ -519,9 +520,9 @@ def test_bucket_with_dash():
def test_create_existing_bucket():
"Trying to create a bucket that already exists should raise an Error"
conn = boto.s3.connect_to_region("us-west-2")
conn.create_bucket("foobar")
conn.create_bucket("foobar", location="us-west-2")
with assert_raises(S3CreateError):
conn.create_bucket("foobar")
conn.create_bucket("foobar", location="us-west-2")
@mock_s3_deprecated
@ -535,7 +536,7 @@ def test_create_existing_bucket_in_us_east_1():
us-east-1. In us-east-1 region, you will get 200 OK, but it is no-op (if
bucket exists it Amazon S3 will not do anything).
"""
conn = boto.s3.connect_to_region("us-east-1")
conn = boto.s3.connect_to_region(DEFAULT_REGION_NAME)
conn.create_bucket("foobar")
bucket = conn.create_bucket("foobar")
bucket.name.should.equal("foobar")
@ -544,7 +545,7 @@ def test_create_existing_bucket_in_us_east_1():
@mock_s3_deprecated
def test_other_region():
conn = S3Connection("key", "secret", host="s3-website-ap-southeast-2.amazonaws.com")
conn.create_bucket("foobar")
conn.create_bucket("foobar", location="ap-southeast-2")
list(conn.get_bucket("foobar").get_all_keys()).should.equal([])
@ -995,7 +996,9 @@ def test_bucket_acl_switching():
def test_s3_object_in_public_bucket():
s3 = boto3.resource("s3")
bucket = s3.Bucket("test-bucket")
bucket.create(ACL="public-read")
bucket.create(
ACL="public-read", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
bucket.put_object(Body=b"ABCD", Key="file.txt")
s3_anonymous = boto3.resource("s3")
@ -1026,7 +1029,9 @@ def test_s3_object_in_public_bucket():
def test_s3_object_in_private_bucket():
s3 = boto3.resource("s3")
bucket = s3.Bucket("test-bucket")
bucket.create(ACL="private")
bucket.create(
ACL="private", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
bucket.put_object(ACL="private", Body=b"ABCD", Key="file.txt")
s3_anonymous = boto3.resource("s3")
@ -1086,19 +1091,51 @@ def test_setting_content_encoding():
@mock_s3_deprecated
def test_bucket_location():
conn = boto.s3.connect_to_region("us-west-2")
bucket = conn.create_bucket("mybucket")
bucket = conn.create_bucket("mybucket", location="us-west-2")
bucket.get_location().should.equal("us-west-2")
@mock_s3
def test_bucket_location_us_east_1():
cli = boto3.client("s3")
def test_bucket_location_default():
cli = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
# No LocationConstraint ==> us-east-1
cli.create_bucket(Bucket=bucket_name)
cli.get_bucket_location(Bucket=bucket_name)["LocationConstraint"].should.equal(None)
@mock_s3
def test_bucket_location_nondefault():
cli = boto3.client("s3", region_name="eu-central-1")
bucket_name = "mybucket"
# LocationConstraint set for non default regions
resp = cli.create_bucket(
Bucket=bucket_name,
CreateBucketConfiguration={"LocationConstraint": "eu-central-1"},
)
cli.get_bucket_location(Bucket=bucket_name)["LocationConstraint"].should.equal(
"eu-central-1"
)
# Test uses current Region to determine whether to throw an error
# Region is retrieved based on current URL
# URL will always be localhost in Server Mode, so can't run it there
if not settings.TEST_SERVER_MODE:
@mock_s3
def test_s3_location_should_error_outside_useast1():
s3 = boto3.client("s3", region_name="eu-west-1")
bucket_name = "asdfasdfsdfdsfasda"
with assert_raises(ClientError) as e:
s3.create_bucket(Bucket=bucket_name)
e.exception.response["Error"]["Message"].should.equal(
"The unspecified location constraint is incompatible for the region specific endpoint this request was sent to."
)
@mock_s3_deprecated
def test_ranged_get():
conn = boto.connect_s3()
@ -1222,7 +1259,7 @@ def test_key_with_trailing_slash_in_ordinary_calling_format():
@mock_s3
def test_boto3_key_etag():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="mybucket")
s3.put_object(Bucket="mybucket", Key="steve", Body=b"is awesome")
resp = s3.get_object(Bucket="mybucket", Key="steve")
@ -1231,7 +1268,7 @@ def test_boto3_key_etag():
@mock_s3
def test_website_redirect_location():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="mybucket")
s3.put_object(Bucket="mybucket", Key="steve", Body=b"is awesome")
@ -1248,7 +1285,7 @@ def test_website_redirect_location():
@mock_s3
def test_boto3_list_objects_truncated_response():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="mybucket")
s3.put_object(Bucket="mybucket", Key="one", Body=b"1")
s3.put_object(Bucket="mybucket", Key="two", Body=b"22")
@ -1294,7 +1331,7 @@ def test_boto3_list_objects_truncated_response():
@mock_s3
def test_boto3_list_keys_xml_escaped():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="mybucket")
key_name = "Q&A.txt"
s3.put_object(Bucket="mybucket", Key=key_name, Body=b"is awesome")
@ -1314,7 +1351,7 @@ def test_boto3_list_keys_xml_escaped():
@mock_s3
def test_boto3_list_objects_v2_common_prefix_pagination():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="mybucket")
max_keys = 1
@ -1343,7 +1380,7 @@ def test_boto3_list_objects_v2_common_prefix_pagination():
@mock_s3
def test_boto3_list_objects_v2_truncated_response():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="mybucket")
s3.put_object(Bucket="mybucket", Key="one", Body=b"1")
s3.put_object(Bucket="mybucket", Key="two", Body=b"22")
@ -1400,7 +1437,7 @@ def test_boto3_list_objects_v2_truncated_response():
@mock_s3
def test_boto3_list_objects_v2_truncated_response_start_after():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="mybucket")
s3.put_object(Bucket="mybucket", Key="one", Body=b"1")
s3.put_object(Bucket="mybucket", Key="two", Body=b"22")
@ -1442,7 +1479,7 @@ def test_boto3_list_objects_v2_truncated_response_start_after():
@mock_s3
def test_boto3_list_objects_v2_fetch_owner():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="mybucket")
s3.put_object(Bucket="mybucket", Key="one", Body=b"11")
@ -1456,7 +1493,7 @@ def test_boto3_list_objects_v2_fetch_owner():
@mock_s3
def test_boto3_list_objects_v2_truncate_combined_keys_and_folders():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="mybucket")
s3.put_object(Bucket="mybucket", Key="1/2", Body="")
s3.put_object(Bucket="mybucket", Key="2", Body="")
@ -1486,7 +1523,7 @@ def test_boto3_list_objects_v2_truncate_combined_keys_and_folders():
@mock_s3
def test_boto3_bucket_create():
s3 = boto3.resource("s3", region_name="us-east-1")
s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="blah")
s3.Object("blah", "hello.txt").put(Body="some text")
@ -1511,10 +1548,11 @@ def test_bucket_create_duplicate():
@mock_s3
def test_bucket_create_force_us_east_1():
s3 = boto3.resource("s3", region_name="us-east-1")
s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
with assert_raises(ClientError) as exc:
s3.create_bucket(
Bucket="blah", CreateBucketConfiguration={"LocationConstraint": "us-east-1"}
Bucket="blah",
CreateBucketConfiguration={"LocationConstraint": DEFAULT_REGION_NAME},
)
exc.exception.response["Error"]["Code"].should.equal("InvalidLocationConstraint")
@ -1522,7 +1560,9 @@ def test_bucket_create_force_us_east_1():
@mock_s3
def test_boto3_bucket_create_eu_central():
s3 = boto3.resource("s3", region_name="eu-central-1")
s3.create_bucket(Bucket="blah")
s3.create_bucket(
Bucket="blah", CreateBucketConfiguration={"LocationConstraint": "eu-central-1"}
)
s3.Object("blah", "hello.txt").put(Body="some text")
@ -1533,7 +1573,7 @@ def test_boto3_bucket_create_eu_central():
@mock_s3
def test_boto3_head_object():
s3 = boto3.resource("s3", region_name="us-east-1")
s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="blah")
s3.Object("blah", "hello.txt").put(Body="some text")
@ -1551,7 +1591,7 @@ def test_boto3_head_object():
@mock_s3
def test_boto3_bucket_deletion():
cli = boto3.client("s3", region_name="us-east-1")
cli = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
cli.create_bucket(Bucket="foobar")
cli.put_object(Bucket="foobar", Key="the-key", Body="some value")
@ -1582,7 +1622,7 @@ def test_boto3_bucket_deletion():
@mock_s3
def test_boto3_get_object():
s3 = boto3.resource("s3", region_name="us-east-1")
s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="blah")
s3.Object("blah", "hello.txt").put(Body="some text")
@ -1599,7 +1639,7 @@ def test_boto3_get_object():
@mock_s3
def test_boto3_get_missing_object_with_part_number():
s3 = boto3.resource("s3", region_name="us-east-1")
s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="blah")
with assert_raises(ClientError) as e:
@ -1612,7 +1652,7 @@ def test_boto3_get_missing_object_with_part_number():
@mock_s3
def test_boto3_head_object_with_versioning():
s3 = boto3.resource("s3", region_name="us-east-1")
s3 = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
bucket = s3.create_bucket(Bucket="blah")
bucket.Versioning().enable()
@ -1642,7 +1682,7 @@ def test_boto3_head_object_with_versioning():
@mock_s3
def test_boto3_copy_object_with_versioning():
client = boto3.client("s3", region_name="us-east-1")
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
client.create_bucket(
Bucket="blah", CreateBucketConfiguration={"LocationConstraint": "eu-west-1"}
@ -1706,7 +1746,7 @@ def test_boto3_copy_object_with_versioning():
@mock_s3
def test_boto3_copy_object_from_unversioned_to_versioned_bucket():
client = boto3.client("s3", region_name="us-east-1")
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
client.create_bucket(
Bucket="src", CreateBucketConfiguration={"LocationConstraint": "eu-west-1"}
@ -1730,7 +1770,7 @@ def test_boto3_copy_object_from_unversioned_to_versioned_bucket():
@mock_s3
def test_boto3_deleted_versionings_list():
client = boto3.client("s3", region_name="us-east-1")
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
client.create_bucket(Bucket="blah")
client.put_bucket_versioning(
@ -1747,7 +1787,7 @@ def test_boto3_deleted_versionings_list():
@mock_s3
def test_boto3_delete_versioned_bucket():
client = boto3.client("s3", region_name="us-east-1")
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
client.create_bucket(Bucket="blah")
client.put_bucket_versioning(
@ -1762,7 +1802,7 @@ def test_boto3_delete_versioned_bucket():
@mock_s3
def test_boto3_get_object_if_modified_since():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "blah"
s3.create_bucket(Bucket=bucket_name)
@ -1782,7 +1822,7 @@ def test_boto3_get_object_if_modified_since():
@mock_s3
def test_boto3_head_object_if_modified_since():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "blah"
s3.create_bucket(Bucket=bucket_name)
@ -1804,7 +1844,7 @@ def test_boto3_head_object_if_modified_since():
@reduced_min_part_size
def test_boto3_multipart_etag():
# Create Bucket so that test can run
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="mybucket")
upload_id = s3.create_multipart_upload(Bucket="mybucket", Key="the-key")["UploadId"]
@ -1848,7 +1888,7 @@ def test_boto3_multipart_etag():
@mock_s3
@reduced_min_part_size
def test_boto3_multipart_part_size():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="mybucket")
mpu = s3.create_multipart_upload(Bucket="mybucket", Key="the-key")
@ -1883,7 +1923,7 @@ def test_boto3_multipart_part_size():
@mock_s3
def test_boto3_put_object_with_tagging():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-tags"
s3.create_bucket(Bucket=bucket_name)
@ -1897,7 +1937,7 @@ def test_boto3_put_object_with_tagging():
@mock_s3
def test_boto3_put_bucket_tagging():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
s3.create_bucket(Bucket=bucket_name)
@ -1944,7 +1984,7 @@ def test_boto3_put_bucket_tagging():
@mock_s3
def test_boto3_get_bucket_tagging():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
s3.create_bucket(Bucket=bucket_name)
s3.put_bucket_tagging(
@ -1975,7 +2015,7 @@ def test_boto3_get_bucket_tagging():
@mock_s3
def test_boto3_delete_bucket_tagging():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
s3.create_bucket(Bucket=bucket_name)
@ -2002,7 +2042,7 @@ def test_boto3_delete_bucket_tagging():
@mock_s3
def test_boto3_put_bucket_cors():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
s3.create_bucket(Bucket=bucket_name)
@ -2062,7 +2102,7 @@ def test_boto3_put_bucket_cors():
@mock_s3
def test_boto3_get_bucket_cors():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
s3.create_bucket(Bucket=bucket_name)
@ -2103,7 +2143,7 @@ def test_boto3_get_bucket_cors():
@mock_s3
def test_boto3_delete_bucket_cors():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
s3.create_bucket(Bucket=bucket_name)
s3.put_bucket_cors(
@ -2127,7 +2167,7 @@ def test_boto3_delete_bucket_cors():
@mock_s3
def test_put_bucket_acl_body():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="bucket")
bucket_owner = s3.get_bucket_acl(Bucket="bucket")["Owner"]
s3.put_bucket_acl(
@ -2225,7 +2265,7 @@ def test_put_bucket_acl_body():
@mock_s3
def test_put_bucket_notification():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="bucket")
# With no configuration:
@ -2421,7 +2461,7 @@ def test_put_bucket_notification():
@mock_s3
def test_put_bucket_notification_errors():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket="bucket")
# With incorrect ARNs:
@ -2488,7 +2528,7 @@ def test_put_bucket_notification_errors():
@mock_s3
def test_boto3_put_bucket_logging():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
log_bucket = "logbucket"
wrong_region_bucket = "wrongregionlogbucket"
@ -2667,7 +2707,7 @@ def test_boto3_put_bucket_logging():
@mock_s3
def test_boto3_put_object_tagging():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-tags"
s3.create_bucket(Bucket=bucket_name)
@ -2711,7 +2751,7 @@ def test_boto3_put_object_tagging():
@mock_s3
def test_boto3_put_object_tagging_on_earliest_version():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-tags"
s3.create_bucket(Bucket=bucket_name)
@ -2778,7 +2818,7 @@ def test_boto3_put_object_tagging_on_earliest_version():
@mock_s3
def test_boto3_put_object_tagging_on_both_version():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-tags"
s3.create_bucket(Bucket=bucket_name)
@ -2858,7 +2898,7 @@ def test_boto3_put_object_tagging_on_both_version():
@mock_s3
def test_boto3_put_object_tagging_with_single_tag():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-tags"
s3.create_bucket(Bucket=bucket_name)
@ -2876,7 +2916,7 @@ def test_boto3_put_object_tagging_with_single_tag():
@mock_s3
def test_boto3_get_object_tagging():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-tags"
s3.create_bucket(Bucket=bucket_name)
@ -2905,7 +2945,7 @@ def test_boto3_get_object_tagging():
@mock_s3
def test_boto3_list_object_versions():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-versions"
s3.create_bucket(Bucket=bucket_name)
@ -2927,7 +2967,7 @@ def test_boto3_list_object_versions():
@mock_s3
def test_boto3_list_object_versions_with_versioning_disabled():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-versions"
s3.create_bucket(Bucket=bucket_name)
@ -2950,7 +2990,7 @@ def test_boto3_list_object_versions_with_versioning_disabled():
@mock_s3
def test_boto3_list_object_versions_with_versioning_enabled_late():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-versions"
s3.create_bucket(Bucket=bucket_name)
@ -2978,7 +3018,7 @@ def test_boto3_list_object_versions_with_versioning_enabled_late():
@mock_s3
def test_boto3_bad_prefix_list_object_versions():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-versions"
bad_prefix = "key-that-does-not-exist"
@ -2997,7 +3037,7 @@ def test_boto3_bad_prefix_list_object_versions():
@mock_s3
def test_boto3_delete_markers():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-versions-and-unicode-ó"
s3.create_bucket(Bucket=bucket_name)
@ -3040,7 +3080,7 @@ def test_boto3_delete_markers():
@mock_s3
def test_boto3_multiple_delete_markers():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
key = "key-with-versions-and-unicode-ó"
s3.create_bucket(Bucket=bucket_name)
@ -3091,7 +3131,7 @@ def test_boto3_multiple_delete_markers():
def test_get_stream_gzipped():
payload = b"this is some stuff here"
s3_client = boto3.client("s3", region_name="us-east-1")
s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3_client.create_bucket(Bucket="moto-tests")
buffer_ = BytesIO()
with GzipFile(fileobj=buffer_, mode="w") as f:
@ -3129,7 +3169,7 @@ TEST_XML = """\
@mock_s3
def test_boto3_bucket_name_too_long():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
with assert_raises(ClientError) as exc:
s3.create_bucket(Bucket="x" * 64)
exc.exception.response["Error"]["Code"].should.equal("InvalidBucketName")
@ -3137,7 +3177,7 @@ def test_boto3_bucket_name_too_long():
@mock_s3
def test_boto3_bucket_name_too_short():
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
with assert_raises(ClientError) as exc:
s3.create_bucket(Bucket="x" * 2)
exc.exception.response["Error"]["Code"].should.equal("InvalidBucketName")
@ -3146,7 +3186,7 @@ def test_boto3_bucket_name_too_short():
@mock_s3
def test_accelerated_none_when_unspecified():
bucket_name = "some_bucket"
s3 = boto3.client("s3")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket=bucket_name)
resp = s3.get_bucket_accelerate_configuration(Bucket=bucket_name)
resp.shouldnt.have.key("Status")
@ -3155,7 +3195,7 @@ def test_accelerated_none_when_unspecified():
@mock_s3
def test_can_enable_bucket_acceleration():
bucket_name = "some_bucket"
s3 = boto3.client("s3")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket=bucket_name)
resp = s3.put_bucket_accelerate_configuration(
Bucket=bucket_name, AccelerateConfiguration={"Status": "Enabled"}
@ -3171,7 +3211,7 @@ def test_can_enable_bucket_acceleration():
@mock_s3
def test_can_suspend_bucket_acceleration():
bucket_name = "some_bucket"
s3 = boto3.client("s3")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket=bucket_name)
resp = s3.put_bucket_accelerate_configuration(
Bucket=bucket_name, AccelerateConfiguration={"Status": "Enabled"}
@ -3191,7 +3231,10 @@ def test_can_suspend_bucket_acceleration():
def test_suspending_acceleration_on_not_configured_bucket_does_nothing():
bucket_name = "some_bucket"
s3 = boto3.client("s3")
s3.create_bucket(Bucket=bucket_name)
s3.create_bucket(
Bucket=bucket_name,
CreateBucketConfiguration={"LocationConstraint": "us-west-1"},
)
resp = s3.put_bucket_accelerate_configuration(
Bucket=bucket_name, AccelerateConfiguration={"Status": "Suspended"}
)
@ -3205,7 +3248,7 @@ def test_suspending_acceleration_on_not_configured_bucket_does_nothing():
@mock_s3
def test_accelerate_configuration_status_validation():
bucket_name = "some_bucket"
s3 = boto3.client("s3")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket=bucket_name)
with assert_raises(ClientError) as exc:
s3.put_bucket_accelerate_configuration(
@ -3217,7 +3260,7 @@ def test_accelerate_configuration_status_validation():
@mock_s3
def test_accelerate_configuration_is_not_supported_when_bucket_name_has_dots():
bucket_name = "some.bucket.with.dots"
s3 = boto3.client("s3")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
s3.create_bucket(Bucket=bucket_name)
with assert_raises(ClientError) as exc:
s3.put_bucket_accelerate_configuration(
@ -3227,7 +3270,7 @@ def test_accelerate_configuration_is_not_supported_when_bucket_name_has_dots():
def store_and_read_back_a_key(key):
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
body = b"Some body"
@ -3255,7 +3298,7 @@ def test_root_dir_with_empty_name_works():
)
@mock_s3
def test_delete_objects_with_url_encoded_key(key):
s3 = boto3.client("s3", region_name="us-east-1")
s3 = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
bucket_name = "mybucket"
body = b"Some body"
@ -3282,7 +3325,7 @@ def test_delete_objects_with_url_encoded_key(key):
@mock_s3
@mock_config
def test_public_access_block():
client = boto3.client("s3")
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
client.create_bucket(Bucket="mybucket")
# Try to get the public access block (should not exist by default)
@ -3349,7 +3392,7 @@ def test_public_access_block():
assert ce.exception.response["ResponseMetadata"]["HTTPStatusCode"] == 400
# Test that things work with AWS Config:
config_client = boto3.client("config", region_name="us-east-1")
config_client = boto3.client("config", region_name=DEFAULT_REGION_NAME)
result = config_client.get_resource_config_history(
resourceType="AWS::S3::Bucket", resourceId="mybucket"
)

View File

@ -16,7 +16,7 @@ from moto import mock_s3_deprecated, mock_s3
@mock_s3_deprecated
def test_lifecycle_create():
conn = boto.s3.connect_to_region("us-west-1")
bucket = conn.create_bucket("foobar")
bucket = conn.create_bucket("foobar", location="us-west-1")
lifecycle = Lifecycle()
lifecycle.add_rule("myid", "", "Enabled", 30)
@ -33,7 +33,9 @@ def test_lifecycle_create():
@mock_s3
def test_lifecycle_with_filters():
client = boto3.client("s3")
client.create_bucket(Bucket="bucket")
client.create_bucket(
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
# Create a lifecycle rule with a Filter (no tags):
lfc = {
@ -245,7 +247,9 @@ def test_lifecycle_with_filters():
@mock_s3
def test_lifecycle_with_eodm():
client = boto3.client("s3")
client.create_bucket(Bucket="bucket")
client.create_bucket(
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
lfc = {
"Rules": [
@ -293,7 +297,9 @@ def test_lifecycle_with_eodm():
@mock_s3
def test_lifecycle_with_nve():
client = boto3.client("s3")
client.create_bucket(Bucket="bucket")
client.create_bucket(
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
lfc = {
"Rules": [
@ -327,7 +333,9 @@ def test_lifecycle_with_nve():
@mock_s3
def test_lifecycle_with_nvt():
client = boto3.client("s3")
client.create_bucket(Bucket="bucket")
client.create_bucket(
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
lfc = {
"Rules": [
@ -393,7 +401,9 @@ def test_lifecycle_with_nvt():
@mock_s3
def test_lifecycle_with_aimu():
client = boto3.client("s3")
client.create_bucket(Bucket="bucket")
client.create_bucket(
Bucket="bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
lfc = {
"Rules": [
@ -432,7 +442,7 @@ def test_lifecycle_with_aimu():
@mock_s3_deprecated
def test_lifecycle_with_glacier_transition():
conn = boto.s3.connect_to_region("us-west-1")
bucket = conn.create_bucket("foobar")
bucket = conn.create_bucket("foobar", location="us-west-1")
lifecycle = Lifecycle()
transition = Transition(days=30, storage_class="GLACIER")
@ -451,7 +461,7 @@ def test_lifecycle_with_glacier_transition():
@mock_s3_deprecated
def test_lifecycle_multi():
conn = boto.s3.connect_to_region("us-west-1")
bucket = conn.create_bucket("foobar")
bucket = conn.create_bucket("foobar", location="us-west-1")
date = "2022-10-12T00:00:00.000Z"
sc = "GLACIER"
@ -493,7 +503,7 @@ def test_lifecycle_multi():
@mock_s3_deprecated
def test_lifecycle_delete():
conn = boto.s3.connect_to_region("us-west-1")
bucket = conn.create_bucket("foobar")
bucket = conn.create_bucket("foobar", location="us-west-1")
lifecycle = Lifecycle()
lifecycle.add_rule(expiration=30)

View File

@ -11,7 +11,7 @@ from moto import mock_s3
@mock_s3
def test_s3_storage_class_standard():
s3 = boto3.client("s3")
s3 = boto3.client("s3", region_name="us-east-1")
s3.create_bucket(Bucket="Bucket")
# add an object to the bucket with standard storage
@ -26,7 +26,9 @@ def test_s3_storage_class_standard():
@mock_s3
def test_s3_storage_class_infrequent_access():
s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket")
s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-2"}
)
# add an object to the bucket with standard storage
@ -46,7 +48,9 @@ def test_s3_storage_class_infrequent_access():
def test_s3_storage_class_intelligent_tiering():
s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket")
s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-east-2"}
)
s3.put_object(
Bucket="Bucket",
Key="my_key_infrequent",
@ -61,7 +65,7 @@ def test_s3_storage_class_intelligent_tiering():
@mock_s3
def test_s3_storage_class_copy():
s3 = boto3.client("s3")
s3 = boto3.client("s3", region_name="us-east-1")
s3.create_bucket(Bucket="Bucket")
s3.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD"
@ -86,7 +90,7 @@ def test_s3_storage_class_copy():
@mock_s3
def test_s3_invalid_copied_storage_class():
s3 = boto3.client("s3")
s3 = boto3.client("s3", region_name="us-east-1")
s3.create_bucket(Bucket="Bucket")
s3.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="STANDARD"
@ -119,7 +123,9 @@ def test_s3_invalid_copied_storage_class():
@mock_s3
def test_s3_invalid_storage_class():
s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket")
s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
# Try to add an object with an invalid storage class
with assert_raises(ClientError) as err:
@ -137,7 +143,9 @@ def test_s3_invalid_storage_class():
@mock_s3
def test_s3_default_storage_class():
s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket")
s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
s3.put_object(Bucket="Bucket", Key="First_Object", Body="Body")
@ -150,7 +158,9 @@ def test_s3_default_storage_class():
@mock_s3
def test_s3_copy_object_error_for_glacier_storage_class():
s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket")
s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
s3.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="GLACIER"
@ -169,7 +179,9 @@ def test_s3_copy_object_error_for_glacier_storage_class():
@mock_s3
def test_s3_copy_object_error_for_deep_archive_storage_class():
s3 = boto3.client("s3")
s3.create_bucket(Bucket="Bucket")
s3.create_bucket(
Bucket="Bucket", CreateBucketConfiguration={"LocationConstraint": "us-west-1"}
)
s3.put_object(
Bucket="Bucket", Key="First_Object", Body="Body", StorageClass="DEEP_ARCHIVE"