Merge pull request #2464 from sethblack/lambda-update-function-code-and-configuration

added UpdateFunctionCode and UpdateFunctionConfiguration and associat…
This commit is contained in:
Steve Pulec 2019-10-10 17:01:31 -05:00 committed by GitHub
commit 359d6c9099
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 280 additions and 1 deletions

View File

@ -273,6 +273,70 @@ class LambdaFunction(BaseModel):
"Configuration": self.get_configuration(),
}
def update_configuration(self, config_updates):
for key, value in config_updates.items():
if key == "Description":
self.description = value
elif key == "Handler":
self.handler = value
elif key == "MemorySize":
self.memory_size = value
elif key == "Role":
self.role = value
elif key == "Runtime":
self.run_time = value
elif key == "Timeout":
self.timeout = value
elif key == "VpcConfig":
self.vpc_config = value
return self.get_configuration()
def update_function_code(self, updated_spec):
if 'DryRun' in updated_spec and updated_spec['DryRun']:
return self.get_configuration()
if 'ZipFile' in updated_spec:
self.code['ZipFile'] = updated_spec['ZipFile']
# using the "hackery" from __init__ because it seems to work
# TODOs and FIXMEs included, because they'll need to be fixed
# in both places now
try:
to_unzip_code = base64.b64decode(
bytes(updated_spec['ZipFile'], 'utf-8'))
except Exception:
to_unzip_code = base64.b64decode(updated_spec['ZipFile'])
self.code_bytes = to_unzip_code
self.code_size = len(to_unzip_code)
self.code_sha_256 = hashlib.sha256(to_unzip_code).hexdigest()
# TODO: we should be putting this in a lambda bucket
self.code['UUID'] = str(uuid.uuid4())
self.code['S3Key'] = '{}-{}'.format(self.function_name, self.code['UUID'])
elif 'S3Bucket' in updated_spec and 'S3Key' in updated_spec:
key = None
try:
# FIXME: does not validate bucket region
key = s3_backend.get_key(updated_spec['S3Bucket'], updated_spec['S3Key'])
except MissingBucket:
if do_validate_s3():
raise ValueError(
"InvalidParameterValueException",
"Error occurred while GetObject. S3 Error Code: NoSuchBucket. S3 Error Message: The specified bucket does not exist")
except MissingKey:
if do_validate_s3():
raise ValueError(
"InvalidParameterValueException",
"Error occurred while GetObject. S3 Error Code: NoSuchKey. S3 Error Message: The specified key does not exist.")
if key:
self.code_bytes = key.value
self.code_size = key.size
self.code_sha_256 = hashlib.sha256(key.value).hexdigest()
return self.get_configuration()
@staticmethod
def convert(s):
try:

View File

@ -122,6 +122,20 @@ class LambdaResponse(BaseResponse):
if request.method == 'POST':
return self._add_policy(request, full_url, headers)
def configuration(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == 'PUT':
return self._put_configuration(request)
else:
raise ValueError("Cannot handle request")
def code(self, request, full_url, headers):
self.setup_class(request, full_url, headers)
if request.method == 'PUT':
return self._put_code()
else:
raise ValueError("Cannot handle request")
def _add_policy(self, request, full_url, headers):
path = request.path if hasattr(request, 'path') else path_url(request.url)
function_name = path.split('/')[-2]
@ -308,3 +322,30 @@ class LambdaResponse(BaseResponse):
return 204, {}, "{}"
else:
return 404, {}, "{}"
def _put_configuration(self, request):
function_name = self.path.rsplit('/', 2)[-2]
qualifier = self._get_param('Qualifier', None)
fn = self.lambda_backend.get_function(function_name, qualifier)
if fn:
config = fn.update_configuration(self.json_body)
return 200, {}, json.dumps(config)
else:
return 404, {}, "{}"
def _put_code(self):
function_name = self.path.rsplit('/', 2)[-2]
qualifier = self._get_param('Qualifier', None)
fn = self.lambda_backend.get_function(function_name, qualifier)
if fn:
if self.json_body.get('Publish', False):
fn = self.lambda_backend.publish_function(function_name)
config = fn.update_function_code(self.json_body)
return 200, {}, json.dumps(config)
else:
return 404, {}, "{}"

View File

@ -16,5 +16,7 @@ url_paths = {
r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/invocations/?$': response.invoke,
r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/invoke-async/?$': response.invoke_async,
r'{0}/(?P<api_version>[^/]+)/tags/(?P<resource_arn>.+)': response.tag,
r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/policy/?$': response.policy
r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/policy/?$': response.policy,
r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/configuration/?$': response.configuration,
r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/code/?$': response.code
}

View File

@ -1245,3 +1245,175 @@ def test_delete_event_source_mapping():
assert response['State'] == 'Deleting'
conn.get_event_source_mapping.when.called_with(UUID=response['UUID'])\
.should.throw(botocore.client.ClientError)
@mock_lambda
@mock_s3
def test_update_configuration():
s3_conn = boto3.client('s3', 'us-west-2')
s3_conn.create_bucket(Bucket='test-bucket')
zip_content = get_test_zip_file2()
s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content)
conn = boto3.client('lambda', 'us-west-2')
fxn = conn.create_function(
FunctionName='testFunction',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.lambda_handler',
Code={
'S3Bucket': 'test-bucket',
'S3Key': 'test.zip',
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
assert fxn['Description'] == 'test lambda function'
assert fxn['Handler'] == 'lambda_function.lambda_handler'
assert fxn['MemorySize'] == 128
assert fxn['Runtime'] == 'python2.7'
assert fxn['Timeout'] == 3
updated_config = conn.update_function_configuration(
FunctionName='testFunction',
Description='updated test lambda function',
Handler='lambda_function.new_lambda_handler',
Runtime='python3.6',
Timeout=7
)
assert updated_config['ResponseMetadata']['HTTPStatusCode'] == 200
assert updated_config['Description'] == 'updated test lambda function'
assert updated_config['Handler'] == 'lambda_function.new_lambda_handler'
assert updated_config['MemorySize'] == 128
assert updated_config['Runtime'] == 'python3.6'
assert updated_config['Timeout'] == 7
@mock_lambda
def test_update_function_zip():
conn = boto3.client('lambda', 'us-west-2')
zip_content_one = get_test_zip_file1()
fxn = conn.create_function(
FunctionName='testFunctionZip',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.lambda_handler',
Code={
'ZipFile': zip_content_one,
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
zip_content_two = get_test_zip_file2()
fxn_updated = conn.update_function_code(
FunctionName='testFunctionZip',
ZipFile=zip_content_two,
Publish=True
)
response = conn.get_function(
FunctionName='testFunctionZip',
Qualifier='2'
)
response['Configuration'].pop('LastModified')
response['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
assert len(response['Code']) == 2
assert response['Code']['RepositoryType'] == 'S3'
assert response['Code']['Location'].startswith('s3://awslambda-{0}-tasks.s3-{0}.amazonaws.com'.format(_lambda_region))
response['Configuration'].should.equal(
{
"CodeSha256": hashlib.sha256(zip_content_two).hexdigest(),
"CodeSize": len(zip_content_two),
"Description": "test lambda function",
"FunctionArn": 'arn:aws:lambda:{}:123456789012:function:testFunctionZip:2'.format(_lambda_region),
"FunctionName": "testFunctionZip",
"Handler": "lambda_function.lambda_handler",
"MemorySize": 128,
"Role": "test-iam-role",
"Runtime": "python2.7",
"Timeout": 3,
"Version": '2',
"VpcConfig": {
"SecurityGroupIds": [],
"SubnetIds": [],
}
},
)
@mock_lambda
@mock_s3
def test_update_function_s3():
s3_conn = boto3.client('s3', 'us-west-2')
s3_conn.create_bucket(Bucket='test-bucket')
zip_content = get_test_zip_file1()
s3_conn.put_object(Bucket='test-bucket', Key='test.zip', Body=zip_content)
conn = boto3.client('lambda', 'us-west-2')
fxn = conn.create_function(
FunctionName='testFunctionS3',
Runtime='python2.7',
Role='test-iam-role',
Handler='lambda_function.lambda_handler',
Code={
'S3Bucket': 'test-bucket',
'S3Key': 'test.zip',
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True,
)
zip_content_two = get_test_zip_file2()
s3_conn.put_object(Bucket='test-bucket', Key='test2.zip', Body=zip_content_two)
fxn_updated = conn.update_function_code(
FunctionName='testFunctionS3',
S3Bucket='test-bucket',
S3Key='test2.zip',
Publish=True
)
response = conn.get_function(
FunctionName='testFunctionS3',
Qualifier='2'
)
response['Configuration'].pop('LastModified')
response['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
assert len(response['Code']) == 2
assert response['Code']['RepositoryType'] == 'S3'
assert response['Code']['Location'].startswith('s3://awslambda-{0}-tasks.s3-{0}.amazonaws.com'.format(_lambda_region))
response['Configuration'].should.equal(
{
"CodeSha256": hashlib.sha256(zip_content_two).hexdigest(),
"CodeSize": len(zip_content_two),
"Description": "test lambda function",
"FunctionArn": 'arn:aws:lambda:{}:123456789012:function:testFunctionS3:2'.format(_lambda_region),
"FunctionName": "testFunctionS3",
"Handler": "lambda_function.lambda_handler",
"MemorySize": 128,
"Role": "test-iam-role",
"Runtime": "python2.7",
"Timeout": 3,
"Version": '2',
"VpcConfig": {
"SecurityGroupIds": [],
"SubnetIds": [],
}
},
)