Use 'data' attribute of Flask Request object. (#1117)
* Flask Request object does not have a 'body' attribute, changed to 'data' * Making moto 'glaciar' more aws 'glaciar' like. * Making moto 'glacier' more aws 'glacier' like. * Fixing Travis errors? * OK, check if object has proper attribute because HTTPrettyRequest has no data attribute and Python Requests has no body attribute. * Output to match test expectation; sleep for 60 seconds to mimic actual wait time. * Amending test_describe_job to reflect changes. * Shorten time from 1 minute to seconds. * Shorten sleep time in test. Forgot about the test.
This commit is contained in:
parent
1a8a4a084d
commit
2faffc96de
@ -2,42 +2,101 @@ from __future__ import unicode_literals
|
|||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
import boto.glacier
|
import boto.glacier
|
||||||
from moto.core import BaseBackend, BaseModel
|
from moto.core import BaseBackend, BaseModel
|
||||||
|
|
||||||
from .utils import get_job_id
|
from .utils import get_job_id
|
||||||
|
|
||||||
|
|
||||||
class ArchiveJob(BaseModel):
|
class Job(BaseModel):
|
||||||
|
def __init__(self, tier):
|
||||||
|
self.st = datetime.datetime.now()
|
||||||
|
|
||||||
def __init__(self, job_id, archive_id):
|
if tier.lower() == "expedited":
|
||||||
|
self.et = self.st + datetime.timedelta(seconds=2)
|
||||||
|
elif tier.lower() == "bulk":
|
||||||
|
self.et = self.st + datetime.timedelta(seconds=10)
|
||||||
|
else:
|
||||||
|
# Standard
|
||||||
|
self.et = self.st + datetime.timedelta(seconds=5)
|
||||||
|
|
||||||
|
|
||||||
|
class ArchiveJob(Job):
|
||||||
|
|
||||||
|
def __init__(self, job_id, tier, arn, archive_id):
|
||||||
self.job_id = job_id
|
self.job_id = job_id
|
||||||
|
self.tier = tier
|
||||||
|
self.arn = arn
|
||||||
self.archive_id = archive_id
|
self.archive_id = archive_id
|
||||||
|
Job.__init__(self, tier)
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
return {
|
d = {
|
||||||
"Action": "InventoryRetrieval",
|
"Action": "ArchiveRetrieval",
|
||||||
"ArchiveId": self.archive_id,
|
"ArchiveId": self.archive_id,
|
||||||
"ArchiveSizeInBytes": 0,
|
"ArchiveSizeInBytes": 0,
|
||||||
"ArchiveSHA256TreeHash": None,
|
"ArchiveSHA256TreeHash": None,
|
||||||
"Completed": True,
|
"Completed": False,
|
||||||
"CompletionDate": "2013-03-20T17:03:43.221Z",
|
"CreationDate": self.st.strftime("%Y-%m-%dT%H:%M:%S.000Z"),
|
||||||
"CreationDate": "2013-03-20T17:03:43.221Z",
|
"InventorySizeInBytes": 0,
|
||||||
"InventorySizeInBytes": "0",
|
|
||||||
"JobDescription": None,
|
"JobDescription": None,
|
||||||
"JobId": self.job_id,
|
"JobId": self.job_id,
|
||||||
"RetrievalByteRange": None,
|
"RetrievalByteRange": None,
|
||||||
"SHA256TreeHash": None,
|
"SHA256TreeHash": None,
|
||||||
"SNSTopic": None,
|
"SNSTopic": None,
|
||||||
"StatusCode": "Succeeded",
|
"StatusCode": "InProgress",
|
||||||
"StatusMessage": None,
|
"StatusMessage": None,
|
||||||
"VaultARN": None,
|
"VaultARN": self.arn,
|
||||||
|
"Tier": self.tier
|
||||||
}
|
}
|
||||||
|
if datetime.datetime.now() > self.et:
|
||||||
|
d["Completed"] = True
|
||||||
|
d["CompletionDate"] = self.et.strftime("%Y-%m-%dT%H:%M:%S.000Z")
|
||||||
|
d["InventorySizeInBytes"] = 10000
|
||||||
|
d["StatusCode"] = "Succeeded"
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
class InventoryJob(Job):
|
||||||
|
|
||||||
|
def __init__(self, job_id, tier, arn):
|
||||||
|
self.job_id = job_id
|
||||||
|
self.tier = tier
|
||||||
|
self.arn = arn
|
||||||
|
Job.__init__(self, tier)
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
d = {
|
||||||
|
"Action": "InventoryRetrieval",
|
||||||
|
"ArchiveSHA256TreeHash": None,
|
||||||
|
"Completed": False,
|
||||||
|
"CreationDate": self.st.strftime("%Y-%m-%dT%H:%M:%S.000Z"),
|
||||||
|
"InventorySizeInBytes": 0,
|
||||||
|
"JobDescription": None,
|
||||||
|
"JobId": self.job_id,
|
||||||
|
"RetrievalByteRange": None,
|
||||||
|
"SHA256TreeHash": None,
|
||||||
|
"SNSTopic": None,
|
||||||
|
"StatusCode": "InProgress",
|
||||||
|
"StatusMessage": None,
|
||||||
|
"VaultARN": self.arn,
|
||||||
|
"Tier": self.tier
|
||||||
|
}
|
||||||
|
if datetime.datetime.now() > self.et:
|
||||||
|
d["Completed"] = True
|
||||||
|
d["CompletionDate"] = self.et.strftime("%Y-%m-%dT%H:%M:%S.000Z")
|
||||||
|
d["InventorySizeInBytes"] = 10000
|
||||||
|
d["StatusCode"] = "Succeeded"
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
class Vault(BaseModel):
|
class Vault(BaseModel):
|
||||||
|
|
||||||
def __init__(self, vault_name, region):
|
def __init__(self, vault_name, region):
|
||||||
|
self.st = datetime.datetime.now()
|
||||||
self.vault_name = vault_name
|
self.vault_name = vault_name
|
||||||
self.region = region
|
self.region = region
|
||||||
self.archives = {}
|
self.archives = {}
|
||||||
@ -48,29 +107,57 @@ class Vault(BaseModel):
|
|||||||
return "arn:aws:glacier:{0}:012345678901:vaults/{1}".format(self.region, self.vault_name)
|
return "arn:aws:glacier:{0}:012345678901:vaults/{1}".format(self.region, self.vault_name)
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
return {
|
archives_size = 0
|
||||||
"CreationDate": "2013-03-20T17:03:43.221Z",
|
for k in self.archives:
|
||||||
"LastInventoryDate": "2013-03-20T17:03:43.221Z",
|
archives_size += self.archives[k]["size"]
|
||||||
"NumberOfArchives": None,
|
d = {
|
||||||
"SizeInBytes": None,
|
"CreationDate": self.st.strftime("%Y-%m-%dT%H:%M:%S.000Z"),
|
||||||
|
"LastInventoryDate": self.st.strftime("%Y-%m-%dT%H:%M:%S.000Z"),
|
||||||
|
"NumberOfArchives": len(self.archives),
|
||||||
|
"SizeInBytes": archives_size,
|
||||||
"VaultARN": self.arn,
|
"VaultARN": self.arn,
|
||||||
"VaultName": self.vault_name,
|
"VaultName": self.vault_name,
|
||||||
}
|
}
|
||||||
|
return d
|
||||||
|
|
||||||
def create_archive(self, body):
|
def create_archive(self, body, description):
|
||||||
archive_id = hashlib.sha256(body).hexdigest()
|
archive_id = hashlib.md5(body).hexdigest()
|
||||||
self.archives[archive_id] = body
|
self.archives[archive_id] = {}
|
||||||
|
self.archives[archive_id]["body"] = body
|
||||||
|
self.archives[archive_id]["size"] = len(body)
|
||||||
|
self.archives[archive_id]["sha256"] = hashlib.sha256(body).hexdigest()
|
||||||
|
self.archives[archive_id]["creation_date"] = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.000Z")
|
||||||
|
self.archives[archive_id]["description"] = description
|
||||||
return archive_id
|
return archive_id
|
||||||
|
|
||||||
def get_archive_body(self, archive_id):
|
def get_archive_body(self, archive_id):
|
||||||
return self.archives[archive_id]
|
return self.archives[archive_id]["body"]
|
||||||
|
|
||||||
|
def get_archive_list(self):
|
||||||
|
archive_list = []
|
||||||
|
for a in self.archives:
|
||||||
|
archive = self.archives[a]
|
||||||
|
aobj = {
|
||||||
|
"ArchiveId": a,
|
||||||
|
"ArchiveDescription": archive["description"],
|
||||||
|
"CreationDate": archive["creation_date"],
|
||||||
|
"Size": archive["size"],
|
||||||
|
"SHA256TreeHash": archive["sha256"]
|
||||||
|
}
|
||||||
|
archive_list.append(aobj)
|
||||||
|
return archive_list
|
||||||
|
|
||||||
def delete_archive(self, archive_id):
|
def delete_archive(self, archive_id):
|
||||||
return self.archives.pop(archive_id)
|
return self.archives.pop(archive_id)
|
||||||
|
|
||||||
def initiate_job(self, archive_id):
|
def initiate_job(self, job_type, tier, archive_id):
|
||||||
job_id = get_job_id()
|
job_id = get_job_id()
|
||||||
job = ArchiveJob(job_id, archive_id)
|
|
||||||
|
if job_type == "inventory-retrieval":
|
||||||
|
job = InventoryJob(job_id, tier, self.arn)
|
||||||
|
elif job_type == "archive-retrieval":
|
||||||
|
job = ArchiveJob(job_id, tier, self.arn, archive_id)
|
||||||
|
|
||||||
self.jobs[job_id] = job
|
self.jobs[job_id] = job
|
||||||
return job_id
|
return job_id
|
||||||
|
|
||||||
@ -80,10 +167,24 @@ class Vault(BaseModel):
|
|||||||
def describe_job(self, job_id):
|
def describe_job(self, job_id):
|
||||||
return self.jobs.get(job_id)
|
return self.jobs.get(job_id)
|
||||||
|
|
||||||
|
def job_ready(self, job_id):
|
||||||
|
job = self.describe_job(job_id)
|
||||||
|
jobj = job.to_dict()
|
||||||
|
return jobj["Completed"]
|
||||||
|
|
||||||
def get_job_output(self, job_id):
|
def get_job_output(self, job_id):
|
||||||
job = self.describe_job(job_id)
|
job = self.describe_job(job_id)
|
||||||
archive_body = self.get_archive_body(job.archive_id)
|
jobj = job.to_dict()
|
||||||
return archive_body
|
if jobj["Action"] == "InventoryRetrieval":
|
||||||
|
archives = self.get_archive_list()
|
||||||
|
return {
|
||||||
|
"VaultARN": self.arn,
|
||||||
|
"InventoryDate": jobj["CompletionDate"],
|
||||||
|
"ArchiveList": archives
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
archive_body = self.get_archive_body(job.archive_id)
|
||||||
|
return archive_body
|
||||||
|
|
||||||
|
|
||||||
class GlacierBackend(BaseBackend):
|
class GlacierBackend(BaseBackend):
|
||||||
@ -109,9 +210,9 @@ class GlacierBackend(BaseBackend):
|
|||||||
def delete_vault(self, vault_name):
|
def delete_vault(self, vault_name):
|
||||||
self.vaults.pop(vault_name)
|
self.vaults.pop(vault_name)
|
||||||
|
|
||||||
def initiate_job(self, vault_name, archive_id):
|
def initiate_job(self, vault_name, job_type, tier, archive_id):
|
||||||
vault = self.get_vault(vault_name)
|
vault = self.get_vault(vault_name)
|
||||||
job_id = vault.initiate_job(archive_id)
|
job_id = vault.initiate_job(job_type, tier, archive_id)
|
||||||
return job_id
|
return job_id
|
||||||
|
|
||||||
def list_jobs(self, vault_name):
|
def list_jobs(self, vault_name):
|
||||||
|
@ -72,17 +72,25 @@ class GlacierResponse(_TemplateEnvironmentMixin):
|
|||||||
|
|
||||||
def _vault_archive_response(self, request, full_url, headers):
|
def _vault_archive_response(self, request, full_url, headers):
|
||||||
method = request.method
|
method = request.method
|
||||||
body = request.body
|
if hasattr(request, 'body'):
|
||||||
|
body = request.body
|
||||||
|
else:
|
||||||
|
body = request.data
|
||||||
|
description = ""
|
||||||
|
if 'x-amz-archive-description' in request.headers:
|
||||||
|
description = request.headers['x-amz-archive-description']
|
||||||
parsed_url = urlparse(full_url)
|
parsed_url = urlparse(full_url)
|
||||||
querystring = parse_qs(parsed_url.query, keep_blank_values=True)
|
querystring = parse_qs(parsed_url.query, keep_blank_values=True)
|
||||||
vault_name = full_url.split("/")[-2]
|
vault_name = full_url.split("/")[-2]
|
||||||
|
|
||||||
if method == 'POST':
|
if method == 'POST':
|
||||||
return self._vault_archive_response_post(vault_name, body, querystring, headers)
|
return self._vault_archive_response_post(vault_name, body, description, querystring, headers)
|
||||||
|
else:
|
||||||
|
return 400, headers, "400 Bad Request"
|
||||||
|
|
||||||
def _vault_archive_response_post(self, vault_name, body, querystring, headers):
|
def _vault_archive_response_post(self, vault_name, body, description, querystring, headers):
|
||||||
vault = self.backend.get_vault(vault_name)
|
vault = self.backend.get_vault(vault_name)
|
||||||
vault_id = vault.create_archive(body)
|
vault_id = vault.create_archive(body, description)
|
||||||
headers['x-amz-archive-id'] = vault_id
|
headers['x-amz-archive-id'] = vault_id
|
||||||
return 201, headers, ""
|
return 201, headers, ""
|
||||||
|
|
||||||
@ -110,7 +118,10 @@ class GlacierResponse(_TemplateEnvironmentMixin):
|
|||||||
|
|
||||||
def _vault_jobs_response(self, request, full_url, headers):
|
def _vault_jobs_response(self, request, full_url, headers):
|
||||||
method = request.method
|
method = request.method
|
||||||
body = request.body
|
if hasattr(request, 'body'):
|
||||||
|
body = request.body
|
||||||
|
else:
|
||||||
|
body = request.data
|
||||||
account_id = full_url.split("/")[1]
|
account_id = full_url.split("/")[1]
|
||||||
vault_name = full_url.split("/")[-2]
|
vault_name = full_url.split("/")[-2]
|
||||||
|
|
||||||
@ -125,11 +136,17 @@ class GlacierResponse(_TemplateEnvironmentMixin):
|
|||||||
})
|
})
|
||||||
elif method == 'POST':
|
elif method == 'POST':
|
||||||
json_body = json.loads(body.decode("utf-8"))
|
json_body = json.loads(body.decode("utf-8"))
|
||||||
archive_id = json_body['ArchiveId']
|
job_type = json_body['Type']
|
||||||
job_id = self.backend.initiate_job(vault_name, archive_id)
|
archive_id = None
|
||||||
|
if 'ArchiveId' in json_body:
|
||||||
|
archive_id = json_body['ArchiveId']
|
||||||
|
if 'Tier' in json_body:
|
||||||
|
tier = json_body["Tier"]
|
||||||
|
else:
|
||||||
|
tier = "Standard"
|
||||||
|
job_id = self.backend.initiate_job(vault_name, job_type, tier, archive_id)
|
||||||
headers['x-amz-job-id'] = job_id
|
headers['x-amz-job-id'] = job_id
|
||||||
headers[
|
headers['Location'] = "/{0}/vaults/{1}/jobs/{2}".format(account_id, vault_name, job_id)
|
||||||
'Location'] = "/{0}/vaults/{1}/jobs/{2}".format(account_id, vault_name, job_id)
|
|
||||||
return 202, headers, ""
|
return 202, headers, ""
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -155,8 +172,14 @@ class GlacierResponse(_TemplateEnvironmentMixin):
|
|||||||
def _vault_jobs_output_response(self, request, full_url, headers):
|
def _vault_jobs_output_response(self, request, full_url, headers):
|
||||||
vault_name = full_url.split("/")[-4]
|
vault_name = full_url.split("/")[-4]
|
||||||
job_id = full_url.split("/")[-2]
|
job_id = full_url.split("/")[-2]
|
||||||
|
|
||||||
vault = self.backend.get_vault(vault_name)
|
vault = self.backend.get_vault(vault_name)
|
||||||
output = vault.get_job_output(job_id)
|
if vault.job_ready(job_id):
|
||||||
headers['content-type'] = 'application/octet-stream'
|
output = vault.get_job_output(job_id)
|
||||||
return 200, headers, output
|
if isinstance(output, dict):
|
||||||
|
headers['content-type'] = 'application/json'
|
||||||
|
return 200, headers, json.dumps(output)
|
||||||
|
else:
|
||||||
|
headers['content-type'] = 'application/octet-stream'
|
||||||
|
return 200, headers, output
|
||||||
|
else:
|
||||||
|
return 404, headers, "404 Not Found"
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import time
|
||||||
|
|
||||||
from boto.glacier.layer1 import Layer1
|
from boto.glacier.layer1 import Layer1
|
||||||
import sure # noqa
|
import sure # noqa
|
||||||
@ -39,24 +40,11 @@ def test_describe_job():
|
|||||||
job_id = job_response['JobId']
|
job_id = job_response['JobId']
|
||||||
|
|
||||||
job = conn.describe_job(vault_name, job_id)
|
job = conn.describe_job(vault_name, job_id)
|
||||||
json.loads(job.read().decode("utf-8")).should.equal({
|
joboutput = json.loads(job.read().decode("utf-8"))
|
||||||
'CompletionDate': '2013-03-20T17:03:43.221Z',
|
|
||||||
'VaultARN': None,
|
joboutput.should.have.key('Tier').which.should.equal('Standard')
|
||||||
'RetrievalByteRange': None,
|
joboutput.should.have.key('StatusCode').which.should.equal('InProgress')
|
||||||
'SHA256TreeHash': None,
|
joboutput.should.have.key('VaultARN').which.should.equal('arn:aws:glacier:RegionInfo:us-west-2:012345678901:vaults/my_vault')
|
||||||
'Completed': True,
|
|
||||||
'InventorySizeInBytes': '0',
|
|
||||||
'JobId': job_id,
|
|
||||||
'Action': 'InventoryRetrieval',
|
|
||||||
'JobDescription': None,
|
|
||||||
'SNSTopic': None,
|
|
||||||
'ArchiveSizeInBytes': 0,
|
|
||||||
'ArchiveId': archive_id,
|
|
||||||
'ArchiveSHA256TreeHash': None,
|
|
||||||
'CreationDate': '2013-03-20T17:03:43.221Z',
|
|
||||||
'StatusMessage': None,
|
|
||||||
'StatusCode': 'Succeeded',
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
@mock_glacier_deprecated
|
@mock_glacier_deprecated
|
||||||
@ -96,5 +84,7 @@ def test_get_job_output():
|
|||||||
})
|
})
|
||||||
job_id = job_response['JobId']
|
job_id = job_response['JobId']
|
||||||
|
|
||||||
|
time.sleep(6)
|
||||||
|
|
||||||
output = conn.get_job_output(vault_name, job_id)
|
output = conn.get_job_output(vault_name, job_id)
|
||||||
output.read().decode("utf-8").should.equal("some stuff")
|
output.read().decode("utf-8").should.equal("some stuff")
|
||||||
|
Loading…
Reference in New Issue
Block a user