Implement pagination support for GetLogEvents

* Add nextForwardToken and nextBackwardToken to GetLogEvents response
* Handle end of stream by returning the same token as passed in
This commit is contained in:
Berislav Kovacki 2019-08-07 17:37:53 +02:00
parent 40271d2c4e
commit 5063ffc837
2 changed files with 80 additions and 8 deletions

View File

@ -98,17 +98,29 @@ class LogStream:
return True
def get_paging_token_from_index(index, back=False):
if index is not None:
return "b/{:056d}".format(index) if back else "f/{:056d}".format(index)
return 0
def get_index_from_paging_token(token):
if token is not None:
return int(token[2:])
return 0
events = sorted(filter(filter_func, self.events), key=lambda event: event.timestamp, reverse=start_from_head)
back_token = next_token
if next_token is None:
next_token = 0
next_index = get_index_from_paging_token(next_token)
back_index = next_index
events_page = [event.to_response_dict() for event in events[next_token: next_token + limit]]
next_token += limit
if next_token >= len(self.events):
next_token = None
events_page = [event.to_response_dict() for event in events[next_index: next_index + limit]]
if next_index + limit < len(self.events):
next_index += limit
return events_page, back_token, next_token
back_index -= limit
if back_index <= 0:
back_index = 0
return events_page, get_paging_token_from_index(back_index, True), get_paging_token_from_index(next_index)
def filter_log_events(self, log_group_name, log_stream_names, start_time, end_time, limit, next_token, filter_pattern, interleaved):
def filter_func(event):

View File

@ -162,3 +162,63 @@ def test_delete_retention_policy():
response = conn.delete_log_group(logGroupName=log_group_name)
@mock_logs
def test_get_log_events():
conn = boto3.client('logs', 'us-west-2')
log_group_name = 'test'
log_stream_name = 'stream'
conn.create_log_group(logGroupName=log_group_name)
conn.create_log_stream(
logGroupName=log_group_name,
logStreamName=log_stream_name
)
events = [{'timestamp': x, 'message': str(x)} for x in range(20)]
conn.put_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
logEvents=events
)
resp = conn.get_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
limit=10)
resp['events'].should.have.length_of(10)
resp.should.have.key('nextForwardToken')
resp.should.have.key('nextBackwardToken')
for i in range(10):
resp['events'][i]['timestamp'].should.equal(i)
resp['events'][i]['message'].should.equal(str(i))
next_token = resp['nextForwardToken']
resp = conn.get_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
nextToken=next_token,
limit=10)
resp['events'].should.have.length_of(10)
resp.should.have.key('nextForwardToken')
resp.should.have.key('nextBackwardToken')
resp['nextForwardToken'].should.equal(next_token)
for i in range(10):
resp['events'][i]['timestamp'].should.equal(i+10)
resp['events'][i]['message'].should.equal(str(i+10))
resp = conn.get_log_events(
logGroupName=log_group_name,
logStreamName=log_stream_name,
nextToken=resp['nextBackwardToken'],
limit=10)
resp['events'].should.have.length_of(10)
resp.should.have.key('nextForwardToken')
resp.should.have.key('nextBackwardToken')
for i in range(10):
resp['events'][i]['timestamp'].should.equal(i)
resp['events'][i]['message'].should.equal(str(i))