moto/moto/kinesis/responses.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

321 lines
12 KiB
Python
Raw Normal View History

2014-11-26 10:55:58 -05:00
import json
from moto.core.responses import BaseResponse
from .models import KinesisBackend, kinesis_backends
2014-11-26 10:55:58 -05:00
class KinesisResponse(BaseResponse):
2023-03-23 09:17:40 -01:00
def __init__(self) -> None:
2022-08-13 09:49:43 +00:00
super().__init__(service_name="kinesis")
2014-11-26 10:55:58 -05:00
@property
def kinesis_backend(self) -> KinesisBackend:
2022-08-13 09:49:43 +00:00
return kinesis_backends[self.current_account][self.region]
2014-11-26 10:55:58 -05:00
2023-03-23 09:17:40 -01:00
def create_stream(self) -> str:
stream_name = self._get_param("StreamName")
shard_count = self._get_param("ShardCount")
stream_mode = self._get_param("StreamModeDetails")
self.kinesis_backend.create_stream(
stream_name, shard_count, stream_mode=stream_mode
)
2014-11-26 10:55:58 -05:00
return ""
2023-03-23 09:17:40 -01:00
def describe_stream(self) -> str:
stream_name = self._get_param("StreamName")
stream_arn = self._get_param("StreamARN")
limit = self._get_param("Limit")
stream = self.kinesis_backend.describe_stream(stream_arn, stream_name)
return json.dumps(stream.to_json(shard_limit=limit))
2014-11-26 10:55:58 -05:00
2023-03-23 09:17:40 -01:00
def describe_stream_summary(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
stream = self.kinesis_backend.describe_stream_summary(stream_arn, stream_name)
return json.dumps(stream.to_json_summary())
2023-03-23 09:17:40 -01:00
def list_streams(self) -> str:
2014-11-26 10:55:58 -05:00
streams = self.kinesis_backend.list_streams()
2017-05-10 21:58:42 -04:00
stream_names = [stream.stream_name for stream in streams]
max_streams = self._get_param("Limit", 10)
try:
2023-03-23 09:17:40 -01:00
token = self._get_param("ExclusiveStartStreamName")
2017-05-10 21:58:42 -04:00
except ValueError:
token = self._get_param("ExclusiveStartStreamName")
if token:
start = stream_names.index(token) + 1
else:
start = 0
streams_resp = stream_names[start : start + max_streams]
has_more_streams = False
if start + max_streams < len(stream_names):
has_more_streams = True
2014-11-26 10:55:58 -05:00
return json.dumps(
2017-05-10 21:58:42 -04:00
{"HasMoreStreams": has_more_streams, "StreamNames": streams_resp}
2014-11-26 10:55:58 -05:00
)
2023-03-23 09:17:40 -01:00
def delete_stream(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
self.kinesis_backend.delete_stream(stream_arn, stream_name)
2014-11-26 10:55:58 -05:00
return ""
2014-11-26 20:49:21 -05:00
2023-03-23 09:17:40 -01:00
def get_shard_iterator(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
shard_id = self._get_param("ShardId")
shard_iterator_type = self._get_param("ShardIteratorType")
starting_sequence_number = self._get_param("StartingSequenceNumber")
at_timestamp = self._get_param("Timestamp")
2014-11-26 20:49:21 -05:00
shard_iterator = self.kinesis_backend.get_shard_iterator(
stream_arn,
stream_name,
shard_id,
shard_iterator_type,
starting_sequence_number,
at_timestamp,
2014-11-26 20:49:21 -05:00
)
return json.dumps({"ShardIterator": shard_iterator})
2023-03-23 09:17:40 -01:00
def get_records(self) -> str:
stream_arn = self._get_param("StreamARN")
shard_iterator = self._get_param("ShardIterator")
limit = self._get_param("Limit")
2014-11-26 20:49:21 -05:00
(
next_shard_iterator,
records,
millis_behind_latest,
) = self.kinesis_backend.get_records(stream_arn, shard_iterator, limit)
2019-10-31 08:44:26 -07:00
2014-11-26 20:49:21 -05:00
return json.dumps(
{
"NextShardIterator": next_shard_iterator,
"Records": [record.to_json() for record in records],
"MillisBehindLatest": millis_behind_latest,
2014-11-26 20:49:21 -05:00
}
)
2023-03-23 09:17:40 -01:00
def put_record(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
partition_key = self._get_param("PartitionKey")
explicit_hash_key = self._get_param("ExplicitHashKey")
data = self._get_param("Data")
2014-11-26 20:49:21 -05:00
sequence_number, shard_id = self.kinesis_backend.put_record(
stream_arn,
2014-11-26 20:49:21 -05:00
stream_name,
partition_key,
explicit_hash_key,
data,
)
return json.dumps({"SequenceNumber": sequence_number, "ShardId": shard_id})
2015-10-30 09:59:57 -04:00
2023-03-23 09:17:40 -01:00
def put_records(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
records = self._get_param("Records")
response = self.kinesis_backend.put_records(stream_arn, stream_name, records)
return json.dumps(response)
2023-03-23 09:17:40 -01:00
def split_shard(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
shard_to_split = self._get_param("ShardToSplit")
new_starting_hash_key = self._get_param("NewStartingHashKey")
2017-02-23 21:37:43 -05:00
self.kinesis_backend.split_shard(
stream_arn, stream_name, shard_to_split, new_starting_hash_key
)
return ""
2023-03-23 09:17:40 -01:00
def merge_shards(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
shard_to_merge = self._get_param("ShardToMerge")
adjacent_shard_to_merge = self._get_param("AdjacentShardToMerge")
2017-02-23 21:37:43 -05:00
self.kinesis_backend.merge_shards(
stream_arn, stream_name, shard_to_merge, adjacent_shard_to_merge
)
return ""
2023-03-23 09:17:40 -01:00
def list_shards(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
next_token = self._get_param("NextToken")
max_results = self._get_param("MaxResults", 10000)
shards, token = self.kinesis_backend.list_shards(
stream_arn=stream_arn,
stream_name=stream_name,
limit=max_results,
next_token=next_token,
)
res = {"Shards": shards, "NextToken": token}
return json.dumps(res)
2023-03-23 09:17:40 -01:00
def update_shard_count(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
target_shard_count = self._get_param("TargetShardCount")
2022-01-26 18:41:04 -01:00
current_shard_count = self.kinesis_backend.update_shard_count(
stream_arn=stream_arn,
stream_name=stream_name,
target_shard_count=target_shard_count,
2022-01-26 18:41:04 -01:00
)
return json.dumps(
dict(
StreamName=stream_name,
CurrentShardCount=current_shard_count,
TargetShardCount=target_shard_count,
)
)
2023-03-23 09:17:40 -01:00
def increase_stream_retention_period(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
retention_period_hours = self._get_param("RetentionPeriodHours")
self.kinesis_backend.increase_stream_retention_period(
stream_arn, stream_name, retention_period_hours
)
return ""
2023-03-23 09:17:40 -01:00
def decrease_stream_retention_period(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
retention_period_hours = self._get_param("RetentionPeriodHours")
self.kinesis_backend.decrease_stream_retention_period(
stream_arn, stream_name, retention_period_hours
)
return ""
2023-03-23 09:17:40 -01:00
def add_tags_to_stream(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
tags = self._get_param("Tags")
self.kinesis_backend.add_tags_to_stream(stream_arn, stream_name, tags)
return json.dumps({})
2023-03-23 09:17:40 -01:00
def list_tags_for_stream(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
exclusive_start_tag_key = self._get_param("ExclusiveStartTagKey")
limit = self._get_param("Limit")
2017-02-23 21:37:43 -05:00
response = self.kinesis_backend.list_tags_for_stream(
stream_arn, stream_name, exclusive_start_tag_key, limit
2017-02-23 21:37:43 -05:00
)
return json.dumps(response)
2023-03-23 09:17:40 -01:00
def remove_tags_from_stream(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
tag_keys = self._get_param("TagKeys")
self.kinesis_backend.remove_tags_from_stream(stream_arn, stream_name, tag_keys)
return json.dumps({})
2022-01-26 18:41:04 -01:00
2023-03-23 09:17:40 -01:00
def enable_enhanced_monitoring(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
shard_level_metrics = self._get_param("ShardLevelMetrics")
arn, name, current, desired = self.kinesis_backend.enable_enhanced_monitoring(
stream_arn=stream_arn,
stream_name=stream_name,
shard_level_metrics=shard_level_metrics,
2022-01-26 18:41:04 -01:00
)
return json.dumps(
dict(
StreamName=name,
2022-01-26 18:41:04 -01:00
CurrentShardLevelMetrics=current,
DesiredShardLevelMetrics=desired,
StreamARN=arn,
2022-01-26 18:41:04 -01:00
)
)
2023-03-23 09:17:40 -01:00
def disable_enhanced_monitoring(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
shard_level_metrics = self._get_param("ShardLevelMetrics")
arn, name, current, desired = self.kinesis_backend.disable_enhanced_monitoring(
stream_arn=stream_arn,
stream_name=stream_name,
to_be_disabled=shard_level_metrics,
2022-01-26 18:41:04 -01:00
)
return json.dumps(
dict(
StreamName=name,
2022-01-26 18:41:04 -01:00
CurrentShardLevelMetrics=current,
DesiredShardLevelMetrics=desired,
StreamARN=arn,
2022-01-26 18:41:04 -01:00
)
)
2023-03-23 09:17:40 -01:00
def list_stream_consumers(self) -> str:
stream_arn = self._get_param("StreamARN")
2022-01-26 18:41:04 -01:00
consumers = self.kinesis_backend.list_stream_consumers(stream_arn=stream_arn)
return json.dumps(dict(Consumers=[c.to_json() for c in consumers]))
2023-03-23 09:17:40 -01:00
def register_stream_consumer(self) -> str:
stream_arn = self._get_param("StreamARN")
consumer_name = self._get_param("ConsumerName")
2022-01-26 18:41:04 -01:00
consumer = self.kinesis_backend.register_stream_consumer(
stream_arn=stream_arn, consumer_name=consumer_name
)
return json.dumps(dict(Consumer=consumer.to_json()))
2023-03-23 09:17:40 -01:00
def describe_stream_consumer(self) -> str:
stream_arn = self._get_param("StreamARN")
consumer_name = self._get_param("ConsumerName")
consumer_arn = self._get_param("ConsumerARN")
2022-01-26 18:41:04 -01:00
consumer = self.kinesis_backend.describe_stream_consumer(
stream_arn=stream_arn,
consumer_name=consumer_name,
consumer_arn=consumer_arn,
)
return json.dumps(
dict(ConsumerDescription=consumer.to_json(include_stream_arn=True))
)
2023-03-23 09:17:40 -01:00
def deregister_stream_consumer(self) -> str:
stream_arn = self._get_param("StreamARN")
consumer_name = self._get_param("ConsumerName")
consumer_arn = self._get_param("ConsumerARN")
2022-01-26 18:41:04 -01:00
self.kinesis_backend.deregister_stream_consumer(
stream_arn=stream_arn,
consumer_name=consumer_name,
consumer_arn=consumer_arn,
)
return json.dumps(dict())
2023-03-23 09:17:40 -01:00
def start_stream_encryption(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
encryption_type = self._get_param("EncryptionType")
key_id = self._get_param("KeyId")
2022-01-26 18:41:04 -01:00
self.kinesis_backend.start_stream_encryption(
stream_arn=stream_arn,
stream_name=stream_name,
encryption_type=encryption_type,
key_id=key_id,
2022-01-26 18:41:04 -01:00
)
return json.dumps(dict())
2023-03-23 09:17:40 -01:00
def stop_stream_encryption(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_name = self._get_param("StreamName")
self.kinesis_backend.stop_stream_encryption(
stream_arn=stream_arn, stream_name=stream_name
)
2022-01-26 18:41:04 -01:00
return json.dumps(dict())
2022-09-08 21:51:55 +00:00
2023-03-23 09:17:40 -01:00
def update_stream_mode(self) -> str:
stream_arn = self._get_param("StreamARN")
stream_mode = self._get_param("StreamModeDetails")
2022-09-08 21:51:55 +00:00
self.kinesis_backend.update_stream_mode(stream_arn, stream_mode)
return "{}"