Feature: Proxy (#6848)
This commit is contained in:
parent
39b9c2f121
commit
438b2b7843
5
.github/workflows/build.yml
vendored
5
.github/workflows/build.yml
vendored
@ -88,6 +88,11 @@ jobs:
|
|||||||
if: "!contains(github.event.pull_request.labels.*.name, 'java')"
|
if: "!contains(github.event.pull_request.labels.*.name, 'java')"
|
||||||
uses: ./.github/workflows/tests_servermode.yml
|
uses: ./.github/workflows/tests_servermode.yml
|
||||||
|
|
||||||
|
testproxy:
|
||||||
|
needs: [lint]
|
||||||
|
if: "!contains(github.event.pull_request.labels.*.name, 'java')"
|
||||||
|
uses: ./.github/workflows/tests_proxymode.yml
|
||||||
|
|
||||||
release:
|
release:
|
||||||
name: Release
|
name: Release
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
57
.github/workflows/tests_proxymode.yml
vendored
Normal file
57
.github/workflows/tests_proxymode.yml
vendored
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
name: Unit tests in Proxy Mode
|
||||||
|
on: [workflow_call]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
python-version: ["3.10", "3.11"]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
- name: Get pip cache dir
|
||||||
|
id: pip-cache
|
||||||
|
run: |
|
||||||
|
echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
|
||||||
|
- name: pip cache
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: ${{ steps.pip-cache.outputs.dir }}
|
||||||
|
key: pip-${{ matrix.python-version }}-${{ hashFiles('**/setup.cfg') }}
|
||||||
|
- name: Update pip
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
- name: Install project dependencies
|
||||||
|
run: |
|
||||||
|
pip install -r requirements-tests.txt
|
||||||
|
pip install .[all,server]
|
||||||
|
- name: Start MotoProxy
|
||||||
|
run: |
|
||||||
|
moto_proxy -h > moto_proxy.log
|
||||||
|
moto_proxy -H 0.0.0.0 -v > moto_proxy.log &
|
||||||
|
- name: Test ProxyMode
|
||||||
|
env:
|
||||||
|
TEST_PROXY_MODE: ${{ true }}
|
||||||
|
run: |
|
||||||
|
pytest -sv tests/test_acmpca tests/test_awslambda tests/test_apigateway tests/test_s3
|
||||||
|
- name: "Stop MotoProxy"
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
pwd
|
||||||
|
ls -la
|
||||||
|
kill $(lsof -t -i:5005)
|
||||||
|
- name: Archive Proxy logs
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: motoproxy-${{ matrix.python-version }}
|
||||||
|
path: |
|
||||||
|
moto_proxy.log
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -29,6 +29,9 @@ htmlcov/
|
|||||||
.coverage*
|
.coverage*
|
||||||
docs/_build
|
docs/_build
|
||||||
moto_recording
|
moto_recording
|
||||||
|
moto/moto_proxy/certs/*.crt
|
||||||
|
moto/moto_proxy/certs/*.csr
|
||||||
|
moto/moto_proxy/certs/*.conf
|
||||||
.hypothesis
|
.hypothesis
|
||||||
other_langs/tests_java/target
|
other_langs/tests_java/target
|
||||||
other_langs/tests_dotnet/ExampleTestProject/bin
|
other_langs/tests_dotnet/ExampleTestProject/bin
|
||||||
|
@ -10,6 +10,9 @@ include moto/ec2/resources/ecs/optimized_amis/*.json
|
|||||||
include moto/cognitoidp/resources/*.json
|
include moto/cognitoidp/resources/*.json
|
||||||
include moto/dynamodb/parsing/reserved_keywords.txt
|
include moto/dynamodb/parsing/reserved_keywords.txt
|
||||||
include moto/moto_api/_internal/*
|
include moto/moto_api/_internal/*
|
||||||
|
include moto/moto_proxy/*
|
||||||
|
include moto/moto_proxy/certs/__init__.py
|
||||||
|
include moto/moto_proxy/certs/req.conf.tmpl
|
||||||
include moto/rds/resources/cluster_options/*.json
|
include moto/rds/resources/cluster_options/*.json
|
||||||
include moto/servicequotas/resources/*/*.json
|
include moto/servicequotas/resources/*/*.json
|
||||||
include moto/ssm/resources/*.json
|
include moto/ssm/resources/*.json
|
||||||
|
126
docs/docs/proxy_mode.rst
Normal file
126
docs/docs/proxy_mode.rst
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
.. _proxy_mode:
|
||||||
|
|
||||||
|
.. role:: bash(code)
|
||||||
|
:language: bash
|
||||||
|
|
||||||
|
.. role:: raw-html(raw)
|
||||||
|
:format: html
|
||||||
|
|
||||||
|
================================
|
||||||
|
Proxy Mode
|
||||||
|
================================
|
||||||
|
|
||||||
|
Moto can be run as a proxy, intercepting all requests to AWS and mocking them instead. :raw-html:`<br />`
|
||||||
|
Some of the benefits:
|
||||||
|
- Easy to configure for all SDK's
|
||||||
|
- Can be reached by Lambda containers, allowing you to mock service-calls inside a Lambda-function
|
||||||
|
|
||||||
|
|
||||||
|
Installation
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Install the required dependencies using:
|
||||||
|
|
||||||
|
.. code:: bash
|
||||||
|
|
||||||
|
pip install moto[proxy]
|
||||||
|
|
||||||
|
|
||||||
|
You can then start the proxy like this:
|
||||||
|
|
||||||
|
.. code:: bash
|
||||||
|
|
||||||
|
$ pip install moto[proxy]
|
||||||
|
$ moto_proxy
|
||||||
|
|
||||||
|
Note that, if you want your Lambda functions to reach this proxy, you need to open up the moto_proxy:
|
||||||
|
|
||||||
|
.. code:: bash
|
||||||
|
|
||||||
|
$ moto_proxy -H 0.0.0.0
|
||||||
|
|
||||||
|
.. warning:: Be careful not to use this on a public network - this allows all network users access to your server.
|
||||||
|
|
||||||
|
|
||||||
|
Quick usage
|
||||||
|
--------------
|
||||||
|
The help command shows a quick-guide on how to configure SDK's to use the proxy.
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
$ moto_proxy --help
|
||||||
|
|
||||||
|
|
||||||
|
Extended Configuration
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
To use the MotoProxy while running your tests, the AWS SDK needs to know two things:
|
||||||
|
|
||||||
|
- The proxy endpoint
|
||||||
|
- How to deal with SSL
|
||||||
|
|
||||||
|
To set the proxy endpoint, use the `HTTPS_PROXY`-environment variable.
|
||||||
|
|
||||||
|
Because the proxy does not have an approved SSL certificate, the SDK will not trust the proxy by default. This means that the SDK has to be configured to either
|
||||||
|
|
||||||
|
1. Accept the proxy's custom certificate, by setting the `AWS_CA_BUNDLE`-environment variable
|
||||||
|
2. Allow unverified SSL certificates
|
||||||
|
|
||||||
|
The `AWS_CA_BUNDLE` needs to point to the location of the CA certificate that comes with Moto. :raw-html:`<br />`
|
||||||
|
You can run `moto_proxy --help` to get the exact location of this certificate, depending on where Moto is installed.
|
||||||
|
|
||||||
|
Environment Variables Configuration:
|
||||||
|
------------------------------
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
export HTTPS_PROXY=http://localhost:5005
|
||||||
|
aws cloudformation list-stacks --no-verify-ssl
|
||||||
|
|
||||||
|
Or by configuring the AWS_CA_BUNDLE:
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
export HTTPS_PROXY=http://localhost:5005
|
||||||
|
export AWS_CA_BUNDLE=/location/of/moto/ca/cert.crt
|
||||||
|
aws cloudformation list-stacks
|
||||||
|
|
||||||
|
|
||||||
|
Python Configuration
|
||||||
|
--------------------------
|
||||||
|
|
||||||
|
If you're already using Moto's `mock_service`-decorators, you can use a custom environment variable that configures everything automatically:
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
TEST_PROXY_MODE=true pytest
|
||||||
|
|
||||||
|
To configure this manually:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from botocore.config import Config
|
||||||
|
|
||||||
|
config = Config(proxies={"https": "http://localhost:5005"})
|
||||||
|
client = boto3.client("s3", config=config, verify=False)
|
||||||
|
|
||||||
|
|
||||||
|
Terraform Configuration
|
||||||
|
------------------------------
|
||||||
|
|
||||||
|
.. code-block::
|
||||||
|
|
||||||
|
provider "aws" {
|
||||||
|
region = "us-east-1"
|
||||||
|
http_proxy = "http://localhost:5005"
|
||||||
|
custom_ca_bundle = "/location/of/moto/ca/cert.crt"
|
||||||
|
# OR
|
||||||
|
insecure = true
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Drawbacks
|
||||||
|
------------
|
||||||
|
|
||||||
|
Configuring a proxy means that all requests are intercepted, but the MotoProxy can only handle requests to AWS.
|
||||||
|
|
||||||
|
If your test includes a call to `https://www.thirdpartyservice.com`, that will also be intercepted by `MotoProxy` - and subsequently throw an error because it doesn't know how to handle non-AWS requests.
|
@ -32,6 +32,7 @@ Additional Resources
|
|||||||
|
|
||||||
docs/getting_started
|
docs/getting_started
|
||||||
docs/server_mode
|
docs/server_mode
|
||||||
|
docs/proxy_mode
|
||||||
docs/faq
|
docs/faq
|
||||||
docs/iam
|
docs/iam
|
||||||
docs/aws_config
|
docs/aws_config
|
||||||
|
@ -27,6 +27,7 @@ from moto.awslambda.policy import Policy
|
|||||||
from moto.core import BaseBackend, BackendDict, BaseModel, CloudFormationModel
|
from moto.core import BaseBackend, BackendDict, BaseModel, CloudFormationModel
|
||||||
from moto.core.exceptions import RESTError
|
from moto.core.exceptions import RESTError
|
||||||
from moto.core.utils import unix_time_millis, iso_8601_datetime_with_nanoseconds, utcnow
|
from moto.core.utils import unix_time_millis, iso_8601_datetime_with_nanoseconds, utcnow
|
||||||
|
from moto.utilities.utils import load_resource_as_bytes
|
||||||
from moto.iam.models import iam_backends
|
from moto.iam.models import iam_backends
|
||||||
from moto.iam.exceptions import IAMNotFoundException
|
from moto.iam.exceptions import IAMNotFoundException
|
||||||
from moto.ecr.exceptions import ImageNotFoundException
|
from moto.ecr.exceptions import ImageNotFoundException
|
||||||
@ -82,6 +83,17 @@ def zip2tar(zip_bytes: bytes) -> io.BytesIO:
|
|||||||
return tarstream
|
return tarstream
|
||||||
|
|
||||||
|
|
||||||
|
def file2tar(file_content: bytes, file_name: str) -> io.BytesIO:
|
||||||
|
tarstream = io.BytesIO()
|
||||||
|
tarf = tarfile.TarFile(fileobj=tarstream, mode="w")
|
||||||
|
tarinfo = tarfile.TarInfo(name=file_name)
|
||||||
|
tarinfo.size = len(file_content)
|
||||||
|
tarf.addfile(tarinfo, io.BytesIO(file_content))
|
||||||
|
|
||||||
|
tarstream.seek(0)
|
||||||
|
return tarstream
|
||||||
|
|
||||||
|
|
||||||
class _VolumeRefCount:
|
class _VolumeRefCount:
|
||||||
__slots__ = "refcount", "volume"
|
__slots__ = "refcount", "volume"
|
||||||
|
|
||||||
@ -132,6 +144,10 @@ class _DockerDataVolumeContext:
|
|||||||
try:
|
try:
|
||||||
with zip2tar(self._lambda_func.code_bytes) as stream:
|
with zip2tar(self._lambda_func.code_bytes) as stream:
|
||||||
container.put_archive(settings.LAMBDA_DATA_DIR, stream)
|
container.put_archive(settings.LAMBDA_DATA_DIR, stream)
|
||||||
|
if settings.test_proxy_mode():
|
||||||
|
ca_cert = load_resource_as_bytes(__name__, "../moto_proxy/ca.crt")
|
||||||
|
with file2tar(ca_cert, "ca.crt") as cert_stream:
|
||||||
|
container.put_archive(settings.LAMBDA_DATA_DIR, cert_stream)
|
||||||
finally:
|
finally:
|
||||||
container.remove(force=True)
|
container.remove(force=True)
|
||||||
|
|
||||||
@ -862,10 +878,13 @@ class LambdaFunction(CloudFormationModel, DockerModel):
|
|||||||
|
|
||||||
env_vars.update(self.environment_vars)
|
env_vars.update(self.environment_vars)
|
||||||
env_vars["MOTO_HOST"] = settings.moto_server_host()
|
env_vars["MOTO_HOST"] = settings.moto_server_host()
|
||||||
env_vars["MOTO_PORT"] = settings.moto_server_port()
|
moto_port = settings.moto_server_port()
|
||||||
env_vars[
|
env_vars["MOTO_PORT"] = moto_port
|
||||||
"MOTO_HTTP_ENDPOINT"
|
env_vars["MOTO_HTTP_ENDPOINT"] = f'{env_vars["MOTO_HOST"]}:{moto_port}'
|
||||||
] = f'{env_vars["MOTO_HOST"]}:{env_vars["MOTO_PORT"]}'
|
|
||||||
|
if settings.test_proxy_mode():
|
||||||
|
env_vars["HTTPS_PROXY"] = env_vars["MOTO_HTTP_ENDPOINT"]
|
||||||
|
env_vars["AWS_CA_BUNDLE"] = "/var/task/ca.crt"
|
||||||
|
|
||||||
container = exit_code = None
|
container = exit_code = None
|
||||||
log_config = docker.types.LogConfig(type=docker.types.LogConfig.types.JSON)
|
log_config = docker.types.LogConfig(type=docker.types.LogConfig.types.JSON)
|
||||||
@ -1614,8 +1633,11 @@ class LambdaBackend(BaseBackend):
|
|||||||
Implementation of the AWS Lambda endpoint.
|
Implementation of the AWS Lambda endpoint.
|
||||||
Invoking functions is supported - they will run inside a Docker container, emulating the real AWS behaviour as closely as possible.
|
Invoking functions is supported - they will run inside a Docker container, emulating the real AWS behaviour as closely as possible.
|
||||||
|
|
||||||
It is possible to connect from AWS Lambdas to other services, as long as you are running Moto in ServerMode.
|
It is possible to connect from AWS Lambdas to other services, as long as you are running MotoProxy or the MotoServer.
|
||||||
The Lambda has access to environment variables `MOTO_HOST` and `MOTO_PORT`, which can be used to build the url that MotoServer runs on:
|
|
||||||
|
When running the MotoProxy, calls to other AWS services are automatically proxied.
|
||||||
|
|
||||||
|
When running MotoServer, the Lambda has access to environment variables `MOTO_HOST` and `MOTO_PORT`, which can be used to build the url that MotoServer runs on:
|
||||||
|
|
||||||
.. sourcecode:: python
|
.. sourcecode:: python
|
||||||
|
|
||||||
|
@ -427,6 +427,69 @@ class ServerModeMockAWS(BaseMockAWS):
|
|||||||
self._resource_patcher.stop()
|
self._resource_patcher.stop()
|
||||||
|
|
||||||
|
|
||||||
|
class ProxyModeMockAWS(BaseMockAWS):
|
||||||
|
|
||||||
|
RESET_IN_PROGRESS = False
|
||||||
|
|
||||||
|
def __init__(self, *args: Any, **kwargs: Any):
|
||||||
|
self.test_proxy_mode_endpoint = settings.test_proxy_mode_endpoint()
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def reset(self) -> None:
|
||||||
|
call_reset_api = os.environ.get("MOTO_CALL_RESET_API")
|
||||||
|
if not call_reset_api or call_reset_api.lower() != "false":
|
||||||
|
if not ProxyModeMockAWS.RESET_IN_PROGRESS:
|
||||||
|
ProxyModeMockAWS.RESET_IN_PROGRESS = True
|
||||||
|
import requests
|
||||||
|
|
||||||
|
requests.post(f"{self.test_proxy_mode_endpoint}/moto-api/reset")
|
||||||
|
ProxyModeMockAWS.RESET_IN_PROGRESS = False
|
||||||
|
|
||||||
|
def enable_patching(self, reset: bool = True) -> None:
|
||||||
|
if self.__class__.nested_count == 1 and reset:
|
||||||
|
# Just started
|
||||||
|
self.reset()
|
||||||
|
|
||||||
|
from boto3 import client as real_boto3_client, resource as real_boto3_resource
|
||||||
|
|
||||||
|
def fake_boto3_client(*args: Any, **kwargs: Any) -> botocore.client.BaseClient:
|
||||||
|
kwargs["verify"] = False
|
||||||
|
proxy_endpoint = (
|
||||||
|
f"http://localhost:{os.environ.get('MOTO_PROXY_PORT', 5005)}"
|
||||||
|
)
|
||||||
|
proxies = {"http": proxy_endpoint, "https": proxy_endpoint}
|
||||||
|
if "config" in kwargs:
|
||||||
|
kwargs["config"].__dict__["proxies"] = proxies
|
||||||
|
else:
|
||||||
|
config = Config(proxies=proxies)
|
||||||
|
kwargs["config"] = config
|
||||||
|
|
||||||
|
return real_boto3_client(*args, **kwargs)
|
||||||
|
|
||||||
|
def fake_boto3_resource(*args: Any, **kwargs: Any) -> Any:
|
||||||
|
kwargs["verify"] = False
|
||||||
|
proxy_endpoint = (
|
||||||
|
f"http://localhost:{os.environ.get('MOTO_PROXY_PORT', 5005)}"
|
||||||
|
)
|
||||||
|
proxies = {"http": proxy_endpoint, "https": proxy_endpoint}
|
||||||
|
if "config" in kwargs:
|
||||||
|
kwargs["config"].__dict__["proxies"] = proxies
|
||||||
|
else:
|
||||||
|
config = Config(proxies=proxies)
|
||||||
|
kwargs["config"] = config
|
||||||
|
return real_boto3_resource(*args, **kwargs)
|
||||||
|
|
||||||
|
self._client_patcher = patch("boto3.client", fake_boto3_client)
|
||||||
|
self._resource_patcher = patch("boto3.resource", fake_boto3_resource)
|
||||||
|
self._client_patcher.start()
|
||||||
|
self._resource_patcher.start()
|
||||||
|
|
||||||
|
def disable_patching(self) -> None:
|
||||||
|
if self._client_patcher:
|
||||||
|
self._client_patcher.stop()
|
||||||
|
self._resource_patcher.stop()
|
||||||
|
|
||||||
|
|
||||||
class base_decorator:
|
class base_decorator:
|
||||||
mock_backend = MockAWS
|
mock_backend = MockAWS
|
||||||
|
|
||||||
@ -436,8 +499,10 @@ class base_decorator:
|
|||||||
def __call__(
|
def __call__(
|
||||||
self, func: Optional[Callable[..., Any]] = None
|
self, func: Optional[Callable[..., Any]] = None
|
||||||
) -> Union[BaseMockAWS, Callable[..., BaseMockAWS]]:
|
) -> Union[BaseMockAWS, Callable[..., BaseMockAWS]]:
|
||||||
if settings.TEST_SERVER_MODE:
|
if settings.test_proxy_mode():
|
||||||
mocked_backend: BaseMockAWS = ServerModeMockAWS(self.backends)
|
mocked_backend: BaseMockAWS = ProxyModeMockAWS(self.backends)
|
||||||
|
elif settings.TEST_SERVER_MODE:
|
||||||
|
mocked_backend: BaseMockAWS = ServerModeMockAWS(self.backends) # type: ignore
|
||||||
else:
|
else:
|
||||||
mocked_backend = self.mock_backend(self.backends)
|
mocked_backend = self.mock_backend(self.backends)
|
||||||
|
|
||||||
|
24
moto/moto_proxy/__init__.py
Normal file
24
moto/moto_proxy/__init__.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
log_format = "%(levelname)s %(asctime)s - %(message)s"
|
||||||
|
logging.basicConfig(stream=sys.stdout, format=log_format)
|
||||||
|
logger = logging.getLogger("MOTO_PROXY")
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
|
||||||
|
def with_color(color: int, text: object) -> str:
|
||||||
|
return f"\x1b[{color}m{text}\x1b[0m"
|
||||||
|
|
||||||
|
|
||||||
|
def info(msg: object) -> None:
|
||||||
|
logger.info(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def debug(msg: object) -> None:
|
||||||
|
logger.debug(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def error(msg: object) -> None:
|
||||||
|
logger.error(msg)
|
19
moto/moto_proxy/ca.crt
Normal file
19
moto/moto_proxy/ca.crt
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIDCzCCAfOgAwIBAgIUIOBzxLZH8maXw2YsSoQpXEpyqpowDQYJKoZIhvcNAQEL
|
||||||
|
BQAwFDESMBAGA1UEAwwJcHJveHkyIENBMCAXDTIzMDkyNTA5MzUwMFoYDzIxMjMw
|
||||||
|
OTAxMDkzNTAwWjAUMRIwEAYDVQQDDAlwcm94eTIgQ0EwggEiMA0GCSqGSIb3DQEB
|
||||||
|
AQUAA4IBDwAwggEKAoIBAQCt6XhWWVFTEQlC+ktSmL+MFDdOHM0vteOz+9HouBK/
|
||||||
|
ofo/1q8Zd5z2hYOQPYx4h/EnXb7LFA8ke5XY1HENY3U4k+OWNwuRr95EeV8rrxFk
|
||||||
|
4vQoqmWGXtQ332TAGY9B5k6uCe2b5dLO/0NR0MiGZw1vGhd3zhHo5utorVmOdAaM
|
||||||
|
VTI7krqSB+gM4xOfnE2UIeGqS0RVPbzXNTTdVH8PHOHZB9uWlyHbXDyeG/uRJFB7
|
||||||
|
lCCQSkLzvQ7vmVY852Pke5H60kHJYb994RR2ajVAE9AxJI16qnxPSOMVGoeebm3I
|
||||||
|
H3ao+VGMq/b1XGZUQq0s7sA2a+DHDPHSl4iwJ/FMEMTbAgMBAAGjUzBRMB0GA1Ud
|
||||||
|
DgQWBBQsMTVcFGS22i+kRFGEtEBdCHTG5DAfBgNVHSMEGDAWgBQsMTVcFGS22i+k
|
||||||
|
RFGEtEBdCHTG5DAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQAo
|
||||||
|
908wu8QadGfa3MuQ9vlR18P7pNWFcE5z16cX348IclGRmnkln/9x78CG9RZNAckS
|
||||||
|
4ch5RzGrJNtHb4s9zDhS5SpyPdx5Ua0pYqVFZm6Vyg1cFVwipRJ78qM/uBcdE/b5
|
||||||
|
r2DnGKfJCAWIpRpzTZ8uGDGDaoX7NxJ0U9zQ04J+o4GpLeTY0qzI1Y9gFaDYPpGB
|
||||||
|
M8wBuYUwEYKbOq/cUA++m0n2SzsU1xlXk+01QZcQGokby0bMrorccdi3ZjsXQNSb
|
||||||
|
eC8btoekt29cxBU/N7v1iR9Hd2DMZtz1xDsX+ihWGGq3D+PeyqewMuaWFQLbDFHM
|
||||||
|
0pRthQyOKT0c1ZJjusv8
|
||||||
|
-----END CERTIFICATE-----
|
28
moto/moto_proxy/ca.key
Normal file
28
moto/moto_proxy/ca.key
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
-----BEGIN PRIVATE KEY-----
|
||||||
|
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCt6XhWWVFTEQlC
|
||||||
|
+ktSmL+MFDdOHM0vteOz+9HouBK/ofo/1q8Zd5z2hYOQPYx4h/EnXb7LFA8ke5XY
|
||||||
|
1HENY3U4k+OWNwuRr95EeV8rrxFk4vQoqmWGXtQ332TAGY9B5k6uCe2b5dLO/0NR
|
||||||
|
0MiGZw1vGhd3zhHo5utorVmOdAaMVTI7krqSB+gM4xOfnE2UIeGqS0RVPbzXNTTd
|
||||||
|
VH8PHOHZB9uWlyHbXDyeG/uRJFB7lCCQSkLzvQ7vmVY852Pke5H60kHJYb994RR2
|
||||||
|
ajVAE9AxJI16qnxPSOMVGoeebm3IH3ao+VGMq/b1XGZUQq0s7sA2a+DHDPHSl4iw
|
||||||
|
J/FMEMTbAgMBAAECggEAMLxL9jq6cQJFq6jTgdZ/WyRxKSkmEPgyUsY/WS14R45/
|
||||||
|
P/OMByF/cZARwdKVslM6L7N0G5nH8ovVfrlt4vgbqdq7vOU5Dz8PFPZERswdHj4B
|
||||||
|
eQHjSIf7hZrLM5AWFrwREXGDzhvV+x8KgPt2rj9jwt43dGHhn/hSQPfPMH3wNdPV
|
||||||
|
vkPjgRVgH99qtXN4duAknpY80qs3T83n8ZCQj628wy0N9tRXMWMp2A0KoIOS0tEd
|
||||||
|
LsqcCbXY7Z8B89ERGSfHN4qczuqwaeObu1tWionFAKCIzohBFUjHNqOePEF2Qo8q
|
||||||
|
w3yI3MA5vMn7o4PMfx/h/vLEls1ZFBiS9IVJ9mCarQKBgQDefa24Aode7gGvzltV
|
||||||
|
vApoEhhh81VWY+UI8+YtNIZjyyzMFS0eZJMc4peQk1AmPgY/GhNOe8lCgBkHurX9
|
||||||
|
t8Y1ljHRVanAkT56uuG5a/LofBVKUgT8dMA/LspRE3GWmr2qTvcmhladkMM6HN8Q
|
||||||
|
BpZ7WWSRsMOeYFfJ7sGenlRDLQKBgQDIGspXMVLeM7DohvZhejPuj9uwZBFeIqwG
|
||||||
|
+vrxgoQWJxaSarzf6nnSG/M5lx15MYhVOlzbo2/sz0rJQmn6vD3swbcF2EMXG5T+
|
||||||
|
g2fzejBJUySx2xhSYi2G3ZGf2SRSsvLBFitW7BWuoX7bR0771S27XqNpzO6wKBOV
|
||||||
|
yXI4ZN5NJwKBgQCH9WTivSjb6bU+KWvGyFHTprsfoALV99VN0z0lAqPc95s4Wvhn
|
||||||
|
Si5byFu2DU89D0nh5Z1GqH4kFQM2pfHwSQzmUhG/SgmhkyAK/4hQNpcJWknoUJab
|
||||||
|
bvzLn1wijy8qSQT9vaNp902Wm4+xQ1NMB7qNReMe5FWlwlnjG/NVaoszQQKBgBwg
|
||||||
|
h+iRqlBJe8hzkBZLkxkpZ3v31OkifoPMq5FfAyoJ/IZAMqRW1SDPhPTHZQEwETXJ
|
||||||
|
qlvFMWpcCOsZRsRTyXCKGivcJjINUngkCGyU9EyaP0Iwxc5utm+KnXmWkCB/vted
|
||||||
|
QiJJtRKC6M3xzAxh/rejqdypTbO9LmOTmVaL9yNpAoGANTkRHuXzjIoESPGdCfwm
|
||||||
|
N1ng5Z8RUP9TclRfszPWy6FnMKw50PfIs1l2ZTiEXjTKq7sfPx4BaN/r4sumJNaS
|
||||||
|
6zhVohY5pteKjdmi7GZlhDPBjaZwjzQjNKTdlCGf7Khif/zNytQFJ8Xz9MP12457
|
||||||
|
PlZ5dO/E1EW2dEou9Wf77JU=
|
||||||
|
-----END PRIVATE KEY-----
|
28
moto/moto_proxy/cert.key
Normal file
28
moto/moto_proxy/cert.key
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
-----BEGIN PRIVATE KEY-----
|
||||||
|
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDk0V6Tomu5xxkU
|
||||||
|
fzSKWBOW/g11AuX5fWzxhHZj+7lJxdtxv2hsqcRAz1Ixj7yoGeu94k73cbmhYxdz
|
||||||
|
xuu8DHsxjqRU1GEIzMmrjQT5o7xUH3HR9Rcq4kNm98kMJnCRItqrIkGlQgaUl2kk
|
||||||
|
3g+PgTaWx9ygcs2jpBaM1YgNfOjtJsFDYVmgCRtzLfUieY2d2NjQ4TIx/Hme2gL6
|
||||||
|
My0kwL+d6ksk1Al5BOOBRby2L6HXDMks3qXpXUsxxqrEXZs4SjJDZ0LZHo+3Jws7
|
||||||
|
8r0ukN0u1ZwWDDywAJ9bJfq4zwm8ELt3QulLVy97eggtZSKBHH7s9r8O7u5LFt2h
|
||||||
|
wnZKwU7jAgMBAAECggEAEyOTqnvQg+dsQlBWt82gxQvp9Ap/Uh965p0V4/+kCQPB
|
||||||
|
YUgv0Ir6QEUdeS/7Is2ZCZ06Z3l+AkqnpWz5i2I5E07JkWwUOhuBZAjosuxoB8Dh
|
||||||
|
92+HKQ405UULh9y9Pj6KlZVJC8aA7QClieP/32Wtu1RRsKMs1Sexi5HrjkLcHsQ3
|
||||||
|
4qzgdQjcVd7T4Y6h2ZFG/idZA3Oqjyec7LJS6212huuzpjB6D9oe89zxafpHzX+x
|
||||||
|
gYCXhFeOnvQtIxS5ajTlL/LhIMOBsG4opERc/7uxXILAKjb/hLk9/LyK6cZzMNwu
|
||||||
|
EFev5dzI17m+Lk5L9IntOYjVddoUeuAW8djHFanRHQKBgQDrjhsONoG5D73jMOJC
|
||||||
|
YskfQ/q+//ghl03eQOPtdW+XxpiEbVl6f1SNtgRdU/0bI52p61fvGaLcS/S1BLEy
|
||||||
|
13xvbFZIIsMdcJoUq89yr3FCvG1MeLp+MY1y43Hhp/c1F24jkAScv3wxjWINhXro
|
||||||
|
O5X5g7PsZn6Xrlk9G7NiN1pOhQKBgQD4rZDoMOrRBXHY1EyywwZlVVQ3mIWG9684
|
||||||
|
rvCRFP1/c9+SG89DGC9tNLJclwQqy2yqWGQc0Fdb7WZbYsEuMmDC+TX7RByAInoG
|
||||||
|
+ihqaX8mGKSp44y9X0KffPOHgy/o9lD2vzpKSdEMj5rChh5ckSiIYOToYRLJJLwo
|
||||||
|
4j2a4WnoRwKBgAzYhRUzV8O13g8jvVMNfBZeaLA92VRLog160HNEsj8+r1aZeAW8
|
||||||
|
J+pKgNZuHCF8wb5gfT0m0sDcy42LofY51illaRcp/iX+3AhAjmGcu7p9+B/xfYog
|
||||||
|
PayERtOdi1ez3WfHFNlPgABby3sdSmSby0P+MLO1qzWuZmN0vUWf6ybZAoGBAJg9
|
||||||
|
2irsV7WjebFfN51xHCdJeAeZTpX0aMdxAkIv8YnnrIXMlLTkx5Q54MAijCCO7XXU
|
||||||
|
K2Ygfnr++d0UtmPL38U9wLiVWEVx1fcTi06qS3dNOvHvJyiAe08cthLOU7Rxp9uH
|
||||||
|
8u2sB1mDSSGx7kCJdaEYgMtrMo8F+FOnPkPloGrdAoGAYuNpqXeEUlNwf9L6eCHg
|
||||||
|
aSSPaO927cdvjEnSWuyYaCweqNTwpD9ZrxPtpoVPNDl2kftfJTm4AVxoJI1irdDe
|
||||||
|
1Z/Txj6AOesM1GdFqp88/CgoeJDSh8yXY5Gctp38JwYJrEVkI3bL1Bc6DjjSAgEf
|
||||||
|
+swqLap4CEnppbl3Rt1mIWQ=
|
||||||
|
-----END PRIVATE KEY-----
|
133
moto/moto_proxy/certificate_creator.py
Normal file
133
moto/moto_proxy/certificate_creator.py
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
import os
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from subprocess import Popen, PIPE
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from . import debug, info
|
||||||
|
|
||||||
|
|
||||||
|
def join_with_script_dir(path: str) -> str:
|
||||||
|
return os.path.join(os.path.dirname(os.path.abspath(__file__)), path)
|
||||||
|
|
||||||
|
|
||||||
|
class CertificateCreator:
|
||||||
|
cakey = join_with_script_dir("ca.key")
|
||||||
|
cacert = join_with_script_dir("ca.crt")
|
||||||
|
certkey = join_with_script_dir("cert.key")
|
||||||
|
certdir = join_with_script_dir("certs/")
|
||||||
|
|
||||||
|
lock = threading.Lock()
|
||||||
|
|
||||||
|
def validate(self) -> None:
|
||||||
|
# Verify the CertificateAuthority files exist
|
||||||
|
if not os.path.isfile(CertificateCreator.cakey):
|
||||||
|
raise Exception(f"Cannot find {CertificateCreator.cakey}")
|
||||||
|
if not os.path.isfile(CertificateCreator.cacert):
|
||||||
|
raise Exception(f"Cannot find {CertificateCreator.cacert}")
|
||||||
|
if not os.path.isfile(CertificateCreator.certkey):
|
||||||
|
raise Exception(f"Cannot find {CertificateCreator.certkey}")
|
||||||
|
if not os.path.isdir(CertificateCreator.certdir):
|
||||||
|
raise Exception(f"Cannot find {CertificateCreator.certdir}")
|
||||||
|
# Verify the `certs` dir is reachable
|
||||||
|
try:
|
||||||
|
test_file_location = f"{CertificateCreator.certdir}/{uuid4()}.txt"
|
||||||
|
debug(
|
||||||
|
f"Writing test file to {test_file_location} to verify the directory is writable..."
|
||||||
|
)
|
||||||
|
with open(test_file_location, "w") as file:
|
||||||
|
file.write("test")
|
||||||
|
os.remove(test_file_location)
|
||||||
|
except Exception:
|
||||||
|
info("Failed to write test file")
|
||||||
|
info(
|
||||||
|
f"The directory {CertificateCreator.certdir} does not seem to be writable"
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
def create(self, path: str) -> str:
|
||||||
|
"""
|
||||||
|
Create an SSL certificate for the supplied hostname.
|
||||||
|
This method will return a path to the certificate.
|
||||||
|
"""
|
||||||
|
full_name = path.split(":")[0]
|
||||||
|
|
||||||
|
with CertificateCreator.lock:
|
||||||
|
# We don't want to create certificates for every possible endpoint
|
||||||
|
# Especially with randomly named S3-buckets
|
||||||
|
|
||||||
|
# We can create certificates that match wildcards to reduce the total number
|
||||||
|
# For example:
|
||||||
|
# Hostname: somebucket.s3.amazonaws.com
|
||||||
|
# Certificate: *.s3.amazonaws.com
|
||||||
|
#
|
||||||
|
# All requests that match this wildcard certificate will reuse it
|
||||||
|
|
||||||
|
wildcard_name = f"*.{'.'.join(full_name.split('.')[1:])}"
|
||||||
|
server_csr = f"{self.certdir.rstrip('/')}/{wildcard_name}.csr"
|
||||||
|
|
||||||
|
# Verify if the certificate already exists
|
||||||
|
certpath = f"{self.certdir.rstrip('/')}/{wildcard_name}.crt"
|
||||||
|
if not os.path.isfile(certpath):
|
||||||
|
# Create a Config-file that contains the wildcard-name
|
||||||
|
with open(f"{self.certdir.rstrip('/')}/req.conf.tmpl", "r") as f:
|
||||||
|
config_template = f.read()
|
||||||
|
config_template = config_template.replace("{{full_name}}", full_name)
|
||||||
|
config_template = config_template.replace(
|
||||||
|
"{{wildcard_name}}", wildcard_name
|
||||||
|
)
|
||||||
|
config_template_name = (
|
||||||
|
f"{self.certdir.rstrip('/')}/{wildcard_name}.conf"
|
||||||
|
)
|
||||||
|
with open(config_template_name, "w") as f:
|
||||||
|
f.write(config_template)
|
||||||
|
|
||||||
|
# Create an Certificate Signing Request
|
||||||
|
#
|
||||||
|
subject = f"/CN={full_name}"[0:64]
|
||||||
|
commands = [
|
||||||
|
"openssl",
|
||||||
|
"req",
|
||||||
|
"-new",
|
||||||
|
"-key",
|
||||||
|
self.certkey,
|
||||||
|
"-out",
|
||||||
|
server_csr,
|
||||||
|
]
|
||||||
|
commands.extend(["-subj", subject, "-config", config_template_name])
|
||||||
|
|
||||||
|
p1 = Popen(commands)
|
||||||
|
p1.communicate()
|
||||||
|
debug(f"Created CSR in {server_csr}")
|
||||||
|
|
||||||
|
# Create the actual certificate used by the requests
|
||||||
|
p2 = Popen(
|
||||||
|
[
|
||||||
|
"openssl",
|
||||||
|
"x509",
|
||||||
|
"-req",
|
||||||
|
"-in",
|
||||||
|
server_csr,
|
||||||
|
"-days",
|
||||||
|
"3650",
|
||||||
|
"-CA",
|
||||||
|
self.cacert,
|
||||||
|
"-CAkey",
|
||||||
|
self.cakey,
|
||||||
|
"-set_serial",
|
||||||
|
f"{int(time.time() * 1000)}",
|
||||||
|
"-out",
|
||||||
|
certpath,
|
||||||
|
"-extensions",
|
||||||
|
"req_ext",
|
||||||
|
"-extfile",
|
||||||
|
config_template_name,
|
||||||
|
],
|
||||||
|
stderr=PIPE,
|
||||||
|
)
|
||||||
|
p2.communicate()
|
||||||
|
debug(f"Created certificate for {path} called {certpath}")
|
||||||
|
os.remove(server_csr)
|
||||||
|
os.remove(config_template_name)
|
||||||
|
debug(f"Removed intermediate certificates for {certpath}")
|
||||||
|
return certpath
|
3
moto/moto_proxy/certs/__init__.py
Normal file
3
moto/moto_proxy/certs/__init__.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# Folder that will contain SSL certificates
|
||||||
|
# The file `req.conf.tmpl` must be kept
|
||||||
|
# Other files (*.crt) act as a cache, and will be recreated if required
|
13
moto/moto_proxy/certs/req.conf.tmpl
Normal file
13
moto/moto_proxy/certs/req.conf.tmpl
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
[req]
|
||||||
|
prompt=no
|
||||||
|
default_md = sha256
|
||||||
|
distinguished_name = dn
|
||||||
|
req_extensions = req_ext
|
||||||
|
[dn]
|
||||||
|
commonName=amazonaws.com
|
||||||
|
[req_ext]
|
||||||
|
subjectAltName=@alt_names
|
||||||
|
[alt_names]
|
||||||
|
DNS.1=amazonaws.com
|
||||||
|
DNS.2={{full_name}}
|
||||||
|
DNS.3={{wildcard_name}}
|
239
moto/moto_proxy/proxy3.py
Normal file
239
moto/moto_proxy/proxy3.py
Normal file
@ -0,0 +1,239 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import socket
|
||||||
|
import ssl
|
||||||
|
import re
|
||||||
|
from http.server import BaseHTTPRequestHandler
|
||||||
|
from subprocess import check_output, CalledProcessError
|
||||||
|
from threading import Lock
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from botocore.awsrequest import AWSPreparedRequest
|
||||||
|
from moto.backends import get_backend
|
||||||
|
from moto.backend_index import backend_url_patterns
|
||||||
|
from moto.core import BackendDict, DEFAULT_ACCOUNT_ID
|
||||||
|
from moto.core.exceptions import RESTError
|
||||||
|
from . import debug, error, info, with_color
|
||||||
|
from .utils import get_body_from_form_data
|
||||||
|
from .certificate_creator import CertificateCreator
|
||||||
|
|
||||||
|
# Adapted from https://github.com/xxlv/proxy3
|
||||||
|
|
||||||
|
|
||||||
|
class MotoRequestHandler:
|
||||||
|
def __init__(self, port: int):
|
||||||
|
self.lock = Lock()
|
||||||
|
self.port = port
|
||||||
|
|
||||||
|
def get_backend_for_host(self, host: str) -> Any:
|
||||||
|
if host == f"http://localhost:{self.port}":
|
||||||
|
return "moto_api"
|
||||||
|
|
||||||
|
for backend, pattern in backend_url_patterns:
|
||||||
|
if pattern.match(host):
|
||||||
|
return backend
|
||||||
|
|
||||||
|
def get_handler_for_host(self, host: str, path: str) -> Any:
|
||||||
|
# We do not match against URL parameters
|
||||||
|
path = path.split("?")[0]
|
||||||
|
backend_name = self.get_backend_for_host(host)
|
||||||
|
backend_dict = get_backend(backend_name)
|
||||||
|
|
||||||
|
# Get an instance of this backend.
|
||||||
|
# We'll only use this backend to resolve the URL's, so the exact region/account_id is irrelevant
|
||||||
|
if isinstance(backend_dict, BackendDict):
|
||||||
|
if "us-east-1" in backend_dict[DEFAULT_ACCOUNT_ID]:
|
||||||
|
backend = backend_dict[DEFAULT_ACCOUNT_ID]["us-east-1"]
|
||||||
|
else:
|
||||||
|
backend = backend_dict[DEFAULT_ACCOUNT_ID]["global"]
|
||||||
|
else:
|
||||||
|
backend = backend_dict["global"]
|
||||||
|
|
||||||
|
for url_path, handler in backend.url_paths.items():
|
||||||
|
if re.match(url_path, path):
|
||||||
|
return handler
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def parse_request(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
host: str,
|
||||||
|
path: str,
|
||||||
|
headers: Any,
|
||||||
|
body: bytes,
|
||||||
|
form_data: Dict[str, Any],
|
||||||
|
) -> Any:
|
||||||
|
handler = self.get_handler_for_host(host=host, path=path)
|
||||||
|
full_url = host + path
|
||||||
|
request = AWSPreparedRequest(
|
||||||
|
method, full_url, headers, body, stream_output=False
|
||||||
|
)
|
||||||
|
request.form_data = form_data
|
||||||
|
return handler(request, full_url, headers)
|
||||||
|
|
||||||
|
|
||||||
|
class ProxyRequestHandler(BaseHTTPRequestHandler):
|
||||||
|
timeout = 5
|
||||||
|
|
||||||
|
def __init__(self, *args: Any, **kwargs: Any):
|
||||||
|
sock = [a for a in args if isinstance(a, socket.socket)][0]
|
||||||
|
_, port = sock.getsockname()
|
||||||
|
self.protocol_version = "HTTP/1.1"
|
||||||
|
self.moto_request_handler = MotoRequestHandler(port)
|
||||||
|
self.cert_creator = CertificateCreator()
|
||||||
|
BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate() -> None:
|
||||||
|
debug("Starting initial validation...")
|
||||||
|
CertificateCreator().validate()
|
||||||
|
# Validate the openssl command is available
|
||||||
|
try:
|
||||||
|
debug("Verifying SSL version...")
|
||||||
|
svn_output = check_output(["openssl", "version"])
|
||||||
|
debug(svn_output)
|
||||||
|
except CalledProcessError as e:
|
||||||
|
info(e.output)
|
||||||
|
raise
|
||||||
|
|
||||||
|
def do_CONNECT(self) -> None:
|
||||||
|
certpath = self.cert_creator.create(self.path)
|
||||||
|
|
||||||
|
self.wfile.write(
|
||||||
|
f"{self.protocol_version} 200 Connection Established\r\n".encode("utf-8")
|
||||||
|
)
|
||||||
|
self.send_header("k", "v")
|
||||||
|
self.end_headers()
|
||||||
|
|
||||||
|
ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||||
|
ssl_context.load_cert_chain(
|
||||||
|
keyfile=CertificateCreator.certkey,
|
||||||
|
certfile=certpath,
|
||||||
|
)
|
||||||
|
ssl_context.check_hostname = False
|
||||||
|
self.connection = ssl_context.wrap_socket(
|
||||||
|
self.connection,
|
||||||
|
server_side=True,
|
||||||
|
)
|
||||||
|
self.rfile = self.connection.makefile("rb", self.rbufsize) # type: ignore
|
||||||
|
self.wfile = self.connection.makefile("wb", self.wbufsize) # type: ignore
|
||||||
|
|
||||||
|
conntype = self.headers.get("Proxy-Connection", "")
|
||||||
|
if self.protocol_version == "HTTP/1.1" and conntype.lower() != "close":
|
||||||
|
self.close_connection = 0 # type: ignore
|
||||||
|
else:
|
||||||
|
self.close_connection = 1 # type: ignore
|
||||||
|
|
||||||
|
def do_GET(self) -> None:
|
||||||
|
req = self
|
||||||
|
req_body = b""
|
||||||
|
if "Content-Length" in req.headers:
|
||||||
|
content_length = int(req.headers["Content-Length"])
|
||||||
|
req_body = self.rfile.read(content_length)
|
||||||
|
elif "chunked" in self.headers.get("Transfer-Encoding", ""):
|
||||||
|
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Transfer-Encoding
|
||||||
|
req_body = self.read_chunked_body(self.rfile)
|
||||||
|
if self.headers.get("Content-Type", "").startswith("multipart/form-data"):
|
||||||
|
boundary = self.headers["Content-Type"].split("boundary=")[-1]
|
||||||
|
req_body, form_data = get_body_from_form_data(req_body, boundary) # type: ignore
|
||||||
|
for key, val in form_data.items():
|
||||||
|
self.headers[key] = [val]
|
||||||
|
else:
|
||||||
|
form_data = {}
|
||||||
|
|
||||||
|
req_body = self.decode_request_body(req.headers, req_body) # type: ignore
|
||||||
|
if isinstance(self.connection, ssl.SSLSocket):
|
||||||
|
host = "https://" + req.headers["Host"]
|
||||||
|
else:
|
||||||
|
host = "http://" + req.headers["Host"]
|
||||||
|
path = req.path
|
||||||
|
|
||||||
|
try:
|
||||||
|
info(f"{with_color(33, req.command.upper())} {host}{path}") # noqa
|
||||||
|
if req_body is not None:
|
||||||
|
debug("\tbody\t" + with_color(31, text=req_body))
|
||||||
|
debug(f"\theaders\t{with_color(31, text=dict(req.headers))}")
|
||||||
|
response = self.moto_request_handler.parse_request(
|
||||||
|
method=req.command,
|
||||||
|
host=host,
|
||||||
|
path=path,
|
||||||
|
headers=req.headers,
|
||||||
|
body=req_body,
|
||||||
|
form_data=form_data,
|
||||||
|
)
|
||||||
|
debug("\t=====RESPONSE========")
|
||||||
|
debug("\t" + with_color(color=33, text=response))
|
||||||
|
debug("\n")
|
||||||
|
|
||||||
|
if isinstance(response, tuple):
|
||||||
|
res_status, res_headers, res_body = response
|
||||||
|
else:
|
||||||
|
res_status, res_headers, res_body = (200, {}, response)
|
||||||
|
|
||||||
|
except RESTError as e:
|
||||||
|
if isinstance(e.get_headers(), list):
|
||||||
|
res_headers = dict(e.get_headers())
|
||||||
|
else:
|
||||||
|
res_headers = e.get_headers()
|
||||||
|
res_status = e.code
|
||||||
|
res_body = e.get_body()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error(e)
|
||||||
|
self.send_error(502)
|
||||||
|
return
|
||||||
|
|
||||||
|
res_reason = "OK"
|
||||||
|
if isinstance(res_body, str):
|
||||||
|
res_body = res_body.encode("utf-8")
|
||||||
|
|
||||||
|
if "content-length" not in res_headers and res_body:
|
||||||
|
res_headers["Content-Length"] = str(len(res_body))
|
||||||
|
|
||||||
|
self.wfile.write(
|
||||||
|
f"{self.protocol_version} {res_status} {res_reason}\r\n".encode("utf-8")
|
||||||
|
)
|
||||||
|
if res_headers:
|
||||||
|
for k, v in res_headers.items():
|
||||||
|
if isinstance(v, bytes):
|
||||||
|
self.send_header(k, v.decode("utf-8"))
|
||||||
|
else:
|
||||||
|
self.send_header(k, v)
|
||||||
|
self.end_headers()
|
||||||
|
if res_body:
|
||||||
|
self.wfile.write(res_body)
|
||||||
|
self.close_connection = True
|
||||||
|
|
||||||
|
def read_chunked_body(self, reader: Any) -> bytes:
|
||||||
|
chunked_body = b""
|
||||||
|
while True:
|
||||||
|
line = reader.readline().strip()
|
||||||
|
chunk_length = int(line, 16)
|
||||||
|
if chunk_length != 0:
|
||||||
|
chunked_body += reader.read(chunk_length)
|
||||||
|
|
||||||
|
# Each chunk is followed by an additional empty newline
|
||||||
|
reader.readline()
|
||||||
|
|
||||||
|
# a chunk size of 0 is an end indication
|
||||||
|
if chunk_length == 0:
|
||||||
|
# AWS does send additional (checksum-)headers, but we can ignore them
|
||||||
|
break
|
||||||
|
return chunked_body
|
||||||
|
|
||||||
|
def decode_request_body(self, headers: Dict[str, str], body: Any) -> Any:
|
||||||
|
if body is None:
|
||||||
|
return body
|
||||||
|
if headers.get("Content-Type", "") in [
|
||||||
|
"application/x-amz-json-1.1",
|
||||||
|
"application/x-www-form-urlencoded; charset=utf-8",
|
||||||
|
]:
|
||||||
|
return body.decode("utf-8")
|
||||||
|
return body
|
||||||
|
|
||||||
|
do_HEAD = do_GET
|
||||||
|
do_POST = do_GET
|
||||||
|
do_PUT = do_GET
|
||||||
|
do_PATCH = do_GET
|
||||||
|
do_DELETE = do_GET
|
||||||
|
do_OPTIONS = do_GET
|
9
moto/moto_proxy/setup_https_intercept.sh
Executable file
9
moto/moto_proxy/setup_https_intercept.sh
Executable file
@ -0,0 +1,9 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# The certificate key is valid until 25 september 2123
|
||||||
|
# To our AI overlords maintaining this system in that year:
|
||||||
|
# Please run this script to refresh the certificate to last another 100 years.
|
||||||
|
|
||||||
|
openssl genrsa -out ca.key 2048
|
||||||
|
openssl req -new -x509 -days 36500 -key ca.key -out ca.crt -subj "/CN=proxy2 CA"
|
||||||
|
openssl genrsa -out cert.key 2048
|
24
moto/moto_proxy/utils.py
Normal file
24
moto/moto_proxy/utils.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import io
|
||||||
|
import multipart
|
||||||
|
from typing import Dict, Tuple, Optional
|
||||||
|
|
||||||
|
|
||||||
|
def get_body_from_form_data(
|
||||||
|
body: bytes, boundary: str
|
||||||
|
) -> Tuple[Optional[bytes], Dict[str, str]]:
|
||||||
|
body_stream = io.BytesIO(body)
|
||||||
|
parser = multipart.MultipartParser(body_stream, boundary=boundary)
|
||||||
|
|
||||||
|
data = None
|
||||||
|
headers: Dict[str, str] = {}
|
||||||
|
for prt in parser.parts():
|
||||||
|
if prt.name == "upload_file":
|
||||||
|
headers["key"] = prt.name
|
||||||
|
data = prt.file.read()
|
||||||
|
else:
|
||||||
|
val = prt.file.read()
|
||||||
|
if prt.name == "file":
|
||||||
|
data = val
|
||||||
|
else:
|
||||||
|
headers[prt.name] = val.decode("utf-8")
|
||||||
|
return data, headers
|
97
moto/proxy.py
Normal file
97
moto/proxy.py
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import signal
|
||||||
|
import sys
|
||||||
|
from http.server import ThreadingHTTPServer
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from moto.moto_proxy import logger
|
||||||
|
from moto.moto_proxy.proxy3 import ProxyRequestHandler, with_color, CertificateCreator
|
||||||
|
|
||||||
|
|
||||||
|
def signal_handler(signum: Any, frame: Any) -> None: # pylint: disable=unused-argument
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
def get_help_msg() -> str:
|
||||||
|
msg = """
|
||||||
|
###################################################################################
|
||||||
|
$$___$$_ __$$$___ $$$$$$_ __$$$___\t__$$$$$$__ $$$$$$__ __$$$___ $$___$$_ $$____$$_
|
||||||
|
$$$_$$$_ _$$_$$__ __$$___ _$$_$$__\t__$$___$$_ $$___$$_ _$$_$$__ $$$_$$$_ _$$__$$__
|
||||||
|
$$$$$$$_ $$___$$_ __$$___ $$___$$_\t__$$___$$_ $$___$$_ $$___$$_ _$$$$$__ __$$$$___
|
||||||
|
$$_$_$$_ $$___$$_ __$$___ $$___$$_\t__$$$$$$__ $$$$$$__ $$___$$_ _$$$$$__ ___$$____
|
||||||
|
$$___$$_ _$$_$$__ __$$___ _$$_$$__\t__$$______ $$___$$_ _$$_$$__ $$$_$$$_ ___$$____
|
||||||
|
$$___$$_ __$$$___ __$$___ __$$$___\t__$$______ $$___$$_ __$$$___ $$___$$_ ___$$____
|
||||||
|
###################################################################################"""
|
||||||
|
msg += "\n"
|
||||||
|
msg += "Using the CLI:"
|
||||||
|
msg += "\n"
|
||||||
|
msg += with_color(37, text="\texport HTTPS_PROXY=http://localhost:5005")
|
||||||
|
msg += "\n"
|
||||||
|
msg += with_color(37, text="\taws cloudformation list-stacks --no-verify-ssl\n")
|
||||||
|
msg += "\n"
|
||||||
|
msg += "Using pytest:"
|
||||||
|
msg += "\n"
|
||||||
|
msg += with_color(37, text=f"\texport AWS_CA_BUNDLE={CertificateCreator.cacert}")
|
||||||
|
msg += "\n"
|
||||||
|
msg += with_color(
|
||||||
|
37,
|
||||||
|
text="\tHTTPS_PROXY=http://localhost:5005 MOTO_PROXY_PORT=5005 pytest tests_dir\n",
|
||||||
|
)
|
||||||
|
return msg
|
||||||
|
|
||||||
|
|
||||||
|
def main(argv: Any = None) -> None:
|
||||||
|
argv = argv or sys.argv[1:]
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter, description=get_help_msg()
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"-H", "--host", type=str, help="Which host to bind", default="127.0.0.1"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-p",
|
||||||
|
"--port",
|
||||||
|
type=int,
|
||||||
|
help="Port number to use for connection",
|
||||||
|
default=int(os.environ.get("MOTO_PROXY_PORT", 5005)),
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-v",
|
||||||
|
"--verbose",
|
||||||
|
action="store_true",
|
||||||
|
help="Add verbose logging",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args(argv)
|
||||||
|
|
||||||
|
if args.verbose:
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
ProxyRequestHandler.validate()
|
||||||
|
|
||||||
|
if "MOTO_PORT" not in os.environ:
|
||||||
|
os.environ["MOTO_PORT"] = f"{args.port}"
|
||||||
|
os.environ["TEST_PROXY_MODE"] = "true"
|
||||||
|
|
||||||
|
try:
|
||||||
|
signal.signal(signal.SIGINT, signal_handler)
|
||||||
|
signal.signal(signal.SIGTERM, signal_handler)
|
||||||
|
except Exception:
|
||||||
|
pass # ignore "ValueError: signal only works in main thread"
|
||||||
|
|
||||||
|
server_address = (args.host, args.port)
|
||||||
|
|
||||||
|
httpd = ThreadingHTTPServer(server_address, ProxyRequestHandler)
|
||||||
|
|
||||||
|
sa = httpd.socket.getsockname()
|
||||||
|
|
||||||
|
print("Call `moto_proxy -h` for example invocations")
|
||||||
|
print(f"Serving HTTP Proxy on {sa[0]}:{sa[1]} ...") # noqa
|
||||||
|
httpd.serve_forever()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
@ -955,7 +955,6 @@ class S3Response(BaseResponse):
|
|||||||
if self.body:
|
if self.body:
|
||||||
if self._create_bucket_configuration_is_empty(self.body):
|
if self._create_bucket_configuration_is_empty(self.body):
|
||||||
raise MalformedXML()
|
raise MalformedXML()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
forced_region = xmltodict.parse(self.body)[
|
forced_region = xmltodict.parse(self.body)[
|
||||||
"CreateBucketConfiguration"
|
"CreateBucketConfiguration"
|
||||||
@ -1519,6 +1518,7 @@ class S3Response(BaseResponse):
|
|||||||
)
|
)
|
||||||
response = ""
|
response = ""
|
||||||
response_headers.update(key.response_dict)
|
response_headers.update(key.response_dict)
|
||||||
|
response_headers["content-length"] = len(response)
|
||||||
return 200, response_headers, response
|
return 200, response_headers, response
|
||||||
|
|
||||||
storage_class = request.headers.get("x-amz-storage-class", "STANDARD")
|
storage_class = request.headers.get("x-amz-storage-class", "STANDARD")
|
||||||
@ -1700,7 +1700,9 @@ class S3Response(BaseResponse):
|
|||||||
|
|
||||||
template = self.response_template(S3_OBJECT_COPY_RESPONSE)
|
template = self.response_template(S3_OBJECT_COPY_RESPONSE)
|
||||||
response_headers.update(new_key.response_dict)
|
response_headers.update(new_key.response_dict)
|
||||||
return 200, response_headers, template.render(key=new_key)
|
response = template.render(key=new_key)
|
||||||
|
response_headers["content-length"] = len(response)
|
||||||
|
return 200, response_headers, response
|
||||||
|
|
||||||
# Initial data
|
# Initial data
|
||||||
new_key = self.backend.put_object(
|
new_key = self.backend.put_object(
|
||||||
@ -1729,6 +1731,8 @@ class S3Response(BaseResponse):
|
|||||||
self.backend.set_key_tags(new_key, tagging)
|
self.backend.set_key_tags(new_key, tagging)
|
||||||
|
|
||||||
response_headers.update(new_key.response_dict)
|
response_headers.update(new_key.response_dict)
|
||||||
|
# Remove content-length - the response body is empty for this request
|
||||||
|
response_headers.pop("content-length", None)
|
||||||
return 200, response_headers, ""
|
return 200, response_headers, ""
|
||||||
|
|
||||||
def _key_response_head(
|
def _key_response_head(
|
||||||
|
@ -6,8 +6,12 @@ from functools import lru_cache
|
|||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
|
|
||||||
|
def test_proxy_mode() -> bool:
|
||||||
|
return os.environ.get("TEST_PROXY_MODE", "0").lower() == "true"
|
||||||
|
|
||||||
|
|
||||||
TEST_SERVER_MODE = os.environ.get("TEST_SERVER_MODE", "0").lower() == "true"
|
TEST_SERVER_MODE = os.environ.get("TEST_SERVER_MODE", "0").lower() == "true"
|
||||||
TEST_DECORATOR_MODE = not TEST_SERVER_MODE
|
TEST_DECORATOR_MODE = not TEST_SERVER_MODE and not test_proxy_mode()
|
||||||
|
|
||||||
INITIAL_NO_AUTH_ACTION_COUNT = float(
|
INITIAL_NO_AUTH_ACTION_COUNT = float(
|
||||||
os.environ.get("INITIAL_NO_AUTH_ACTION_COUNT", float("inf"))
|
os.environ.get("INITIAL_NO_AUTH_ACTION_COUNT", float("inf"))
|
||||||
@ -100,6 +104,10 @@ def moto_server_port() -> str:
|
|||||||
return os.environ.get("MOTO_PORT") or "5000"
|
return os.environ.get("MOTO_PORT") or "5000"
|
||||||
|
|
||||||
|
|
||||||
|
def moto_proxy_port() -> str:
|
||||||
|
return os.environ.get("MOTO_PROXY_PORT") or "5005"
|
||||||
|
|
||||||
|
|
||||||
@lru_cache()
|
@lru_cache()
|
||||||
def moto_server_host() -> str:
|
def moto_server_host() -> str:
|
||||||
if is_docker():
|
if is_docker():
|
||||||
@ -126,6 +134,12 @@ def test_server_mode_endpoint() -> str:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_proxy_mode_endpoint() -> str:
|
||||||
|
return os.environ.get(
|
||||||
|
"TEST_PROXY_MODE_ENDPOINT", f"http://localhost:{moto_proxy_port()}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def is_docker() -> bool:
|
def is_docker() -> bool:
|
||||||
path = pathlib.Path("/proc/self/cgroup")
|
path = pathlib.Path("/proc/self/cgroup")
|
||||||
return (
|
return (
|
||||||
|
@ -23,7 +23,11 @@ def load_resource(package: str, resource: str) -> Any:
|
|||||||
|
|
||||||
|
|
||||||
def load_resource_as_str(package: str, resource: str) -> str:
|
def load_resource_as_str(package: str, resource: str) -> str:
|
||||||
return pkgutil.get_data(package, resource).decode("utf-8") # type: ignore
|
return load_resource_as_bytes(package, resource).decode("utf-8") # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
def load_resource_as_bytes(package: str, resource: str) -> bytes:
|
||||||
|
return pkgutil.get_data(package, resource) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def merge_multiple_dicts(*args: Any) -> Dict[str, Any]:
|
def merge_multiple_dicts(*args: Any) -> Dict[str, Any]:
|
||||||
|
17
setup.cfg
17
setup.cfg
@ -55,6 +55,22 @@ all =
|
|||||||
py-partiql-parser==0.3.7
|
py-partiql-parser==0.3.7
|
||||||
aws-xray-sdk!=0.96,>=0.93
|
aws-xray-sdk!=0.96,>=0.93
|
||||||
setuptools
|
setuptools
|
||||||
|
multipart
|
||||||
|
proxy =
|
||||||
|
python-jose[cryptography]>=3.1.0,<4.0.0
|
||||||
|
ecdsa!=0.15
|
||||||
|
docker>=2.5.1
|
||||||
|
graphql-core
|
||||||
|
PyYAML>=5.1
|
||||||
|
cfn-lint>=0.40.0
|
||||||
|
sshpubkeys>=3.1.0
|
||||||
|
openapi-spec-validator>=0.2.8
|
||||||
|
pyparsing>=3.0.7
|
||||||
|
jsondiff>=1.1.2
|
||||||
|
py-partiql-parser==0.3.7
|
||||||
|
aws-xray-sdk!=0.96,>=0.93
|
||||||
|
setuptools
|
||||||
|
multipart
|
||||||
server =
|
server =
|
||||||
python-jose[cryptography]>=3.1.0,<4.0.0
|
python-jose[cryptography]>=3.1.0,<4.0.0
|
||||||
ecdsa!=0.15
|
ecdsa!=0.15
|
||||||
@ -229,6 +245,7 @@ xray =
|
|||||||
[options.entry_points]
|
[options.entry_points]
|
||||||
console_scripts =
|
console_scripts =
|
||||||
moto_server = moto.server:main
|
moto_server = moto.server:main
|
||||||
|
moto_proxy = moto.proxy:main
|
||||||
|
|
||||||
[bdist_wheel]
|
[bdist_wheel]
|
||||||
universal=1
|
universal=1
|
||||||
|
@ -581,7 +581,7 @@ def test_request_certificate_issued_status():
|
|||||||
assert resp["Certificate"]["CertificateArn"] == arn
|
assert resp["Certificate"]["CertificateArn"] == arn
|
||||||
assert resp["Certificate"]["Status"] == "PENDING_VALIDATION"
|
assert resp["Certificate"]["Status"] == "PENDING_VALIDATION"
|
||||||
|
|
||||||
if not settings.TEST_SERVER_MODE:
|
if settings.TEST_DECORATOR_MODE:
|
||||||
# Move time to get it issued.
|
# Move time to get it issued.
|
||||||
with freeze_time("2012-01-01 12:02:00"):
|
with freeze_time("2012-01-01 12:02:00"):
|
||||||
resp = client.describe_certificate(CertificateArn=arn)
|
resp = client.describe_certificate(CertificateArn=arn)
|
||||||
@ -593,7 +593,7 @@ def test_request_certificate_issued_status():
|
|||||||
@mock_acm
|
@mock_acm
|
||||||
def test_request_certificate_issued_status_with_wait_in_envvar():
|
def test_request_certificate_issued_status_with_wait_in_envvar():
|
||||||
# After requesting a certificate, it should then auto-validate after 3 seconds
|
# After requesting a certificate, it should then auto-validate after 3 seconds
|
||||||
if settings.TEST_SERVER_MODE:
|
if not settings.TEST_DECORATOR_MODE:
|
||||||
raise SkipTest("Cant manipulate time in server mode")
|
raise SkipTest("Cant manipulate time in server mode")
|
||||||
|
|
||||||
client = boto3.client("acm", region_name="eu-central-1")
|
client = boto3.client("acm", region_name="eu-central-1")
|
||||||
@ -621,7 +621,7 @@ def test_request_certificate_issued_status_with_wait_in_envvar():
|
|||||||
|
|
||||||
@mock_acm
|
@mock_acm
|
||||||
def test_request_certificate_with_mutiple_times():
|
def test_request_certificate_with_mutiple_times():
|
||||||
if settings.TEST_SERVER_MODE:
|
if not settings.TEST_DECORATOR_MODE:
|
||||||
raise SkipTest("Cant manipulate time in server mode")
|
raise SkipTest("Cant manipulate time in server mode")
|
||||||
|
|
||||||
# After requesting a certificate, it should then auto-validate after 1 minute
|
# After requesting a certificate, it should then auto-validate after 1 minute
|
||||||
|
@ -323,7 +323,7 @@ def test_get_query_results_queue():
|
|||||||
assert result["ResultSet"]["Rows"] == []
|
assert result["ResultSet"]["Rows"] == []
|
||||||
assert result["ResultSet"]["ResultSetMetadata"]["ColumnInfo"] == []
|
assert result["ResultSet"]["ResultSetMetadata"]["ColumnInfo"] == []
|
||||||
|
|
||||||
if not settings.TEST_SERVER_MODE:
|
if settings.TEST_DECORATOR_MODE:
|
||||||
backend = athena_backends[DEFAULT_ACCOUNT_ID]["us-east-1"]
|
backend = athena_backends[DEFAULT_ACCOUNT_ID]["us-east-1"]
|
||||||
rows = [{"Data": [{"VarCharValue": ".."}]}]
|
rows = [{"Data": [{"VarCharValue": ".."}]}]
|
||||||
column_info = [
|
column_info = [
|
||||||
|
@ -30,6 +30,8 @@ boto3.setup_default_session(region_name=_lambda_region)
|
|||||||
@pytest.mark.parametrize("region", ["us-west-2", "cn-northwest-1", "us-isob-east-1"])
|
@pytest.mark.parametrize("region", ["us-west-2", "cn-northwest-1", "us-isob-east-1"])
|
||||||
@mock_lambda
|
@mock_lambda
|
||||||
def test_lambda_regions(region):
|
def test_lambda_regions(region):
|
||||||
|
if not settings.TEST_DECORATOR_MODE:
|
||||||
|
raise SkipTest("Can only set EnvironVars in DecoratorMode")
|
||||||
client = boto3.client("lambda", region_name=region)
|
client = boto3.client("lambda", region_name=region)
|
||||||
resp = client.list_functions()
|
resp = client.list_functions()
|
||||||
assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
|
assert resp["ResponseMetadata"]["HTTPStatusCode"] == 200
|
||||||
|
@ -16,6 +16,7 @@ from .utilities import (
|
|||||||
get_lambda_using_environment_port,
|
get_lambda_using_environment_port,
|
||||||
get_lambda_using_network_mode,
|
get_lambda_using_network_mode,
|
||||||
get_test_zip_largeresponse,
|
get_test_zip_largeresponse,
|
||||||
|
get_proxy_zip_file,
|
||||||
)
|
)
|
||||||
from ..markers import requires_docker
|
from ..markers import requires_docker
|
||||||
|
|
||||||
@ -339,3 +340,39 @@ def test_invoke_function_large_response():
|
|||||||
# Absolutely fine when invoking async
|
# Absolutely fine when invoking async
|
||||||
resp = conn.invoke(FunctionName=fxn["FunctionArn"], InvocationType="Event")
|
resp = conn.invoke(FunctionName=fxn["FunctionArn"], InvocationType="Event")
|
||||||
assert "FunctionError" not in resp
|
assert "FunctionError" not in resp
|
||||||
|
|
||||||
|
|
||||||
|
@mock_lambda
|
||||||
|
def test_invoke_lambda_with_proxy():
|
||||||
|
if not settings.test_proxy_mode():
|
||||||
|
raise SkipTest("We only want to test this in ProxyMode")
|
||||||
|
|
||||||
|
conn = boto3.resource("ec2", _lambda_region)
|
||||||
|
vol = conn.create_volume(Size=99, AvailabilityZone=_lambda_region)
|
||||||
|
vol = conn.Volume(vol.id)
|
||||||
|
|
||||||
|
conn = boto3.client("lambda", _lambda_region)
|
||||||
|
function_name = str(uuid4())[0:6]
|
||||||
|
conn.create_function(
|
||||||
|
FunctionName=function_name,
|
||||||
|
Runtime=PYTHON_VERSION,
|
||||||
|
Role=get_role_name(),
|
||||||
|
Handler="lambda_function.lambda_handler",
|
||||||
|
Code={"ZipFile": get_proxy_zip_file()},
|
||||||
|
Description="test lambda function",
|
||||||
|
Timeout=3,
|
||||||
|
MemorySize=128,
|
||||||
|
Publish=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
in_data = {"volume_id": vol.id}
|
||||||
|
result = conn.invoke(
|
||||||
|
FunctionName=function_name,
|
||||||
|
InvocationType="RequestResponse",
|
||||||
|
Payload=json.dumps(in_data),
|
||||||
|
)
|
||||||
|
assert result["StatusCode"] == 200
|
||||||
|
payload = result["Payload"].read().decode("utf-8")
|
||||||
|
|
||||||
|
expected_payload = {"id": vol.id, "state": vol.state, "size": vol.size}
|
||||||
|
assert json.loads(payload) == expected_payload
|
||||||
|
@ -85,6 +85,21 @@ def lambda_handler(event, context):
|
|||||||
return _process_lambda(func_str)
|
return _process_lambda(func_str)
|
||||||
|
|
||||||
|
|
||||||
|
def get_proxy_zip_file():
|
||||||
|
func_str = """
|
||||||
|
import boto3
|
||||||
|
|
||||||
|
def lambda_handler(event, context):
|
||||||
|
ec2 = boto3.resource('ec2', region_name='us-west-2')
|
||||||
|
|
||||||
|
volume_id = event.get('volume_id')
|
||||||
|
vol = ec2.Volume(volume_id)
|
||||||
|
|
||||||
|
return {'id': vol.id, 'state': vol.state, 'size': vol.size}
|
||||||
|
"""
|
||||||
|
return _process_lambda(func_str)
|
||||||
|
|
||||||
|
|
||||||
def get_test_zip_file3():
|
def get_test_zip_file3():
|
||||||
pfunc = """
|
pfunc = """
|
||||||
def lambda_handler(event, context):
|
def lambda_handler(event, context):
|
||||||
|
@ -21,6 +21,7 @@ import requests
|
|||||||
from moto import settings, mock_s3, mock_config
|
from moto import settings, mock_s3, mock_config
|
||||||
from moto.moto_api import state_manager
|
from moto.moto_api import state_manager
|
||||||
from moto.core.utils import utcnow
|
from moto.core.utils import utcnow
|
||||||
|
from moto import moto_proxy
|
||||||
from moto.s3.responses import DEFAULT_REGION_NAME
|
from moto.s3.responses import DEFAULT_REGION_NAME
|
||||||
import moto.s3.models as s3model
|
import moto.s3.models as s3model
|
||||||
|
|
||||||
@ -113,7 +114,7 @@ def test_key_save_to_missing_bucket():
|
|||||||
@mock_s3
|
@mock_s3
|
||||||
def test_missing_key_request():
|
def test_missing_key_request():
|
||||||
if not settings.TEST_DECORATOR_MODE:
|
if not settings.TEST_DECORATOR_MODE:
|
||||||
raise SkipTest("Only test status code in non-ServerMode")
|
raise SkipTest("Only test status code in DecoratorMode")
|
||||||
s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
|
s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
|
||||||
s3_client.create_bucket(Bucket="foobar")
|
s3_client.create_bucket(Bucket="foobar")
|
||||||
|
|
||||||
@ -460,6 +461,8 @@ def test_bucket_name_with_special_chars(name):
|
|||||||
)
|
)
|
||||||
@mock_s3
|
@mock_s3
|
||||||
def test_key_with_special_characters(key):
|
def test_key_with_special_characters(key):
|
||||||
|
if settings.test_proxy_mode():
|
||||||
|
raise SkipTest("Keys starting with a / don't work well in ProxyMode")
|
||||||
s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
|
s3_resource = boto3.resource("s3", region_name=DEFAULT_REGION_NAME)
|
||||||
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
|
client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
|
||||||
bucket = s3_resource.Bucket("testname")
|
bucket = s3_resource.Bucket("testname")
|
||||||
@ -774,7 +777,10 @@ def test_streaming_upload_from_file_to_presigned_url():
|
|||||||
"put_object", params, ExpiresIn=900
|
"put_object", params, ExpiresIn=900
|
||||||
)
|
)
|
||||||
with open(__file__, "rb") as fhandle:
|
with open(__file__, "rb") as fhandle:
|
||||||
response = requests.get(presigned_url, data=fhandle)
|
get_kwargs = {"data": fhandle}
|
||||||
|
if settings.test_proxy_mode():
|
||||||
|
add_proxy_details(get_kwargs)
|
||||||
|
response = requests.get(presigned_url, **get_kwargs)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
@ -793,7 +799,10 @@ def test_upload_from_file_to_presigned_url():
|
|||||||
file.close()
|
file.close()
|
||||||
files = {"upload_file": open("text.txt", "rb")}
|
files = {"upload_file": open("text.txt", "rb")}
|
||||||
|
|
||||||
requests.put(presigned_url, files=files)
|
put_kwargs = {"files": files}
|
||||||
|
if settings.test_proxy_mode():
|
||||||
|
add_proxy_details(put_kwargs)
|
||||||
|
requests.put(presigned_url, **put_kwargs)
|
||||||
resp = s3_client.get_object(Bucket="mybucket", Key="file_upload")
|
resp = s3_client.get_object(Bucket="mybucket", Key="file_upload")
|
||||||
data = resp["Body"].read()
|
data = resp["Body"].read()
|
||||||
assert data == b"test"
|
assert data == b"test"
|
||||||
@ -2837,6 +2846,8 @@ def test_root_dir_with_empty_name_works():
|
|||||||
@mock_s3
|
@mock_s3
|
||||||
def test_leading_slashes_not_removed(bucket_name):
|
def test_leading_slashes_not_removed(bucket_name):
|
||||||
"""Make sure that leading slashes are not removed internally."""
|
"""Make sure that leading slashes are not removed internally."""
|
||||||
|
if settings.test_proxy_mode():
|
||||||
|
raise SkipTest("Doesn't quite work right with the Proxy")
|
||||||
s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
|
s3_client = boto3.client("s3", region_name=DEFAULT_REGION_NAME)
|
||||||
s3_client.create_bucket(Bucket=bucket_name)
|
s3_client.create_bucket(Bucket=bucket_name)
|
||||||
|
|
||||||
@ -3020,9 +3031,14 @@ def test_creating_presigned_post():
|
|||||||
Conditions=conditions,
|
Conditions=conditions,
|
||||||
ExpiresIn=1000,
|
ExpiresIn=1000,
|
||||||
)
|
)
|
||||||
resp = requests.post(
|
kwargs = {
|
||||||
data["url"], data=data["fields"], files={"file": fdata}, allow_redirects=False
|
"data": data["fields"],
|
||||||
)
|
"files": {"file": fdata},
|
||||||
|
"allow_redirects": False,
|
||||||
|
}
|
||||||
|
if settings.test_proxy_mode():
|
||||||
|
add_proxy_details(kwargs)
|
||||||
|
resp = requests.post(data["url"], **kwargs)
|
||||||
assert resp.status_code == 303
|
assert resp.status_code == 303
|
||||||
redirect = resp.headers["Location"]
|
redirect = resp.headers["Location"]
|
||||||
assert redirect.startswith(success_url)
|
assert redirect.startswith(success_url)
|
||||||
@ -3051,7 +3067,10 @@ def test_presigned_put_url_with_approved_headers():
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Verify S3 throws an error when the header is not provided
|
# Verify S3 throws an error when the header is not provided
|
||||||
response = requests.put(url, data=content)
|
kwargs = {"data": content}
|
||||||
|
if settings.test_proxy_mode():
|
||||||
|
add_proxy_details(kwargs)
|
||||||
|
response = requests.put(url, **kwargs)
|
||||||
assert response.status_code == 403
|
assert response.status_code == 403
|
||||||
assert "<Code>SignatureDoesNotMatch</Code>" in str(response.content)
|
assert "<Code>SignatureDoesNotMatch</Code>" in str(response.content)
|
||||||
assert (
|
assert (
|
||||||
@ -3060,9 +3079,10 @@ def test_presigned_put_url_with_approved_headers():
|
|||||||
) in str(response.content)
|
) in str(response.content)
|
||||||
|
|
||||||
# Verify S3 throws an error when the header has the wrong value
|
# Verify S3 throws an error when the header has the wrong value
|
||||||
response = requests.put(
|
kwargs = {"data": content, "headers": {"Content-Type": "application/unknown"}}
|
||||||
url, data=content, headers={"Content-Type": "application/unknown"}
|
if settings.test_proxy_mode():
|
||||||
)
|
add_proxy_details(kwargs)
|
||||||
|
response = requests.put(url, **kwargs)
|
||||||
assert response.status_code == 403
|
assert response.status_code == 403
|
||||||
assert "<Code>SignatureDoesNotMatch</Code>" in str(response.content)
|
assert "<Code>SignatureDoesNotMatch</Code>" in str(response.content)
|
||||||
assert (
|
assert (
|
||||||
@ -3071,9 +3091,10 @@ def test_presigned_put_url_with_approved_headers():
|
|||||||
) in str(response.content)
|
) in str(response.content)
|
||||||
|
|
||||||
# Verify S3 uploads correctly when providing the meta data
|
# Verify S3 uploads correctly when providing the meta data
|
||||||
response = requests.put(
|
kwargs = {"data": content, "headers": {"Content-Type": expected_contenttype}}
|
||||||
url, data=content, headers={"Content-Type": expected_contenttype}
|
if settings.test_proxy_mode():
|
||||||
)
|
add_proxy_details(kwargs)
|
||||||
|
response = requests.put(url, **kwargs)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
# Assert the object exists
|
# Assert the object exists
|
||||||
@ -3103,7 +3124,10 @@ def test_presigned_put_url_with_custom_headers():
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Verify S3 uploads correctly when providing the meta data
|
# Verify S3 uploads correctly when providing the meta data
|
||||||
response = requests.put(url, data=content)
|
kwargs = {"data": content}
|
||||||
|
if settings.test_proxy_mode():
|
||||||
|
add_proxy_details(kwargs)
|
||||||
|
response = requests.put(url, **kwargs)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
# Assert the object exists
|
# Assert the object exists
|
||||||
@ -3430,3 +3454,8 @@ def test_checksum_response(algorithm):
|
|||||||
ChecksumAlgorithm=algorithm,
|
ChecksumAlgorithm=algorithm,
|
||||||
)
|
)
|
||||||
assert f"Checksum{algorithm}" in response
|
assert f"Checksum{algorithm}" in response
|
||||||
|
|
||||||
|
|
||||||
|
def add_proxy_details(kwargs):
|
||||||
|
kwargs["proxies"] = {"https": "http://localhost:5005"}
|
||||||
|
kwargs["verify"] = moto_proxy.__file__.replace("__init__.py", "ca.crt")
|
||||||
|
@ -7,7 +7,8 @@ import requests
|
|||||||
|
|
||||||
from botocore.exceptions import ClientError
|
from botocore.exceptions import ClientError
|
||||||
from botocore.handlers import disable_signing
|
from botocore.handlers import disable_signing
|
||||||
from moto import mock_s3
|
from moto import mock_s3, settings
|
||||||
|
from .test_s3 import add_proxy_details
|
||||||
|
|
||||||
DEFAULT_REGION_NAME = "us-east-1"
|
DEFAULT_REGION_NAME = "us-east-1"
|
||||||
|
|
||||||
@ -116,8 +117,11 @@ def test_s3_object_in_public_bucket_using_multiple_presigned_urls():
|
|||||||
presigned_url = boto3.client("s3").generate_presigned_url(
|
presigned_url = boto3.client("s3").generate_presigned_url(
|
||||||
"get_object", params, ExpiresIn=900
|
"get_object", params, ExpiresIn=900
|
||||||
)
|
)
|
||||||
|
kwargs = {}
|
||||||
|
if settings.test_proxy_mode():
|
||||||
|
add_proxy_details(kwargs)
|
||||||
for i in range(1, 10):
|
for i in range(1, 10):
|
||||||
response = requests.get(presigned_url)
|
response = requests.get(presigned_url, **kwargs)
|
||||||
assert response.status_code == 200, f"Failed on req number {i}"
|
assert response.status_code == 200, f"Failed on req number {i}"
|
||||||
|
|
||||||
|
|
||||||
@ -263,8 +267,10 @@ def test_object_acl_with_presigned_post():
|
|||||||
)
|
)
|
||||||
|
|
||||||
with open(object_name, "rb") as fhandle:
|
with open(object_name, "rb") as fhandle:
|
||||||
files = {"file": (object_name, fhandle)}
|
kwargs = {"files": {"file": (object_name, fhandle)}}
|
||||||
requests.post(response["url"], data=response["fields"], files=files)
|
if settings.test_proxy_mode():
|
||||||
|
add_proxy_details(kwargs)
|
||||||
|
requests.post(response["url"], data=response["fields"], **kwargs)
|
||||||
|
|
||||||
response = s3_client.get_object_acl(Bucket=bucket_name, Key=object_name)
|
response = s3_client.get_object_acl(Bucket=bucket_name, Key=object_name)
|
||||||
|
|
||||||
|
@ -5,12 +5,16 @@ import requests
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from botocore.exceptions import ClientError
|
from botocore.exceptions import ClientError
|
||||||
|
from moto import settings
|
||||||
from moto.moto_server.threaded_moto_server import ThreadedMotoServer
|
from moto.moto_server.threaded_moto_server import ThreadedMotoServer
|
||||||
|
from unittest import SkipTest
|
||||||
|
|
||||||
|
|
||||||
class TestBucketPolicy:
|
class TestBucketPolicy:
|
||||||
@classmethod
|
@classmethod
|
||||||
def setup_class(cls):
|
def setup_class(cls):
|
||||||
|
if not settings.TEST_DECORATOR_MODE:
|
||||||
|
raise SkipTest("No point testing the ThreadedServer in Server/Proxy-mode")
|
||||||
cls.server = ThreadedMotoServer(port="6000", verbose=False)
|
cls.server = ThreadedMotoServer(port="6000", verbose=False)
|
||||||
cls.server.start()
|
cls.server.start()
|
||||||
|
|
||||||
|
@ -230,6 +230,8 @@ class TestS3FileHandleClosures(TestCase):
|
|||||||
|
|
||||||
class TestS3FileHandleClosuresUsingMocks(TestCase):
|
class TestS3FileHandleClosuresUsingMocks(TestCase):
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
|
if not settings.TEST_DECORATOR_MODE:
|
||||||
|
raise SkipTest("No point in testing ServerMode, we're not using boto3")
|
||||||
self.s3_client = boto3.client("s3", "us-east-1")
|
self.s3_client = boto3.client("s3", "us-east-1")
|
||||||
|
|
||||||
@verify_zero_warnings
|
@verify_zero_warnings
|
||||||
|
@ -11,7 +11,12 @@ import requests
|
|||||||
from moto import settings, mock_s3
|
from moto import settings, mock_s3
|
||||||
import moto.s3.models as s3model
|
import moto.s3.models as s3model
|
||||||
from moto.s3.responses import DEFAULT_REGION_NAME
|
from moto.s3.responses import DEFAULT_REGION_NAME
|
||||||
from moto.settings import get_s3_default_key_buffer_size, S3_UPLOAD_PART_MIN_SIZE
|
from moto.settings import (
|
||||||
|
get_s3_default_key_buffer_size,
|
||||||
|
S3_UPLOAD_PART_MIN_SIZE,
|
||||||
|
test_proxy_mode,
|
||||||
|
)
|
||||||
|
from .test_s3 import add_proxy_details
|
||||||
|
|
||||||
if settings.TEST_DECORATOR_MODE:
|
if settings.TEST_DECORATOR_MODE:
|
||||||
REDUCED_PART_SIZE = 256
|
REDUCED_PART_SIZE = 256
|
||||||
@ -977,7 +982,10 @@ def test_generate_presigned_url_on_multipart_upload_without_acl():
|
|||||||
url = client.generate_presigned_url(
|
url = client.generate_presigned_url(
|
||||||
"head_object", Params={"Bucket": bucket_name, "Key": object_key}
|
"head_object", Params={"Bucket": bucket_name, "Key": object_key}
|
||||||
)
|
)
|
||||||
res = requests.get(url)
|
kwargs = {}
|
||||||
|
if test_proxy_mode():
|
||||||
|
add_proxy_details(kwargs)
|
||||||
|
res = requests.get(url, **kwargs)
|
||||||
assert res.status_code == 200
|
assert res.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,8 +3,9 @@ from botocore.client import ClientError
|
|||||||
import pytest
|
import pytest
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from moto import mock_s3
|
from moto import mock_s3, settings
|
||||||
from moto.s3.responses import DEFAULT_REGION_NAME
|
from moto.s3.responses import DEFAULT_REGION_NAME
|
||||||
|
from .test_s3 import add_proxy_details
|
||||||
|
|
||||||
|
|
||||||
@mock_s3
|
@mock_s3
|
||||||
@ -470,6 +471,9 @@ def test_generate_url_for_tagged_object():
|
|||||||
url = s3_client.generate_presigned_url(
|
url = s3_client.generate_presigned_url(
|
||||||
"get_object", Params={"Bucket": "my-bucket", "Key": "test.txt"}
|
"get_object", Params={"Bucket": "my-bucket", "Key": "test.txt"}
|
||||||
)
|
)
|
||||||
response = requests.get(url)
|
kwargs = {}
|
||||||
|
if settings.test_proxy_mode():
|
||||||
|
add_proxy_details(kwargs)
|
||||||
|
response = requests.get(url, **kwargs)
|
||||||
assert response.content == b"abc"
|
assert response.content == b"abc"
|
||||||
assert response.headers["x-amz-tagging-count"] == "1"
|
assert response.headers["x-amz-tagging-count"] == "1"
|
||||||
|
Loading…
Reference in New Issue
Block a user