Skip to content

Moto Fixtures

aws_credentials(aws_region, monkeypatch)

Mocked AWS Credentials for moto.

Source code in pytest_aiomoto/aws_credentials.py
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
@pytest.fixture
def aws_credentials(aws_region, monkeypatch):
    """Mocked AWS Credentials for moto."""
    try:
        clean_aws_credentials(monkeypatch)

        monkeypatch.setenv("AWS_ACCESS_KEY_ID", AWS_ACCESS_KEY_ID)
        monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", AWS_SECRET_ACCESS_KEY)
        monkeypatch.setenv("AWS_SECURITY_TOKEN", "testing")
        monkeypatch.setenv("AWS_SESSION_TOKEN", "testing")

        yield
        clean_aws_credentials(monkeypatch)

    finally:
        clean_aws_credentials(monkeypatch)

aws_profile_credentials(monkeypatch)

This returns a context manager that applies awscli credentials for a named profile - it assumes the named profile is available - and it will use monkeypatch to set awscli environment variables with those credentials. It cleans up the environment variables.

The context manager returned is used like so

with mock_aws_credentials(profile_name, aws_region) as profile_session: assert isinstance(profile_session, boto3.session.Session)

Source code in pytest_aiomoto/aws_credentials.py
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
@pytest.fixture
def aws_profile_credentials(monkeypatch) -> Callable:
    """
    This returns a context manager that applies awscli credentials for
    a named profile - it assumes the named profile is available - and it
    will use monkeypatch to set awscli environment variables with those
    credentials.  It cleans up the environment variables.

    The context manager returned is used like so:

        with mock_aws_credentials(profile_name, aws_region) as profile_session:
            assert isinstance(profile_session, boto3.session.Session)

    """
    return partial(patch_aws_profile_credentials, monkeypatch=monkeypatch)

clean_aws_credentials(monkeypatch)

This uses monkeypatch to clear awscli environment variables and applies

boto3.DEFAULT_SESSION = None S3FileSystem.clear_instance_cache()

Source code in pytest_aiomoto/aws_credentials.py
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
def clean_aws_credentials(monkeypatch):
    """
    This uses monkeypatch to clear awscli environment variables and applies:

        boto3.DEFAULT_SESSION = None
        S3FileSystem.clear_instance_cache()

    """
    # See https://github.com/dask/s3fs/issues/461 for details about
    # s3fs using an instance cache with stored credentials.
    boto3.DEFAULT_SESSION = None
    S3FileSystem.clear_instance_cache()
    monkeypatch.delenv("AWS_CONFIG_FILE", raising=False)
    monkeypatch.delenv("AWS_SHARED_CREDENTIALS_FILE", raising=False)
    monkeypatch.delenv("AWS_PROFILE", raising=False)
    monkeypatch.delenv("AWS_DEFAULT_PROFILE", raising=False)
    monkeypatch.delenv("AWS_ACCOUNT", raising=False)
    monkeypatch.delenv("AWS_ACCESS_KEY_ID", raising=False)
    monkeypatch.delenv("AWS_SECRET_ACCESS_KEY", raising=False)
    monkeypatch.delenv("AWS_SECURITY_TOKEN", raising=False)
    monkeypatch.delenv("AWS_SESSION_TOKEN", raising=False)

ctx_aws_default_credentials(monkeypatch)

This returns a context manager that applies awscli credentials for a named profile - it assumes the named profile is available - and it will use monkeypatch to set awscli environment variables with those credentials. It cleans up the environment variables.

Source code in pytest_aiomoto/aws_credentials.py
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
@contextmanager
def ctx_aws_default_credentials(monkeypatch) -> boto3.session.Session:
    """
    This returns a context manager that applies awscli credentials for
    a named profile - it assumes the named profile is available - and it
    will use monkeypatch to set awscli environment variables with those
    credentials.  It cleans up the environment variables.
    """
    profile_name = "default"
    default_profile = get_default_profile()
    aws_region = default_profile["region"]

    with ctx_aws_default_credentials_file(monkeypatch) as profile_path:
        assert profile_path.exists()
        profile_file = str(profile_path.absolute())
        try:
            session = botocore.session.Session()
            profiles = session.full_config.get("profiles")
            profile = profiles.get("default")
            assert profile == default_profile

            session = setup_aws_credentials(
                profile_name,
                aws_region=aws_region,
                profile_file=profile_file,
                monkeypatch=monkeypatch,
            )
            yield session
            clean_aws_credentials(monkeypatch)

        except ProfileNotFound:
            # Skip for missing credentials
            clean_aws_credentials(monkeypatch)
            pytest.skip(f"Missing AWS credentials ({profile_name}), skipping test")

        finally:
            clean_aws_credentials(monkeypatch)

ctx_aws_default_credentials_file(monkeypatch)

This returns a context manager that mocks [default] profile credentials in a temporary credentials file.

... seealso:: - https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#shared-credentials-file - https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html#envvars-list

Source code in pytest_aiomoto/aws_credentials.py
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
@contextmanager
def ctx_aws_default_credentials_file(monkeypatch) -> Path:
    """
    This returns a context manager that mocks [default] profile credentials
    in a temporary credentials file.

    ... seealso::
        - https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#shared-credentials-file
        - https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html#envvars-list

    """
    credentials_ini = get_default_profile_credentials_content()
    config_ini = get_default_profile_config_content()

    clean_aws_credentials(monkeypatch)
    with TemporaryDirectory(prefix="pytest_aiomoto_") as dirname:
        try:
            tmp_path = Path(dirname)

            credentials_path = tmp_path / ".aws" / "credentials"
            # credentials_path = Path(credentials_file)
            if not credentials_path.exists():
                credentials_path.parent.mkdir(parents=True, exist_ok=True)
                credentials_path.touch(exist_ok=True)
            credentials_path.write_text(credentials_ini)
            credentials_mock_data = credentials_path.read_text()
            assert credentials_mock_data == credentials_ini

            config_path = tmp_path / ".aws" / "config"
            if not config_path.exists():
                config_path.parent.mkdir(parents=True, exist_ok=True)
                config_path.touch(exist_ok=True)
            config_path.write_text(config_ini)
            config_mock_data = config_path.read_text()
            assert config_mock_data == config_ini

            # Note: boto3 seems to read the credentials file, not the config file;
            #       whereas the awscli might prefer the config file; and they use
            #       different env-vars for custom file locations.
            credentials_file = str(credentials_path.absolute())
            monkeypatch.setenv("AWS_CONFIG_FILE", credentials_file)
            monkeypatch.setenv("AWS_SHARED_CREDENTIALS_FILE",  credentials_file)
            yield credentials_path
            clean_aws_credentials(monkeypatch)

        finally:
            clean_aws_credentials(monkeypatch)

default_profile()

a default profile key:value pairs

Source code in pytest_aiomoto/aws_credentials.py
232
233
234
235
236
237
@pytest.fixture
def default_profile() -> Dict:
    """
    a default profile key:value pairs
    """
    return get_default_profile()

default_profile_credentials_content(default_profile)

A string for the [default] entry in ~/.aws/credentials file

Source code in pytest_aiomoto/aws_credentials.py
240
241
242
243
244
245
@pytest.fixture
def default_profile_credentials_content(default_profile) -> str:
    """
    A string for the [default] entry in ~/.aws/credentials file
    """
    return get_default_profile_credentials_content()

get_default_profile()

a default profile key:value pairs

Source code in pytest_aiomoto/aws_credentials.py
35
36
37
38
39
40
41
42
43
def get_default_profile() -> Dict:
    """
    a default profile key:value pairs
    """
    return deepcopy({
        "aws_access_key_id": AWS_ACCESS_KEY_ID,
        "aws_secret_access_key": AWS_SECRET_ACCESS_KEY,
        "region": AWS_REGION
    })

mock_default_credentials_file(monkeypatch)

A context manager to mock a default profile in a mocked temporary file like ~/.aws/credentials

Source code in pytest_aiomoto/aws_credentials.py
277
278
279
280
281
282
283
284
@pytest.fixture
def mock_default_credentials_file(monkeypatch) -> Path:
    """
    A context manager to mock a default profile in a
    mocked temporary file like ~/.aws/credentials
    """
    with ctx_aws_default_credentials_file(monkeypatch) as profile_path:
        yield profile_path

mock_default_profile(default_profile, mocker, monkeypatch)

mock a default profile by patching botocore.session.Session.get_scoped_config

Source code in pytest_aiomoto/aws_credentials.py
265
266
267
268
269
270
271
272
273
274
@pytest.fixture
def mock_default_profile(default_profile, mocker, monkeypatch) -> Dict:
    """
    mock a default profile by patching botocore.session.Session.get_scoped_config
    """
    mock_config = mocker.patch(
        "botocore.session.Session.get_scoped_config"
    )
    mock_config.return_value = default_profile
    yield default_profile

mock_default_session(monkeypatch)

A context manager to mock a session with a default profile by creating a mocked temporary file like ~/.aws/credentials

Source code in pytest_aiomoto/aws_credentials.py
287
288
289
290
291
292
293
294
@pytest.fixture
def mock_default_session(monkeypatch) -> boto3.session.Session:
    """
    A context manager to mock a session with a default profile by
    creating a mocked temporary file like ~/.aws/credentials
    """
    with ctx_aws_default_credentials(monkeypatch) as m_session:
        yield m_session

patch_aws_profile_credentials(profile_name, aws_region=None, profile_file=None, monkeypatch=None)

This returns a context manager that applies awscli credentials for a named profile - it assumes the named profile is available - and it will use monkeypatch to set awscli environment variables with those credentials. It cleans up the environment variables.

Source code in pytest_aiomoto/aws_credentials.py
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
@contextmanager
def patch_aws_profile_credentials(profile_name, aws_region=None, profile_file=None, monkeypatch=None) -> boto3.session.Session:
    """
    This returns a context manager that applies awscli credentials for
    a named profile - it assumes the named profile is available - and it
    will use monkeypatch to set awscli environment variables with those
    credentials.  It cleans up the environment variables.
    """
    try:
        clean_aws_credentials(monkeypatch)
        session = setup_aws_credentials(
            profile_name,
            aws_region=aws_region,
            profile_file=profile_file,
            monkeypatch=monkeypatch,
        )
        yield session
        clean_aws_credentials(monkeypatch)

    except ProfileNotFound:
        # Skip for missing credentials
        clean_aws_credentials(monkeypatch)
        pytest.skip(f"Missing AWS credentials ({profile_name}), skipping test")

    finally:
        clean_aws_credentials(monkeypatch)

setup_aws_credentials(profile_name, aws_region=None, profile_file=None, monkeypatch=None)

Any clients created from this session will use credentials from the [profile_name] section of ~/.aws/credentials.

This assumes the profile_name is available in user credentials, it does not mock the credentials.

It yields a boto3.Session for the named profile, and it uses monkeypatch to set awscli environment variables with the profile credentials.

Source code in pytest_aiomoto/aws_credentials.py
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
def setup_aws_credentials(profile_name, aws_region=None, profile_file=None, monkeypatch=None) -> boto3.session.Session:
    """
    Any clients created from this session will use credentials
    from the [profile_name] section of ~/.aws/credentials.

    This assumes the profile_name is available in user credentials,
    it does not mock the credentials.

    It yields a `boto3.Session` for the named profile, and it uses
    monkeypatch to set awscli environment variables with the profile credentials.
    """
    monkeypatch.setenv("AWS_DEFAULT_PROFILE", profile_name)
    monkeypatch.setenv("AWS_DEFAULT_REGION", aws_region)
    if profile_file:
        monkeypatch.setenv("AWS_CONFIG_FILE", profile_file)
        monkeypatch.setenv("AWS_SHARED_CREDENTIALS_FILE", profile_file)
    session = boto3.Session(profile_name=profile_name, region_name=aws_region)
    credentials = session.get_credentials().get_frozen_credentials()
    monkeypatch.setenv("AWS_PROFILE", session.profile_name)
    monkeypatch.setenv("AWS_DEFAULT_PROFILE", session.profile_name)
    monkeypatch.setenv("AWS_DEFAULT_REGION", session.region_name)
    monkeypatch.setenv("AWS_ACCESS_KEY_ID", credentials.access_key)
    monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", credentials.secret_key)
    return session

batch_infrastructure(aws_clients, compute_env_name, job_queue_name, job_definition_name, iam_role_name)

Create AWS Batch infrastructure, including: - VPC with subnet - Security group and IAM role - Batch compute environment and job queue - Batch job job_definition

This function is not a fixture so that tests can pass the AWS clients to it and then continue to use the infrastructure created by it while the client fixtures are in-tact for the duration of a test.

Source code in pytest_aiomoto/aws_batch.py
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
@contextmanager
def batch_infrastructure(
    aws_clients: AwsBatchClients,
    compute_env_name: str,
    job_queue_name: str,
    job_definition_name: str,
    iam_role_name: str,
) -> AwsBatchInfrastructure:
    """
    Create AWS Batch infrastructure, including:
    - VPC with subnet
    - Security group and IAM role
    - Batch compute environment and job queue
    - Batch job job_definition

    This function is not a fixture so that tests can pass the AWS clients to it and then
    continue to use the infrastructure created by it while the client fixtures are in-tact for
    the duration of a test.
    """

    infrastructure = AwsBatchInfrastructure()

    try:

        infrastructure.aws_region = aws_clients.region
        infrastructure.aws_clients = aws_clients

        resp = aws_clients.ec2.create_vpc(CidrBlock="172.30.0.0/24")
        vpc_id = resp["Vpc"]["VpcId"]

        resp = aws_clients.ec2.create_subnet(
            AvailabilityZone=f"{aws_clients.region}a",
            CidrBlock="172.30.0.0/25",
            VpcId=vpc_id,
        )
        subnet_id = resp["Subnet"]["SubnetId"]

        resp = aws_clients.ec2.create_security_group(
            Description="moto_test_sg_desc", GroupName="moto_test_sg", VpcId=vpc_id
        )
        sg_id = resp["GroupId"]

        resp = aws_clients.iam.create_role(
            RoleName=iam_role_name, AssumeRolePolicyDocument="moto_test_policy"
        )
        iam_arn = resp["Role"]["Arn"]
        iam_name = resp["Role"]["RoleName"]

        resp = aws_clients.batch.create_compute_environment(
            computeEnvironmentName=compute_env_name,
            type="UNMANAGED",
            state="ENABLED",
            serviceRole=iam_arn,
        )
        compute_env_arn = resp["computeEnvironmentArn"]

        resp = aws_clients.batch.create_job_queue(
            jobQueueName=job_queue_name,
            state="ENABLED",
            priority=123,
            computeEnvironmentOrder=[{"order": 123, "computeEnvironment": compute_env_arn}],
        )
        assert resp["jobQueueName"] == job_queue_name
        assert resp["jobQueueArn"]
        job_queue_arn = resp["jobQueueArn"]

        resp = aws_clients.batch.register_job_definition(
            jobDefinitionName=job_definition_name,
            type="container",
            containerProperties={
                "image": "busybox",
                "vcpus": 2,
                "memory": 8,
                "command": ["sleep", "10"],  # NOTE: job runs for 10 sec without overrides
            },
        )
        assert resp["jobDefinitionName"] == job_definition_name
        assert resp["jobDefinitionArn"]
        job_definition_arn = resp["jobDefinitionArn"]
        assert resp["revision"]
        assert resp["jobDefinitionArn"].endswith(
            "{0}:{1}".format(resp["jobDefinitionName"], resp["revision"])
        )

        infrastructure.vpc_id = vpc_id
        infrastructure.subnet_id = subnet_id
        infrastructure.security_group_id = sg_id
        infrastructure.iam_role_arn = iam_arn
        infrastructure.iam_role_name = iam_name
        infrastructure.compute_env_name = compute_env_name
        infrastructure.compute_env_arn = compute_env_arn
        infrastructure.job_queue_name = job_queue_name
        infrastructure.job_queue_arn = job_queue_arn
        infrastructure.job_definition_name = job_definition_name
        infrastructure.job_definition_arn = job_definition_arn

        yield infrastructure

    finally:
        # TODO: break all services into separate fixtures that clean up
        aws_clients.batch.deregister_job_definition(jobDefinition=infrastructure.job_definition_arn)
        aws_clients.batch.delete_job_queue(jobQueue=infrastructure.job_queue_arn)
        aws_clients.batch.delete_compute_environment(computeEnvironment=infrastructure.compute_env_arn)
        aws_clients.iam.delete_role(RoleName=infrastructure.iam_role_name)
        aws_clients.ec2.delete_security_group(GroupId=infrastructure.security_group_id)
        aws_clients.ec2.delete_subnet(SubnetId=infrastructure.subnet_id)
        aws_clients.ec2.delete_vpc(VpcId=infrastructure.vpc_id)

compute_env_name(moto_uuid)

A unique batch compute environment name

Source code in pytest_aiomoto/aws_batch.py
60
61
62
63
@pytest.fixture
def compute_env_name(moto_uuid) -> str:
    """A unique batch compute environment name"""
    return f"moto-batch-compute-env-{moto_uuid}"

iam_role_name(moto_uuid)

A unique IAM role name

Source code in pytest_aiomoto/aws_batch.py
66
67
68
69
@pytest.fixture
def iam_role_name(moto_uuid) -> str:
    """A unique IAM role name"""
    return f"moto-iam-role-name-{moto_uuid}"

job_definition_name(moto_uuid)

A unique batch job definition name

Source code in pytest_aiomoto/aws_batch.py
54
55
56
57
@pytest.fixture
def job_definition_name(moto_uuid) -> str:
    """A unique batch job definition name"""
    return f"moto-batch-job-definition-{moto_uuid}"

job_queue_name(moto_uuid)

A unique batch queue name

Source code in pytest_aiomoto/aws_batch.py
48
49
50
51
@pytest.fixture
def job_queue_name(moto_uuid) -> str:
    """A unique batch queue name"""
    return f"moto-batch-job-queue-{moto_uuid}"

moto_uuid()

A uuid.uuid4() for unique moto mock artifacts

Source code in pytest_aiomoto/aws_batch.py
42
43
44
45
@pytest.fixture
def moto_uuid() -> uuid.UUID:
    """A uuid.uuid4() for unique moto mock artifacts"""
    return uuid.uuid4()

AWS Lambda Utilities for Testing

lambda_handler(event, context)

A lambda handler for test purposes. This function must be self-contained, including imports required.

Source code in pytest_aiomoto/aws_lambda.py
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
def lambda_handler(event, context):
    """
    A lambda handler for test purposes.
    This function must be self-contained, including imports required.
    """
    import sys

    print("event: %s" % event)
    action = event.get("action")
    if action == "too-large":
        x = ["xxx" for x in range(10 ** 6)]
        assert sys.getsizeof(x) > 6291556
        return {"statusCode": 200, "body": x}
    if action == "runtime-error":
        raise RuntimeError(action)
    return {"statusCode": 200, "body": event}

MotoService

Will Create MotoService. Service is ref-counted so there will only be one per process. Real Service will be returned by __enter__.

Source code in pytest_aiomoto/moto_services.py
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
class MotoService:
    """Will Create MotoService.
    Service is ref-counted so there will only be one per process. Real Service will
    be returned by `__enter__`."""

    _services = dict()  # {name: instance}

    def __init__(self, service_name: str, port: int = None):
        self._service_name = service_name

        if port:
            self._socket = None
            self._port = port
        else:
            self._socket, self._port = get_free_tcp_port()

        self._thread = None
        self._logger = logging.getLogger(self.__class__.__name__)
        self._refcount = 0
        self._ip_address = AWS_HOST
        self._server = None

    @property
    def endpoint_url(self):
        return "http://{}:{}".format(self._ip_address, self._port)

    def reset(self):
        moto_service_reset(service_name=self._service_name)

    def __call__(self, func):
        def wrapper(*args, **kwargs):
            self._start()
            try:
                result = func(*args, **kwargs)
            finally:
                self._stop()
            return result

        functools.update_wrapper(wrapper, func)
        wrapper.__wrapped__ = func
        return wrapper

    def __enter__(self):
        svc = self._services.get(self._service_name)
        if svc is None:
            self._services[self._service_name] = self
            self._refcount = 1
            self._start()
            return self
        else:
            svc._refcount += 1
            return svc

    def __exit__(self, exc_type, exc_val, exc_tb):
        self._refcount -= 1

        if self._socket:
            self._socket.close()
            self._socket = None

        if self._refcount == 0:
            del self._services[self._service_name]
            self._stop()

    def _server_entry(self):
        self._main_app = moto_service_app(service_name=self._service_name)

        if self._socket:
            self._socket.close()  # release right before we use it
            self._socket = None

        self._server = werkzeug.serving.make_server(
            self._ip_address, self._port, self._main_app, True
        )
        self._server.serve_forever()

    def _start(self):
        self._thread = threading.Thread(target=self._server_entry, daemon=True)
        self._thread.start()

        http = urllib3.PoolManager()

        start = time.time()

        while time.time() - start < 10:
            if not self._thread.is_alive():
                break

            try:
                resp = http.request(
                    "GET", self.endpoint_url + "/static", timeout=CONNECT_TIMEOUT
                )
                break
            except (
                urllib3.exceptions.NewConnectionError,
                urllib3.exceptions.MaxRetryError,
            ):
                time.sleep(0.2)
        else:
            self._stop()  # pytest.fail doesn't call stop_process
            raise Exception("Cannot start {}: {}".format(self.__class__.__name__, self._service_name))

    def _stop(self):
        if self._server:
            self._server.shutdown()

        self._thread.join()

moto_service_reset(service_name)

Reset a moto service backend, for all regions. Each service can have multiple regional backends.

Source code in pytest_aiomoto/moto_services.py
33
34
35
36
37
38
39
40
41
def moto_service_reset(service_name: str):
    """
    Reset a moto service backend, for all regions.
    Each service can have multiple regional backends.
    """
    service_backends = moto.backends.get_backend(service_name)
    if service_backends:
        for region_name, backend in service_backends.items():
            backend.reset()

AWS test fixtures

This test suite uses a large suite of moto mocks for the AWS batch infrastructure. These infrastructure mocks are derived from the moto test suite for testing the batch client. The test infrastructure should be used according to the moto license.

.. seealso::

- https://github.com/spulec/moto/pull/1197/files
- https://github.com/spulec/moto/blob/master/tests/test_batch/test_batch.py

s3_bucket(s3_bucket_name, aws_s3_client, aws_region)

The s3_bucket fixture provides a moto-bucket for the s3_bucket_name fixture, where the moto-bucket is cleaned up on exit. :return: the s3_bucket_name

Source code in pytest_aiomoto/aws_s3.py
218
219
220
221
222
223
224
225
226
227
228
@pytest.fixture
def s3_bucket(s3_bucket_name, aws_s3_client, aws_region) -> str:
    """
    The s3_bucket fixture provides a moto-bucket for the s3_bucket_name
    fixture, where the moto-bucket is cleaned up on exit.
    :return: the s3_bucket_name
    """
    with mock_s3():
        create_s3_bucket(s3_bucket_name, aws_s3_client, aws_region)
        yield s3_bucket_name
        delete_s3_bucket(s3_bucket_name, aws_s3_client)

s3_bucket_name(s3_uuid)

A valid S3 bucket name with a UUID suffix.

This bucket may not exist yet; for a bucket that exists, use the s3_bucket fixture instead. It creates a moto-bucket for this bucket name.

:return: str for the bucket component of 's3://{bucket}/{key}'

Source code in pytest_aiomoto/aws_s3.py
204
205
206
207
208
209
210
211
212
213
214
215
@pytest.fixture
def s3_bucket_name(s3_uuid) -> str:
    """
    A valid S3 bucket name with a UUID suffix.

    This bucket may not exist yet; for a bucket that exists, use
    the `s3_bucket` fixture instead.  It creates a moto-bucket
    for this bucket name.

    :return: str for the bucket component of 's3://{bucket}/{key}'
    """
    return f"moto-bucket-{s3_uuid}"

s3_bucket_resource(s3_bucket_name, aws_s3_resource, aws_region)

The s3_bucket fixture provides a moto-bucket for the s3_bucket_name fixture, where the moto-bucket is cleaned up on exit. :return: the s3.Bucket resource

Source code in pytest_aiomoto/aws_s3.py
231
232
233
234
235
236
237
238
239
240
241
@pytest.fixture
def s3_bucket_resource(s3_bucket_name, aws_s3_resource, aws_region) -> "s3.Bucket":
    """
    The s3_bucket fixture provides a moto-bucket for the s3_bucket_name
    fixture, where the moto-bucket is cleaned up on exit.
    :return: the s3.Bucket resource
    """
    with mock_s3():
        bucket = create_s3_bucket_resource(s3_bucket_name, aws_s3_resource, aws_region)
        yield bucket
        delete_s3_bucket_resource(s3_bucket_name, aws_s3_resource)

s3_buckets(s3_bucket_name, aws_s3_client, aws_region)

The s3_buckets fixture creates moto-buckets for the s3_bucket_name fixture, with a numeric suffix for each bucket, where the moto-buckets are cleaned up on exit. :return: a list of bucket names

Source code in pytest_aiomoto/aws_s3.py
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
@pytest.fixture
def s3_buckets(s3_bucket_name, aws_s3_client, aws_region) -> List[str]:
    """
    The s3_buckets fixture creates moto-buckets for the s3_bucket_name
    fixture, with a numeric suffix for each bucket, where the
    moto-buckets are cleaned up on exit.
    :return: a list of bucket names
    """
    with mock_s3():
        bucket_names = []
        for i in range(10):
            bucket_name = f"{s3_bucket_name}-{i:02d}"
            create_s3_bucket(bucket_name, aws_s3_client, aws_region)
            bucket_names.append(bucket_name)

        yield bucket_names

        for bucket_name in bucket_names:
            delete_s3_bucket(bucket_name, aws_s3_client)

s3_key(s3_key_path, s3_key_file)

A valid S3 key composed of a key_path and a key_file The key component of 's3://{bucket}/{key}' that is composed of '{key_path}/{key_file}'; the key does not begin or end with any delimiters (e.g. '/') :return: str for the key component of 's3://{bucket}/{key}'

Source code in pytest_aiomoto/aws_s3.py
285
286
287
288
289
290
291
292
@pytest.fixture(scope="session")
def s3_key(s3_key_path, s3_key_file) -> str:
    """A valid S3 key composed of a key_path and a key_file
    The key component of 's3://{bucket}/{key}' that is composed of '{key_path}/{key_file}';
    the key does not begin or end with any delimiters (e.g. '/')
    :return: str for the key component of 's3://{bucket}/{key}'
    """
    return f"{s3_key_path}/{s3_key_file}"

s3_key_file()

A valid S3 key name that is also a file name The key component of 's3://{bucket}/{key}' that is composed of '{key_file}'; the key does not begin or end with any delimiters (e.g. '/') :return: str for the key component of 's3://{bucket}/{key}'

Source code in pytest_aiomoto/aws_s3.py
275
276
277
278
279
280
281
282
@pytest.fixture(scope="session")
def s3_key_file() -> str:
    """A valid S3 key name that is also a file name
    The key component of 's3://{bucket}/{key}' that is composed of '{key_file}';
    the key does not begin or end with any delimiters (e.g. '/')
    :return: str for the key component of 's3://{bucket}/{key}'
    """
    return "s3_file_test.txt"

s3_key_path()

A valid S3 key name that is not a file name, it's like a directory The key component of 's3://{bucket}/{key}' that is composed of '{key_path}'; the key does not begin or end with any delimiters (e.g. '/') :return: str for the key component of 's3://{bucket}/{key}'

Source code in pytest_aiomoto/aws_s3.py
265
266
267
268
269
270
271
272
@pytest.fixture(scope="session")
def s3_key_path() -> str:
    """A valid S3 key name that is not a file name, it's like a directory
    The key component of 's3://{bucket}/{key}' that is composed of '{key_path}';
    the key does not begin or end with any delimiters (e.g. '/')
    :return: str for the key component of 's3://{bucket}/{key}'
    """
    return "s3_key_path"

s3_object_text()

s3 object data: 's3 test object text '

Source code in pytest_aiomoto/aws_s3.py
295
296
297
298
@pytest.fixture(scope="session")
def s3_object_text() -> str:
    """s3 object data: 's3 test object text\n'"""
    return "s3 test object text\n"

s3_protocol()

An s3:// protocol prefix

Source code in pytest_aiomoto/aws_s3.py
192
193
194
195
@pytest.fixture
def s3_protocol() -> str:
    """An s3:// protocol prefix"""
    return "s3://"

s3_temp_1000s_objects(aws_s3_client, aws_s3_resource, s3_bucket, s3_temp_dir, s3_uuid)

This creates 1010 files, half with .txt and others with .tif file extensions, below the s3://s3_bucket/s3_temp_dir path; the default page limit for s3 object listings is usually 1000, so this should exceed 1 page.

Source code in pytest_aiomoto/aws_s3.py
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
@pytest.fixture
def s3_temp_1000s_objects(
    aws_s3_client, aws_s3_resource, s3_bucket, s3_temp_dir, s3_uuid
) -> List[S3Object]:
    """
    This creates 1010 files, half with .txt and others with .tif file extensions,
    below the s3://s3_bucket/s3_temp_dir path; the default page limit for s3
    object listings is usually 1000, so this should exceed 1 page.
    """
    # Since a mock_s3 context is created by the s3_bucket
    # and aws_s3_client fixtures, it is not required here.
    file_key = f"{s3_temp_dir}/{s3_uuid}.txt"
    s3_file = create_s3_object(
        s3_bucket, file_key, s3_uuid, aws_s3_resource, aws_s3_client
    )
    s3_objects = [s3_file]

    for i in range(1010):
        if i % 2 > 0:
            key = f"{s3_temp_dir}/{s3_uuid}_{i:04d}.txt"
        else:
            key = f"{s3_temp_dir}/{s3_uuid}_{i:04d}.tif"
        body = f"{s3_uuid}-{i:04d}".encode()
        s3_obj = create_s3_object(s3_bucket, key, body, aws_s3_resource, aws_s3_client)
        s3_objects.append(s3_obj)

    s3_objects = [
        S3Object(bucket=s3_obj.bucket_name, key=s3_obj.key) for s3_obj in s3_objects
    ]

    yield s3_objects

    delete_s3_prefix(aws_s3_client, s3_bucket, s3_temp_dir)

s3_temp_objects(aws_s3_client, aws_s3_resource, s3_bucket, s3_temp_dir, s3_uuid)

This creates 10 files, 5 with .txt and 5 with .tif file extensions, below the s3://s3_bucket/s3_temp_dir path

Source code in pytest_aiomoto/aws_s3.py
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
@pytest.fixture
def s3_temp_objects(
    aws_s3_client, aws_s3_resource, s3_bucket, s3_temp_dir, s3_uuid
) -> List[S3Object]:
    """
    This creates 10 files, 5 with .txt and 5 with .tif file extensions,
    below the s3://s3_bucket/s3_temp_dir path
    """
    # Since a mock_s3 context is created by the s3_bucket
    # and aws_s3_client fixtures, it is not required here.

    file_key = f"{s3_temp_dir}/{s3_uuid}.txt"
    s3_file = create_s3_object(
        s3_bucket, file_key, s3_uuid, aws_s3_resource, aws_s3_client
    )
    s3_objects = [s3_file]

    files_prefix = f"{s3_temp_dir}/{s3_uuid}"
    for i in range(10):
        file_stem = f"{files_prefix}_{i:04d}"
        if i % 2 > 0:
            key = f"{file_stem}.txt"
        else:
            key = f"{file_stem}.tif"
        body = file_stem.encode()
        s3_obj = create_s3_object(s3_bucket, key, body, aws_s3_resource, aws_s3_client)
        s3_objects.append(s3_obj)

    # create a sub-key path for derivative files
    derivative_path = str(uuid.uuid4())
    derivative_prefix = f"{s3_temp_dir}/{derivative_path}/{s3_uuid}"
    for i in range(10):
        file_stem = f"{derivative_prefix}_{i:04d}"
        if i % 2 > 0:
            key = f"{file_stem}.txt"
        else:
            key = f"{file_stem}.tif"
        body = file_stem.encode()
        s3_obj = create_s3_object(s3_bucket, key, body, aws_s3_resource, aws_s3_client)
        s3_objects.append(s3_obj)

    s3_objects = [
        S3Object(bucket=s3_obj.bucket_name, key=s3_obj.key) for s3_obj in s3_objects
    ]

    yield s3_objects

    delete_s3_prefix(aws_s3_client, s3_bucket, s3_temp_dir)

s3_uri_object(aws_s3_client, aws_s3_resource, aws_region, s3_bucket_name, s3_key, s3_object_text)

The s3_uri_object fixture creates a moto-bucket and moto-object for the s3_uri_str fixture, where the moto-bucket is cleaned up on exit. :return: an S3Object(bucket=s3_bucket_name, key=s3_key)

Source code in pytest_aiomoto/aws_s3.py
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
@pytest.fixture
def s3_uri_object(
    aws_s3_client, aws_s3_resource, aws_region, s3_bucket_name, s3_key, s3_object_text
) -> S3Object:
    """
    The s3_uri_object fixture creates a moto-bucket and moto-object for
    the s3_uri_str fixture, where the moto-bucket is cleaned up on exit.
    :return: an S3Object(bucket=s3_bucket_name, key=s3_key)
    """
    with mock_s3():
        create_s3_bucket(s3_bucket_name, aws_s3_client, aws_region)
        create_s3_object(
            s3_bucket_name, s3_key, s3_object_text, aws_s3_resource, aws_s3_client
        )
        yield S3Object(bucket=s3_bucket_name, key=s3_key)
        delete_s3_bucket(s3_bucket_name, aws_s3_client)

s3_uri_str(s3_protocol, s3_bucket_name, s3_key)

A valid S3 URI comprised of 's3://{bucket}/{key}'

This s3_uri_str may not exist yet; for an object that exists, use the s3_uri_object fixture instead.

:return: str

Source code in pytest_aiomoto/aws_s3.py
301
302
303
304
305
306
307
308
309
310
@pytest.fixture
def s3_uri_str(s3_protocol, s3_bucket_name, s3_key) -> str:
    """A valid S3 URI comprised of 's3://{bucket}/{key}'

    This s3_uri_str may not exist yet; for an object that exists, use
    the `s3_uri_object` fixture instead.

    :return: str
    """
    return f"{s3_protocol}{s3_bucket_name}/{s3_key}"

s3_uuid()

A UUID for S3 artifacts

Source code in pytest_aiomoto/aws_s3.py
198
199
200
201
@pytest.fixture
def s3_uuid() -> str:
    """A UUID for S3 artifacts"""
    return str(uuid.uuid4())

S3Object

Bases: NamedTuple

Just the bucket_name and key for an :code:s3.ObjectSummary. This simple named tuple should work around problems with :code:Pickle for an :code:s3.ObjectSummary

Source code in pytest_aiomoto/s3_object.py
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
class S3Object(NamedTuple):
    """
    Just the bucket_name and key for an :code:`s3.ObjectSummary`.
    This simple named tuple should work around problems with :code:`Pickle`
    for an :code:`s3.ObjectSummary`
    """

    bucket: str
    key: str

    @property
    def bucket_name(self) -> str:
        return self.bucket

    @property
    def s3_uri(self) -> str:
        return f"s3://{self.bucket}/{self.key}"

response_success(response)

Parse a response from a request issued by any botocore.client.BaseClient to determine whether the request was successful or not. :param response: :return: boolean :raises: KeyError if the response is not an AWS response

Source code in pytest_aiomoto/utils.py
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
def response_success(response: Dict) -> bool:
    """
    Parse a response from a request issued by any botocore.client.BaseClient
    to determine whether the request was successful or not.
    :param response:
    :return: boolean
    :raises: KeyError if the response is not an AWS response
    """
    # If the response dict is not constructed as expected for an AWS response,
    # this should raise a KeyError to indicate something is very wrong.
    status_code = int(response["ResponseMetadata"]["HTTPStatusCode"])
    if status_code:
        # consider 300+ responses to be unsuccessful
        return 200 <= status_code < 300
    else:
        return False