diff --git a/.github/workflows/ci-community.yml b/.github/workflows/ci-community.yml
index 337b90661..92e58e518 100644
--- a/.github/workflows/ci-community.yml
+++ b/.github/workflows/ci-community.yml
@@ -70,7 +70,5 @@ jobs:
cache: poetry
- name: Install Python dependencies
run: poetry install -E ${{ matrix.module }}
- - name: Run linter
- run: make modules/${{ matrix.module }}/lint
- name: Run tests
- run: make modules/${{ matrix.module }}/tests
\ No newline at end of file
+ run: make modules/${{ matrix.module }}/tests
diff --git a/.github/workflows/ci-core.yml b/.github/workflows/ci-core.yml
index 56a45cd33..c96619868 100644
--- a/.github/workflows/ci-core.yml
+++ b/.github/workflows/ci-core.yml
@@ -25,9 +25,7 @@ jobs:
python-version: ${{ matrix.python-version }}
cache: poetry
- name: Install Python dependencies
- run: poetry install
- - name: Run linter
- run: make core/lint
+ run: poetry install --all-extras
- name: Run twine check
run: poetry build && poetry run twine check dist/*.tar.gz
- name: Run tests
diff --git a/.github/workflows/ci-lint.yml b/.github/workflows/ci-lint.yml
new file mode 100644
index 000000000..6261002b3
--- /dev/null
+++ b/.github/workflows/ci-lint.yml
@@ -0,0 +1,28 @@
+# Contrinuous Integration for the core package
+
+name: lint
+
+on:
+ push:
+ branches: [main]
+ pull_request:
+ branches: [main]
+
+jobs:
+ all:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: Setup Poetry
+ run: pipx install poetry
+ - name: Setup python 3.9
+ uses: actions/setup-python@v5
+ with:
+ python-version: 3.9
+ cache: poetry
+ - name: Install Python dependencies
+ run: poetry install
+ - name: Install pre-commit
+ run: pip install pre-commit
+ - name: Run linter
+ run: pre-commit run -a
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 000000000..0d2a53b63
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,34 @@
+default_language_version:
+ python: python3.9
+
+repos:
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: 'v4.5.0'
+ hooks:
+ - id: check-toml
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+
+ - repo: https://github.com/psf/black-pre-commit-mirror
+ rev: '24.1.1'
+ hooks:
+ - id: black
+ args: [ '--config', 'pyproject.toml' ]
+
+ - repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: 'v0.1.14'
+ hooks:
+ - id: ruff
+ # Explicitly setting config to prevent Ruff from using `pyproject.toml` in sub packages.
+ args: [ '--fix', '--exit-non-zero-on-fix', '--config', 'pyproject.toml' ]
+
+# - repo: local
+# hooks:
+# - id: mypy
+# name: mypy
+# entry: poetry run mypy
+# args: ["--config-file", "pyproject.toml"]
+# files: "core" # start with the core being type checked
+# language: system
+# types: [ python ]
+# require_serial: true
diff --git a/Dockerfile b/Dockerfile
index fb4ad5d24..4172f86fe 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -7,8 +7,7 @@ RUN pip install --upgrade pip \
&& apt-get install -y \
freetds-dev \
&& rm -rf /var/lib/apt/lists/*
-ARG version=3.8
-COPY requirements/${version}.txt requirements.txt
+COPY build/requirements.txt requirements.txt
COPY setup.py README.rst ./
RUN pip install -r requirements.txt
COPY . .
diff --git a/Dockerfile.diagnostics b/Dockerfile.diagnostics
index 9d1bba2cd..687b447c5 100644
--- a/Dockerfile.diagnostics
+++ b/Dockerfile.diagnostics
@@ -4,4 +4,4 @@ FROM python:${version}
WORKDIR /workspace
COPY core core
RUN pip install --no-cache-dir -e core
-COPY diagnostics.py .
+COPY scripts/diagnostics.py .
diff --git a/INDEX.rst b/INDEX.rst
index 87c413355..be5e3d1cd 100644
--- a/INDEX.rst
+++ b/INDEX.rst
@@ -58,11 +58,12 @@ The snippet above will spin up a postgres database in a container. The :code:`ge
Installation
------------
-The suite of testcontainers packages is available on `PyPI `_, and individual packages can be installed using :code:`pip`. We recommend installing the package you need by running :code:`pip install testcontainers-`, e.g., :code:`pip install testcontainers-postgres`.
+The suite of testcontainers packages is available on `PyPI `_,
+and individual packages can be installed using :code:`pip`.
-.. note::
+Version `4.0.0` onwards we do not support the `testcontainers-*` packages as it is unsutainable to maintain ownership.
- For backwards compatibility, packages can also be installed by specifying `extras `__, e.g., :code:`pip install testcontainers[postgres]`.
+Instead packages can be installed by specifying `extras `__, e.g., :code:`pip install testcontainers[postgres]`.
Docker in Docker (DinD)
@@ -80,8 +81,8 @@ We recommend you use a `virtual environment /tests
Package Structure
^^^^^^^^^^^^^^^^^
@@ -90,23 +91,24 @@ Testcontainers is a collection of `implicit namespace packages 'DockerContainer':
+ def with_env(self, key: str, value: str) -> "DockerContainer":
self.env[key] = value
return self
- def with_bind_ports(self, container: int, host: int = None) -> 'DockerContainer':
+ def with_bind_ports(self, container: int, host: Optional[int] = None) -> "DockerContainer":
self.ports[container] = host
return self
- def with_exposed_ports(self, *ports: int) -> 'DockerContainer':
+ def with_exposed_ports(self, *ports: int) -> "DockerContainer":
for port in ports:
self.ports[port] = None
return self
- def with_kwargs(self, **kwargs) -> 'DockerContainer':
+ def with_kwargs(self, **kwargs) -> "DockerContainer":
self._kwargs = kwargs
return self
- def maybe_emulate_amd64(self) -> 'DockerContainer':
+ def maybe_emulate_amd64(self) -> "DockerContainer":
if is_arm():
- return self.with_kwargs(platform='linux/amd64')
+ return self.with_kwargs(platform="linux/amd64")
return self
- def start(self) -> 'DockerContainer':
+ def start(self) -> "DockerContainer":
logger.info("Pulling image %s", self.image)
docker_client = self.get_docker_client()
self._container = docker_client.run(
- self.image, command=self._command, detach=True, environment=self.env, ports=self.ports,
- name=self._name, volumes=self.volumes, **self._kwargs
+ self.image,
+ command=self._command,
+ detach=True,
+ environment=self.env,
+ ports=self.ports,
+ name=self._name,
+ volumes=self.volumes,
+ **self._kwargs
)
logger.info("Container started: %s", self._container.short_id)
return self
@@ -71,7 +78,7 @@ def start(self) -> 'DockerContainer':
def stop(self, force=True, delete_volume=True) -> None:
self._container.remove(force=force, v=delete_volume)
- def __enter__(self) -> 'DockerContainer':
+ def __enter__(self) -> "DockerContainer":
return self.start()
def __exit__(self, exc_type, exc_val, exc_tb) -> None:
@@ -82,11 +89,9 @@ def __del__(self) -> None:
__del__ runs when Python attempts to garbage collect the object.
In case of leaky test design, we still attempt to clean up the container.
"""
- try:
+ with contextlib.suppress(Exception):
if self._container is not None:
self.stop()
- finally:
- pass
def get_container_host_ip(self) -> str:
# infer from docker host
@@ -94,7 +99,7 @@ def get_container_host_ip(self) -> str:
if not host:
return "localhost"
# see https://github.com/testcontainers/testcontainers-python/issues/415
- if host == "localnpipe" and "Windows" == system():
+ if host == "localnpipe" and system() == "Windows":
return "localhost"
# check testcontainers itself runs inside docker container
@@ -122,16 +127,16 @@ def get_exposed_port(self, port: int) -> str:
return port
return mapped_port
- def with_command(self, command: str) -> 'DockerContainer':
+ def with_command(self, command: str) -> "DockerContainer":
self._command = command
return self
- def with_name(self, name: str) -> 'DockerContainer':
+ def with_name(self, name: str) -> "DockerContainer":
self._name = name
return self
- def with_volume_mapping(self, host: str, container: str, mode: str = 'ro') -> 'DockerContainer':
- mapping = {'bind': container, 'mode': mode}
+ def with_volume_mapping(self, host: str, container: str, mode: str = "ro") -> "DockerContainer":
+ mapping = {"bind": container, "mode": mode}
self.volumes[host] = mapping
return self
@@ -141,12 +146,12 @@ def get_wrapped_container(self) -> Container:
def get_docker_client(self) -> DockerClient:
return self._docker
- def get_logs(self) -> Tuple[str, str]:
+ def get_logs(self) -> tuple[str, str]:
if not self._container:
raise ContainerStartException("Container should be started before getting logs")
return self._container.logs(stderr=False), self._container.logs(stdout=False)
- def exec(self, command) -> Tuple[int, str]:
+ def exec(self, command) -> tuple[int, str]:
if not self._container:
raise ContainerStartException("Container should be started before executing a command")
return self._container.exec_run(command)
diff --git a/core/testcontainers/core/docker_client.py b/core/testcontainers/core/docker_client.py
index fb54838fa..3c724ac3c 100644
--- a/core/testcontainers/core/docker_client.py
+++ b/core/testcontainers/core/docker_client.py
@@ -14,7 +14,7 @@
import functools as ft
import os
import urllib
-from typing import List, Optional, Union
+from typing import Optional, Union
import docker
from docker.errors import NotFound
@@ -31,8 +31,7 @@ def _stop_container(container: Container) -> None:
except NotFound:
pass
except Exception as ex:
- LOGGER.warning("failed to shut down container %s with image %s: %s", container.id,
- container.image, ex)
+ LOGGER.warning("failed to shut down container %s with image %s: %s", container.id, container.image, ex)
class DockerClient:
@@ -45,19 +44,27 @@ def __init__(self, **kwargs) -> None:
@ft.wraps(ContainerCollection.run)
def run(
- self, image: str,
- command: Union[str, List[str]] = None,
- environment: Optional[dict] = None,
- ports: Optional[dict] = None,
- detach: bool = False,
- stdout: bool = True,
- stderr: bool = False,
- remove: bool = False,
- **kwargs
+ self,
+ image: str,
+ command: Optional[Union[str, list[str]]] = None,
+ environment: Optional[dict] = None,
+ ports: Optional[dict] = None,
+ detach: bool = False,
+ stdout: bool = True,
+ stderr: bool = False,
+ remove: bool = False,
+ **kwargs,
) -> Container:
container = self.client.containers.run(
- image, command=command, stdout=stdout, stderr=stderr, remove=remove, detach=detach,
- environment=environment, ports=ports, **kwargs
+ image,
+ command=command,
+ stdout=stdout,
+ stderr=stderr,
+ remove=remove,
+ detach=detach,
+ environment=environment,
+ ports=ports,
+ **kwargs,
)
if detach:
atexit.register(_stop_container, container)
@@ -69,17 +76,16 @@ def port(self, container_id: str, port: int) -> int:
"""
port_mappings = self.client.api.port(container_id, port)
if not port_mappings:
- raise ConnectionError(f'Port mapping for container {container_id} and port {port} is '
- 'not available')
+ raise ConnectionError(f"Port mapping for container {container_id} and port {port} is " "not available")
return port_mappings[0]["HostPort"]
def get_container(self, container_id: str) -> Container:
"""
Get the container with a given identifier.
"""
- containers = self.client.api.containers(filters={'id': container_id})
+ containers = self.client.api.containers(filters={"id": container_id})
if not containers:
- raise RuntimeError(f'Could not get container with id {container_id}')
+ raise RuntimeError(f"Could not get container with id {container_id}")
return containers[0]
def bridge_ip(self, container_id: str) -> str:
@@ -87,14 +93,14 @@ def bridge_ip(self, container_id: str) -> str:
Get the bridge ip address for a container.
"""
container = self.get_container(container_id)
- return container['NetworkSettings']['Networks']['bridge']['IPAddress']
+ return container["NetworkSettings"]["Networks"]["bridge"]["IPAddress"]
def gateway_ip(self, container_id: str) -> str:
"""
Get the gateway ip address for a container.
"""
container = self.get_container(container_id)
- return container['NetworkSettings']['Networks']['bridge']['Gateway']
+ return container["NetworkSettings"]["Networks"]["bridge"]["Gateway"]
def host(self) -> str:
"""
@@ -102,7 +108,7 @@ def host(self) -> str:
"""
# https://github.com/testcontainers/testcontainers-go/blob/dd76d1e39c654433a3d80429690d07abcec04424/docker.go#L644
# if os env TC_HOST is set, use it
- host = os.environ.get('TC_HOST')
+ host = os.environ.get("TC_HOST")
if host:
return host
try:
@@ -110,11 +116,10 @@ def host(self) -> str:
except ValueError:
return None
- if 'http' in url.scheme or 'tcp' in url.scheme:
+ if "http" in url.scheme or "tcp" in url.scheme:
return url.hostname
- if 'unix' in url.scheme or 'npipe' in url.scheme:
- if inside_container():
- ip_address = default_gateway_ip()
- if ip_address:
- return ip_address
+ if inside_container() and ("unix" in url.scheme or "npipe" in url.scheme):
+ ip_address = default_gateway_ip()
+ if ip_address:
+ return ip_address
return "localhost"
diff --git a/core/testcontainers/core/generic.py b/core/testcontainers/core/generic.py
index e63478064..21bf9d7e4 100644
--- a/core/testcontainers/core/generic.py
+++ b/core/testcontainers/core/generic.py
@@ -20,6 +20,7 @@
ADDITIONAL_TRANSIENT_ERRORS = []
try:
from sqlalchemy.exc import DBAPIError
+
ADDITIONAL_TRANSIENT_ERRORS.append(DBAPIError)
except ImportError:
pass
@@ -29,18 +30,27 @@ class DbContainer(DockerContainer):
"""
Generic database container.
"""
+
@wait_container_is_ready(*ADDITIONAL_TRANSIENT_ERRORS)
def _connect(self) -> None:
import sqlalchemy
+
engine = sqlalchemy.create_engine(self.get_connection_url())
engine.connect()
def get_connection_url(self) -> str:
raise NotImplementedError
- def _create_connection_url(self, dialect: str, username: str, password: str,
- host: Optional[str] = None, port: Optional[int] = None,
- dbname: Optional[str] = None, **kwargs) -> str:
+ def _create_connection_url(
+ self,
+ dialect: str,
+ username: str,
+ password: str,
+ host: Optional[str] = None,
+ port: Optional[int] = None,
+ dbname: Optional[str] = None,
+ **kwargs,
+ ) -> str:
if raise_for_deprecated_parameter(kwargs, "db_name", "dbname"):
raise ValueError(f"Unexpected arguments: {','.join(kwargs)}")
if self._container is None:
@@ -52,7 +62,7 @@ def _create_connection_url(self, dialect: str, username: str, password: str,
url = f"{url}/{dbname}"
return url
- def start(self) -> 'DbContainer':
+ def start(self) -> "DbContainer":
self._configure()
super().start()
self._connect()
diff --git a/core/testcontainers/core/utils.py b/core/testcontainers/core/utils.py
index 9a02747b0..5ca1c2f7d 100644
--- a/core/testcontainers/core/utils.py
+++ b/core/testcontainers/core/utils.py
@@ -29,19 +29,19 @@ def os_name() -> str:
def is_mac() -> bool:
- return MAC == os_name()
+ return os_name() == MAC
def is_linux() -> bool:
- return LINUX == os_name()
+ return os_name() == LINUX
def is_windows() -> bool:
- return WIN == os_name()
+ return os_name() == WIN
def is_arm() -> bool:
- return platform.machine() in ('arm64', 'aarch64')
+ return platform.machine() in ("arm64", "aarch64")
def inside_container() -> bool:
@@ -50,7 +50,7 @@ def inside_container() -> bool:
https://github.com/docker/docker/blob/a9fa38b1edf30b23cae3eade0be48b3d4b1de14b/daemon/initlayer/setup_unix.go#L25
"""
- return os.path.exists('/.dockerenv')
+ return os.path.exists("/.dockerenv")
def default_gateway_ip() -> str:
@@ -62,11 +62,10 @@ def default_gateway_ip() -> str:
"""
cmd = ["sh", "-c", "ip route|awk '/default/ { print $3 }'"]
try:
- process = subprocess.Popen(cmd, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
+ process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
ip_address = process.communicate()[0]
if ip_address and process.returncode == 0:
- return ip_address.decode('utf-8').strip().strip('\n')
+ return ip_address.decode("utf-8").strip().strip("\n")
except subprocess.SubprocessError:
return None
diff --git a/core/testcontainers/core/waiting_utils.py b/core/testcontainers/core/waiting_utils.py
index 5e9aa33c1..ea52683d5 100644
--- a/core/testcontainers/core/waiting_utils.py
+++ b/core/testcontainers/core/waiting_utils.py
@@ -15,7 +15,7 @@
import re
import time
import traceback
-from typing import Any, Callable, Iterable, Mapping, Optional, TYPE_CHECKING, Union
+from typing import TYPE_CHECKING, Any, Callable, Optional, Union
import wrapt
@@ -45,12 +45,11 @@ def wait_container_is_ready(*transient_exceptions) -> Callable:
transient_exceptions = TRANSIENT_EXCEPTIONS + tuple(transient_exceptions)
@wrapt.decorator
- def wrapper(wrapped: Callable, instance: Any, args: Iterable, kwargs: Mapping) -> Any:
+ def wrapper(wrapped: Callable, instance: Any, args: list, kwargs: dict) -> Any:
from testcontainers.core.container import DockerContainer
if isinstance(instance, DockerContainer):
- logger.info("Waiting for container %s with image %s to be ready ...",
- instance._container, instance.image)
+ logger.info("Waiting for container %s with image %s to be ready ...", instance._container, instance.image)
else:
logger.info("Waiting for %s to be ready ...", instance)
@@ -59,13 +58,15 @@ def wrapper(wrapped: Callable, instance: Any, args: Iterable, kwargs: Mapping) -
try:
return wrapped(*args, **kwargs)
except transient_exceptions as e:
- logger.debug(f"Connection attempt '{attempt_no + 1}' of '{config.MAX_TRIES + 1}' "
- f"failed: {traceback.format_exc()}")
+ logger.debug(
+ f"Connection attempt '{attempt_no + 1}' of '{config.MAX_TRIES + 1}' "
+ f"failed: {traceback.format_exc()}"
+ )
time.sleep(config.SLEEP_TIME)
exception = e
raise TimeoutError(
- f'Wait time ({config.TIMEOUT}s) exceeded for {wrapped.__name__}(args: {args}, kwargs: '
- f'{kwargs}). Exception: {exception}'
+ f"Wait time ({config.TIMEOUT}s) exceeded for {wrapped.__name__}(args: {args}, kwargs: "
+ f"{kwargs}). Exception: {exception}"
)
return wrapper
@@ -76,8 +77,9 @@ def wait_for(condition: Callable[..., bool]) -> bool:
return condition()
-def wait_for_logs(container: "DockerContainer", predicate: Union[Callable, str],
- timeout: Optional[float] = None, interval: float = 1) -> float:
+def wait_for_logs(
+ container: "DockerContainer", predicate: Union[Callable, str], timeout: Optional[float] = None, interval: float = 1
+) -> float:
"""
Wait for the container to emit logs satisfying the predicate.
@@ -102,6 +104,5 @@ def wait_for_logs(container: "DockerContainer", predicate: Union[Callable, str],
if predicate(stdout) or predicate(stderr):
return duration
if timeout and duration > timeout:
- raise TimeoutError(f"Container did not emit logs satisfying predicate in {timeout:.3f} "
- "seconds")
+ raise TimeoutError(f"Container did not emit logs satisfying predicate in {timeout:.3f} " "seconds")
time.sleep(interval)
diff --git a/core/tests/test_core.py b/core/tests/test_core.py
index 01e9c97c8..a00be1f02 100644
--- a/core/tests/test_core.py
+++ b/core/tests/test_core.py
@@ -5,9 +5,8 @@
def test_timeout_is_raised_when_waiting_for_logs():
- with pytest.raises(TimeoutError):
- with DockerContainer("alpine").with_command("sleep 2") as container:
- wait_for_logs(container, "Hello from Docker!", timeout=1e-3)
+ with pytest.raises(TimeoutError), DockerContainer("alpine").with_command("sleep 2") as container:
+ wait_for_logs(container, "Hello from Docker!", timeout=1e-3)
def test_garbage_collection_is_defensive():
@@ -29,4 +28,4 @@ def test_can_get_logs():
with DockerContainer("hello-world") as container:
wait_for_logs(container, "Hello from Docker!")
stdout, stderr = container.get_logs()
- assert stdout, 'There should be something on stdout'
+ assert stdout, "There should be something on stdout"
diff --git a/core/tests/test_docker_client.py b/core/tests/test_docker_client.py
index ccd640e22..23f92e9e5 100644
--- a/core/tests/test_docker_client.py
+++ b/core/tests/test_docker_client.py
@@ -1,14 +1,13 @@
from unittest.mock import MagicMock, patch
+
import docker
-from testcontainers.core.docker_client import DockerClient
from testcontainers.core.container import DockerContainer
+from testcontainers.core.docker_client import DockerClient
def test_docker_client_from_env():
- test_kwargs = dict(
- test_kw="test_value"
- )
+ test_kwargs = {"test_kw": "test_value"}
mock_docker = MagicMock(spec=docker)
with patch("testcontainers.core.docker_client.docker", mock_docker):
DockerClient(**test_kwargs)
@@ -17,9 +16,7 @@ def test_docker_client_from_env():
def test_container_docker_client_kw():
- test_kwargs = dict(
- test_kw="test_value"
- )
+ test_kwargs = {"test_kw": "test_value"}
mock_docker = MagicMock(spec=docker)
with patch("testcontainers.core.docker_client.docker", mock_docker):
DockerContainer(image="", docker_client_kw=test_kwargs)
diff --git a/core/tests/test_docker_in_docker.py b/core/tests/test_docker_in_docker.py
index b048a78ab..95392408d 100644
--- a/core/tests/test_docker_in_docker.py
+++ b/core/tests/test_docker_in_docker.py
@@ -1,4 +1,5 @@
import pytest
+
from testcontainers.core.container import DockerContainer
from testcontainers.core.docker_client import DockerClient
from testcontainers.core.waiting_utils import wait_for_logs
@@ -12,7 +13,7 @@ def test_wait_for_logs_docker_in_docker():
not_really_dind = client.run(
image="alpine/socat",
command="tcp-listen:2375,fork,reuseaddr unix-connect:/var/run/docker.sock",
- volumes={'/var/run/docker.sock': {'bind': '/var/run/docker.sock'}},
+ volumes={"/var/run/docker.sock": {"bind": "/var/run/docker.sock"}},
detach=True,
)
@@ -21,22 +22,17 @@ def test_wait_for_logs_docker_in_docker():
# get ip address for DOCKER_HOST
# avoiding DockerContainer class here to prevent code changes affecting the test
specs = client.get_container(not_really_dind.id)
- docker_host_ip = specs['NetworkSettings']['Networks']['bridge']['IPAddress']
+ docker_host_ip = specs["NetworkSettings"]["Networks"]["bridge"]["IPAddress"]
docker_host = f"tcp://{docker_host_ip}:2375"
with DockerContainer(
- image="hello-world",
- docker_client_kw={
- "environment": {
- "DOCKER_HOST": docker_host,
- "DOCKER_CERT_PATH": "",
- "DOCKER_TLS_VERIFY": ""
- }
- }) as container:
+ image="hello-world",
+ docker_client_kw={"environment": {"DOCKER_HOST": docker_host, "DOCKER_CERT_PATH": "", "DOCKER_TLS_VERIFY": ""}},
+ ) as container:
assert container.get_container_host_ip() == docker_host_ip
wait_for_logs(container, "Hello from Docker!")
stdout, stderr = container.get_logs()
- assert stdout, 'There should be something on stdout'
+ assert stdout, "There should be something on stdout"
not_really_dind.stop()
not_really_dind.remove()
diff --git a/core/tests/test_new_docker_api.py b/core/tests/test_new_docker_api.py
index 22e69d19d..936efc82b 100644
--- a/core/tests/test_new_docker_api.py
+++ b/core/tests/test_new_docker_api.py
@@ -16,13 +16,13 @@ def test_docker_custom_image():
def test_docker_kwargs():
code_dir = Path(__file__).parent
container_first = DockerContainer("nginx:latest")
- container_first.with_volume_mapping(code_dir, '/code')
+ container_first.with_volume_mapping(code_dir, "/code")
container_second = DockerContainer("nginx:latest")
with container_first:
container_second.with_kwargs(volumes_from=[container_first._container.short_id])
with container_second:
- files_first = container_first.exec('ls /code').output.decode('utf-8').strip()
- files_second = container_second.exec('ls /code').output.decode('utf-8').strip()
+ files_first = container_first.exec("ls /code").output.decode("utf-8").strip()
+ files_second = container_second.exec("ls /code").output.decode("utf-8").strip()
assert files_first == files_second
diff --git a/diagnostics.py b/diagnostics.py
deleted file mode 100644
index 79d306fba..000000000
--- a/diagnostics.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import json
-from testcontainers.core import utils
-from testcontainers.core.container import DockerContainer
-
-
-result = {
- 'is_linux': utils.is_linux(),
- 'is_mac': utils.is_mac(),
- 'is_windows': utils.is_windows(),
- 'inside_container': utils.inside_container(),
- 'default_gateway_ip': utils.default_gateway_ip(),
-}
-
-with DockerContainer('alpine:latest') as container:
- client = container.get_docker_client()
- result.update({
- 'container_host_ip': container.get_container_host_ip(),
- 'docker_client_gateway_ip': client.gateway_ip(container._container.id),
- 'docker_client_bridge_ip': client.bridge_ip(container._container.id),
- 'docker_client_host': client.host(),
- })
-
-print(json.dumps(result, indent=2))
diff --git a/get_requirements.py b/get_requirements.py
deleted file mode 100644
index 549f05efd..000000000
--- a/get_requirements.py
+++ /dev/null
@@ -1,98 +0,0 @@
-import argparse
-import io
-import pathlib
-import requests
-import shutil
-import tempfile
-import zipfile
-
-
-def __main__() -> None:
- parser = argparse.ArgumentParser()
- parser.add_argument("--owner", default="testcontainers")
- parser.add_argument("--repo", default="testcontainers-python")
- parser.add_argument("--run", help="GitHub Action run id")
- parser.add_argument("--pr", help="GitHub PR number")
- parser.add_argument("--branch", default="main")
- parser.add_argument("--token", help="GitHub autentication token")
- args = parser.parse_args()
-
- # Get an access token.
- if args.token:
- token = args.token
- elif (path := pathlib.Path(".github-token")).is_file():
- token = path.read_text().strip()
- else:
- token = input("We need a GitHub access token to fetch the requirements. Please visit "
- "https://github.com/settings/tokens/new, create a token with `public_repo` "
- "scope, and paste it here: ").strip()
- cache = input("Do you want to cache the token in a `.github-token` file [Ny]? ")
- if cache.lower().startswith("y"):
- path.write_text(token)
-
- headers = {
- "Authorization": f"Bearer {token}",
- }
- base_url = f"https://api.github.com/repos/{args.owner}/{args.repo}"
-
- if args.run: # Run id was specified.
- run = args.run
- elif args.pr: # PR was specified, let's get the most recent run id.
- print(f"Fetching most recent commit for PR #{args.pr}.")
- response = requests.get(f"{base_url}/pulls/{args.pr}", headers=headers)
- response.raise_for_status()
- response = response.json()
- head_sha = response["head"]["sha"]
- else: # Nothing was specified, let's get the most recent run id on the main branch.
- print(f"Fetching most recent commit for branch `{args.branch}`.")
- response = requests.get(f"{base_url}/branches/{args.branch}", headers=headers)
- response.raise_for_status()
- response = response.json()
- head_sha = response["commit"]["sha"]
-
- # List all completed runs and find the one that generated the requirements.
- response = requests.get(f"{base_url}/actions/runs", headers=headers, params={
- "head_sha": head_sha,
- "status": "success",
- })
- response.raise_for_status()
- response = response.json()
-
- # Get the requirements run.
- runs = [run for run in response["workflow_runs"] if
- run["path"].endswith("requirements.yml")]
- if not runs:
- raise RuntimeError("Could not find a workflow. Has the GitHub Action run completed? If you"
- "are a first-time contributor, a contributor has to approve your changes"
- "before Actions can run.")
- if len(runs) != 1:
- raise RuntimeError(f"Could not identify unique workflow run: {runs}")
- run = runs[0]["id"]
-
- # Get all the artifacts.
- print(f"fetching artifacts for run {run} ...")
- url = f"{base_url}/actions/runs/{run}/artifacts"
- response = requests.get(url, headers=headers)
- response.raise_for_status()
- response = response.json()
- artifacts = response["artifacts"]
- print(f"Discovered {len(artifacts)} artifacts.")
-
- # Get the content for each artifact and save it.
- for artifact in artifacts:
- name: str = artifact["name"]
- name = name.removeprefix("requirements-")
- print(f"Fetching artifact {name} ...")
- response = requests.get(artifact["archive_download_url"], headers=headers)
- response.raise_for_status()
- with zipfile.ZipFile(io.BytesIO(response.content)) as zip, \
- tempfile.TemporaryDirectory() as tempdir:
- zip.extract("requirements.txt", tempdir)
- shutil.move(pathlib.Path(tempdir) / "requirements.txt",
- pathlib.Path("requirements") / name)
-
- print("Done.")
-
-
-if __name__ == "__main__":
- __main__()
diff --git a/modules/arangodb/testcontainers/arangodb/__init__.py b/modules/arangodb/testcontainers/arangodb/__init__.py
index f56f1eab7..4977e79ef 100644
--- a/modules/arangodb/testcontainers/arangodb/__init__.py
+++ b/modules/arangodb/testcontainers/arangodb/__init__.py
@@ -1,12 +1,14 @@
"""
ArangoDB container support.
"""
+
+import typing
from os import environ
+
from testcontainers.core.config import TIMEOUT
from testcontainers.core.generic import DbContainer
from testcontainers.core.utils import raise_for_deprecated_parameter
from testcontainers.core.waiting_utils import wait_for_logs
-import typing
class ArangoDbContainer(DbContainer):
@@ -35,13 +37,15 @@ class ArangoDbContainer(DbContainer):
True
"""
- def __init__(self,
- image: str = "arangodb:latest",
- port: int = 8529,
- arango_root_password: str = "passwd",
- arango_no_auth: typing.Optional[bool] = None,
- arango_random_root_password: typing.Optional[bool] = None,
- **kwargs) -> None:
+ def __init__(
+ self,
+ image: str = "arangodb:latest",
+ port: int = 8529,
+ arango_root_password: str = "passwd",
+ arango_no_auth: typing.Optional[bool] = None,
+ arango_random_root_password: typing.Optional[bool] = None,
+ **kwargs,
+ ) -> None:
"""
Args:
image: Actual docker image/tag to pull.
@@ -62,14 +66,17 @@ def __init__(self,
# See https://www.arangodb.com/docs/stable/deployment-single-instance-manual-start.html for
# details. We convert to int then to bool because Arango uses the string literal "1" to
# indicate flags.
- self.arango_no_auth = bool(int(environ.get("ARANGO_NO_AUTH", 0) if arango_no_auth is None
- else arango_no_auth))
- self.arango_root_password = environ.get("ARANGO_ROOT_PASSWORD") if arango_root_password is \
- None else arango_root_password
- self.arango_random_root_password = bool(int(
- environ.get("ARANGO_RANDOM_ROOT_PASSWORD", 0) if arango_random_root_password is None
- else arango_random_root_password
- ))
+ self.arango_no_auth = bool(int(environ.get("ARANGO_NO_AUTH", 0) if arango_no_auth is None else arango_no_auth))
+ self.arango_root_password = (
+ environ.get("ARANGO_ROOT_PASSWORD") if arango_root_password is None else arango_root_password
+ )
+ self.arango_random_root_password = bool(
+ int(
+ environ.get("ARANGO_RANDOM_ROOT_PASSWORD", 0)
+ if arango_random_root_password is None
+ else arango_random_root_password
+ )
+ )
def _configure(self) -> None:
self.with_env("ARANGO_ROOT_PASSWORD", self.arango_root_password)
diff --git a/modules/arangodb/tests/test_arangodb.py b/modules/arangodb/tests/test_arangodb.py
index 7933c2fd1..6c06b4ca3 100644
--- a/modules/arangodb/tests/test_arangodb.py
+++ b/modules/arangodb/tests/test_arangodb.py
@@ -1,15 +1,17 @@
"""
ArangoDB Container Tests
"""
+
import pytest
from arango import ArangoClient
from arango.exceptions import DatabaseCreateError, ServerVersionError
+
from testcontainers.arangodb import ArangoDbContainer
-ARANGODB_IMAGE_NAME = 'arangodb'
+ARANGODB_IMAGE_NAME = "arangodb"
-def arango_test_ops(arango_client, expeced_version, username='root', password=''):
+def arango_test_ops(arango_client, expeced_version, username="root", password=""):
"""
Basic ArangoDB operations to test DB really up and running.
"""
@@ -48,38 +50,32 @@ def test_docker_run_arango():
"""
Test ArangoDB container with default settings.
"""
- image_version = '3.9.1'
- image = f'{ARANGODB_IMAGE_NAME}:{image_version}'
- arango_root_password = 'passwd'
+ image_version = "3.9.1"
+ image = f"{ARANGODB_IMAGE_NAME}:{image_version}"
+ arango_root_password = "passwd"
with ArangoDbContainer(image) as arango:
client = ArangoClient(hosts=arango.get_connection_url())
# Test invalid auth
+ sys_db = client.db("_system", username="root", password="notTheRightPass")
with pytest.raises(DatabaseCreateError):
- sys_db = client.db("_system", username="root", password='notTheRightPass')
sys_db.create_database("test")
- arango_test_ops(
- arango_client=client,
- expeced_version=image_version,
- password=arango_root_password)
+ arango_test_ops(arango_client=client, expeced_version=image_version, password=arango_root_password)
def test_docker_run_arango_without_auth():
"""
Test ArangoDB container with ARANGO_NO_AUTH var set.
"""
- image_version = '3.9.1'
- image = f'{ARANGODB_IMAGE_NAME}:{image_version}'
+ image_version = "3.9.1"
+ image = f"{ARANGODB_IMAGE_NAME}:{image_version}"
with ArangoDbContainer(image, arango_no_auth=True) as arango:
client = ArangoClient(hosts=arango.get_connection_url())
- arango_test_ops(
- arango_client=client,
- expeced_version=image_version,
- password='')
+ arango_test_ops(arango_client=client, expeced_version=image_version, password="")
def test_docker_run_arango_older_version():
@@ -91,30 +87,27 @@ def test_docker_run_arango_older_version():
we must verify older image tags still supported. (without that logic - we'll face race issues
where we try to create & populate DB when ArangoDB not really ready.
"""
- image_version = '3.1.7'
- image = f'{ARANGODB_IMAGE_NAME}:{image_version}'
+ image_version = "3.1.7"
+ image = f"{ARANGODB_IMAGE_NAME}:{image_version}"
with ArangoDbContainer(image, arango_no_auth=True) as arango:
client = ArangoClient(hosts=arango.get_connection_url())
- arango_test_ops(
- arango_client=client,
- expeced_version=image_version,
- password='')
+ arango_test_ops(arango_client=client, expeced_version=image_version, password="")
def test_docker_run_arango_random_root_password():
"""
Test ArangoDB container with ARANGO_RANDOM_ROOT_PASSWORD var set.
"""
- image_version = '3.9.1'
- image = f'{ARANGODB_IMAGE_NAME}:{image_version}'
- arango_root_password = 'passwd'
+ image_version = "3.9.1"
+ image = f"{ARANGODB_IMAGE_NAME}:{image_version}"
+ arango_root_password = "passwd"
with ArangoDbContainer(image, arango_random_root_password=True) as arango:
client = ArangoClient(hosts=arango.get_connection_url())
# Test invalid auth (we don't know the password in random mode)
+ sys_db = client.db("_system", username="root", password=arango_root_password)
with pytest.raises(ServerVersionError):
- sys_db = client.db("_system", username='root', password=arango_root_password)
assert sys_db.version() == image_version
diff --git a/modules/azurite/testcontainers/azurite/__init__.py b/modules/azurite/testcontainers/azurite/__init__.py
index 847775c63..969fcf35d 100644
--- a/modules/azurite/testcontainers/azurite/__init__.py
+++ b/modules/azurite/testcontainers/azurite/__init__.py
@@ -40,23 +40,30 @@ class AzuriteContainer(DockerContainer):
... api_version="2019-12-12"
... )
"""
- def __init__(self, image: str = "mcr.microsoft.com/azure-storage/azurite:latest", *,
- blob_service_port: int = 10_000, queue_service_port: int = 10_001,
- table_service_port: int = 10_002, account_name: Optional[str] = None,
- account_key: Optional[str] = None, **kwargs) \
- -> None:
- """ Constructs an AzuriteContainer.
+
+ def __init__(
+ self,
+ image: str = "mcr.microsoft.com/azure-storage/azurite:latest",
+ *,
+ blob_service_port: int = 10_000,
+ queue_service_port: int = 10_001,
+ table_service_port: int = 10_002,
+ account_name: Optional[str] = None,
+ account_key: Optional[str] = None,
+ **kwargs,
+ ) -> None:
+ """Constructs an AzuriteContainer.
Args:
image: Expects an image with tag.
**kwargs: Keyword arguments passed to super class.
"""
super().__init__(image=image, **kwargs)
- self.account_name = account_name or os.environ.get(
- "AZURITE_ACCOUNT_NAME", "devstoreaccount1")
+ self.account_name = account_name or os.environ.get("AZURITE_ACCOUNT_NAME", "devstoreaccount1")
self.account_key = account_key or os.environ.get(
- "AZURITE_ACCOUNT_KEY", "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/"
- "K1SZFPTOtr/KBHBeksoGMGw==")
+ "AZURITE_ACCOUNT_KEY",
+ "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/" "K1SZFPTOtr/KBHBeksoGMGw==",
+ )
raise_for_deprecated_parameter(kwargs, "ports_to_expose", "container.with_exposed_ports")
self.blob_service_port = blob_service_port
@@ -68,28 +75,34 @@ def __init__(self, image: str = "mcr.microsoft.com/azure-storage/azurite:latest"
def get_connection_string(self) -> str:
host_ip = self.get_container_host_ip()
- connection_string = f"DefaultEndpointsProtocol=http;" \
- f"AccountName={self.account_name};" \
- f"AccountKey={self.account_key};"
+ connection_string = (
+ f"DefaultEndpointsProtocol=http;" f"AccountName={self.account_name};" f"AccountKey={self.account_key};"
+ )
if self.blob_service_port in self.ports:
- connection_string += f"BlobEndpoint=http://{host_ip}:" \
- f"{self.get_exposed_port(self.blob_service_port)}" \
- f"/{self.account_name};"
+ connection_string += (
+ f"BlobEndpoint=http://{host_ip}:"
+ f"{self.get_exposed_port(self.blob_service_port)}"
+ f"/{self.account_name};"
+ )
if self.queue_service_port in self.ports:
- connection_string += f"QueueEndpoint=http://{host_ip}:" \
- f"{self.get_exposed_port(self.queue_service_port)}" \
- f"/{self.account_name};"
+ connection_string += (
+ f"QueueEndpoint=http://{host_ip}:"
+ f"{self.get_exposed_port(self.queue_service_port)}"
+ f"/{self.account_name};"
+ )
if self.table_service_port in self.ports:
- connection_string += f"TableEndpoint=http://{host_ip}:" \
- f"{self.get_exposed_port(self.table_service_port)}" \
- f"/{self.account_name};"
+ connection_string += (
+ f"TableEndpoint=http://{host_ip}:"
+ f"{self.get_exposed_port(self.table_service_port)}"
+ f"/{self.account_name};"
+ )
return connection_string
- def start(self) -> 'AzuriteContainer':
+ def start(self) -> "AzuriteContainer":
super().start()
self._connect()
return self
@@ -97,5 +110,4 @@ def start(self) -> 'AzuriteContainer':
@wait_container_is_ready(OSError)
def _connect(self) -> None:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
- s.connect((self.get_container_host_ip(),
- int(self.get_exposed_port(next(iter(self.ports))))))
+ s.connect((self.get_container_host_ip(), int(self.get_exposed_port(next(iter(self.ports))))))
diff --git a/modules/azurite/tests/test_azurite.py b/modules/azurite/tests/test_azurite.py
index 5c92e48ed..74230ab14 100644
--- a/modules/azurite/tests/test_azurite.py
+++ b/modules/azurite/tests/test_azurite.py
@@ -1,12 +1,12 @@
-from testcontainers.azurite import AzuriteContainer
from azure.storage.blob import BlobServiceClient
+from testcontainers.azurite import AzuriteContainer
+
def test_docker_run_azurite():
with AzuriteContainer() as azurite_container:
blob_service_client = BlobServiceClient.from_connection_string(
- azurite_container.get_connection_string(),
- api_version="2019-12-12"
+ azurite_container.get_connection_string(), api_version="2019-12-12"
)
blob_service_client.create_container("test-container")
diff --git a/modules/clickhouse/testcontainers/clickhouse/__init__.py b/modules/clickhouse/testcontainers/clickhouse/__init__.py
index b78c509cd..147940199 100644
--- a/modules/clickhouse/testcontainers/clickhouse/__init__.py
+++ b/modules/clickhouse/testcontainers/clickhouse/__init__.py
@@ -40,9 +40,16 @@ class ClickHouseContainer(DbContainer):
... client.execute("select 'working'")
[('working',)]
"""
- def __init__(self, image: str = "clickhouse/clickhouse-server:latest", port: int = 9000,
- username: Optional[str] = None, password: Optional[str] = None,
- dbname: Optional[str] = None, **kwargs) -> None:
+
+ def __init__(
+ self,
+ image: str = "clickhouse/clickhouse-server:latest",
+ port: int = 9000,
+ username: Optional[str] = None,
+ password: Optional[str] = None,
+ dbname: Optional[str] = None,
+ **kwargs
+ ) -> None:
raise_for_deprecated_parameter(kwargs, "user", "username")
super().__init__(image=image, **kwargs)
self.username = username or os.environ.get("CLICKHOUSE_USER", "test")
diff --git a/modules/clickhouse/tests/test_clickhouse.py b/modules/clickhouse/tests/test_clickhouse.py
index 32ac046f7..23e5f4686 100644
--- a/modules/clickhouse/tests/test_clickhouse.py
+++ b/modules/clickhouse/tests/test_clickhouse.py
@@ -1,4 +1,5 @@
import clickhouse_driver
+
from testcontainers.clickhouse import ClickHouseContainer
@@ -8,4 +9,4 @@ def test_docker_run_clickhouse():
client = clickhouse_driver.Client.from_url(clickhouse.get_connection_url())
result = client.execute("select 'working'")
- assert result == [('working',)]
+ assert result == [("working",)]
diff --git a/modules/elasticsearch/testcontainers/elasticsearch/__init__.py b/modules/elasticsearch/testcontainers/elasticsearch/__init__.py
index 546dd9df1..1b943916a 100644
--- a/modules/elasticsearch/testcontainers/elasticsearch/__init__.py
+++ b/modules/elasticsearch/testcontainers/elasticsearch/__init__.py
@@ -13,7 +13,6 @@
import logging
import re
import urllib
-from typing import Dict
from urllib.error import URLError
from testcontainers.core.container import DockerContainer
@@ -32,14 +31,15 @@ def _major_version_from_image_name(image_name: str) -> int:
version_string = image_name.split(":")[-1]
regex_match = re.compile(r"(\d+)\.\d+\.\d+").match(version_string)
if not regex_match:
- logging.warning("Could not determine major version from image name '%s'. Will use %s",
- image_name, _FALLBACK_VERSION)
+ logging.warning(
+ "Could not determine major version from image name '%s'. Will use %s", image_name, _FALLBACK_VERSION
+ )
return _FALLBACK_VERSION
else:
return int(regex_match.group(1))
-def _environment_by_version(version: int) -> Dict[str, str]:
+def _environment_by_version(version: int) -> dict[str, str]:
"""Returns environment variables required for each major version to work."""
if version == 6:
# This setting is needed to avoid the check for the kernel parameter
@@ -76,11 +76,11 @@ class ElasticSearchContainer(DockerContainer):
def __init__(self, image: str = "elasticsearch", port: int = 9200, **kwargs) -> None:
raise_for_deprecated_parameter(kwargs, "port_to_expose", "port")
- super(ElasticSearchContainer, self).__init__(image, **kwargs)
+ super().__init__(image, **kwargs)
self.port = port
self.with_exposed_ports(self.port)
- self.with_env('transport.host', '127.0.0.1')
- self.with_env('http.host', '0.0.0.0')
+ self.with_env("transport.host", "127.0.0.1")
+ self.with_env("http.host", "0.0.0.0")
major_version = _major_version_from_image_name(image)
for key, value in _environment_by_version(major_version).items():
@@ -95,7 +95,7 @@ def _connect(self) -> None:
def get_url(self) -> str:
host = self.get_container_host_ip()
port = self.get_exposed_port(self.port)
- return f'http://{host}:{port}'
+ return f"http://{host}:{port}"
def start(self) -> "ElasticSearchContainer":
super().start()
diff --git a/modules/elasticsearch/tests/test_elasticsearch.py b/modules/elasticsearch/tests/test_elasticsearch.py
index 924dfeb88..e174ec47b 100644
--- a/modules/elasticsearch/tests/test_elasticsearch.py
+++ b/modules/elasticsearch/tests/test_elasticsearch.py
@@ -1,13 +1,14 @@
import json
import urllib.request
+
import pytest
from testcontainers.elasticsearch import ElasticSearchContainer
# The versions below were the current supported versions at time of writing (2022-08-11)
-@pytest.mark.parametrize('version', ['6.8.23', '7.17.5', '8.3.3'])
+@pytest.mark.parametrize("version", ["6.8.23", "7.17.5", "8.3.3"])
def test_docker_run_elasticsearch(version):
- with ElasticSearchContainer(f'elasticsearch:{version}', mem_limit='3G') as es:
+ with ElasticSearchContainer(f"elasticsearch:{version}", mem_limit="3G") as es:
resp = urllib.request.urlopen(es.get_url())
- assert json.loads(resp.read().decode())['version']['number'] == version
+ assert json.loads(resp.read().decode())["version"]["number"] == version
diff --git a/modules/google/testcontainers/google/__init__.py b/modules/google/testcontainers/google/__init__.py
index 71665bea6..b28f2ed48 100644
--- a/modules/google/testcontainers/google/__init__.py
+++ b/modules/google/testcontainers/google/__init__.py
@@ -1 +1 @@
-from .pubsub import PubSubContainer # noqa
+from .pubsub import PubSubContainer # noqa: F401
diff --git a/modules/google/testcontainers/google/pubsub.py b/modules/google/testcontainers/google/pubsub.py
index bdce91c64..78c6929e2 100644
--- a/modules/google/testcontainers/google/pubsub.py
+++ b/modules/google/testcontainers/google/pubsub.py
@@ -10,12 +10,12 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
-from google.cloud import pubsub
import os
-from testcontainers.core.container import DockerContainer
-from typing import Type
from unittest.mock import patch
+from google.cloud import pubsub
+from testcontainers.core.container import DockerContainer
+
class PubSubContainer(DockerContainer):
"""
@@ -38,20 +38,20 @@ class PubSubContainer(DockerContainer):
... topic_path = publisher.topic_path(pubsub.project, "my-topic")
... topic = publisher.create_topic(name=topic_path)
"""
- def __init__(self, image: str = "google/cloud-sdk:emulators", project: str = "test-project",
- port: int = 8432, **kwargs) -> None:
- super(PubSubContainer, self).__init__(image=image, **kwargs)
+
+ def __init__(
+ self, image: str = "google/cloud-sdk:emulators", project: str = "test-project", port: int = 8432, **kwargs
+ ) -> None:
+ super().__init__(image=image, **kwargs)
self.project = project
self.port = port
self.with_exposed_ports(self.port)
- self.with_command(
- f"gcloud beta emulators pubsub start --project={project} --host-port=0.0.0.0:{port}"
- )
+ self.with_command(f"gcloud beta emulators pubsub start --project={project} --host-port=0.0.0.0:{port}")
def get_pubsub_emulator_host(self) -> str:
return f"{self.get_container_host_ip()}:{self.get_exposed_port(self.port)}"
- def _get_client(self, cls: Type, **kwargs) -> dict:
+ def _get_client(self, cls: type, **kwargs) -> dict:
with patch.dict(os.environ, PUBSUB_EMULATOR_HOST=self.get_pubsub_emulator_host()):
return cls(**kwargs)
diff --git a/modules/google/tests/test_google.py b/modules/google/tests/test_google.py
index 6fa506e26..780f5fdd6 100644
--- a/modules/google/tests/test_google.py
+++ b/modules/google/tests/test_google.py
@@ -1,7 +1,8 @@
-from testcontainers.google import PubSubContainer
-from testcontainers.core.waiting_utils import wait_for_logs
from queue import Queue
+from testcontainers.core.waiting_utils import wait_for_logs
+from testcontainers.google import PubSubContainer
+
def test_pubsub_container():
pubsub: PubSubContainer
@@ -14,8 +15,7 @@ def test_pubsub_container():
# Create a subscription
subscriber = pubsub.get_subscriber_client()
- subscription_path = subscriber.subscription_path(pubsub.project,
- "my-subscription")
+ subscription_path = subscriber.subscription_path(pubsub.project, "my-subscription")
subscriber.create_subscription(name=subscription_path, topic=topic_path)
# Publish a message
diff --git a/modules/k3s/testcontainers/k3s/__init__.py b/modules/k3s/testcontainers/k3s/__init__.py
index 48c9d0959..045e2eb5d 100644
--- a/modules/k3s/testcontainers/k3s/__init__.py
+++ b/modules/k3s/testcontainers/k3s/__init__.py
@@ -38,9 +38,9 @@ class K3SContainer(DockerContainer):
RANCHER_WEBHOOK_PORT = 8443
def __init__(self, image="rancher/k3s:latest", **kwargs) -> None:
- super(K3SContainer, self).__init__(image, **kwargs)
+ super().__init__(image, **kwargs)
self.with_exposed_ports(self.KUBE_SECURE_PORT, self.RANCHER_WEBHOOK_PORT)
- self.with_env("K3S_URL", f'https://{self.get_container_host_ip()}:{self.KUBE_SECURE_PORT}')
+ self.with_env("K3S_URL", f"https://{self.get_container_host_ip()}:{self.KUBE_SECURE_PORT}")
self.with_command("server --disable traefik --tls-san=" + self.get_container_host_ip())
self.with_kwargs(privileged=True, tmpfs={"/run": "", "/var/run": ""})
self.with_volume_mapping("/sys/fs/cgroup", "/sys/fs/cgroup", "rw")
@@ -57,9 +57,9 @@ def config_yaml(self) -> str:
"""This function returns the kubernetes config yaml which can be used
to initialise k8s client
"""
- execution = self.get_wrapped_container().exec_run(['cat', '/etc/rancher/k3s/k3s.yaml'])
- config_yaml = execution.output.decode('utf-8') \
- .replace(f'https://127.0.0.1:{self.KUBE_SECURE_PORT}',
- f'https://{self.get_container_host_ip()}:'
- f'{self.get_exposed_port(self.KUBE_SECURE_PORT)}')
+ execution = self.get_wrapped_container().exec_run(["cat", "/etc/rancher/k3s/k3s.yaml"])
+ config_yaml = execution.output.decode("utf-8").replace(
+ f"https://127.0.0.1:{self.KUBE_SECURE_PORT}",
+ f"https://{self.get_container_host_ip()}:" f"{self.get_exposed_port(self.KUBE_SECURE_PORT)}",
+ )
return config_yaml
diff --git a/modules/kafka/testcontainers/kafka/__init__.py b/modules/kafka/testcontainers/kafka/__init__.py
index 49c362c20..399839433 100644
--- a/modules/kafka/testcontainers/kafka/__init__.py
+++ b/modules/kafka/testcontainers/kafka/__init__.py
@@ -4,8 +4,7 @@
from textwrap import dedent
from kafka import KafkaConsumer
-from kafka.errors import KafkaError, UnrecognizedBrokerVersion, NoBrokersAvailable
-
+from kafka.errors import KafkaError, NoBrokersAvailable, UnrecognizedBrokerVersion
from testcontainers.core.container import DockerContainer
from testcontainers.core.utils import raise_for_deprecated_parameter
from testcontainers.core.waiting_utils import wait_container_is_ready
@@ -24,42 +23,41 @@ class KafkaContainer(DockerContainer):
>>> with KafkaContainer() as kafka:
... connection = kafka.get_bootstrap_server()
"""
- TC_START_SCRIPT = '/tc-start.sh'
- def __init__(self, image: str = "confluentinc/cp-kafka:5.4.3", port: int = 9093, **kwargs) \
- -> None:
+ TC_START_SCRIPT = "/tc-start.sh"
+
+ def __init__(self, image: str = "confluentinc/cp-kafka:5.4.3", port: int = 9093, **kwargs) -> None:
raise_for_deprecated_parameter(kwargs, "port_to_expose", "port")
- super(KafkaContainer, self).__init__(image, **kwargs)
+ super().__init__(image, **kwargs)
self.port = port
self.with_exposed_ports(self.port)
- listeners = f'PLAINTEXT://0.0.0.0:{self.port},BROKER://0.0.0.0:9092'
- self.with_env('KAFKA_LISTENERS', listeners)
- self.with_env('KAFKA_LISTENER_SECURITY_PROTOCOL_MAP',
- 'BROKER:PLAINTEXT,PLAINTEXT:PLAINTEXT')
- self.with_env('KAFKA_INTER_BROKER_LISTENER_NAME', 'BROKER')
+ listeners = f"PLAINTEXT://0.0.0.0:{self.port},BROKER://0.0.0.0:9092"
+ self.with_env("KAFKA_LISTENERS", listeners)
+ self.with_env("KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", "BROKER:PLAINTEXT,PLAINTEXT:PLAINTEXT")
+ self.with_env("KAFKA_INTER_BROKER_LISTENER_NAME", "BROKER")
- self.with_env('KAFKA_BROKER_ID', '1')
- self.with_env('KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR', '1')
- self.with_env('KAFKA_OFFSETS_TOPIC_NUM_PARTITIONS', '1')
- self.with_env('KAFKA_LOG_FLUSH_INTERVAL_MESSAGES', '10000000')
- self.with_env('KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS', '0')
+ self.with_env("KAFKA_BROKER_ID", "1")
+ self.with_env("KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR", "1")
+ self.with_env("KAFKA_OFFSETS_TOPIC_NUM_PARTITIONS", "1")
+ self.with_env("KAFKA_LOG_FLUSH_INTERVAL_MESSAGES", "10000000")
+ self.with_env("KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS", "0")
def get_bootstrap_server(self) -> str:
host = self.get_container_host_ip()
port = self.get_exposed_port(self.port)
- return f'{host}:{port}'
+ return f"{host}:{port}"
@wait_container_is_ready(UnrecognizedBrokerVersion, NoBrokersAvailable, KafkaError, ValueError)
def _connect(self) -> None:
bootstrap_server = self.get_bootstrap_server()
- consumer = KafkaConsumer(group_id='test', bootstrap_servers=[bootstrap_server])
+ consumer = KafkaConsumer(group_id="test", bootstrap_servers=[bootstrap_server])
if not consumer.bootstrap_connected():
raise KafkaError("Unable to connect with kafka container!")
def tc_start(self) -> None:
host = self.get_container_host_ip()
port = self.get_exposed_port(self.port)
- listeners = f'PLAINTEXT://{host}:{port},BROKER://$(hostname -i):9092'
+ listeners = f"PLAINTEXT://{host}:{port},BROKER://$(hostname -i):9092"
data = (
dedent(
f"""
@@ -76,7 +74,7 @@ def tc_start(self) -> None:
"""
)
.strip()
- .encode('utf-8')
+ .encode("utf-8")
)
self.create_file(data, KafkaContainer.TC_START_SCRIPT)
diff --git a/modules/kafka/tests/test_kafka.py b/modules/kafka/tests/test_kafka.py
index 5ebe99296..c47aa111d 100644
--- a/modules/kafka/tests/test_kafka.py
+++ b/modules/kafka/tests/test_kafka.py
@@ -1,4 +1,5 @@
from kafka import KafkaConsumer, KafkaProducer, TopicPartition
+
from testcontainers.kafka import KafkaContainer
@@ -14,12 +15,12 @@ def test_kafka_producer_consumer_custom_port():
def test_kafka_confluent_7_1_3():
- with KafkaContainer(image='confluentinc/cp-kafka:7.1.3') as container:
+ with KafkaContainer(image="confluentinc/cp-kafka:7.1.3") as container:
produce_and_consume_kafka_message(container)
def produce_and_consume_kafka_message(container):
- topic = 'test-topic'
+ topic = "test-topic"
bootstrap_server = container.get_bootstrap_server()
producer = KafkaProducer(bootstrap_servers=[bootstrap_server])
@@ -31,5 +32,4 @@ def produce_and_consume_kafka_message(container):
tp = TopicPartition(topic, 0)
consumer.assign([tp])
consumer.seek_to_beginning()
- assert consumer.end_offsets([tp])[tp] == 1, \
- "Expected exactly one test message to be present on test topic !"
+ assert consumer.end_offsets([tp])[tp] == 1, "Expected exactly one test message to be present on test topic !"
diff --git a/modules/keycloak/testcontainers/keycloak/__init__.py b/modules/keycloak/testcontainers/keycloak/__init__.py
index aeb9a4c78..2e8f77383 100644
--- a/modules/keycloak/testcontainers/keycloak/__init__.py
+++ b/modules/keycloak/testcontainers/keycloak/__init__.py
@@ -11,13 +11,13 @@
# License for the specific language governing permissions and limitations
# under the License.
import os
+from typing import Optional
+
import requests
from keycloak import KeycloakAdmin
-
from testcontainers.core.container import DockerContainer
from testcontainers.core.waiting_utils import wait_container_is_ready
-from typing import Optional
class KeycloakContainer(DockerContainer):
@@ -33,9 +33,15 @@ class KeycloakContainer(DockerContainer):
>>> with KeycloakContainer() as kc:
... keycloak = kc.get_client()
"""
- def __init__(self, image="jboss/keycloak:latest", username: Optional[str] = None,
- password: Optional[str] = None, port: int = 8080) -> None:
- super(KeycloakContainer, self).__init__(image=image)
+
+ def __init__(
+ self,
+ image="jboss/keycloak:latest",
+ username: Optional[str] = None,
+ password: Optional[str] = None,
+ port: int = 8080,
+ ) -> None:
+ super().__init__(image=image)
self.username = username or os.environ.get("KEYCLOAK_USER", "test")
self.password = password or os.environ.get("KEYCLOAK_PASSWORD", "test")
self.port = port
@@ -63,15 +69,12 @@ def start(self) -> "KeycloakContainer":
return self
def get_client(self, **kwargs) -> KeycloakAdmin:
- default_kwargs = dict(
- server_url=f"{self.get_url()}/auth/",
- username=self.username,
- password=self.password,
- realm_name="master",
- verify=True,
- )
- kwargs = {
- **default_kwargs,
- **kwargs
+ default_kwargs = {
+ "server_url": f"{self.get_url()}/auth/",
+ "username": self.username,
+ "password": self.password,
+ "realm_name": "master",
+ "verify": True,
}
+ kwargs = {**default_kwargs, **kwargs}
return KeycloakAdmin(**kwargs)
diff --git a/modules/keycloak/tests/test_keycloak.py b/modules/keycloak/tests/test_keycloak.py
index 900ee0ddf..70eff57cb 100644
--- a/modules/keycloak/tests/test_keycloak.py
+++ b/modules/keycloak/tests/test_keycloak.py
@@ -5,5 +5,5 @@
@pytest.mark.parametrize("version", ["16.1.1"])
def test_docker_run_keycloak(version: str):
- with KeycloakContainer(f'jboss/keycloak:{version}') as kc:
+ with KeycloakContainer(f"jboss/keycloak:{version}") as kc:
kc.get_client().users_count()
diff --git a/modules/localstack/testcontainers/localstack/__init__.py b/modules/localstack/testcontainers/localstack/__init__.py
index 470d78a00..15cabeab6 100644
--- a/modules/localstack/testcontainers/localstack/__init__.py
+++ b/modules/localstack/testcontainers/localstack/__init__.py
@@ -10,13 +10,15 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
-import boto3
import functools as ft
import os
-from testcontainers.core.waiting_utils import wait_for_logs
-from testcontainers.core.container import DockerContainer
from typing import Any, Optional
+import boto3
+
+from testcontainers.core.container import DockerContainer
+from testcontainers.core.waiting_utils import wait_for_logs
+
class LocalStackContainer(DockerContainer):
"""
@@ -34,9 +36,15 @@ class LocalStackContainer(DockerContainer):
>>> tables
{'TableNames': [], ...}
"""
- def __init__(self, image: str = 'localstack/localstack:2.0.1', edge_port: int = 4566,
- region_name: Optional[str] = None, **kwargs) -> None:
- super(LocalStackContainer, self).__init__(image, **kwargs)
+
+ def __init__(
+ self,
+ image: str = "localstack/localstack:2.0.1",
+ edge_port: int = 4566,
+ region_name: Optional[str] = None,
+ **kwargs,
+ ) -> None:
+ super().__init__(image, **kwargs)
self.edge_port = edge_port
self.region_name = region_name or os.environ.get("AWS_DEFAULT_REGION", "us-west-1")
self.with_exposed_ports(self.edge_port)
@@ -54,7 +62,7 @@ def with_services(self, *services) -> "LocalStackContainer":
Returns:
self: Container to allow chaining of 'with_*' calls.
"""
- return self.with_env('SERVICES', ','.join(services))
+ return self.with_env("SERVICES", ",".join(services))
def get_url(self) -> str:
"""
@@ -64,7 +72,7 @@ def get_url(self) -> str:
"""
host = self.get_container_host_ip()
port = self.get_exposed_port(self.edge_port)
- return f'http://{host}:{port}'
+ return f"http://{host}:{port}"
@ft.wraps(boto3.client)
def get_client(self, name, **kwargs) -> Any:
@@ -79,5 +87,5 @@ def get_client(self, name, **kwargs) -> Any:
def start(self, timeout: float = 60) -> "LocalStackContainer":
super().start()
- wait_for_logs(self, r'Ready\.\n', timeout=timeout)
+ wait_for_logs(self, r"Ready\.\n", timeout=timeout)
return self
diff --git a/modules/localstack/tests/test_localstack.py b/modules/localstack/tests/test_localstack.py
index f587c41db..6801aefdb 100644
--- a/modules/localstack/tests/test_localstack.py
+++ b/modules/localstack/tests/test_localstack.py
@@ -6,13 +6,13 @@
def test_docker_run_localstack():
with LocalStackContainer() as localstack:
- resp = urllib.request.urlopen(f'{localstack.get_url()}/health')
- services = json.loads(resp.read().decode())['services']
+ resp = urllib.request.urlopen(f"{localstack.get_url()}/health")
+ services = json.loads(resp.read().decode())["services"]
# Check that all services are running
- assert all(value == 'available' for value in services.values())
+ assert all(value == "available" for value in services.values())
# Check that some of the services keys
- assert all(test_service in services for test_service in ['dynamodb', 'sns', 'sqs'])
+ assert all(test_service in services for test_service in ["dynamodb", "sns", "sqs"])
def test_localstack_boto3():
diff --git a/modules/minio/testcontainers/minio/__init__.py b/modules/minio/testcontainers/minio/__init__.py
index 87b91d942..51a7094e3 100644
--- a/modules/minio/testcontainers/minio/__init__.py
+++ b/modules/minio/testcontainers/minio/__init__.py
@@ -1,10 +1,15 @@
-from minio import Minio
-from requests import ConnectionError, Response, get
+from typing import TYPE_CHECKING
+
+from requests import ConnectionError, get
+from minio import Minio
from testcontainers.core.container import DockerContainer
from testcontainers.core.utils import raise_for_deprecated_parameter
from testcontainers.core.waiting_utils import wait_container_is_ready
+if TYPE_CHECKING:
+ from requests import Response
+
class MinioContainer(DockerContainer):
"""
@@ -35,9 +40,14 @@ class MinioContainer(DockerContainer):
... retrieved_content = client.get_object("test", "testfile.txt").data
"""
- def __init__(self, image: str = "minio/minio:RELEASE.2022-12-02T19-19-22Z",
- port: int = 9000, access_key: str = "minioadmin",
- secret_key: str = "minioadmin", **kwargs) -> None:
+ def __init__(
+ self,
+ image: str = "minio/minio:RELEASE.2022-12-02T19-19-22Z",
+ port: int = 9000,
+ access_key: str = "minioadmin",
+ secret_key: str = "minioadmin",
+ **kwargs,
+ ) -> None:
"""
Args:
image: Docker image to use for the MinIO container.
@@ -46,7 +56,7 @@ def __init__(self, image: str = "minio/minio:RELEASE.2022-12-02T19-19-22Z",
secret_key: Secret key for client connections.
"""
raise_for_deprecated_parameter(kwargs, "port_to_expose", "port")
- super(MinioContainer, self).__init__(image, **kwargs)
+ super().__init__(image, **kwargs)
self.port = port
self.access_key = access_key
self.secret_key = secret_key
diff --git a/modules/mongodb/testcontainers/mongodb/__init__.py b/modules/mongodb/testcontainers/mongodb/__init__.py
index 97db1a3e2..1ff029258 100644
--- a/modules/mongodb/testcontainers/mongodb/__init__.py
+++ b/modules/mongodb/testcontainers/mongodb/__init__.py
@@ -11,11 +11,13 @@
# License for the specific language governing permissions and limitations
# under the License.
import os
+from typing import Optional
+
from pymongo import MongoClient
+
from testcontainers.core.generic import DbContainer
from testcontainers.core.utils import raise_for_deprecated_parameter
from testcontainers.core.waiting_utils import wait_container_is_ready
-from typing import Optional
class MongoDbContainer(DbContainer):
@@ -48,11 +50,18 @@ class MongoDbContainer(DbContainer):
... # Find the restaurant document
... cursor = db.restaurants.find({"borough": "Manhattan"})
"""
- def __init__(self, image: str = "mongo:latest", port: int = 27017,
- username: Optional[str] = None, password: Optional[str] = None,
- dbname: Optional[str] = None, **kwargs) -> None:
+
+ def __init__(
+ self,
+ image: str = "mongo:latest",
+ port: int = 27017,
+ username: Optional[str] = None,
+ password: Optional[str] = None,
+ dbname: Optional[str] = None,
+ **kwargs
+ ) -> None:
raise_for_deprecated_parameter(kwargs, "port_to_expose", "port")
- super(MongoDbContainer, self).__init__(image=image, **kwargs)
+ super().__init__(image=image, **kwargs)
self.username = username or os.environ.get("MONGO_INITDB_ROOT_USERNAME", "test")
self.password = password or os.environ.get("MONGO_INITDB_ROOT_PASSWORD", "test")
self.dbname = dbname or os.environ.get("MONGO_DB", "test")
@@ -66,7 +75,7 @@ def _configure(self) -> None:
def get_connection_url(self) -> str:
return self._create_connection_url(
- dialect='mongodb',
+ dialect="mongodb",
username=self.username,
password=self.password,
port=self.port,
diff --git a/modules/mongodb/tests/test_mongodb.py b/modules/mongodb/tests/test_mongodb.py
index c778a0100..5b9d6be21 100644
--- a/modules/mongodb/tests/test_mongodb.py
+++ b/modules/mongodb/tests/test_mongodb.py
@@ -1,6 +1,7 @@
+import pytest
from pymongo import MongoClient
from pymongo.errors import OperationFailure
-import pytest
+
from testcontainers.core.container import DockerContainer
from testcontainers.core.waiting_utils import wait_for
from testcontainers.mongodb import MongoDbContainer
@@ -8,6 +9,7 @@
def test_docker_generic_db():
with DockerContainer("mongo:latest").with_bind_ports(27017, 27017) as mongo_container:
+
def connect():
host = mongo_container.get_container_host_ip()
port = mongo_container.get_exposed_port(27017)
@@ -20,12 +22,12 @@ def connect():
"street": "2 Avenue",
"zipcode": "10075",
"building": "1480",
- "coord": [-73.9557413, 40.7720266]
+ "coord": [-73.9557413, 40.7720266],
},
"borough": "Manhattan",
"cuisine": "Italian",
"name": "Vella",
- "restaurant_id": "41704620"
+ "restaurant_id": "41704620",
}
)
assert result.inserted_id
@@ -42,22 +44,21 @@ def test_docker_run_mongodb():
"street": "2 Avenue",
"zipcode": "10075",
"building": "1480",
- "coord": [-73.9557413, 40.7720266]
+ "coord": [-73.9557413, 40.7720266],
},
"borough": "Manhattan",
"cuisine": "Italian",
"name": "Vella",
- "restaurant_id": "41704620"
+ "restaurant_id": "41704620",
}
db.restaurants.insert_one(doc)
cursor = db.restaurants.find({"borough": "Manhattan"})
- assert cursor.next()['restaurant_id'] == doc['restaurant_id']
+ assert cursor.next()["restaurant_id"] == doc["restaurant_id"]
def test_docker_run_mongodb_connect_without_credentials():
with MongoDbContainer() as mongo:
- connection_url = f"mongodb://{mongo.get_container_host_ip()}:" \
- f"{mongo.get_exposed_port(mongo.port)}"
+ connection_url = f"mongodb://{mongo.get_container_host_ip()}:" f"{mongo.get_exposed_port(mongo.port)}"
db = MongoClient(connection_url).test
with pytest.raises(OperationFailure):
db.restaurants.insert_one({})
diff --git a/modules/mssql/testcontainers/mssql/__init__.py b/modules/mssql/testcontainers/mssql/__init__.py
index 9de6edf00..98b668269 100644
--- a/modules/mssql/testcontainers/mssql/__init__.py
+++ b/modules/mssql/testcontainers/mssql/__init__.py
@@ -1,5 +1,6 @@
from os import environ
from typing import Optional
+
from testcontainers.core.generic import DbContainer
from testcontainers.core.utils import raise_for_deprecated_parameter
@@ -21,11 +22,18 @@ class SqlServerContainer(DbContainer):
... result = connection.execute(sqlalchemy.text("select @@VERSION"))
"""
- def __init__(self, image: str = "mcr.microsoft.com/mssql/server:2019-latest",
- username: str = "SA", password: Optional[str] = None, port: int = 1433,
- dbname: str = "tempdb", dialect: str = 'mssql+pymssql', **kwargs) -> None:
+ def __init__(
+ self,
+ image: str = "mcr.microsoft.com/mssql/server:2019-latest",
+ username: str = "SA",
+ password: Optional[str] = None,
+ port: int = 1433,
+ dbname: str = "tempdb",
+ dialect: str = "mssql+pymssql",
+ **kwargs
+ ) -> None:
raise_for_deprecated_parameter(kwargs, "user", "username")
- super(SqlServerContainer, self).__init__(image, **kwargs)
+ super().__init__(image, **kwargs)
self.port = port
self.with_exposed_ports(self.port)
@@ -39,10 +47,9 @@ def _configure(self) -> None:
self.with_env("SA_PASSWORD", self.password)
self.with_env("SQLSERVER_USER", self.username)
self.with_env("SQLSERVER_DBNAME", self.dbname)
- self.with_env("ACCEPT_EULA", 'Y')
+ self.with_env("ACCEPT_EULA", "Y")
def get_connection_url(self) -> str:
return super()._create_connection_url(
- dialect=self.dialect, username=self.username, password=self.password,
- dbname=self.dbname, port=self.port
+ dialect=self.dialect, username=self.username, password=self.password, dbname=self.dbname, port=self.port
)
diff --git a/modules/mssql/tests/test_mssql.py b/modules/mssql/tests/test_mssql.py
index b615f1fff..6f48f0a13 100644
--- a/modules/mssql/tests/test_mssql.py
+++ b/modules/mssql/tests/test_mssql.py
@@ -1,20 +1,21 @@
import sqlalchemy
+
from testcontainers.mssql import SqlServerContainer
def test_docker_run_mssql():
- image = 'mcr.microsoft.com/azure-sql-edge'
- dialect = 'mssql+pymssql'
+ image = "mcr.microsoft.com/azure-sql-edge"
+ dialect = "mssql+pymssql"
with SqlServerContainer(image, dialect=dialect) as mssql:
engine = sqlalchemy.create_engine(mssql.get_connection_url())
with engine.begin() as connection:
- result = connection.execute(sqlalchemy.text('select @@servicename'))
+ result = connection.execute(sqlalchemy.text("select @@servicename"))
for row in result:
- assert row[0] == 'MSSQLSERVER'
+ assert row[0] == "MSSQLSERVER"
with SqlServerContainer(image, password="1Secure*Password2", dialect=dialect) as mssql:
engine = sqlalchemy.create_engine(mssql.get_connection_url())
with engine.begin() as connection:
- result = connection.execute(sqlalchemy.text('select @@servicename'))
+ result = connection.execute(sqlalchemy.text("select @@servicename"))
for row in result:
- assert row[0] == 'MSSQLSERVER'
+ assert row[0] == "MSSQLSERVER"
diff --git a/modules/mysql/testcontainers/mysql/__init__.py b/modules/mysql/testcontainers/mysql/__init__.py
index 6234540bc..65b317b0c 100644
--- a/modules/mysql/testcontainers/mysql/__init__.py
+++ b/modules/mysql/testcontainers/mysql/__init__.py
@@ -12,6 +12,7 @@
# under the License.
from os import environ
from typing import Optional
+
from testcontainers.core.generic import DbContainer
from testcontainers.core.utils import raise_for_deprecated_parameter
@@ -38,23 +39,31 @@ class MySqlContainer(DbContainer):
... result = connection.execute(sqlalchemy.text("select version()"))
... version, = result.fetchone()
"""
- def __init__(self, image: str = "mysql:latest", username: Optional[str] = None,
- root_password: Optional[str] = None, password: Optional[str] = None,
- dbname: Optional[str] = None, port: int = 3306, **kwargs) -> None:
+
+ def __init__(
+ self,
+ image: str = "mysql:latest",
+ username: Optional[str] = None,
+ root_password: Optional[str] = None,
+ password: Optional[str] = None,
+ dbname: Optional[str] = None,
+ port: int = 3306,
+ **kwargs
+ ) -> None:
raise_for_deprecated_parameter(kwargs, "MYSQL_USER", "username")
raise_for_deprecated_parameter(kwargs, "MYSQL_ROOT_PASSWORD", "root_password")
raise_for_deprecated_parameter(kwargs, "MYSQL_PASSWORD", "password")
raise_for_deprecated_parameter(kwargs, "MYSQL_DATABASE", "dbname")
- super(MySqlContainer, self).__init__(image, **kwargs)
+ super().__init__(image, **kwargs)
self.port = port
self.with_exposed_ports(self.port)
- self.username = username or environ.get('MYSQL_USER', 'test')
- self.root_password = root_password or environ.get('MYSQL_ROOT_PASSWORD', 'test')
- self.password = password or environ.get('MYSQL_PASSWORD', 'test')
- self.dbname = dbname or environ.get('MYSQL_DATABASE', 'test')
+ self.username = username or environ.get("MYSQL_USER", "test")
+ self.root_password = root_password or environ.get("MYSQL_ROOT_PASSWORD", "test")
+ self.password = password or environ.get("MYSQL_PASSWORD", "test")
+ self.dbname = dbname or environ.get("MYSQL_DATABASE", "test")
- if self.username == 'root':
+ if self.username == "root":
self.root_password = self.password
def _configure(self) -> None:
@@ -66,8 +75,6 @@ def _configure(self) -> None:
self.with_env("MYSQL_PASSWORD", self.password)
def get_connection_url(self) -> str:
- return super()._create_connection_url(dialect="mysql+pymysql",
- username=self.username,
- password=self.password,
- dbname=self.dbname,
- port=self.port)
+ return super()._create_connection_url(
+ dialect="mysql+pymysql", username=self.username, password=self.password, dbname=self.dbname, port=self.port
+ )
diff --git a/modules/mysql/tests/test_mysql.py b/modules/mysql/tests/test_mysql.py
index 274207b92..a84df4d13 100644
--- a/modules/mysql/tests/test_mysql.py
+++ b/modules/mysql/tests/test_mysql.py
@@ -1,31 +1,33 @@
import re
-import sqlalchemy
+from unittest import mock
+
import pytest
+import sqlalchemy
+
from testcontainers.core.utils import is_arm
from testcontainers.mysql import MySqlContainer
-from unittest import mock
-@pytest.mark.skipif(is_arm(), reason='mysql container not available for ARM')
+@pytest.mark.skipif(is_arm(), reason="mysql container not available for ARM")
def test_docker_run_mysql():
- config = MySqlContainer('mysql:5.7.17')
+ config = MySqlContainer("mysql:5.7.17")
with config as mysql:
engine = sqlalchemy.create_engine(mysql.get_connection_url())
with engine.begin() as connection:
result = connection.execute(sqlalchemy.text("select version()"))
for row in result:
- assert row[0].startswith('5.7.17')
+ assert row[0].startswith("5.7.17")
-@pytest.mark.skipif(is_arm(), reason='mysql container not available for ARM')
+@pytest.mark.skipif(is_arm(), reason="mysql container not available for ARM")
def test_docker_run_mysql_8():
- config = MySqlContainer('mysql:8')
+ config = MySqlContainer("mysql:8")
with config as mysql:
engine = sqlalchemy.create_engine(mysql.get_connection_url())
with engine.begin() as connection:
result = connection.execute(sqlalchemy.text("select version()"))
for row in result:
- assert row[0].startswith('8')
+ assert row[0].startswith("8")
def test_docker_run_mariadb():
@@ -34,13 +36,13 @@ def test_docker_run_mariadb():
with engine.begin() as connection:
result = connection.execute(sqlalchemy.text("select version()"))
for row in result:
- assert row[0].startswith('10.6.5')
+ assert row[0].startswith("10.6.5")
def test_docker_env_variables():
- with mock.patch.dict("os.environ", MYSQL_USER="demo", MYSQL_DATABASE="custom_db"), \
- MySqlContainer("mariadb:10.6.5").with_bind_ports(3306, 32785).maybe_emulate_amd64() \
- as container:
+ with mock.patch.dict("os.environ", MYSQL_USER="demo", MYSQL_DATABASE="custom_db"), MySqlContainer(
+ "mariadb:10.6.5"
+ ).with_bind_ports(3306, 32785).maybe_emulate_amd64() as container:
url = container.get_connection_url()
- pattern = r'mysql\+pymysql:\/\/demo:test@[\w,.]+:(3306|32785)\/custom_db'
+ pattern = r"mysql\+pymysql:\/\/demo:test@[\w,.]+:(3306|32785)\/custom_db"
assert re.match(pattern, url)
diff --git a/modules/neo4j/testcontainers/neo4j/__init__.py b/modules/neo4j/testcontainers/neo4j/__init__.py
index cf76df501..26f46dc61 100644
--- a/modules/neo4j/testcontainers/neo4j/__init__.py
+++ b/modules/neo4j/testcontainers/neo4j/__init__.py
@@ -12,14 +12,13 @@
# under the License.
import os
+from typing import Optional
from neo4j import Driver, GraphDatabase
-
from testcontainers.core.config import TIMEOUT
from testcontainers.core.generic import DbContainer
from testcontainers.core.utils import raise_for_deprecated_parameter
from testcontainers.core.waiting_utils import wait_container_is_ready, wait_for_logs
-from typing import Optional
class Neo4jContainer(DbContainer):
@@ -38,10 +37,17 @@ class Neo4jContainer(DbContainer):
... result = session.run("MATCH (n) RETURN n LIMIT 1")
... record = result.single()
"""
- def __init__(self, image: str = "neo4j:latest", port: int = 7687,
- password: Optional[str] = None, username: Optional[str] = None, **kwargs) -> None:
+
+ def __init__(
+ self,
+ image: str = "neo4j:latest",
+ port: int = 7687,
+ password: Optional[str] = None,
+ username: Optional[str] = None,
+ **kwargs,
+ ) -> None:
raise_for_deprecated_parameter(kwargs, "bolt_port", "port")
- super(Neo4jContainer, self).__init__(image, **kwargs)
+ super().__init__(image, **kwargs)
self.username = username or os.environ.get("NEO4J_USER", "neo4j")
self.password = password or os.environ.get("NEO4J_PASSWORD", "password")
self.port = port
@@ -65,8 +71,4 @@ def _connect(self) -> None:
driver.verify_connectivity()
def get_driver(self, **kwargs) -> Driver:
- return GraphDatabase.driver(
- self.get_connection_url(),
- auth=(self.username, self.password),
- **kwargs
- )
+ return GraphDatabase.driver(self.get_connection_url(), auth=(self.username, self.password), **kwargs)
diff --git a/modules/neo4j/tests/test_neo4j.py b/modules/neo4j/tests/test_neo4j.py
index 8c90ca0e9..6058d34c2 100644
--- a/modules/neo4j/tests/test_neo4j.py
+++ b/modules/neo4j/tests/test_neo4j.py
@@ -2,15 +2,14 @@
def test_docker_run_neo4j_latest():
- with Neo4jContainer() as neo4j:
- with neo4j.get_driver() as driver:
- with driver.session() as session:
- result = session.run(
- """
- CALL dbms.components()
- YIELD name, versions, edition
- UNWIND versions as version
- RETURN name, version, edition
- """)
- record = result.single()
- assert record["name"].startswith("Neo4j")
+ with Neo4jContainer() as neo4j, neo4j.get_driver() as driver, driver.session() as session:
+ result = session.run(
+ """
+ CALL dbms.components()
+ YIELD name, versions, edition
+ UNWIND versions as version
+ RETURN name, version, edition
+ """
+ )
+ record = result.single()
+ assert record["name"].startswith("Neo4j")
diff --git a/modules/nginx/testcontainers/nginx/__init__.py b/modules/nginx/testcontainers/nginx/__init__.py
index d0680f19a..ecf4c072e 100644
--- a/modules/nginx/testcontainers/nginx/__init__.py
+++ b/modules/nginx/testcontainers/nginx/__init__.py
@@ -22,11 +22,11 @@
class NginxContainer(DockerContainer):
def __init__(self, image: str = "nginx:latest", port: int = 80, **kwargs) -> None:
raise_for_deprecated_parameter(kwargs, "port_to_expose", "port")
- super(NginxContainer, self).__init__(image, **kwargs)
+ super().__init__(image, **kwargs)
self.port = port
self.with_exposed_ports(self.port)
- def start(self) -> 'NginxContainer':
+ def start(self) -> "NginxContainer":
super().start()
host = self.get_container_host_ip()
@@ -37,5 +37,5 @@ def start(self) -> 'NginxContainer':
@wait_container_is_ready(urllib.error.URLError)
def _connect(self, host: str, port: str) -> None:
- url = urllib.parse.urlunsplit(('http', f'{host}:{port}', '', '', ''))
+ url = urllib.parse.urlunsplit(("http", f"{host}:{port}", "", "", ""))
urllib.request.urlopen(url, timeout=1)
diff --git a/modules/nginx/tests/test_nginx.py b/modules/nginx/tests/test_nginx.py
index 0d369bf71..39fba5e97 100644
--- a/modules/nginx/tests/test_nginx.py
+++ b/modules/nginx/tests/test_nginx.py
@@ -8,5 +8,5 @@ def test_docker_run_nginx():
with nginx_container as nginx:
url = f"http://{nginx.get_container_host_ip()}:{nginx.get_exposed_port(nginx.port)}/"
r = requests.get(url)
- assert (r.status_code == 200)
- assert ('Welcome to nginx!' in r.text)
+ assert r.status_code == 200
+ assert "Welcome to nginx!" in r.text
diff --git a/modules/opensearch/testcontainers/opensearch/__init__.py b/modules/opensearch/testcontainers/opensearch/__init__.py
index 567ba264d..f889c9934 100644
--- a/modules/opensearch/testcontainers/opensearch/__init__.py
+++ b/modules/opensearch/testcontainers/opensearch/__init__.py
@@ -30,8 +30,13 @@ class OpenSearchContainer(DockerContainer):
... search_result = client.search(index="test", body={"query": {"match_all": {}}})
"""
- def __init__(self, image: str = "opensearchproject/opensearch:2.4.0",
- port: int = 9200, security_enabled: bool = False, **kwargs) -> None:
+ def __init__(
+ self,
+ image: str = "opensearchproject/opensearch:2.4.0",
+ port: int = 9200,
+ security_enabled: bool = False,
+ **kwargs
+ ) -> None:
"""
Args:
image: Docker image to use for the container.
@@ -39,7 +44,7 @@ def __init__(self, image: str = "opensearchproject/opensearch:2.4.0",
security_enabled: :code:`False` disables the security plugin in OpenSearch.
"""
raise_for_deprecated_parameter(kwargs, "port_to_expose", "port")
- super(OpenSearchContainer, self).__init__(image, **kwargs)
+ super().__init__(image, **kwargs)
self.port = port
self.security_enabled = security_enabled
@@ -85,12 +90,7 @@ def get_client(self, verify_certs: bool = False, **kwargs) -> OpenSearch:
**kwargs,
)
- @wait_container_is_ready(
- ConnectionError,
- TransportError,
- ProtocolError,
- ConnectionResetError
- )
+ @wait_container_is_ready(ConnectionError, TransportError, ProtocolError, ConnectionResetError)
def _healthcheck(self) -> None:
"""This is an internal method used to check if the OpenSearch container
is healthy and ready to receive requests."""
diff --git a/modules/opensearch/tests/test_opensearch.py b/modules/opensearch/tests/test_opensearch.py
index f5fb411e1..a287563ed 100644
--- a/modules/opensearch/tests/test_opensearch.py
+++ b/modules/opensearch/tests/test_opensearch.py
@@ -20,9 +20,7 @@ def test_docker_run_opensearch_v1():
def test_docker_run_opensearch_v1_with_security():
- with OpenSearchContainer(
- image="opensearchproject/opensearch:1.3.6", security_enabled=True
- ) as opensearch:
+ with OpenSearchContainer(image="opensearchproject/opensearch:1.3.6", security_enabled=True) as opensearch:
client = opensearch.get_client()
assert client.cluster.health()["status"] == "green"
diff --git a/modules/oracle/testcontainers/oracle/__init__.py b/modules/oracle/testcontainers/oracle/__init__.py
index 3bd736076..c0a5e657c 100644
--- a/modules/oracle/testcontainers/oracle/__init__.py
+++ b/modules/oracle/testcontainers/oracle/__init__.py
@@ -19,15 +19,14 @@ class OracleDbContainer(DbContainer):
"""
def __init__(self, image: str = "wnameless/oracle-xe-11g-r2:latest", **kwargs) -> None:
- super(OracleDbContainer, self).__init__(image=image, **kwargs)
+ super().__init__(image=image, **kwargs)
self.container_port = 1521
self.with_exposed_ports(self.container_port)
self.with_env("ORACLE_ALLOW_REMOTE", "true")
def get_connection_url(self) -> str:
return super()._create_connection_url(
- dialect="oracle", username="system", password="oracle", port=self.container_port,
- dbname="xe"
+ dialect="oracle", username="system", password="oracle", port=self.container_port, dbname="xe"
)
def _configure(self) -> None:
diff --git a/modules/oracle/tests/test_oracle.py b/modules/oracle/tests/test_oracle.py
index ccbcc4b69..32d58b461 100644
--- a/modules/oracle/tests/test_oracle.py
+++ b/modules/oracle/tests/test_oracle.py
@@ -1,15 +1,18 @@
-import sqlalchemy
import pytest
+import sqlalchemy
+
from testcontainers.oracle import OracleDbContainer
@pytest.mark.skip(reason="needs oracle client libraries unavailable on Travis")
def test_docker_run_oracle():
- versions = {'Oracle Database 11g Express Edition Release 11.2.0.2.0 - 64bit Production',
- 'PL/SQL Release 11.2.0.2.0 - Production',
- 'CORE\t11.2.0.2.0\tProduction',
- 'TNS for Linux: Version 11.2.0.2.0 - Production',
- 'NLSRTL Version 11.2.0.2.0 - Production'}
+ versions = {
+ "Oracle Database 11g Express Edition Release 11.2.0.2.0 - 64bit Production",
+ "PL/SQL Release 11.2.0.2.0 - Production",
+ "CORE\t11.2.0.2.0\tProduction",
+ "TNS for Linux: Version 11.2.0.2.0 - Production",
+ "NLSRTL Version 11.2.0.2.0 - Production",
+ }
with OracleDbContainer() as oracledb:
engine = sqlalchemy.create_engine(oracledb.get_connection_url())
with engine.begin() as connection:
diff --git a/modules/postgres/testcontainers/postgres/__init__.py b/modules/postgres/testcontainers/postgres/__init__.py
index 85e0bac80..a61ad2cf8 100644
--- a/modules/postgres/testcontainers/postgres/__init__.py
+++ b/modules/postgres/testcontainers/postgres/__init__.py
@@ -12,6 +12,7 @@
# under the License.
import os
from typing import Optional
+
from testcontainers.core.generic import DbContainer
from testcontainers.core.utils import raise_for_deprecated_parameter
@@ -39,11 +40,19 @@ class PostgresContainer(DbContainer):
>>> version
'PostgreSQL 9.5...'
"""
- def __init__(self, image: str = "postgres:latest", port: int = 5432,
- username: Optional[str] = None, password: Optional[str] = None,
- dbname: Optional[str] = None, driver: str = "psycopg2", **kwargs) -> None:
+
+ def __init__(
+ self,
+ image: str = "postgres:latest",
+ port: int = 5432,
+ username: Optional[str] = None,
+ password: Optional[str] = None,
+ dbname: Optional[str] = None,
+ driver: str = "psycopg2",
+ **kwargs,
+ ) -> None:
raise_for_deprecated_parameter(kwargs, "user", "username")
- super(PostgresContainer, self).__init__(image=image, **kwargs)
+ super().__init__(image=image, **kwargs)
self.username = username or os.environ.get("POSTGRES_USER", "test")
self.password = password or os.environ.get("POSTGRES_PASSWORD", "test")
self.dbname = dbname or os.environ.get("POSTGRES_DB", "test")
@@ -59,7 +68,10 @@ def _configure(self) -> None:
def get_connection_url(self, host=None) -> str:
return super()._create_connection_url(
- dialect=f"postgresql+{self.driver}", username=self.username,
- password=self.password, dbname=self.dbname, host=host,
+ dialect=f"postgresql+{self.driver}",
+ username=self.username,
+ password=self.password,
+ dbname=self.dbname,
+ host=host,
port=self.port,
)
diff --git a/modules/postgres/tests/test_postgres.py b/modules/postgres/tests/test_postgres.py
index c00c1b3fe..c1963531c 100644
--- a/modules/postgres/tests/test_postgres.py
+++ b/modules/postgres/tests/test_postgres.py
@@ -1,4 +1,5 @@
import sqlalchemy
+
from testcontainers.postgres import PostgresContainer
diff --git a/modules/rabbitmq/testcontainers/rabbitmq/__init__.py b/modules/rabbitmq/testcontainers/rabbitmq/__init__.py
index ebdb96351..6c26518c8 100644
--- a/modules/rabbitmq/testcontainers/rabbitmq/__init__.py
+++ b/modules/rabbitmq/testcontainers/rabbitmq/__init__.py
@@ -2,6 +2,7 @@
from typing import Optional
import pika
+
from testcontainers.core.container import DockerContainer
from testcontainers.core.waiting_utils import wait_container_is_ready
@@ -23,8 +24,15 @@ class RabbitMqContainer(DockerContainer):
... connection = pika.BlockingConnection(rabbitmq.get_connection_params())
... channel = connection.channel()
"""
- def __init__(self, image: str = "rabbitmq:latest", port: Optional[int] = None,
- username: Optional[str] = None, password: Optional[str] = None, **kwargs) -> None:
+
+ def __init__(
+ self,
+ image: str = "rabbitmq:latest",
+ port: Optional[int] = None,
+ username: Optional[str] = None,
+ password: Optional[str] = None,
+ **kwargs
+ ) -> None:
"""Initialize the RabbitMQ test container.
Args:
@@ -33,7 +41,7 @@ def __init__(self, image: str = "rabbitmq:latest", port: Optional[int] = None,
username: RabbitMQ username.
password: RabbitMQ password.
"""
- super(RabbitMqContainer, self).__init__(image=image, **kwargs)
+ super().__init__(image=image, **kwargs)
self.port = port or int(os.environ.get("RABBITMQ_NODE_PORT", 5672))
self.username = username or os.environ.get("RABBITMQ_DEFAULT_USER", "guest")
self.password = password or os.environ.get("RABBITMQ_DEFAULT_PASS", "guest")
diff --git a/modules/rabbitmq/tests/test_rabbitmq.py b/modules/rabbitmq/tests/test_rabbitmq.py
index 08427a861..25c0fbbb9 100644
--- a/modules/rabbitmq/tests/test_rabbitmq.py
+++ b/modules/rabbitmq/tests/test_rabbitmq.py
@@ -1,8 +1,9 @@
-from typing import Optional
import json
+from typing import Optional
import pika
import pytest
+
from testcontainers.rabbitmq import RabbitMqContainer
QUEUE = "test-q"
@@ -12,18 +13,14 @@
@pytest.mark.parametrize(
- "port,username,password",
- [
- (None, None, None), # use the defaults
- (5673, None, None), # test with custom port
- (None, "my_test_user", "my_secret_password"), # test with custom credentials
- ]
+ argnames=["port", "username", "password"],
+ argvalues=[
+ [None, None, None], # use the defaults
+ [5673, None, None], # test with custom port
+ [None, "my_test_user", "my_secret_password"], # test with custom credentials
+ ],
)
-def test_docker_run_rabbitmq(
- port: Optional[int],
- username: Optional[str],
- password: Optional[str]
-):
+def test_docker_run_rabbitmq(port: Optional[int], username: Optional[str], password: Optional[str]):
"""Run rabbitmq test container and use it to deliver a simple message."""
kwargs = {}
if port is not None:
diff --git a/modules/redis/testcontainers/redis/__init__.py b/modules/redis/testcontainers/redis/__init__.py
index 12d473644..fba24be15 100644
--- a/modules/redis/testcontainers/redis/__init__.py
+++ b/modules/redis/testcontainers/redis/__init__.py
@@ -11,11 +11,12 @@
# License for the specific language governing permissions and limitations
# under the License.
+from typing import Optional
+
import redis
from testcontainers.core.container import DockerContainer
from testcontainers.core.utils import raise_for_deprecated_parameter
from testcontainers.core.waiting_utils import wait_container_is_ready
-from typing import Optional
class RedisContainer(DockerContainer):
@@ -31,10 +32,10 @@ class RedisContainer(DockerContainer):
>>> with RedisContainer() as redis_container:
... redis_client = redis_container.get_client()
"""
- def __init__(self, image: str = "redis:latest", port: int = 6379,
- password: Optional[str] = None, **kwargs) -> None:
+
+ def __init__(self, image: str = "redis:latest", port: int = 6379, password: Optional[str] = None, **kwargs) -> None:
raise_for_deprecated_parameter(kwargs, "port_to_expose", "port")
- super(RedisContainer, self).__init__(image, **kwargs)
+ super().__init__(image, **kwargs)
self.port = port
self.password = password
self.with_exposed_ports(self.port)
diff --git a/modules/redis/tests/test_redis.py b/modules/redis/tests/test_redis.py
index 9bf946442..7dc56aa46 100644
--- a/modules/redis/tests/test_redis.py
+++ b/modules/redis/tests/test_redis.py
@@ -8,11 +8,11 @@ def test_docker_run_redis():
with config as redis:
client = redis.get_client()
p = client.pubsub()
- p.subscribe('test')
- client.publish('test', 'new_msg')
+ p.subscribe("test")
+ client.publish("test", "new_msg")
msg = wait_for_message(p)
- assert 'data' in msg
- assert b'new_msg', msg['data']
+ assert "data" in msg
+ assert b"new_msg", msg["data"]
def test_docker_run_redis_with_password():
@@ -27,8 +27,7 @@ def wait_for_message(pubsub, timeout=1, ignore_subscribe_messages=True):
now = time.time()
timeout = now + timeout
while now < timeout:
- message = pubsub.get_message(
- ignore_subscribe_messages=ignore_subscribe_messages)
+ message = pubsub.get_message(ignore_subscribe_messages=ignore_subscribe_messages)
if message is not None:
return message
time.sleep(0.01)
diff --git a/modules/selenium/testcontainers/selenium/__init__.py b/modules/selenium/testcontainers/selenium/__init__.py
index 29caf296b..b46d46155 100644
--- a/modules/selenium/testcontainers/selenium/__init__.py
+++ b/modules/selenium/testcontainers/selenium/__init__.py
@@ -11,22 +11,20 @@
# License for the specific language governing permissions and limitations
# under the License.
+from typing import Optional
+
+import urllib3
+
from selenium import webdriver
from selenium.webdriver.common.options import ArgOptions
from testcontainers.core.container import DockerContainer
from testcontainers.core.waiting_utils import wait_container_is_ready
-from typing import Optional
-import urllib3
-
-IMAGES = {
- "firefox": "selenium/standalone-firefox-debug:latest",
- "chrome": "selenium/standalone-chrome-debug:latest"
-}
+IMAGES = {"firefox": "selenium/standalone-firefox-debug:latest", "chrome": "selenium/standalone-chrome-debug:latest"}
def get_image_name(capabilities: str) -> str:
- return IMAGES[capabilities['browserName']]
+ return IMAGES[capabilities["browserName"]]
class BrowserWebDriverContainer(DockerContainer):
@@ -46,13 +44,14 @@ class BrowserWebDriverContainer(DockerContainer):
You can easily change browser by passing :code:`DesiredCapabilities.FIREFOX` instead.
"""
- def __init__(self, capabilities: str, image: Optional[str] = None, port: int = 4444,
- vnc_port: int = 5900, **kwargs) -> None:
+ def __init__(
+ self, capabilities: str, image: Optional[str] = None, port: int = 4444, vnc_port: int = 5900, **kwargs
+ ) -> None:
self.capabilities = capabilities
self.image = image or get_image_name(capabilities)
self.port = port
self.vnc_port = vnc_port
- super(BrowserWebDriverContainer, self).__init__(image=self.image, **kwargs)
+ super().__init__(image=self.image, **kwargs)
self.with_exposed_ports(self.port, self.vnc_port)
def _configure(self) -> None:
@@ -64,9 +63,7 @@ def _connect(self) -> webdriver.Remote:
options = ArgOptions()
for key, value in self.capabilities.items():
options.set_capability(key, value)
- return webdriver.Remote(
- command_executor=(self.get_connection_url()),
- options=options)
+ return webdriver.Remote(command_executor=(self.get_connection_url()), options=options)
def get_driver(self) -> webdriver.Remote:
return self._connect()
@@ -74,4 +71,4 @@ def get_driver(self) -> webdriver.Remote:
def get_connection_url(self) -> str:
ip = self.get_container_host_ip()
port = self.get_exposed_port(self.port)
- return f'http://{ip}:{port}/wd/hub'
+ return f"http://{ip}:{port}/wd/hub"
diff --git a/modules/selenium/tests/test_selenium.py b/modules/selenium/tests/test_selenium.py
index 94cbaac35..61c1bb326 100644
--- a/modules/selenium/tests/test_selenium.py
+++ b/modules/selenium/tests/test_selenium.py
@@ -1,14 +1,15 @@
import pytest
from selenium.webdriver import DesiredCapabilities
from selenium.webdriver.common.by import By
-from testcontainers.selenium import BrowserWebDriverContainer
+
from testcontainers.core.utils import is_arm
+from testcontainers.selenium import BrowserWebDriverContainer
@pytest.mark.parametrize("caps", [DesiredCapabilities.CHROME, DesiredCapabilities.FIREFOX])
def test_webdriver_container_container(caps):
if is_arm():
- pytest.skip('https://github.com/SeleniumHQ/docker-selenium/issues/1076')
+ pytest.skip("https://github.com/SeleniumHQ/docker-selenium/issues/1076")
with BrowserWebDriverContainer(caps).maybe_emulate_amd64() as chrome:
webdriver = chrome.get_driver()
diff --git a/poetry.lock b/poetry.lock
index c69c14480..41eda34bb 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -760,22 +760,6 @@ files = [
[package.extras]
test = ["pytest (>=6)"]
-[[package]]
-name = "flake8"
-version = "6.1.0"
-description = "the modular source code checker: pep8 pyflakes and co"
-optional = false
-python-versions = ">=3.8.1"
-files = [
- {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"},
- {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"},
-]
-
-[package.dependencies]
-mccabe = ">=0.7.0,<0.8.0"
-pycodestyle = ">=2.11.0,<2.12.0"
-pyflakes = ">=3.1.0,<3.2.0"
-
[[package]]
name = "google-api-core"
version = "2.15.0"
@@ -1335,17 +1319,6 @@ files = [
{file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"},
]
-[[package]]
-name = "mccabe"
-version = "0.7.0"
-description = "McCabe checker, plugin for flake8"
-optional = false
-python-versions = ">=3.6"
-files = [
- {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
- {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
-]
-
[[package]]
name = "mdurl"
version = "0.1.2"
@@ -1386,6 +1359,64 @@ files = [
{file = "more_itertools-10.2.0-py3-none-any.whl", hash = "sha256:686b06abe565edfab151cb8fd385a05651e1fdf8f0a14191e4439283421f8684"},
]
+[[package]]
+name = "mypy"
+version = "1.7.1"
+description = "Optional static typing for Python"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"},
+ {file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"},
+ {file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"},
+ {file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"},
+ {file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"},
+ {file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"},
+ {file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"},
+ {file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"},
+ {file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"},
+ {file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"},
+ {file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"},
+ {file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"},
+ {file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"},
+ {file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"},
+ {file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"},
+ {file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"},
+ {file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"},
+ {file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"},
+ {file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"},
+ {file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"},
+ {file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"},
+ {file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"},
+ {file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"},
+ {file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"},
+ {file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"},
+ {file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"},
+ {file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"},
+]
+
+[package.dependencies]
+mypy-extensions = ">=1.0.0"
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+typing-extensions = ">=4.1.0"
+
+[package.extras]
+dmypy = ["psutil (>=4.0)"]
+install-types = ["pip"]
+mypyc = ["setuptools (>=50)"]
+reports = ["lxml"]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.0.0"
+description = "Type system extensions for programs checked with the mypy type checker."
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
+]
+
[[package]]
name = "neo4j"
version = "5.16.0"
@@ -1694,17 +1725,6 @@ files = [
[package.dependencies]
pyasn1 = ">=0.4.6,<0.6.0"
-[[package]]
-name = "pycodestyle"
-version = "2.11.1"
-description = "Python style guide checker"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"},
- {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"},
-]
-
[[package]]
name = "pycparser"
version = "2.21"
@@ -1757,17 +1777,6 @@ files = [
{file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"},
]
-[[package]]
-name = "pyflakes"
-version = "3.1.0"
-description = "passive checker of Python programs"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"},
- {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"},
-]
-
[[package]]
name = "pygments"
version = "2.17.2"
@@ -2771,7 +2780,7 @@ urllib3 = ">=1.26.0"
name = "typing-extensions"
version = "4.9.0"
description = "Backported and Experimental Type Hints for Python 3.8+"
-optional = true
+optional = false
python-versions = ">=3.8"
files = [
{file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"},
@@ -2995,4 +3004,4 @@ selenium = ["selenium"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.12"
-content-hash = "9581bf8b84748e77f2c480e320307fe223cedc7eee614512b9ee5de8fd562bd3"
+content-hash = "89891a5aeea49686e42fd95780d6aa703a1a13d8483a1a965aa32603b39f3a3d"
diff --git a/pyproject.toml b/pyproject.toml
index 9139e3b25..b825d8ebd 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -111,9 +111,9 @@ selenium = ["selenium"]
pytest = "7.4.3"
pytest-cov = "4.1.0"
sphinx = "^7.2.6"
-flake8 = "^6.1.0"
pg8000 = "*"
twine = "^4.0.2"
+mypy = "1.7.1"
[[tool.poetry.source]]
name = "PyPI"
@@ -139,6 +139,119 @@ exclude_lines = [
"raise NotImplementedError" # TODO: used in core/generic.py, not sure we need DbContainer
]
+[tool.ruff.flake8-type-checking]
+strict = true
+
+[tool.ruff]
+target-version = "py39"
+line-length = 120
+fix = true
+fixable = ["I"]
+src = ["core", "modules/*"]
+exclude = ["**/tests/**/*.py"]
+select = [
+ # flake8-2020
+ "YTT",
+ # flake8-bugbear
+ "B",
+ # flake8-builtins
+ "A",
+ # flake8-comprehensions
+ "C4",
+ # flake8-debugger
+ "T10",
+ # flake8-print
+ "T20",
+ # flake8-pytest-style
+ "PT",
+ # flake8-simplify
+ "SIM",
+ # flake8-tidy-imports
+ "TID",
+ # flake8-type-checking
+ "TCH",
+ # isort
+ "I",
+ # mccabe
+ "C90",
+ # pycodestyle
+ "E", "W",
+ # pyflakes
+ "F",
+ # pygrep-hooks
+ "PGH",
+ # pyupgrade
+ "UP",
+ # ruff
+ "RUF",
+ # TODO: security, enable via line below
+ # "S",
+]
+ignore = [
+ # line too long (already checked by black)
+ "E501",
+ # the must-have __init__.py (we are using package namespaces)
+ "INP001"
+]
+
+
+[tool.mypy]
+python_version = "3.9"
+namespace_packages = true
+explicit_package_bases = true
+pretty = true
+show_error_codes = true
+strict = true
+fast_module_lookup = true
+modules = ["testcontainers.core"]
+mypy_path = [
+ "core",
+# "modules/arangodb",
+# "modules/azurite",
+# "modules/clickhouse",
+# "modules/elasticsearch",
+# "modules/google",
+# "modules/k3s",
+# "modules/kafka",
+# "modules/keycloak",
+# "modules/localstack",
+# "modules/minio",
+# "modules/mongodb",
+# "modules/mssql",
+# "modules/mysql",
+# "modules/neo4j",
+# "modules/nginx",
+# "modules/opensearch",
+# "modules/oracle",
+# "modules/postgres",
+# "modules/rabbitmq",
+# "modules/redis",
+# "modules/selenium"
+]
+enable_error_code = [
+ "ignore-without-code",
+ "redundant-expr",
+ "truthy-bool",
+]
+
+[[tool.mypy.overrides]]
+module = ['tests.*']
+# in pytest we allow fixtures to be more relaxed, though we check the untyped functions
+check_untyped_defs = true
+disable_error_code = [
+ 'no-untyped-def'
+]
+
+[[tool.mypy.overrides]]
+module = ['docker.*']
+# docker still doesn't have type annotations (not even 7.0)
+ignore_missing_imports = true
+
+[[tool.mypy.overrides]]
+module = ['wrapt.*']
+# wrapt doesn't have type annotations
+ignore_missing_imports = true
+
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
diff --git a/requirements/macos-latest-3.10.txt b/requirements/macos-latest-3.10.txt
deleted file mode 100644
index d36feeeb6..000000000
--- a/requirements/macos-latest-3.10.txt
+++ /dev/null
@@ -1,442 +0,0 @@
-#
-# This file is autogenerated by pip-compile with Python 3.10
-# by the following command:
-#
-# pip-compile --output-file=requirements.txt
-#
--e file:meta
- # via -r requirements.in
--e file:arangodb
- # via -r requirements.in
--e file:azurite
- # via -r requirements.in
--e file:clickhouse
- # via -r requirements.in
--e file:core
- # via
- # -r requirements.in
- # testcontainers
- # testcontainers-arangodb
- # testcontainers-azurite
- # testcontainers-clickhouse
- # testcontainers-elasticsearch
- # testcontainers-gcp
- # testcontainers-k3s
- # testcontainers-kafka
- # testcontainers-keycloak
- # testcontainers-localstack
- # testcontainers-minio
- # testcontainers-mongodb
- # testcontainers-mssql
- # testcontainers-mysql
- # testcontainers-neo4j
- # testcontainers-nginx
- # testcontainers-opensearch
- # testcontainers-oracle
- # testcontainers-postgres
- # testcontainers-rabbitmq
- # testcontainers-redis
- # testcontainers-selenium
--e file:elasticsearch
- # via -r requirements.in
--e file:google
- # via -r requirements.in
--e file:k3s
- # via -r requirements.in
--e file:kafka
- # via -r requirements.in
--e file:keycloak
- # via -r requirements.in
--e file:localstack
- # via -r requirements.in
--e file:minio
- # via -r requirements.in
--e file:mongodb
- # via -r requirements.in
--e file:mssql
- # via -r requirements.in
--e file:mysql
- # via -r requirements.in
--e file:neo4j
- # via -r requirements.in
--e file:nginx
- # via -r requirements.in
--e file:opensearch
- # via -r requirements.in
--e file:oracle
- # via -r requirements.in
--e file:postgres
- # via -r requirements.in
--e file:rabbitmq
- # via -r requirements.in
--e file:redis
- # via -r requirements.in
--e file:selenium
- # via -r requirements.in
-alabaster==0.7.13
- # via sphinx
-argon2-cffi==23.1.0
- # via minio
-argon2-cffi-bindings==21.2.0
- # via argon2-cffi
-asn1crypto==1.5.1
- # via scramp
-async-timeout==4.0.3
- # via redis
-attrs==23.1.0
- # via
- # outcome
- # trio
-azure-core==1.29.5
- # via azure-storage-blob
-azure-storage-blob==12.19.0
- # via testcontainers-azurite
-babel==2.13.1
- # via sphinx
-boto3==1.33.1
- # via testcontainers-localstack
-botocore==1.33.1
- # via
- # boto3
- # s3transfer
-cachetools==5.3.2
- # via google-auth
-certifi==2023.11.17
- # via
- # kubernetes
- # minio
- # opensearch-py
- # requests
- # selenium
-cffi==1.16.0
- # via
- # argon2-cffi-bindings
- # cryptography
-charset-normalizer==3.3.2
- # via requests
-clickhouse-driver==0.2.6
- # via testcontainers-clickhouse
-coverage[toml]==7.3.2
- # via
- # coverage
- # pytest-cov
-cryptography==36.0.2
- # via
- # -r requirements.in
- # azure-storage-blob
- # pymysql
-cx-oracle==8.3.0
- # via testcontainers-oracle
-deprecation==2.1.0
- # via python-keycloak
-dnspython==2.4.2
- # via pymongo
-docker==6.1.3
- # via testcontainers-core
-docutils==0.20.1
- # via
- # readme-renderer
- # sphinx
-ecdsa==0.18.0
- # via python-jose
-entrypoints==0.3
- # via flake8
-exceptiongroup==1.2.0
- # via
- # pytest
- # trio
- # trio-websocket
-flake8==3.7.9
- # via -r requirements.in
-google-api-core[grpc]==2.14.0
- # via
- # google-api-core
- # google-cloud-pubsub
-google-auth==2.23.4
- # via
- # google-api-core
- # kubernetes
-google-cloud-pubsub==2.18.4
- # via testcontainers-gcp
-googleapis-common-protos[grpc]==1.61.0
- # via
- # google-api-core
- # grpc-google-iam-v1
- # grpcio-status
-greenlet==3.0.1
- # via sqlalchemy
-grpc-google-iam-v1==0.12.7
- # via google-cloud-pubsub
-grpcio==1.59.3
- # via
- # google-api-core
- # google-cloud-pubsub
- # googleapis-common-protos
- # grpc-google-iam-v1
- # grpcio-status
-grpcio-status==1.59.3
- # via
- # google-api-core
- # google-cloud-pubsub
-h11==0.14.0
- # via wsproto
-idna==3.6
- # via
- # requests
- # trio
-imagesize==1.4.1
- # via sphinx
-importlib-metadata==6.8.0
- # via
- # keyring
- # python-arango
- # twine
-iniconfig==2.0.0
- # via pytest
-isodate==0.6.1
- # via azure-storage-blob
-jaraco-classes==3.3.0
- # via keyring
-jinja2==3.1.2
- # via sphinx
-jmespath==1.0.1
- # via
- # boto3
- # botocore
-kafka-python==2.0.2
- # via testcontainers-kafka
-keyring==24.3.0
- # via twine
-kubernetes==28.1.0
- # via testcontainers-k3s
-markdown-it-py==3.0.0
- # via rich
-markupsafe==2.1.3
- # via jinja2
-mccabe==0.6.1
- # via flake8
-mdurl==0.1.2
- # via markdown-it-py
-minio==7.2.0
- # via testcontainers-minio
-more-itertools==10.1.0
- # via jaraco-classes
-neo4j==5.15.0
- # via testcontainers-neo4j
-nh3==0.2.14
- # via readme-renderer
-oauthlib==3.2.2
- # via
- # kubernetes
- # requests-oauthlib
-opensearch-py==2.4.2
- # via testcontainers-opensearch
-outcome==1.3.0.post0
- # via trio
-packaging==23.2
- # via
- # deprecation
- # docker
- # pytest
- # python-arango
- # sphinx
-pg8000==1.30.3
- # via -r requirements.in
-pika==1.3.2
- # via testcontainers-rabbitmq
-pkginfo==1.9.6
- # via twine
-pluggy==1.3.0
- # via pytest
-proto-plus==1.22.3
- # via google-cloud-pubsub
-protobuf==4.25.1
- # via
- # google-api-core
- # google-cloud-pubsub
- # googleapis-common-protos
- # grpc-google-iam-v1
- # grpcio-status
- # proto-plus
-psycopg2-binary==2.9.9
- # via testcontainers-postgres
-pyasn1==0.5.1
- # via
- # pyasn1-modules
- # python-jose
- # rsa
-pyasn1-modules==0.3.0
- # via google-auth
-pycodestyle==2.5.0
- # via flake8
-pycparser==2.21
- # via cffi
-pycryptodome==3.19.0
- # via minio
-pyflakes==2.1.1
- # via flake8
-pygments==2.17.2
- # via
- # readme-renderer
- # rich
- # sphinx
-pyjwt==2.8.0
- # via python-arango
-pymongo==4.6.0
- # via testcontainers-mongodb
-pymssql==2.2.10
- # via testcontainers-mssql
-pymysql[rsa]==1.1.0
- # via testcontainers-mysql
-pysocks==1.7.1
- # via urllib3
-pytest==7.4.3
- # via
- # -r requirements.in
- # pytest-cov
-pytest-cov==4.1.0
- # via -r requirements.in
-python-arango==7.8.1
- # via testcontainers-arangodb
-python-dateutil==2.8.2
- # via
- # botocore
- # kubernetes
- # opensearch-py
- # pg8000
-python-jose==3.3.0
- # via python-keycloak
-python-keycloak==3.7.0
- # via testcontainers-keycloak
-pytz==2023.3.post1
- # via
- # clickhouse-driver
- # neo4j
-pyyaml==6.0.1
- # via
- # kubernetes
- # testcontainers-k3s
-readme-renderer==42.0
- # via twine
-redis==5.0.1
- # via testcontainers-redis
-requests==2.31.0
- # via
- # azure-core
- # docker
- # google-api-core
- # kubernetes
- # opensearch-py
- # python-arango
- # python-keycloak
- # requests-oauthlib
- # requests-toolbelt
- # sphinx
- # twine
-requests-oauthlib==1.3.1
- # via kubernetes
-requests-toolbelt==1.0.0
- # via
- # python-arango
- # python-keycloak
- # twine
-rfc3986==2.0.0
- # via twine
-rich==13.7.0
- # via twine
-rsa==4.9
- # via
- # google-auth
- # python-jose
-s3transfer==0.8.0
- # via boto3
-scramp==1.4.4
- # via pg8000
-selenium==4.15.2
- # via testcontainers-selenium
-six==1.16.0
- # via
- # azure-core
- # ecdsa
- # isodate
- # kubernetes
- # opensearch-py
- # python-dateutil
-sniffio==1.3.0
- # via trio
-snowballstemmer==2.2.0
- # via sphinx
-sortedcontainers==2.4.0
- # via trio
-sphinx==7.2.6
- # via
- # -r requirements.in
- # sphinxcontrib-applehelp
- # sphinxcontrib-devhelp
- # sphinxcontrib-htmlhelp
- # sphinxcontrib-qthelp
- # sphinxcontrib-serializinghtml
-sphinxcontrib-applehelp==1.0.7
- # via sphinx
-sphinxcontrib-devhelp==1.0.5
- # via sphinx
-sphinxcontrib-htmlhelp==2.0.4
- # via sphinx
-sphinxcontrib-jsmath==1.0.1
- # via sphinx
-sphinxcontrib-qthelp==1.0.6
- # via sphinx
-sphinxcontrib-serializinghtml==1.1.9
- # via sphinx
-sqlalchemy==2.0.23
- # via
- # testcontainers-mssql
- # testcontainers-mysql
- # testcontainers-oracle
- # testcontainers-postgres
-tomli==2.0.1
- # via
- # coverage
- # pytest
-trio==0.23.1
- # via
- # selenium
- # trio-websocket
-trio-websocket==0.11.1
- # via selenium
-twine==4.0.2
- # via -r requirements.in
-typing-extensions==4.8.0
- # via
- # azure-core
- # azure-storage-blob
- # sqlalchemy
-tzlocal==5.2
- # via clickhouse-driver
-urllib3[socks]==1.26.18
- # via
- # botocore
- # docker
- # kubernetes
- # minio
- # opensearch-py
- # python-arango
- # requests
- # selenium
- # testcontainers-core
- # twine
-websocket-client==1.6.4
- # via
- # docker
- # kubernetes
-wheel==0.42.0
- # via -r requirements.in
-wrapt==1.16.0
- # via testcontainers-core
-wsproto==1.2.0
- # via trio-websocket
-zipp==3.17.0
- # via importlib-metadata
-
-# The following packages are considered to be unsafe in a requirements file:
-# setuptools
diff --git a/requirements/ubuntu-latest-3.10.txt b/requirements/ubuntu-latest-3.10.txt
deleted file mode 100644
index bc349e7c9..000000000
--- a/requirements/ubuntu-latest-3.10.txt
+++ /dev/null
@@ -1,449 +0,0 @@
-#
-# This file is autogenerated by pip-compile with Python 3.10
-# by the following command:
-#
-# pip-compile --output-file=requirements.txt
-#
--e file:meta
- # via -r requirements.in
--e file:arangodb
- # via -r requirements.in
--e file:azurite
- # via -r requirements.in
--e file:clickhouse
- # via -r requirements.in
--e file:core
- # via
- # -r requirements.in
- # testcontainers
- # testcontainers-arangodb
- # testcontainers-azurite
- # testcontainers-clickhouse
- # testcontainers-elasticsearch
- # testcontainers-gcp
- # testcontainers-k3s
- # testcontainers-kafka
- # testcontainers-keycloak
- # testcontainers-localstack
- # testcontainers-minio
- # testcontainers-mongodb
- # testcontainers-mssql
- # testcontainers-mysql
- # testcontainers-neo4j
- # testcontainers-nginx
- # testcontainers-opensearch
- # testcontainers-oracle
- # testcontainers-postgres
- # testcontainers-rabbitmq
- # testcontainers-redis
- # testcontainers-selenium
--e file:elasticsearch
- # via -r requirements.in
--e file:google
- # via -r requirements.in
--e file:k3s
- # via -r requirements.in
--e file:kafka
- # via -r requirements.in
--e file:keycloak
- # via -r requirements.in
--e file:localstack
- # via -r requirements.in
--e file:minio
- # via -r requirements.in
--e file:mongodb
- # via -r requirements.in
--e file:mssql
- # via -r requirements.in
--e file:mysql
- # via -r requirements.in
--e file:neo4j
- # via -r requirements.in
--e file:nginx
- # via -r requirements.in
--e file:opensearch
- # via -r requirements.in
--e file:oracle
- # via -r requirements.in
--e file:postgres
- # via -r requirements.in
--e file:rabbitmq
- # via -r requirements.in
--e file:redis
- # via -r requirements.in
--e file:selenium
- # via -r requirements.in
-alabaster==0.7.13
- # via sphinx
-argon2-cffi==23.1.0
- # via minio
-argon2-cffi-bindings==21.2.0
- # via argon2-cffi
-asn1crypto==1.5.1
- # via scramp
-async-timeout==4.0.3
- # via redis
-attrs==23.1.0
- # via
- # outcome
- # trio
-azure-core==1.29.5
- # via azure-storage-blob
-azure-storage-blob==12.19.0
- # via testcontainers-azurite
-babel==2.13.1
- # via sphinx
-boto3==1.33.1
- # via testcontainers-localstack
-botocore==1.33.1
- # via
- # boto3
- # s3transfer
-cachetools==5.3.2
- # via google-auth
-certifi==2023.11.17
- # via
- # kubernetes
- # minio
- # opensearch-py
- # requests
- # selenium
-cffi==1.16.0
- # via
- # argon2-cffi-bindings
- # cryptography
-charset-normalizer==3.3.2
- # via requests
-clickhouse-driver==0.2.6
- # via testcontainers-clickhouse
-coverage[toml]==7.3.2
- # via
- # coverage
- # pytest-cov
-cryptography==36.0.2
- # via
- # -r requirements.in
- # azure-storage-blob
- # pymysql
- # secretstorage
-cx-oracle==8.3.0
- # via testcontainers-oracle
-deprecation==2.1.0
- # via python-keycloak
-dnspython==2.4.2
- # via pymongo
-docker==6.1.3
- # via testcontainers-core
-docutils==0.20.1
- # via
- # readme-renderer
- # sphinx
-ecdsa==0.18.0
- # via python-jose
-entrypoints==0.3
- # via flake8
-exceptiongroup==1.2.0
- # via
- # pytest
- # trio
- # trio-websocket
-flake8==3.7.9
- # via -r requirements.in
-google-api-core[grpc]==2.14.0
- # via
- # google-api-core
- # google-cloud-pubsub
-google-auth==2.23.4
- # via
- # google-api-core
- # kubernetes
-google-cloud-pubsub==2.18.4
- # via testcontainers-gcp
-googleapis-common-protos[grpc]==1.61.0
- # via
- # google-api-core
- # grpc-google-iam-v1
- # grpcio-status
-greenlet==3.0.1
- # via sqlalchemy
-grpc-google-iam-v1==0.12.7
- # via google-cloud-pubsub
-grpcio==1.59.3
- # via
- # google-api-core
- # google-cloud-pubsub
- # googleapis-common-protos
- # grpc-google-iam-v1
- # grpcio-status
-grpcio-status==1.59.3
- # via
- # google-api-core
- # google-cloud-pubsub
-h11==0.14.0
- # via wsproto
-idna==3.6
- # via
- # requests
- # trio
-imagesize==1.4.1
- # via sphinx
-importlib-metadata==6.8.0
- # via
- # keyring
- # python-arango
- # twine
-iniconfig==2.0.0
- # via pytest
-isodate==0.6.1
- # via azure-storage-blob
-jaraco-classes==3.3.0
- # via keyring
-jeepney==0.8.0
- # via
- # keyring
- # secretstorage
-jinja2==3.1.2
- # via sphinx
-jmespath==1.0.1
- # via
- # boto3
- # botocore
-kafka-python==2.0.2
- # via testcontainers-kafka
-keyring==24.3.0
- # via twine
-kubernetes==28.1.0
- # via testcontainers-k3s
-markdown-it-py==3.0.0
- # via rich
-markupsafe==2.1.3
- # via jinja2
-mccabe==0.6.1
- # via flake8
-mdurl==0.1.2
- # via markdown-it-py
-minio==7.2.0
- # via testcontainers-minio
-more-itertools==10.1.0
- # via jaraco-classes
-neo4j==5.15.0
- # via testcontainers-neo4j
-nh3==0.2.14
- # via readme-renderer
-oauthlib==3.2.2
- # via
- # kubernetes
- # requests-oauthlib
-opensearch-py==2.4.2
- # via testcontainers-opensearch
-outcome==1.3.0.post0
- # via trio
-packaging==23.2
- # via
- # deprecation
- # docker
- # pytest
- # python-arango
- # sphinx
-pg8000==1.30.3
- # via -r requirements.in
-pika==1.3.2
- # via testcontainers-rabbitmq
-pkginfo==1.9.6
- # via twine
-pluggy==1.3.0
- # via pytest
-proto-plus==1.22.3
- # via google-cloud-pubsub
-protobuf==4.25.1
- # via
- # google-api-core
- # google-cloud-pubsub
- # googleapis-common-protos
- # grpc-google-iam-v1
- # grpcio-status
- # proto-plus
-psycopg2-binary==2.9.9
- # via testcontainers-postgres
-pyasn1==0.5.1
- # via
- # pyasn1-modules
- # python-jose
- # rsa
-pyasn1-modules==0.3.0
- # via google-auth
-pycodestyle==2.5.0
- # via flake8
-pycparser==2.21
- # via cffi
-pycryptodome==3.19.0
- # via minio
-pyflakes==2.1.1
- # via flake8
-pygments==2.17.2
- # via
- # readme-renderer
- # rich
- # sphinx
-pyjwt==2.8.0
- # via python-arango
-pymongo==4.6.0
- # via testcontainers-mongodb
-pymssql==2.2.10
- # via testcontainers-mssql
-pymysql[rsa]==1.1.0
- # via testcontainers-mysql
-pysocks==1.7.1
- # via urllib3
-pytest==7.4.3
- # via
- # -r requirements.in
- # pytest-cov
-pytest-cov==4.1.0
- # via -r requirements.in
-python-arango==7.8.1
- # via testcontainers-arangodb
-python-dateutil==2.8.2
- # via
- # botocore
- # kubernetes
- # opensearch-py
- # pg8000
-python-jose==3.3.0
- # via python-keycloak
-python-keycloak==3.7.0
- # via testcontainers-keycloak
-pytz==2023.3.post1
- # via
- # clickhouse-driver
- # neo4j
-pyyaml==6.0.1
- # via
- # kubernetes
- # testcontainers-k3s
-readme-renderer==42.0
- # via twine
-redis==5.0.1
- # via testcontainers-redis
-requests==2.31.0
- # via
- # azure-core
- # docker
- # google-api-core
- # kubernetes
- # opensearch-py
- # python-arango
- # python-keycloak
- # requests-oauthlib
- # requests-toolbelt
- # sphinx
- # twine
-requests-oauthlib==1.3.1
- # via kubernetes
-requests-toolbelt==1.0.0
- # via
- # python-arango
- # python-keycloak
- # twine
-rfc3986==2.0.0
- # via twine
-rich==13.7.0
- # via twine
-rsa==4.9
- # via
- # google-auth
- # python-jose
-s3transfer==0.8.0
- # via boto3
-scramp==1.4.4
- # via pg8000
-secretstorage==3.3.3
- # via keyring
-selenium==4.15.2
- # via testcontainers-selenium
-six==1.16.0
- # via
- # azure-core
- # ecdsa
- # isodate
- # kubernetes
- # opensearch-py
- # python-dateutil
-sniffio==1.3.0
- # via trio
-snowballstemmer==2.2.0
- # via sphinx
-sortedcontainers==2.4.0
- # via trio
-sphinx==7.2.6
- # via
- # -r requirements.in
- # sphinxcontrib-applehelp
- # sphinxcontrib-devhelp
- # sphinxcontrib-htmlhelp
- # sphinxcontrib-qthelp
- # sphinxcontrib-serializinghtml
-sphinxcontrib-applehelp==1.0.7
- # via sphinx
-sphinxcontrib-devhelp==1.0.5
- # via sphinx
-sphinxcontrib-htmlhelp==2.0.4
- # via sphinx
-sphinxcontrib-jsmath==1.0.1
- # via sphinx
-sphinxcontrib-qthelp==1.0.6
- # via sphinx
-sphinxcontrib-serializinghtml==1.1.9
- # via sphinx
-sqlalchemy==2.0.23
- # via
- # testcontainers-mssql
- # testcontainers-mysql
- # testcontainers-oracle
- # testcontainers-postgres
-tomli==2.0.1
- # via
- # coverage
- # pytest
-trio==0.23.1
- # via
- # selenium
- # trio-websocket
-trio-websocket==0.11.1
- # via selenium
-twine==4.0.2
- # via -r requirements.in
-typing-extensions==4.8.0
- # via
- # azure-core
- # azure-storage-blob
- # sqlalchemy
-tzlocal==5.2
- # via clickhouse-driver
-urllib3[socks]==1.26.18
- # via
- # botocore
- # docker
- # kubernetes
- # minio
- # opensearch-py
- # python-arango
- # requests
- # selenium
- # testcontainers-core
- # twine
-websocket-client==1.6.4
- # via
- # docker
- # kubernetes
-wheel==0.42.0
- # via -r requirements.in
-wrapt==1.16.0
- # via testcontainers-core
-wsproto==1.2.0
- # via trio-websocket
-zipp==3.17.0
- # via importlib-metadata
-
-# The following packages are considered to be unsafe in a requirements file:
-# setuptools
diff --git a/requirements/ubuntu-latest-3.11.txt b/requirements/ubuntu-latest-3.11.txt
deleted file mode 100644
index 6dfda74d1..000000000
--- a/requirements/ubuntu-latest-3.11.txt
+++ /dev/null
@@ -1,438 +0,0 @@
-#
-# This file is autogenerated by pip-compile with Python 3.11
-# by the following command:
-#
-# pip-compile --output-file=requirements.txt
-#
--e file:meta
- # via -r requirements.in
--e file:arangodb
- # via -r requirements.in
--e file:azurite
- # via -r requirements.in
--e file:clickhouse
- # via -r requirements.in
--e file:core
- # via
- # -r requirements.in
- # testcontainers
- # testcontainers-arangodb
- # testcontainers-azurite
- # testcontainers-clickhouse
- # testcontainers-elasticsearch
- # testcontainers-gcp
- # testcontainers-k3s
- # testcontainers-kafka
- # testcontainers-keycloak
- # testcontainers-localstack
- # testcontainers-minio
- # testcontainers-mongodb
- # testcontainers-mssql
- # testcontainers-mysql
- # testcontainers-neo4j
- # testcontainers-nginx
- # testcontainers-opensearch
- # testcontainers-oracle
- # testcontainers-postgres
- # testcontainers-rabbitmq
- # testcontainers-redis
- # testcontainers-selenium
--e file:elasticsearch
- # via -r requirements.in
--e file:google
- # via -r requirements.in
--e file:k3s
- # via -r requirements.in
--e file:kafka
- # via -r requirements.in
--e file:keycloak
- # via -r requirements.in
--e file:localstack
- # via -r requirements.in
--e file:minio
- # via -r requirements.in
--e file:mongodb
- # via -r requirements.in
--e file:mssql
- # via -r requirements.in
--e file:mysql
- # via -r requirements.in
--e file:neo4j
- # via -r requirements.in
--e file:nginx
- # via -r requirements.in
--e file:opensearch
- # via -r requirements.in
--e file:oracle
- # via -r requirements.in
--e file:postgres
- # via -r requirements.in
--e file:rabbitmq
- # via -r requirements.in
--e file:redis
- # via -r requirements.in
--e file:selenium
- # via -r requirements.in
-alabaster==0.7.13
- # via sphinx
-argon2-cffi==23.1.0
- # via minio
-argon2-cffi-bindings==21.2.0
- # via argon2-cffi
-asn1crypto==1.5.1
- # via scramp
-attrs==23.1.0
- # via
- # outcome
- # trio
-azure-core==1.29.5
- # via azure-storage-blob
-azure-storage-blob==12.19.0
- # via testcontainers-azurite
-babel==2.13.1
- # via sphinx
-boto3==1.33.1
- # via testcontainers-localstack
-botocore==1.33.1
- # via
- # boto3
- # s3transfer
-cachetools==5.3.2
- # via google-auth
-certifi==2023.11.17
- # via
- # kubernetes
- # minio
- # opensearch-py
- # requests
- # selenium
-cffi==1.16.0
- # via
- # argon2-cffi-bindings
- # cryptography
-charset-normalizer==3.3.2
- # via requests
-clickhouse-driver==0.2.6
- # via testcontainers-clickhouse
-coverage[toml]==7.3.2
- # via
- # coverage
- # pytest-cov
-cryptography==36.0.2
- # via
- # -r requirements.in
- # azure-storage-blob
- # pymysql
- # secretstorage
-cx-oracle==8.3.0
- # via testcontainers-oracle
-deprecation==2.1.0
- # via python-keycloak
-dnspython==2.4.2
- # via pymongo
-docker==6.1.3
- # via testcontainers-core
-docutils==0.20.1
- # via
- # readme-renderer
- # sphinx
-ecdsa==0.18.0
- # via python-jose
-entrypoints==0.3
- # via flake8
-flake8==3.7.9
- # via -r requirements.in
-google-api-core[grpc]==2.14.0
- # via
- # google-api-core
- # google-cloud-pubsub
-google-auth==2.23.4
- # via
- # google-api-core
- # kubernetes
-google-cloud-pubsub==2.18.4
- # via testcontainers-gcp
-googleapis-common-protos[grpc]==1.61.0
- # via
- # google-api-core
- # grpc-google-iam-v1
- # grpcio-status
-greenlet==3.0.1
- # via sqlalchemy
-grpc-google-iam-v1==0.12.7
- # via google-cloud-pubsub
-grpcio==1.59.3
- # via
- # google-api-core
- # google-cloud-pubsub
- # googleapis-common-protos
- # grpc-google-iam-v1
- # grpcio-status
-grpcio-status==1.59.3
- # via
- # google-api-core
- # google-cloud-pubsub
-h11==0.14.0
- # via wsproto
-idna==3.6
- # via
- # requests
- # trio
-imagesize==1.4.1
- # via sphinx
-importlib-metadata==6.8.0
- # via
- # keyring
- # python-arango
- # twine
-iniconfig==2.0.0
- # via pytest
-isodate==0.6.1
- # via azure-storage-blob
-jaraco-classes==3.3.0
- # via keyring
-jeepney==0.8.0
- # via
- # keyring
- # secretstorage
-jinja2==3.1.2
- # via sphinx
-jmespath==1.0.1
- # via
- # boto3
- # botocore
-kafka-python==2.0.2
- # via testcontainers-kafka
-keyring==24.3.0
- # via twine
-kubernetes==28.1.0
- # via testcontainers-k3s
-markdown-it-py==3.0.0
- # via rich
-markupsafe==2.1.3
- # via jinja2
-mccabe==0.6.1
- # via flake8
-mdurl==0.1.2
- # via markdown-it-py
-minio==7.2.0
- # via testcontainers-minio
-more-itertools==10.1.0
- # via jaraco-classes
-neo4j==5.15.0
- # via testcontainers-neo4j
-nh3==0.2.14
- # via readme-renderer
-oauthlib==3.2.2
- # via
- # kubernetes
- # requests-oauthlib
-opensearch-py==2.4.2
- # via testcontainers-opensearch
-outcome==1.3.0.post0
- # via trio
-packaging==23.2
- # via
- # deprecation
- # docker
- # pytest
- # python-arango
- # sphinx
-pg8000==1.30.3
- # via -r requirements.in
-pika==1.3.2
- # via testcontainers-rabbitmq
-pkginfo==1.9.6
- # via twine
-pluggy==1.3.0
- # via pytest
-proto-plus==1.22.3
- # via google-cloud-pubsub
-protobuf==4.25.1
- # via
- # google-api-core
- # google-cloud-pubsub
- # googleapis-common-protos
- # grpc-google-iam-v1
- # grpcio-status
- # proto-plus
-psycopg2-binary==2.9.9
- # via testcontainers-postgres
-pyasn1==0.5.1
- # via
- # pyasn1-modules
- # python-jose
- # rsa
-pyasn1-modules==0.3.0
- # via google-auth
-pycodestyle==2.5.0
- # via flake8
-pycparser==2.21
- # via cffi
-pycryptodome==3.19.0
- # via minio
-pyflakes==2.1.1
- # via flake8
-pygments==2.17.2
- # via
- # readme-renderer
- # rich
- # sphinx
-pyjwt==2.8.0
- # via python-arango
-pymongo==4.6.0
- # via testcontainers-mongodb
-pymssql==2.2.10
- # via testcontainers-mssql
-pymysql[rsa]==1.1.0
- # via testcontainers-mysql
-pysocks==1.7.1
- # via urllib3
-pytest==7.4.3
- # via
- # -r requirements.in
- # pytest-cov
-pytest-cov==4.1.0
- # via -r requirements.in
-python-arango==7.8.1
- # via testcontainers-arangodb
-python-dateutil==2.8.2
- # via
- # botocore
- # kubernetes
- # opensearch-py
- # pg8000
-python-jose==3.3.0
- # via python-keycloak
-python-keycloak==3.7.0
- # via testcontainers-keycloak
-pytz==2023.3.post1
- # via
- # clickhouse-driver
- # neo4j
-pyyaml==6.0.1
- # via
- # kubernetes
- # testcontainers-k3s
-readme-renderer==42.0
- # via twine
-redis==5.0.1
- # via testcontainers-redis
-requests==2.31.0
- # via
- # azure-core
- # docker
- # google-api-core
- # kubernetes
- # opensearch-py
- # python-arango
- # python-keycloak
- # requests-oauthlib
- # requests-toolbelt
- # sphinx
- # twine
-requests-oauthlib==1.3.1
- # via kubernetes
-requests-toolbelt==1.0.0
- # via
- # python-arango
- # python-keycloak
- # twine
-rfc3986==2.0.0
- # via twine
-rich==13.7.0
- # via twine
-rsa==4.9
- # via
- # google-auth
- # python-jose
-s3transfer==0.8.0
- # via boto3
-scramp==1.4.4
- # via pg8000
-secretstorage==3.3.3
- # via keyring
-selenium==4.15.2
- # via testcontainers-selenium
-six==1.16.0
- # via
- # azure-core
- # ecdsa
- # isodate
- # kubernetes
- # opensearch-py
- # python-dateutil
-sniffio==1.3.0
- # via trio
-snowballstemmer==2.2.0
- # via sphinx
-sortedcontainers==2.4.0
- # via trio
-sphinx==7.2.6
- # via
- # -r requirements.in
- # sphinxcontrib-applehelp
- # sphinxcontrib-devhelp
- # sphinxcontrib-htmlhelp
- # sphinxcontrib-qthelp
- # sphinxcontrib-serializinghtml
-sphinxcontrib-applehelp==1.0.7
- # via sphinx
-sphinxcontrib-devhelp==1.0.5
- # via sphinx
-sphinxcontrib-htmlhelp==2.0.4
- # via sphinx
-sphinxcontrib-jsmath==1.0.1
- # via sphinx
-sphinxcontrib-qthelp==1.0.6
- # via sphinx
-sphinxcontrib-serializinghtml==1.1.9
- # via sphinx
-sqlalchemy==2.0.23
- # via
- # testcontainers-mssql
- # testcontainers-mysql
- # testcontainers-oracle
- # testcontainers-postgres
-trio==0.23.1
- # via
- # selenium
- # trio-websocket
-trio-websocket==0.11.1
- # via selenium
-twine==4.0.2
- # via -r requirements.in
-typing-extensions==4.8.0
- # via
- # azure-core
- # azure-storage-blob
- # sqlalchemy
-tzlocal==5.2
- # via clickhouse-driver
-urllib3[socks]==1.26.18
- # via
- # botocore
- # docker
- # kubernetes
- # minio
- # opensearch-py
- # python-arango
- # requests
- # selenium
- # testcontainers-core
- # twine
-websocket-client==1.6.4
- # via
- # docker
- # kubernetes
-wheel==0.42.0
- # via -r requirements.in
-wrapt==1.16.0
- # via testcontainers-core
-wsproto==1.2.0
- # via trio-websocket
-zipp==3.17.0
- # via importlib-metadata
-
-# The following packages are considered to be unsafe in a requirements file:
-# setuptools
diff --git a/requirements/ubuntu-latest-3.7.txt b/requirements/ubuntu-latest-3.7.txt
deleted file mode 100644
index c3ebc37bb..000000000
--- a/requirements/ubuntu-latest-3.7.txt
+++ /dev/null
@@ -1,467 +0,0 @@
-#
-# This file is autogenerated by pip-compile with Python 3.7
-# by the following command:
-#
-# pip-compile --output-file=requirements.txt --resolver=backtracking
-#
--e file:meta
- # via -r requirements.in
--e file:arangodb
- # via -r requirements.in
--e file:azurite
- # via -r requirements.in
--e file:clickhouse
- # via -r requirements.in
--e file:core
- # via
- # -r requirements.in
- # testcontainers
- # testcontainers-arangodb
- # testcontainers-azurite
- # testcontainers-clickhouse
- # testcontainers-elasticsearch
- # testcontainers-gcp
- # testcontainers-k3s
- # testcontainers-kafka
- # testcontainers-keycloak
- # testcontainers-localstack
- # testcontainers-minio
- # testcontainers-mongodb
- # testcontainers-mssql
- # testcontainers-mysql
- # testcontainers-neo4j
- # testcontainers-nginx
- # testcontainers-opensearch
- # testcontainers-oracle
- # testcontainers-postgres
- # testcontainers-rabbitmq
- # testcontainers-redis
- # testcontainers-selenium
--e file:elasticsearch
- # via -r requirements.in
--e file:google
- # via -r requirements.in
--e file:k3s
- # via -r requirements.in
--e file:kafka
- # via -r requirements.in
--e file:keycloak
- # via -r requirements.in
--e file:localstack
- # via -r requirements.in
--e file:minio
- # via -r requirements.in
--e file:mongodb
- # via -r requirements.in
--e file:mssql
- # via -r requirements.in
--e file:mysql
- # via -r requirements.in
--e file:neo4j
- # via -r requirements.in
--e file:nginx
- # via -r requirements.in
--e file:opensearch
- # via -r requirements.in
--e file:oracle
- # via -r requirements.in
--e file:postgres
- # via -r requirements.in
--e file:rabbitmq
- # via -r requirements.in
--e file:redis
- # via -r requirements.in
--e file:selenium
- # via -r requirements.in
-alabaster==0.7.13
- # via sphinx
-argon2-cffi==23.1.0
- # via minio
-argon2-cffi-bindings==21.2.0
- # via argon2-cffi
-asn1crypto==1.5.1
- # via scramp
-async-timeout==4.0.3
- # via redis
-attrs==23.1.0
- # via
- # outcome
- # trio
-azure-core==1.29.5
- # via azure-storage-blob
-azure-storage-blob==12.19.0
- # via testcontainers-azurite
-babel==2.13.1
- # via sphinx
-backports-zoneinfo==0.2.1
- # via tzlocal
-bleach==6.0.0
- # via readme-renderer
-boto3==1.33.1
- # via testcontainers-localstack
-botocore==1.33.1
- # via
- # boto3
- # s3transfer
-cachetools==5.3.2
- # via google-auth
-certifi==2023.11.17
- # via
- # kubernetes
- # minio
- # opensearch-py
- # requests
- # selenium
-cffi==1.15.1
- # via
- # argon2-cffi-bindings
- # cryptography
-charset-normalizer==3.3.2
- # via requests
-clickhouse-driver==0.2.6
- # via testcontainers-clickhouse
-coverage[toml]==7.2.7
- # via
- # coverage
- # pytest-cov
-cryptography==36.0.2
- # via
- # -r requirements.in
- # azure-storage-blob
- # pymysql
- # secretstorage
-cx-oracle==8.3.0
- # via testcontainers-oracle
-deprecation==2.1.0
- # via python-keycloak
-dnspython==2.3.0
- # via pymongo
-docker==6.1.3
- # via testcontainers-core
-docutils==0.19
- # via
- # readme-renderer
- # sphinx
-ecdsa==0.18.0
- # via python-jose
-entrypoints==0.3
- # via flake8
-exceptiongroup==1.2.0
- # via
- # pytest
- # trio
- # trio-websocket
-flake8==3.7.9
- # via -r requirements.in
-google-api-core[grpc]==2.14.0
- # via
- # google-api-core
- # google-cloud-pubsub
-google-auth==2.23.4
- # via
- # google-api-core
- # kubernetes
-google-cloud-pubsub==2.18.4
- # via testcontainers-gcp
-googleapis-common-protos[grpc]==1.61.0
- # via
- # google-api-core
- # grpc-google-iam-v1
- # grpcio-status
-greenlet==3.0.1
- # via sqlalchemy
-grpc-google-iam-v1==0.12.7
- # via google-cloud-pubsub
-grpcio==1.59.3
- # via
- # google-api-core
- # google-cloud-pubsub
- # googleapis-common-protos
- # grpc-google-iam-v1
- # grpcio-status
-grpcio-status==1.59.3
- # via
- # google-api-core
- # google-cloud-pubsub
-h11==0.14.0
- # via wsproto
-idna==3.6
- # via
- # requests
- # trio
-imagesize==1.4.1
- # via sphinx
-importlib-metadata==6.7.0
- # via
- # attrs
- # keyring
- # pg8000
- # pluggy
- # pytest
- # redis
- # scramp
- # sphinx
- # sqlalchemy
- # twine
-importlib-resources==5.12.0
- # via keyring
-iniconfig==2.0.0
- # via pytest
-isodate==0.6.1
- # via azure-storage-blob
-jaraco-classes==3.2.3
- # via keyring
-jeepney==0.8.0
- # via
- # keyring
- # secretstorage
-jinja2==3.1.2
- # via sphinx
-jmespath==1.0.1
- # via
- # boto3
- # botocore
-kafka-python==2.0.2
- # via testcontainers-kafka
-keyring==24.1.1
- # via twine
-kubernetes==28.1.0
- # via testcontainers-k3s
-markdown-it-py==2.2.0
- # via rich
-markupsafe==2.1.3
- # via jinja2
-mccabe==0.6.1
- # via flake8
-mdurl==0.1.2
- # via markdown-it-py
-minio==7.2.0
- # via testcontainers-minio
-more-itertools==9.1.0
- # via jaraco-classes
-neo4j==5.15.0
- # via testcontainers-neo4j
-oauthlib==3.2.2
- # via
- # kubernetes
- # requests-oauthlib
-opensearch-py==2.4.2
- # via testcontainers-opensearch
-outcome==1.3.0.post0
- # via trio
-packaging==23.2
- # via
- # deprecation
- # docker
- # pytest
- # sphinx
-pg8000==1.29.8
- # via -r requirements.in
-pika==1.3.2
- # via testcontainers-rabbitmq
-pkginfo==1.9.6
- # via twine
-pluggy==1.2.0
- # via pytest
-proto-plus==1.22.3
- # via google-cloud-pubsub
-protobuf==4.24.4
- # via
- # google-api-core
- # google-cloud-pubsub
- # googleapis-common-protos
- # grpc-google-iam-v1
- # grpcio-status
- # proto-plus
-psycopg2-binary==2.9.9
- # via testcontainers-postgres
-pyasn1==0.5.1
- # via
- # pyasn1-modules
- # python-jose
- # rsa
-pyasn1-modules==0.3.0
- # via google-auth
-pycodestyle==2.5.0
- # via flake8
-pycparser==2.21
- # via cffi
-pycryptodome==3.19.0
- # via minio
-pyflakes==2.1.1
- # via flake8
-pygments==2.17.2
- # via
- # readme-renderer
- # rich
- # sphinx
-pyjwt==2.8.0
- # via python-arango
-pymongo==4.6.0
- # via testcontainers-mongodb
-pymssql==2.2.10
- # via testcontainers-mssql
-pymysql[rsa]==1.1.0
- # via testcontainers-mysql
-pysocks==1.7.1
- # via urllib3
-pytest==7.4.3
- # via
- # -r requirements.in
- # pytest-cov
-pytest-cov==4.1.0
- # via -r requirements.in
-python-arango==7.5.6
- # via testcontainers-arangodb
-python-dateutil==2.8.2
- # via
- # botocore
- # kubernetes
- # opensearch-py
- # pg8000
-python-jose==3.3.0
- # via python-keycloak
-python-keycloak==3.6.0
- # via testcontainers-keycloak
-pytz==2023.3.post1
- # via
- # babel
- # clickhouse-driver
- # neo4j
-pyyaml==6.0.1
- # via
- # kubernetes
- # testcontainers-k3s
-readme-renderer==37.3
- # via twine
-redis==5.0.1
- # via testcontainers-redis
-requests==2.31.0
- # via
- # azure-core
- # docker
- # google-api-core
- # kubernetes
- # opensearch-py
- # python-arango
- # python-keycloak
- # requests-oauthlib
- # requests-toolbelt
- # sphinx
- # twine
-requests-oauthlib==1.3.1
- # via kubernetes
-requests-toolbelt==1.0.0
- # via
- # python-arango
- # python-keycloak
- # twine
-rfc3986==2.0.0
- # via twine
-rich==13.7.0
- # via twine
-rsa==4.9
- # via
- # google-auth
- # python-jose
-s3transfer==0.8.0
- # via boto3
-scramp==1.4.4
- # via pg8000
-secretstorage==3.3.3
- # via keyring
-selenium==4.11.2
- # via testcontainers-selenium
-six==1.16.0
- # via
- # azure-core
- # bleach
- # ecdsa
- # isodate
- # kubernetes
- # opensearch-py
- # python-dateutil
-sniffio==1.3.0
- # via trio
-snowballstemmer==2.2.0
- # via sphinx
-sortedcontainers==2.4.0
- # via trio
-sphinx==5.3.0
- # via -r requirements.in
-sphinxcontrib-applehelp==1.0.2
- # via sphinx
-sphinxcontrib-devhelp==1.0.2
- # via sphinx
-sphinxcontrib-htmlhelp==2.0.0
- # via sphinx
-sphinxcontrib-jsmath==1.0.1
- # via sphinx
-sphinxcontrib-qthelp==1.0.3
- # via sphinx
-sphinxcontrib-serializinghtml==1.1.5
- # via sphinx
-sqlalchemy==2.0.23
- # via
- # testcontainers-mssql
- # testcontainers-mysql
- # testcontainers-oracle
- # testcontainers-postgres
-tomli==2.0.1
- # via
- # coverage
- # pytest
-trio==0.22.2
- # via
- # selenium
- # trio-websocket
-trio-websocket==0.11.1
- # via selenium
-twine==4.0.2
- # via -r requirements.in
-typing-extensions==4.7.1
- # via
- # argon2-cffi
- # async-timeout
- # azure-core
- # azure-storage-blob
- # h11
- # importlib-metadata
- # markdown-it-py
- # pyjwt
- # redis
- # rich
- # sqlalchemy
-tzlocal==5.1
- # via clickhouse-driver
-urllib3[socks]==1.26.18
- # via
- # botocore
- # docker
- # kubernetes
- # minio
- # opensearch-py
- # python-arango
- # requests
- # selenium
- # testcontainers-core
- # twine
-webencodings==0.5.1
- # via bleach
-websocket-client==1.6.1
- # via
- # docker
- # kubernetes
-wheel==0.42.0
- # via -r requirements.in
-wrapt==1.16.0
- # via testcontainers-core
-wsproto==1.2.0
- # via trio-websocket
-zipp==3.15.0
- # via
- # importlib-metadata
- # importlib-resources
-
-# The following packages are considered to be unsafe in a requirements file:
-# setuptools
diff --git a/requirements/ubuntu-latest-3.8.txt b/requirements/ubuntu-latest-3.8.txt
deleted file mode 100644
index 605b3cf1c..000000000
--- a/requirements/ubuntu-latest-3.8.txt
+++ /dev/null
@@ -1,452 +0,0 @@
-#
-# This file is autogenerated by pip-compile with Python 3.8
-# by the following command:
-#
-# pip-compile --output-file=requirements.txt
-#
--e file:meta
- # via -r requirements.in
--e file:arangodb
- # via -r requirements.in
--e file:azurite
- # via -r requirements.in
--e file:clickhouse
- # via -r requirements.in
--e file:core
- # via
- # -r requirements.in
- # testcontainers
- # testcontainers-arangodb
- # testcontainers-azurite
- # testcontainers-clickhouse
- # testcontainers-elasticsearch
- # testcontainers-gcp
- # testcontainers-k3s
- # testcontainers-kafka
- # testcontainers-keycloak
- # testcontainers-localstack
- # testcontainers-minio
- # testcontainers-mongodb
- # testcontainers-mssql
- # testcontainers-mysql
- # testcontainers-neo4j
- # testcontainers-nginx
- # testcontainers-opensearch
- # testcontainers-oracle
- # testcontainers-postgres
- # testcontainers-rabbitmq
- # testcontainers-redis
- # testcontainers-selenium
--e file:elasticsearch
- # via -r requirements.in
--e file:google
- # via -r requirements.in
--e file:k3s
- # via -r requirements.in
--e file:kafka
- # via -r requirements.in
--e file:keycloak
- # via -r requirements.in
--e file:localstack
- # via -r requirements.in
--e file:minio
- # via -r requirements.in
--e file:mongodb
- # via -r requirements.in
--e file:mssql
- # via -r requirements.in
--e file:mysql
- # via -r requirements.in
--e file:neo4j
- # via -r requirements.in
--e file:nginx
- # via -r requirements.in
--e file:opensearch
- # via -r requirements.in
--e file:oracle
- # via -r requirements.in
--e file:postgres
- # via -r requirements.in
--e file:rabbitmq
- # via -r requirements.in
--e file:redis
- # via -r requirements.in
--e file:selenium
- # via -r requirements.in
-alabaster==0.7.13
- # via sphinx
-argon2-cffi==23.1.0
- # via minio
-argon2-cffi-bindings==21.2.0
- # via argon2-cffi
-asn1crypto==1.5.1
- # via scramp
-async-timeout==4.0.3
- # via redis
-attrs==23.1.0
- # via
- # outcome
- # trio
-azure-core==1.29.5
- # via azure-storage-blob
-azure-storage-blob==12.19.0
- # via testcontainers-azurite
-babel==2.13.1
- # via sphinx
-backports-zoneinfo==0.2.1
- # via tzlocal
-boto3==1.33.1
- # via testcontainers-localstack
-botocore==1.33.1
- # via
- # boto3
- # s3transfer
-cachetools==5.3.2
- # via google-auth
-certifi==2023.11.17
- # via
- # kubernetes
- # minio
- # opensearch-py
- # requests
- # selenium
-cffi==1.16.0
- # via
- # argon2-cffi-bindings
- # cryptography
-charset-normalizer==3.3.2
- # via requests
-clickhouse-driver==0.2.6
- # via testcontainers-clickhouse
-coverage[toml]==7.3.2
- # via
- # coverage
- # pytest-cov
-cryptography==36.0.2
- # via
- # -r requirements.in
- # azure-storage-blob
- # pymysql
- # secretstorage
-cx-oracle==8.3.0
- # via testcontainers-oracle
-deprecation==2.1.0
- # via python-keycloak
-dnspython==2.4.2
- # via pymongo
-docker==6.1.3
- # via testcontainers-core
-docutils==0.20.1
- # via
- # readme-renderer
- # sphinx
-ecdsa==0.18.0
- # via python-jose
-entrypoints==0.3
- # via flake8
-exceptiongroup==1.2.0
- # via
- # pytest
- # trio
- # trio-websocket
-flake8==3.7.9
- # via -r requirements.in
-google-api-core[grpc]==2.14.0
- # via
- # google-api-core
- # google-cloud-pubsub
-google-auth==2.23.4
- # via
- # google-api-core
- # kubernetes
-google-cloud-pubsub==2.18.4
- # via testcontainers-gcp
-googleapis-common-protos[grpc]==1.61.0
- # via
- # google-api-core
- # grpc-google-iam-v1
- # grpcio-status
-greenlet==3.0.1
- # via sqlalchemy
-grpc-google-iam-v1==0.12.7
- # via google-cloud-pubsub
-grpcio==1.59.3
- # via
- # google-api-core
- # google-cloud-pubsub
- # googleapis-common-protos
- # grpc-google-iam-v1
- # grpcio-status
-grpcio-status==1.59.3
- # via
- # google-api-core
- # google-cloud-pubsub
-h11==0.14.0
- # via wsproto
-idna==3.6
- # via
- # requests
- # trio
-imagesize==1.4.1
- # via sphinx
-importlib-metadata==6.8.0
- # via
- # keyring
- # python-arango
- # sphinx
- # twine
-importlib-resources==6.1.1
- # via keyring
-iniconfig==2.0.0
- # via pytest
-isodate==0.6.1
- # via azure-storage-blob
-jaraco-classes==3.3.0
- # via keyring
-jeepney==0.8.0
- # via
- # keyring
- # secretstorage
-jinja2==3.1.2
- # via sphinx
-jmespath==1.0.1
- # via
- # boto3
- # botocore
-kafka-python==2.0.2
- # via testcontainers-kafka
-keyring==24.3.0
- # via twine
-kubernetes==28.1.0
- # via testcontainers-k3s
-markdown-it-py==3.0.0
- # via rich
-markupsafe==2.1.3
- # via jinja2
-mccabe==0.6.1
- # via flake8
-mdurl==0.1.2
- # via markdown-it-py
-minio==7.2.0
- # via testcontainers-minio
-more-itertools==10.1.0
- # via jaraco-classes
-neo4j==5.15.0
- # via testcontainers-neo4j
-nh3==0.2.14
- # via readme-renderer
-oauthlib==3.2.2
- # via
- # kubernetes
- # requests-oauthlib
-opensearch-py==2.4.2
- # via testcontainers-opensearch
-outcome==1.3.0.post0
- # via trio
-packaging==23.2
- # via
- # deprecation
- # docker
- # pytest
- # python-arango
- # sphinx
-pg8000==1.30.3
- # via -r requirements.in
-pika==1.3.2
- # via testcontainers-rabbitmq
-pkginfo==1.9.6
- # via twine
-pluggy==1.3.0
- # via pytest
-proto-plus==1.22.3
- # via google-cloud-pubsub
-protobuf==4.25.1
- # via
- # google-api-core
- # google-cloud-pubsub
- # googleapis-common-protos
- # grpc-google-iam-v1
- # grpcio-status
- # proto-plus
-psycopg2-binary==2.9.9
- # via testcontainers-postgres
-pyasn1==0.5.1
- # via
- # pyasn1-modules
- # python-jose
- # rsa
-pyasn1-modules==0.3.0
- # via google-auth
-pycodestyle==2.5.0
- # via flake8
-pycparser==2.21
- # via cffi
-pycryptodome==3.19.0
- # via minio
-pyflakes==2.1.1
- # via flake8
-pygments==2.17.2
- # via
- # readme-renderer
- # rich
- # sphinx
-pyjwt==2.8.0
- # via python-arango
-pymongo==4.6.0
- # via testcontainers-mongodb
-pymssql==2.2.10
- # via testcontainers-mssql
-pymysql[rsa]==1.1.0
- # via testcontainers-mysql
-pysocks==1.7.1
- # via urllib3
-pytest==7.4.3
- # via
- # -r requirements.in
- # pytest-cov
-pytest-cov==4.1.0
- # via -r requirements.in
-python-arango==7.8.1
- # via testcontainers-arangodb
-python-dateutil==2.8.2
- # via
- # botocore
- # kubernetes
- # opensearch-py
- # pg8000
-python-jose==3.3.0
- # via python-keycloak
-python-keycloak==3.7.0
- # via testcontainers-keycloak
-pytz==2023.3.post1
- # via
- # babel
- # clickhouse-driver
- # neo4j
-pyyaml==6.0.1
- # via
- # kubernetes
- # testcontainers-k3s
-readme-renderer==42.0
- # via twine
-redis==5.0.1
- # via testcontainers-redis
-requests==2.31.0
- # via
- # azure-core
- # docker
- # google-api-core
- # kubernetes
- # opensearch-py
- # python-arango
- # python-keycloak
- # requests-oauthlib
- # requests-toolbelt
- # sphinx
- # twine
-requests-oauthlib==1.3.1
- # via kubernetes
-requests-toolbelt==1.0.0
- # via
- # python-arango
- # python-keycloak
- # twine
-rfc3986==2.0.0
- # via twine
-rich==13.7.0
- # via twine
-rsa==4.9
- # via
- # google-auth
- # python-jose
-s3transfer==0.8.0
- # via boto3
-scramp==1.4.4
- # via pg8000
-secretstorage==3.3.3
- # via keyring
-selenium==4.15.2
- # via testcontainers-selenium
-six==1.16.0
- # via
- # azure-core
- # ecdsa
- # isodate
- # kubernetes
- # opensearch-py
- # python-dateutil
-sniffio==1.3.0
- # via trio
-snowballstemmer==2.2.0
- # via sphinx
-sortedcontainers==2.4.0
- # via trio
-sphinx==7.1.2
- # via -r requirements.in
-sphinxcontrib-applehelp==1.0.4
- # via sphinx
-sphinxcontrib-devhelp==1.0.2
- # via sphinx
-sphinxcontrib-htmlhelp==2.0.1
- # via sphinx
-sphinxcontrib-jsmath==1.0.1
- # via sphinx
-sphinxcontrib-qthelp==1.0.3
- # via sphinx
-sphinxcontrib-serializinghtml==1.1.5
- # via sphinx
-sqlalchemy==2.0.23
- # via
- # testcontainers-mssql
- # testcontainers-mysql
- # testcontainers-oracle
- # testcontainers-postgres
-tomli==2.0.1
- # via
- # coverage
- # pytest
-trio==0.23.1
- # via
- # selenium
- # trio-websocket
-trio-websocket==0.11.1
- # via selenium
-twine==4.0.2
- # via -r requirements.in
-typing-extensions==4.8.0
- # via
- # azure-core
- # azure-storage-blob
- # rich
- # sqlalchemy
-tzlocal==5.2
- # via clickhouse-driver
-urllib3[socks]==1.26.18
- # via
- # botocore
- # docker
- # kubernetes
- # minio
- # opensearch-py
- # python-arango
- # requests
- # selenium
- # testcontainers-core
- # twine
-websocket-client==1.6.4
- # via
- # docker
- # kubernetes
-wheel==0.42.0
- # via -r requirements.in
-wrapt==1.16.0
- # via testcontainers-core
-wsproto==1.2.0
- # via trio-websocket
-zipp==3.17.0
- # via
- # importlib-metadata
- # importlib-resources
-
-# The following packages are considered to be unsafe in a requirements file:
-# setuptools
diff --git a/requirements/ubuntu-latest-3.9.txt b/requirements/ubuntu-latest-3.9.txt
deleted file mode 100644
index 970a09211..000000000
--- a/requirements/ubuntu-latest-3.9.txt
+++ /dev/null
@@ -1,450 +0,0 @@
-#
-# This file is autogenerated by pip-compile with Python 3.9
-# by the following command:
-#
-# pip-compile --output-file=requirements.txt
-#
--e file:meta
- # via -r requirements.in
--e file:arangodb
- # via -r requirements.in
--e file:azurite
- # via -r requirements.in
--e file:clickhouse
- # via -r requirements.in
--e file:core
- # via
- # -r requirements.in
- # testcontainers
- # testcontainers-arangodb
- # testcontainers-azurite
- # testcontainers-clickhouse
- # testcontainers-elasticsearch
- # testcontainers-gcp
- # testcontainers-k3s
- # testcontainers-kafka
- # testcontainers-keycloak
- # testcontainers-localstack
- # testcontainers-minio
- # testcontainers-mongodb
- # testcontainers-mssql
- # testcontainers-mysql
- # testcontainers-neo4j
- # testcontainers-nginx
- # testcontainers-opensearch
- # testcontainers-oracle
- # testcontainers-postgres
- # testcontainers-rabbitmq
- # testcontainers-redis
- # testcontainers-selenium
--e file:elasticsearch
- # via -r requirements.in
--e file:google
- # via -r requirements.in
--e file:k3s
- # via -r requirements.in
--e file:kafka
- # via -r requirements.in
--e file:keycloak
- # via -r requirements.in
--e file:localstack
- # via -r requirements.in
--e file:minio
- # via -r requirements.in
--e file:mongodb
- # via -r requirements.in
--e file:mssql
- # via -r requirements.in
--e file:mysql
- # via -r requirements.in
--e file:neo4j
- # via -r requirements.in
--e file:nginx
- # via -r requirements.in
--e file:opensearch
- # via -r requirements.in
--e file:oracle
- # via -r requirements.in
--e file:postgres
- # via -r requirements.in
--e file:rabbitmq
- # via -r requirements.in
--e file:redis
- # via -r requirements.in
--e file:selenium
- # via -r requirements.in
-alabaster==0.7.13
- # via sphinx
-argon2-cffi==23.1.0
- # via minio
-argon2-cffi-bindings==21.2.0
- # via argon2-cffi
-asn1crypto==1.5.1
- # via scramp
-async-timeout==4.0.3
- # via redis
-attrs==23.1.0
- # via
- # outcome
- # trio
-azure-core==1.29.5
- # via azure-storage-blob
-azure-storage-blob==12.19.0
- # via testcontainers-azurite
-babel==2.13.1
- # via sphinx
-boto3==1.33.1
- # via testcontainers-localstack
-botocore==1.33.1
- # via
- # boto3
- # s3transfer
-cachetools==5.3.2
- # via google-auth
-certifi==2023.11.17
- # via
- # kubernetes
- # minio
- # opensearch-py
- # requests
- # selenium
-cffi==1.16.0
- # via
- # argon2-cffi-bindings
- # cryptography
-charset-normalizer==3.3.2
- # via requests
-clickhouse-driver==0.2.6
- # via testcontainers-clickhouse
-coverage[toml]==7.3.2
- # via
- # coverage
- # pytest-cov
-cryptography==36.0.2
- # via
- # -r requirements.in
- # azure-storage-blob
- # pymysql
- # secretstorage
-cx-oracle==8.3.0
- # via testcontainers-oracle
-deprecation==2.1.0
- # via python-keycloak
-dnspython==2.4.2
- # via pymongo
-docker==6.1.3
- # via testcontainers-core
-docutils==0.20.1
- # via
- # readme-renderer
- # sphinx
-ecdsa==0.18.0
- # via python-jose
-entrypoints==0.3
- # via flake8
-exceptiongroup==1.2.0
- # via
- # pytest
- # trio
- # trio-websocket
-flake8==3.7.9
- # via -r requirements.in
-google-api-core[grpc]==2.14.0
- # via
- # google-api-core
- # google-cloud-pubsub
-google-auth==2.23.4
- # via
- # google-api-core
- # kubernetes
-google-cloud-pubsub==2.18.4
- # via testcontainers-gcp
-googleapis-common-protos[grpc]==1.61.0
- # via
- # google-api-core
- # grpc-google-iam-v1
- # grpcio-status
-greenlet==3.0.1
- # via sqlalchemy
-grpc-google-iam-v1==0.12.7
- # via google-cloud-pubsub
-grpcio==1.59.3
- # via
- # google-api-core
- # google-cloud-pubsub
- # googleapis-common-protos
- # grpc-google-iam-v1
- # grpcio-status
-grpcio-status==1.59.3
- # via
- # google-api-core
- # google-cloud-pubsub
-h11==0.14.0
- # via wsproto
-idna==3.6
- # via
- # requests
- # trio
-imagesize==1.4.1
- # via sphinx
-importlib-metadata==6.8.0
- # via
- # keyring
- # python-arango
- # sphinx
- # twine
-iniconfig==2.0.0
- # via pytest
-isodate==0.6.1
- # via azure-storage-blob
-jaraco-classes==3.3.0
- # via keyring
-jeepney==0.8.0
- # via
- # keyring
- # secretstorage
-jinja2==3.1.2
- # via sphinx
-jmespath==1.0.1
- # via
- # boto3
- # botocore
-kafka-python==2.0.2
- # via testcontainers-kafka
-keyring==24.3.0
- # via twine
-kubernetes==28.1.0
- # via testcontainers-k3s
-markdown-it-py==3.0.0
- # via rich
-markupsafe==2.1.3
- # via jinja2
-mccabe==0.6.1
- # via flake8
-mdurl==0.1.2
- # via markdown-it-py
-minio==7.2.0
- # via testcontainers-minio
-more-itertools==10.1.0
- # via jaraco-classes
-neo4j==5.15.0
- # via testcontainers-neo4j
-nh3==0.2.14
- # via readme-renderer
-oauthlib==3.2.2
- # via
- # kubernetes
- # requests-oauthlib
-opensearch-py==2.4.2
- # via testcontainers-opensearch
-outcome==1.3.0.post0
- # via trio
-packaging==23.2
- # via
- # deprecation
- # docker
- # pytest
- # python-arango
- # sphinx
-pg8000==1.30.3
- # via -r requirements.in
-pika==1.3.2
- # via testcontainers-rabbitmq
-pkginfo==1.9.6
- # via twine
-pluggy==1.3.0
- # via pytest
-proto-plus==1.22.3
- # via google-cloud-pubsub
-protobuf==4.25.1
- # via
- # google-api-core
- # google-cloud-pubsub
- # googleapis-common-protos
- # grpc-google-iam-v1
- # grpcio-status
- # proto-plus
-psycopg2-binary==2.9.9
- # via testcontainers-postgres
-pyasn1==0.5.1
- # via
- # pyasn1-modules
- # python-jose
- # rsa
-pyasn1-modules==0.3.0
- # via google-auth
-pycodestyle==2.5.0
- # via flake8
-pycparser==2.21
- # via cffi
-pycryptodome==3.19.0
- # via minio
-pyflakes==2.1.1
- # via flake8
-pygments==2.17.2
- # via
- # readme-renderer
- # rich
- # sphinx
-pyjwt==2.8.0
- # via python-arango
-pymongo==4.6.0
- # via testcontainers-mongodb
-pymssql==2.2.10
- # via testcontainers-mssql
-pymysql[rsa]==1.1.0
- # via testcontainers-mysql
-pysocks==1.7.1
- # via urllib3
-pytest==7.4.3
- # via
- # -r requirements.in
- # pytest-cov
-pytest-cov==4.1.0
- # via -r requirements.in
-python-arango==7.8.1
- # via testcontainers-arangodb
-python-dateutil==2.8.2
- # via
- # botocore
- # kubernetes
- # opensearch-py
- # pg8000
-python-jose==3.3.0
- # via python-keycloak
-python-keycloak==3.7.0
- # via testcontainers-keycloak
-pytz==2023.3.post1
- # via
- # clickhouse-driver
- # neo4j
-pyyaml==6.0.1
- # via
- # kubernetes
- # testcontainers-k3s
-readme-renderer==42.0
- # via twine
-redis==5.0.1
- # via testcontainers-redis
-requests==2.31.0
- # via
- # azure-core
- # docker
- # google-api-core
- # kubernetes
- # opensearch-py
- # python-arango
- # python-keycloak
- # requests-oauthlib
- # requests-toolbelt
- # sphinx
- # twine
-requests-oauthlib==1.3.1
- # via kubernetes
-requests-toolbelt==1.0.0
- # via
- # python-arango
- # python-keycloak
- # twine
-rfc3986==2.0.0
- # via twine
-rich==13.7.0
- # via twine
-rsa==4.9
- # via
- # google-auth
- # python-jose
-s3transfer==0.8.0
- # via boto3
-scramp==1.4.4
- # via pg8000
-secretstorage==3.3.3
- # via keyring
-selenium==4.15.2
- # via testcontainers-selenium
-six==1.16.0
- # via
- # azure-core
- # ecdsa
- # isodate
- # kubernetes
- # opensearch-py
- # python-dateutil
-sniffio==1.3.0
- # via trio
-snowballstemmer==2.2.0
- # via sphinx
-sortedcontainers==2.4.0
- # via trio
-sphinx==7.2.6
- # via
- # -r requirements.in
- # sphinxcontrib-applehelp
- # sphinxcontrib-devhelp
- # sphinxcontrib-htmlhelp
- # sphinxcontrib-qthelp
- # sphinxcontrib-serializinghtml
-sphinxcontrib-applehelp==1.0.7
- # via sphinx
-sphinxcontrib-devhelp==1.0.5
- # via sphinx
-sphinxcontrib-htmlhelp==2.0.4
- # via sphinx
-sphinxcontrib-jsmath==1.0.1
- # via sphinx
-sphinxcontrib-qthelp==1.0.6
- # via sphinx
-sphinxcontrib-serializinghtml==1.1.9
- # via sphinx
-sqlalchemy==2.0.23
- # via
- # testcontainers-mssql
- # testcontainers-mysql
- # testcontainers-oracle
- # testcontainers-postgres
-tomli==2.0.1
- # via
- # coverage
- # pytest
-trio==0.23.1
- # via
- # selenium
- # trio-websocket
-trio-websocket==0.11.1
- # via selenium
-twine==4.0.2
- # via -r requirements.in
-typing-extensions==4.8.0
- # via
- # azure-core
- # azure-storage-blob
- # sqlalchemy
-tzlocal==5.2
- # via clickhouse-driver
-urllib3[socks]==1.26.18
- # via
- # botocore
- # docker
- # kubernetes
- # minio
- # opensearch-py
- # python-arango
- # requests
- # selenium
- # testcontainers-core
- # twine
-websocket-client==1.6.4
- # via
- # docker
- # kubernetes
-wheel==0.42.0
- # via -r requirements.in
-wrapt==1.16.0
- # via testcontainers-core
-wsproto==1.2.0
- # via trio-websocket
-zipp==3.17.0
- # via importlib-metadata
-
-# The following packages are considered to be unsafe in a requirements file:
-# setuptools
diff --git a/requirements/windows-latest-3.10.txt b/requirements/windows-latest-3.10.txt
deleted file mode 100644
index b9f41c654..000000000
--- a/requirements/windows-latest-3.10.txt
+++ /dev/null
@@ -1,453 +0,0 @@
-#
-# This file is autogenerated by pip-compile with Python 3.10
-# by the following command:
-#
-# pip-compile --output-file=requirements.txt
-#
--e file:meta
- # via -r requirements.in
--e file:arangodb
- # via -r requirements.in
--e file:azurite
- # via -r requirements.in
--e file:clickhouse
- # via -r requirements.in
--e file:core
- # via
- # -r requirements.in
- # testcontainers
- # testcontainers-arangodb
- # testcontainers-azurite
- # testcontainers-clickhouse
- # testcontainers-elasticsearch
- # testcontainers-gcp
- # testcontainers-k3s
- # testcontainers-kafka
- # testcontainers-keycloak
- # testcontainers-localstack
- # testcontainers-minio
- # testcontainers-mongodb
- # testcontainers-mssql
- # testcontainers-mysql
- # testcontainers-neo4j
- # testcontainers-nginx
- # testcontainers-opensearch
- # testcontainers-oracle
- # testcontainers-postgres
- # testcontainers-rabbitmq
- # testcontainers-redis
- # testcontainers-selenium
--e file:elasticsearch
- # via -r requirements.in
--e file:google
- # via -r requirements.in
--e file:k3s
- # via -r requirements.in
--e file:kafka
- # via -r requirements.in
--e file:keycloak
- # via -r requirements.in
--e file:localstack
- # via -r requirements.in
--e file:minio
- # via -r requirements.in
--e file:mongodb
- # via -r requirements.in
--e file:mssql
- # via -r requirements.in
--e file:mysql
- # via -r requirements.in
--e file:neo4j
- # via -r requirements.in
--e file:nginx
- # via -r requirements.in
--e file:opensearch
- # via -r requirements.in
--e file:oracle
- # via -r requirements.in
--e file:postgres
- # via -r requirements.in
--e file:rabbitmq
- # via -r requirements.in
--e file:redis
- # via -r requirements.in
--e file:selenium
- # via -r requirements.in
-alabaster==0.7.13
- # via sphinx
-argon2-cffi==23.1.0
- # via minio
-argon2-cffi-bindings==21.2.0
- # via argon2-cffi
-asn1crypto==1.5.1
- # via scramp
-async-timeout==4.0.3
- # via redis
-attrs==23.1.0
- # via
- # outcome
- # trio
-azure-core==1.29.5
- # via azure-storage-blob
-azure-storage-blob==12.19.0
- # via testcontainers-azurite
-babel==2.13.1
- # via sphinx
-boto3==1.33.1
- # via testcontainers-localstack
-botocore==1.33.1
- # via
- # boto3
- # s3transfer
-cachetools==5.3.2
- # via google-auth
-certifi==2023.11.17
- # via
- # kubernetes
- # minio
- # opensearch-py
- # requests
- # selenium
-cffi==1.16.0
- # via
- # argon2-cffi-bindings
- # cryptography
- # trio
-charset-normalizer==3.3.2
- # via requests
-clickhouse-driver==0.2.6
- # via testcontainers-clickhouse
-colorama==0.4.6
- # via
- # pytest
- # sphinx
-coverage[toml]==7.3.2
- # via
- # coverage
- # pytest-cov
-cryptography==36.0.2
- # via
- # -r requirements.in
- # azure-storage-blob
- # pymysql
-cx-oracle==8.3.0
- # via testcontainers-oracle
-deprecation==2.1.0
- # via python-keycloak
-dnspython==2.4.2
- # via pymongo
-docker==6.1.3
- # via testcontainers-core
-docutils==0.20.1
- # via
- # readme-renderer
- # sphinx
-ecdsa==0.18.0
- # via python-jose
-entrypoints==0.3
- # via flake8
-exceptiongroup==1.2.0
- # via
- # pytest
- # trio
- # trio-websocket
-flake8==3.7.9
- # via -r requirements.in
-google-api-core[grpc]==2.14.0
- # via
- # google-api-core
- # google-cloud-pubsub
-google-auth==2.23.4
- # via
- # google-api-core
- # kubernetes
-google-cloud-pubsub==2.18.4
- # via testcontainers-gcp
-googleapis-common-protos[grpc]==1.61.0
- # via
- # google-api-core
- # grpc-google-iam-v1
- # grpcio-status
-greenlet==3.0.1
- # via sqlalchemy
-grpc-google-iam-v1==0.12.7
- # via google-cloud-pubsub
-grpcio==1.59.3
- # via
- # google-api-core
- # google-cloud-pubsub
- # googleapis-common-protos
- # grpc-google-iam-v1
- # grpcio-status
-grpcio-status==1.59.3
- # via
- # google-api-core
- # google-cloud-pubsub
-h11==0.14.0
- # via wsproto
-idna==3.6
- # via
- # requests
- # trio
-imagesize==1.4.1
- # via sphinx
-importlib-metadata==6.8.0
- # via
- # keyring
- # python-arango
- # twine
-iniconfig==2.0.0
- # via pytest
-isodate==0.6.1
- # via azure-storage-blob
-jaraco-classes==3.3.0
- # via keyring
-jinja2==3.1.2
- # via sphinx
-jmespath==1.0.1
- # via
- # boto3
- # botocore
-kafka-python==2.0.2
- # via testcontainers-kafka
-keyring==24.3.0
- # via twine
-kubernetes==28.1.0
- # via testcontainers-k3s
-markdown-it-py==3.0.0
- # via rich
-markupsafe==2.1.3
- # via jinja2
-mccabe==0.6.1
- # via flake8
-mdurl==0.1.2
- # via markdown-it-py
-minio==7.2.0
- # via testcontainers-minio
-more-itertools==10.1.0
- # via jaraco-classes
-neo4j==5.15.0
- # via testcontainers-neo4j
-nh3==0.2.14
- # via readme-renderer
-oauthlib==3.2.2
- # via
- # kubernetes
- # requests-oauthlib
-opensearch-py==2.4.2
- # via testcontainers-opensearch
-outcome==1.3.0.post0
- # via trio
-packaging==23.2
- # via
- # deprecation
- # docker
- # pytest
- # python-arango
- # sphinx
-pg8000==1.30.3
- # via -r requirements.in
-pika==1.3.2
- # via testcontainers-rabbitmq
-pkginfo==1.9.6
- # via twine
-pluggy==1.3.0
- # via pytest
-proto-plus==1.22.3
- # via google-cloud-pubsub
-protobuf==4.25.1
- # via
- # google-api-core
- # google-cloud-pubsub
- # googleapis-common-protos
- # grpc-google-iam-v1
- # grpcio-status
- # proto-plus
-psycopg2-binary==2.9.9
- # via testcontainers-postgres
-pyasn1==0.5.1
- # via
- # pyasn1-modules
- # python-jose
- # rsa
-pyasn1-modules==0.3.0
- # via google-auth
-pycodestyle==2.5.0
- # via flake8
-pycparser==2.21
- # via cffi
-pycryptodome==3.19.0
- # via minio
-pyflakes==2.1.1
- # via flake8
-pygments==2.17.2
- # via
- # readme-renderer
- # rich
- # sphinx
-pyjwt==2.8.0
- # via python-arango
-pymongo==4.6.0
- # via testcontainers-mongodb
-pymssql==2.2.10
- # via testcontainers-mssql
-pymysql[rsa]==1.1.0
- # via testcontainers-mysql
-pysocks==1.7.1
- # via urllib3
-pytest==7.4.3
- # via
- # -r requirements.in
- # pytest-cov
-pytest-cov==4.1.0
- # via -r requirements.in
-python-arango==7.8.1
- # via testcontainers-arangodb
-python-dateutil==2.8.2
- # via
- # botocore
- # kubernetes
- # opensearch-py
- # pg8000
-python-jose==3.3.0
- # via python-keycloak
-python-keycloak==3.7.0
- # via testcontainers-keycloak
-pytz==2023.3.post1
- # via
- # clickhouse-driver
- # neo4j
-pywin32==306
- # via docker
-pywin32-ctypes==0.2.2
- # via keyring
-pyyaml==6.0.1
- # via
- # kubernetes
- # testcontainers-k3s
-readme-renderer==42.0
- # via twine
-redis==5.0.1
- # via testcontainers-redis
-requests==2.31.0
- # via
- # azure-core
- # docker
- # google-api-core
- # kubernetes
- # opensearch-py
- # python-arango
- # python-keycloak
- # requests-oauthlib
- # requests-toolbelt
- # sphinx
- # twine
-requests-oauthlib==1.3.1
- # via kubernetes
-requests-toolbelt==1.0.0
- # via
- # python-arango
- # python-keycloak
- # twine
-rfc3986==2.0.0
- # via twine
-rich==13.7.0
- # via twine
-rsa==4.9
- # via
- # google-auth
- # python-jose
-s3transfer==0.8.0
- # via boto3
-scramp==1.4.4
- # via pg8000
-selenium==4.15.2
- # via testcontainers-selenium
-six==1.16.0
- # via
- # azure-core
- # ecdsa
- # isodate
- # kubernetes
- # opensearch-py
- # python-dateutil
-sniffio==1.3.0
- # via trio
-snowballstemmer==2.2.0
- # via sphinx
-sortedcontainers==2.4.0
- # via trio
-sphinx==7.2.6
- # via
- # -r requirements.in
- # sphinxcontrib-applehelp
- # sphinxcontrib-devhelp
- # sphinxcontrib-htmlhelp
- # sphinxcontrib-qthelp
- # sphinxcontrib-serializinghtml
-sphinxcontrib-applehelp==1.0.7
- # via sphinx
-sphinxcontrib-devhelp==1.0.5
- # via sphinx
-sphinxcontrib-htmlhelp==2.0.4
- # via sphinx
-sphinxcontrib-jsmath==1.0.1
- # via sphinx
-sphinxcontrib-qthelp==1.0.6
- # via sphinx
-sphinxcontrib-serializinghtml==1.1.9
- # via sphinx
-sqlalchemy==2.0.23
- # via
- # testcontainers-mssql
- # testcontainers-mysql
- # testcontainers-oracle
- # testcontainers-postgres
-tomli==2.0.1
- # via
- # coverage
- # pytest
-trio==0.23.1
- # via
- # selenium
- # trio-websocket
-trio-websocket==0.11.1
- # via selenium
-twine==4.0.2
- # via -r requirements.in
-typing-extensions==4.8.0
- # via
- # azure-core
- # azure-storage-blob
- # sqlalchemy
-tzdata==2023.3
- # via tzlocal
-tzlocal==5.2
- # via clickhouse-driver
-urllib3[socks]==1.26.18
- # via
- # botocore
- # docker
- # kubernetes
- # minio
- # opensearch-py
- # python-arango
- # requests
- # selenium
- # testcontainers-core
- # twine
-websocket-client==1.6.4
- # via
- # docker
- # kubernetes
-wheel==0.42.0
- # via -r requirements.in
-wrapt==1.16.0
- # via testcontainers-core
-wsproto==1.2.0
- # via trio-websocket
-zipp==3.17.0
- # via importlib-metadata
-
-# The following packages are considered to be unsafe in a requirements file:
-# setuptools
diff --git a/scripts/diagnostics.py b/scripts/diagnostics.py
new file mode 100644
index 000000000..ef8bb6507
--- /dev/null
+++ b/scripts/diagnostics.py
@@ -0,0 +1,25 @@
+import json
+
+from testcontainers.core import utils
+from testcontainers.core.container import DockerContainer
+
+result = {
+ "is_linux": utils.is_linux(),
+ "is_mac": utils.is_mac(),
+ "is_windows": utils.is_windows(),
+ "inside_container": utils.inside_container(),
+ "default_gateway_ip": utils.default_gateway_ip(),
+}
+
+with DockerContainer("alpine:latest") as container:
+ client = container.get_docker_client()
+ result.update(
+ {
+ "container_host_ip": container.get_container_host_ip(),
+ "docker_client_gateway_ip": client.gateway_ip(container._container.id),
+ "docker_client_bridge_ip": client.bridge_ip(container._container.id),
+ "docker_client_host": client.host(),
+ }
+ )
+
+print(json.dumps(result, indent=2)) # noqa: T201
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index d673938d4..000000000
--- a/setup.cfg
+++ /dev/null
@@ -1,3 +0,0 @@
-[flake8]
-max-line-length = 100
-exclude = .git,__pycache__,build,dist,venv,.venv