diff --git a/actions.md b/actions.md index 4268c4e..736c861 100644 --- a/actions.md +++ b/actions.md @@ -12,6 +12,13 @@ These are the core operations that users can request the system to perform. * **Required Parameters**: The AWS account name and the ElastiCache cluster identifier. * **Usage Example (CLI)**: `automated-actions external-resource-flush-elasticache --account aws-account-name --identifier my-elasticache-cluster` +* **`external-resource-rds-logs`**: + * **Description**: Retrieves logs from an Amazon RDS instance and stores them in an S3 bucket. + * **Use Case**: Typically used for troubleshooting database issues, analyzing performance problems, or collecting logs for audit purposes. + * **Required Parameters**: The AWS account name and the RDS instance identifier. + * **Optional Parameters**: Expiration time in days (1-7, default: 7), S3 target file name (defaults to '{account}-{identifier}.zip'). + * **Usage Example (CLI)**: `automated-actions external-resource-rds-logs --account aws-account-name --identifier my-rds-instance --expiration-days 5 --s3-file-name my-custom-logs.zip` + * **`external-resource-rds-reboot`**: * **Description**: Reboots an Amazon RDS instance. * **Use Case**: Typically used for maintenance, applying updates, or resolving performance issues. diff --git a/docker-compose.yml b/docker-compose.yml index 03b7e69..9db194b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,7 +8,7 @@ services: environment: - DEBUG=${DEBUG-} - DOCKER_HOST=unix:///var/run/docker.sock - - SERVICES=${SERVICES:-sqs,dynamodb} + - SERVICES=${SERVICES:-sqs,dynamodb,s3} - PERSISTENCE=${PERSISTENCE:-0} volumes: - "${LOCALSTACK_VOLUME_DIR:-./.localstack_volume}:/var/lib/localstack" @@ -83,6 +83,7 @@ services: - AA_BROKER_URL=sqs://localstack:4566 - AA_SQS_URL=http://localstack:4566/000000000000/automated-actions - AA_DYNAMODB_URL=http://localstack:4566 + - AA_EXTERNAL_RESOURCE_RDS_LOGS__S3_URL=http://localstack:4566 build: context: . dockerfile: Dockerfile diff --git a/localstack/init-s3.sh b/localstack/init-s3.sh new file mode 100755 index 0000000..e1fcff4 --- /dev/null +++ b/localstack/init-s3.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +# create bucket +awslocal s3api create-bucket --bucket automated-actions diff --git a/packages/automated_actions/automated_actions/api/v1/views/external_resource.py b/packages/automated_actions/automated_actions/api/v1/views/external_resource.py index f12fa4a..16d5711 100644 --- a/packages/automated_actions/automated_actions/api/v1/views/external_resource.py +++ b/packages/automated_actions/automated_actions/api/v1/views/external_resource.py @@ -7,26 +7,84 @@ from automated_actions.celery.external_resource.tasks import ( external_resource_flush_elasticache as external_resource_flush_elasticache_task, ) +from automated_actions.celery.external_resource.tasks import ( + external_resource_rds_logs as external_resource_rds_logs_task, +) from automated_actions.celery.external_resource.tasks import ( external_resource_rds_reboot as external_resource_rds_reboot_task, ) from automated_actions.celery.external_resource.tasks import ( external_resource_rds_snapshot as external_resource_rds_snapshot_task, ) -from automated_actions.db.models import ( - Action, - ActionSchemaOut, -) +from automated_actions.db.models import Action, ActionSchemaOut from automated_actions.db.models._action import ActionManager, get_action_manager router = APIRouter() log = logging.getLogger(__name__) -EXTERNAL_RESOURCE_RDS_REBOOT_ACTION_ID = "external-resource-rds-reboot" EXTERNAL_RESOURCE_FLUSH_ELASTICACHE_ACTION_ID = "external-resource-flush-elasticache" +EXTERNAL_RESOURCE_RDS_LOGS_ACTION_ID = "external-resource-rds-logs" +EXTERNAL_RESOURCE_RDS_REBOOT_ACTION_ID = "external-resource-rds-reboot" EXTERNAL_RESOURCE_RDS_SNAPSHOT_ACTION_ID = "external-resource-rds-snapshot" +def get_action_external_resource_rds_logs( + action_mgr: Annotated[ActionManager, Depends(get_action_manager)], user: UserDep +) -> Action: + """Get a new action object for the user. + + Args: + action_mgr: The action manager dependency. + user: The user dependency. + + Returns: + A new Action object. + """ + return action_mgr.create_action( + name=EXTERNAL_RESOURCE_RDS_LOGS_ACTION_ID, owner=user + ) + + +@router.post( + "/external-resource/rds-logs/{account}/{identifier}", + operation_id=EXTERNAL_RESOURCE_RDS_LOGS_ACTION_ID, + status_code=202, + tags=["Actions"], +) +def external_resource_rds_logs( + account: Annotated[str, Path(description="AWS account name")], + identifier: Annotated[str, Path(description="RDS instance identifier")], + action: Annotated[Action, Depends(get_action_external_resource_rds_logs)], + expiration_days: Annotated[ + int, Query(description="Expiration time in days", ge=1, le=7) + ] = 7, + s3_file_name: Annotated[ + str | None, + Query( + description="The S3 target file name. Defaults to '{account}-{identifier}.zip' if not provided." + ), + ] = None, +) -> ActionSchemaOut: + """Get RDS logs for an instance. + + This action retrieves logs from a specified RDS instance in a given AWS account and stores them in an S3 bucket. + """ + log.info( + f"Getting logs for RDS {identifier} in AWS account {account}. action_id={action.action_id}" + ) + external_resource_rds_logs_task.apply_async( + kwargs={ + "account": account, + "identifier": identifier, + "expiration_days": expiration_days, + "s3_file_name": s3_file_name, + "action": action, + }, + task_id=action.action_id, + ) + return action.dump() + + def get_action_external_resource_rds_reboot( action_mgr: Annotated[ActionManager, Depends(get_action_manager)], user: UserDep ) -> Action: diff --git a/packages/automated_actions/automated_actions/celery/automated_action_task.py b/packages/automated_actions/automated_actions/celery/automated_action_task.py index 921d1d5..9905532 100644 --- a/packages/automated_actions/automated_actions/celery/automated_action_task.py +++ b/packages/automated_actions/automated_actions/celery/automated_action_task.py @@ -29,12 +29,12 @@ def before_start( # noqa: PLR6301 def on_success( # noqa: PLR6301 self, - retval: Any, # noqa: ARG002 + retval: Any, task_id: str, # noqa: ARG002 args: tuple, # noqa: ARG002 kwargs: dict, ) -> None: - result = "ok" + result = "ok" if retval is None else str(retval) kwargs["action"].set_final_state( status=ActionStatus.SUCCESS, result=result, diff --git a/packages/automated_actions/automated_actions/celery/external_resource/_elasticache_flush.py b/packages/automated_actions/automated_actions/celery/external_resource/_elasticache_flush.py new file mode 100644 index 0000000..fa5cd9c --- /dev/null +++ b/packages/automated_actions/automated_actions/celery/external_resource/_elasticache_flush.py @@ -0,0 +1,85 @@ +from automated_actions_utils.cluster_connection import get_cluster_connection_data +from automated_actions_utils.external_resource import ( + ExternalResource, + ExternalResourceProvider, + get_external_resource, + settings, +) +from automated_actions_utils.openshift_client import ( + OpenshiftClient, + SecretKeyRef, + job_builder, +) + +from automated_actions.celery.app import app +from automated_actions.celery.automated_action_task import AutomatedActionTask +from automated_actions.db.models import Action + + +class ExternalResourceFlushElastiCache: + def __init__( + self, action: Action, oc: OpenshiftClient, elasticache: ExternalResource + ) -> None: + self.action = action + self.oc = oc + self.elasticache = elasticache + + def run( + self, + image: str, + command: list[str], + args: list[str], + secret_name: str, + env_secret_mappings: dict[str, str], + ) -> None: + job = job_builder( + image=image, + command=command, + args=args, + job_name_prefix="flush-elasticache-", + annotations={ + "automated-actions.action_id": str(self.action.action_id), + }, + env_secrets={ + key: SecretKeyRef( + secret=secret_name, + key=value, + ) + for key, value in env_secret_mappings.items() + }, + ) + return self.oc.run_job(namespace=self.elasticache.namespace, job=job) + + +@app.task(base=AutomatedActionTask) +def external_resource_flush_elasticache( + account: str, + identifier: str, + *, + action: Action, +) -> None: + elasticache = get_external_resource( + account=account, + identifier=identifier, + provider=ExternalResourceProvider.ELASTICACHE, + ) + + cluster_connection = get_cluster_connection_data(elasticache.cluster, settings) + oc = OpenshiftClient( + server_url=cluster_connection.url, token=cluster_connection.token + ) + if not elasticache.output_resource_name: + raise ValueError( + f"Output resource name not defined for {elasticache.identifier} in {elasticache.namespace} namespace.", + ) + ExternalResourceFlushElastiCache( + action=action, + oc=oc, + elasticache=elasticache, + ).run( + image=settings.external_resource_elasticache.image, + command=settings.external_resource_elasticache.flush_command, + args=settings.external_resource_elasticache.flush_command_args, + secret_name=elasticache.output_resource_name, + env_secret_mappings=settings.external_resource_elasticache.env_secret_mappings, + ) diff --git a/packages/automated_actions/automated_actions/celery/external_resource/_rds_logs.py b/packages/automated_actions/automated_actions/celery/external_resource/_rds_logs.py new file mode 100644 index 0000000..c3aa98a --- /dev/null +++ b/packages/automated_actions/automated_actions/celery/external_resource/_rds_logs.py @@ -0,0 +1,121 @@ +import logging + +from automated_actions_utils.aws_api import ( + AWSApi, + AWSStaticCredentials, + LogStream, + get_aws_credentials, +) +from automated_actions_utils.external_resource import ( + ExternalResource, + ExternalResourceProvider, + get_external_resource, +) + +from automated_actions.celery.app import app +from automated_actions.celery.automated_action_task import AutomatedActionTask +from automated_actions.config import settings +from automated_actions.db.models import Action + +log = logging.getLogger(__name__) + + +class ExternalResourceRDSLogs: + """Class to handle RDS logs retrieval.""" + + def __init__( + self, aws_api: AWSApi, rds: ExternalResource, s3_bucket: str, s3_prefix: str + ) -> None: + self.aws_api = aws_api + self.rds = rds + self.s3_bucket = s3_bucket + self.s3_prefix = s3_prefix + + def run( + self, + target_aws_api: AWSApi, + expiration_days: int, + s3_file_name: str | None = None, + ) -> str | None: + """Retrieve RDS logs and upload them to S3 as a zip file.""" + s3_key = ( + s3_file_name + or f"{self.s3_prefix}/{self.rds.account.name}-{self.rds.identifier}.zip" + ) + # append .zip to the filename if not present + if not s3_key.endswith(".zip"): + s3_key += ".zip" + + log.info( + f"Saving RDS logs for {self.rds.account.name}/{self.rds.identifier} to S3 {self.s3_bucket}/{s3_key}" + ) + log_streams = [ + LogStream( + name=log_file, + content=self.aws_api.stream_rds_log( + identifier=self.rds.identifier, log_file=log_file + ), + ) + for log_file in self.aws_api.list_rds_logs(self.rds.identifier) + ] + if not log_streams: + log.warning( + f"No logs found for RDS {self.rds.identifier} in account {self.rds.account.name}" + ) + return None + self.aws_api.stream_rds_logs_to_s3_zip( + log_streams=log_streams, + bucket=self.s3_bucket, + s3_key=s3_key, + target_aws_api=target_aws_api, + ) + return self.aws_api.generate_s3_download_url( + bucket=self.s3_bucket, + s3_key=s3_key, + expiration_secs=expiration_days * 24 * 3600, + ) + + +@app.task(base=AutomatedActionTask) +def external_resource_rds_logs( + account: str, + identifier: str, + expiration_days: int, + action: Action, # noqa: ARG001 + s3_file_name: str | None = None, +) -> str: + rds = get_external_resource( + account=account, identifier=identifier, provider=ExternalResourceProvider.RDS + ) + rds_account_credentials = get_aws_credentials( + vault_secret=rds.account.automation_token, region=rds.account.region + ) + + log_account_credentials = AWSStaticCredentials( + access_key_id=settings.external_resource_rds_logs.access_key_id, + secret_access_key=settings.external_resource_rds_logs.secret_access_key, + region=settings.external_resource_rds_logs.region, + ) + + with ( + AWSApi(credentials=rds_account_credentials, region=rds.region) as aws_api, + AWSApi( + credentials=log_account_credentials, + s3_endpoint_url=settings.external_resource_rds_logs.s3_url, + ) as log_aws_api, + ): + url = ExternalResourceRDSLogs( + aws_api=aws_api, + rds=rds, + s3_bucket=settings.external_resource_rds_logs.bucket, + s3_prefix=settings.external_resource_rds_logs.prefix, + ).run( + target_aws_api=log_aws_api, + expiration_days=expiration_days, + s3_file_name=s3_file_name, + ) + + if not url: + return "No logs found or no logs available for download." + + return f"Download the RDS logs from the following URL: {url}. This link will expire in {expiration_days} days." diff --git a/packages/automated_actions/automated_actions/celery/external_resource/_rds_reboot.py b/packages/automated_actions/automated_actions/celery/external_resource/_rds_reboot.py new file mode 100644 index 0000000..18841ad --- /dev/null +++ b/packages/automated_actions/automated_actions/celery/external_resource/_rds_reboot.py @@ -0,0 +1,42 @@ +from automated_actions_utils.aws_api import AWSApi, get_aws_credentials +from automated_actions_utils.external_resource import ( + ExternalResource, + ExternalResourceProvider, + get_external_resource, +) + +from automated_actions.celery.app import app +from automated_actions.celery.automated_action_task import AutomatedActionTask +from automated_actions.db.models import Action + + +class ExternalResourceRDSReboot: + def __init__(self, aws_api: AWSApi, rds: ExternalResource) -> None: + self.aws_api = aws_api + self.rds = rds + + def run(self, *, force_failover: bool) -> None: + self.aws_api.reboot_rds_instance( + identifier=self.rds.identifier, force_failover=force_failover + ) + + +@app.task(base=AutomatedActionTask) +def external_resource_rds_reboot( + account: str, + identifier: str, + *, + force_failover: bool, + action: Action, # noqa: ARG001 +) -> None: + rds = get_external_resource( + account=account, + identifier=identifier, + provider=ExternalResourceProvider.RDS, + ) + + credentials = get_aws_credentials( + vault_secret=rds.account.automation_token, region=rds.account.region + ) + with AWSApi(credentials=credentials, region=rds.region) as aws_api: + ExternalResourceRDSReboot(aws_api, rds).run(force_failover=force_failover) diff --git a/packages/automated_actions/automated_actions/celery/external_resource/_rds_snapshot.py b/packages/automated_actions/automated_actions/celery/external_resource/_rds_snapshot.py new file mode 100644 index 0000000..90462cd --- /dev/null +++ b/packages/automated_actions/automated_actions/celery/external_resource/_rds_snapshot.py @@ -0,0 +1,47 @@ +from automated_actions_utils.aws_api import AWSApi, get_aws_credentials +from automated_actions_utils.external_resource import ( + ExternalResource, + ExternalResourceProvider, + get_external_resource, +) + +from automated_actions.celery.app import app +from automated_actions.celery.automated_action_task import AutomatedActionTask +from automated_actions.db.models import Action + + +class ExternalResourceRDSSnapshot: + """Create a snapshot of an RDS instance.""" + + def __init__(self, aws_api: AWSApi, rds: ExternalResource) -> None: + self.aws_api = aws_api + self.rds = rds + + def run(self, snapshot_identifier: str) -> None: + self.aws_api.create_rds_snapshot( + identifier=self.rds.identifier, + snapshot_identifier=snapshot_identifier, + ) + + +@app.task(base=AutomatedActionTask) +def external_resource_rds_snapshot( + account: str, + identifier: str, + snapshot_identifier: str, + *, + action: Action, # noqa: ARG001 +) -> None: + rds = get_external_resource( + account=account, + identifier=identifier, + provider=ExternalResourceProvider.RDS, + ) + + credentials = get_aws_credentials( + vault_secret=rds.account.automation_token, region=rds.account.region + ) + with AWSApi(credentials=credentials, region=rds.region) as aws_api: + ExternalResourceRDSSnapshot(aws_api, rds).run( + snapshot_identifier=snapshot_identifier + ) diff --git a/packages/automated_actions/automated_actions/celery/external_resource/tasks.py b/packages/automated_actions/automated_actions/celery/external_resource/tasks.py index 0d9fb19..c4be0a8 100644 --- a/packages/automated_actions/automated_actions/celery/external_resource/tasks.py +++ b/packages/automated_actions/automated_actions/celery/external_resource/tasks.py @@ -1,155 +1,18 @@ -from automated_actions_utils.aws_api import AWSApi, get_aws_credentials -from automated_actions_utils.cluster_connection import get_cluster_connection_data -from automated_actions_utils.external_resource import ( - ExternalResource, - ExternalResourceProvider, - get_external_resource, +from ._elasticache_flush import ( + ExternalResourceFlushElastiCache, + external_resource_flush_elasticache, ) -from automated_actions_utils.openshift_client import ( - OpenshiftClient, - SecretKeyRef, - job_builder, -) - -from automated_actions.celery.app import app -from automated_actions.celery.automated_action_task import AutomatedActionTask -from automated_actions.config import settings -from automated_actions.db.models import Action - - -class ExternalResourceRDSReboot: - def __init__(self, aws_api: AWSApi, rds: ExternalResource) -> None: - self.aws_api = aws_api - self.rds = rds - - def run(self, *, force_failover: bool) -> None: - self.aws_api.reboot_rds_instance( - identifier=self.rds.identifier, force_failover=force_failover - ) - - -@app.task(base=AutomatedActionTask) -def external_resource_rds_reboot( - account: str, - identifier: str, - *, - force_failover: bool, - action: Action, # noqa: ARG001 -) -> None: - rds = get_external_resource( - account=account, - identifier=identifier, - provider=ExternalResourceProvider.RDS, - ) - - credentials = get_aws_credentials( - vault_secret=rds.account.automation_token, region=rds.account.region - ) - with AWSApi(credentials=credentials, region=rds.region) as aws_api: - ExternalResourceRDSReboot(aws_api, rds).run(force_failover=force_failover) - - -class ExternalResourceRDSSnapshot: - """Create a snapshot of an RDS instance.""" - - def __init__(self, aws_api: AWSApi, rds: ExternalResource) -> None: - self.aws_api = aws_api - self.rds = rds - - def run(self, snapshot_identifier: str) -> None: - self.aws_api.create_rds_snapshot( - identifier=self.rds.identifier, - snapshot_identifier=snapshot_identifier, - ) - - -@app.task(base=AutomatedActionTask) -def external_resource_rds_snapshot( - account: str, - identifier: str, - snapshot_identifier: str, - *, - action: Action, # noqa: ARG001 -) -> None: - rds = get_external_resource( - account=account, - identifier=identifier, - provider=ExternalResourceProvider.RDS, - ) - - credentials = get_aws_credentials( - vault_secret=rds.account.automation_token, region=rds.account.region - ) - with AWSApi(credentials=credentials, region=rds.region) as aws_api: - ExternalResourceRDSSnapshot(aws_api, rds).run( - snapshot_identifier=snapshot_identifier - ) - - -class ExternalResourceFlushElastiCache: - def __init__( - self, action: Action, oc: OpenshiftClient, elasticache: ExternalResource - ) -> None: - self.action = action - self.oc = oc - self.elasticache = elasticache - - def run( - self, - image: str, - command: list[str], - args: list[str], - secret_name: str, - env_secret_mappings: dict[str, str], - ) -> None: - job = job_builder( - image=image, - command=command, - args=args, - job_name_prefix="flush-elasticache-", - annotations={ - "automated-actions.action_id": str(self.action.action_id), - }, - env_secrets={ - key: SecretKeyRef( - secret=secret_name, - key=value, - ) - for key, value in env_secret_mappings.items() - }, - ) - return self.oc.run_job(namespace=self.elasticache.namespace, job=job) - - -@app.task(base=AutomatedActionTask) -def external_resource_flush_elasticache( - account: str, - identifier: str, - *, - action: Action, -) -> None: - elasticache = get_external_resource( - account=account, - identifier=identifier, - provider=ExternalResourceProvider.ELASTICACHE, - ) - - cluster_connection = get_cluster_connection_data(elasticache.cluster, settings) - oc = OpenshiftClient( - server_url=cluster_connection.url, token=cluster_connection.token - ) - if not elasticache.output_resource_name: - raise ValueError( - f"Output resource name not defined for {elasticache.identifier} in {elasticache.namespace} namespace.", - ) - ExternalResourceFlushElastiCache( - action=action, - oc=oc, - elasticache=elasticache, - ).run( - image=settings.external_resource_elasticache.image, - command=settings.external_resource_elasticache.flush_command, - args=settings.external_resource_elasticache.flush_command_args, - secret_name=elasticache.output_resource_name, - env_secret_mappings=settings.external_resource_elasticache.env_secret_mappings, - ) +from ._rds_logs import ExternalResourceRDSLogs, external_resource_rds_logs +from ._rds_reboot import ExternalResourceRDSReboot, external_resource_rds_reboot +from ._rds_snapshot import ExternalResourceRDSSnapshot, external_resource_rds_snapshot + +__all__ = [ + "ExternalResourceFlushElastiCache", + "ExternalResourceRDSLogs", + "ExternalResourceRDSReboot", + "ExternalResourceRDSSnapshot", + "external_resource_flush_elasticache", + "external_resource_rds_logs", + "external_resource_rds_reboot", + "external_resource_rds_snapshot", +] diff --git a/packages/automated_actions/automated_actions/config.py b/packages/automated_actions/automated_actions/config.py index a120e02..c2ed896 100644 --- a/packages/automated_actions/automated_actions/config.py +++ b/packages/automated_actions/automated_actions/config.py @@ -20,6 +20,17 @@ class ExternalResourceElastiCacheConfig(BaseSettings): } +class ExternalResourceRdsLogsConfig(BaseSettings): + """Configuration for the external resource RDS logs related actions.""" + + s3_url: str = "http://s3.localhost.localstack.cloud:4566" + access_key_id: str = "localstack" + secret_access_key: str = "localstack" # noqa: S105 + region: str = "us-east-1" + bucket: str = "automated-actions" + prefix: str = "rds-logs" + + class Settings(BaseSettings): # pydantic config model_config = { @@ -77,6 +88,9 @@ class Settings(BaseSettings): external_resource_elasticache: ExternalResourceElastiCacheConfig = ( ExternalResourceElastiCacheConfig() ) + external_resource_rds_logs: ExternalResourceRdsLogsConfig = ( + ExternalResourceRdsLogsConfig() + ) settings = Settings() diff --git a/packages/automated_actions/tests/api/v1/views/test_external_resource.py b/packages/automated_actions/tests/api/v1/views/test_external_resource.py index c34094c..35df268 100644 --- a/packages/automated_actions/tests/api/v1/views/test_external_resource.py +++ b/packages/automated_actions/tests/api/v1/views/test_external_resource.py @@ -8,6 +8,7 @@ from automated_actions.api.v1.views.external_resource import ( get_action_external_resource_flush_elasticache, + get_action_external_resource_rds_logs, get_action_external_resource_rds_reboot, get_action_external_resource_rds_snapshot, ) @@ -30,6 +31,14 @@ def mock_external_resource_rds_snapshot_task(mocker: MockerFixture) -> MagicMock ) +@pytest.fixture +def mock_external_resource_rds_logs_task(mocker: MockerFixture) -> MagicMock: + """Mock the external_resource_rds_logs_task function.""" + return mocker.patch( + "automated_actions.api.v1.views.external_resource.external_resource_rds_logs_task" + ) + + @pytest.fixture def mock_external_resource_flush_elasticache_task(mocker: MockerFixture) -> MagicMock: """Mock the external_resource_flush_elasticache_task function.""" @@ -46,6 +55,9 @@ def test_app(app: FastAPI, mocker: MockerFixture, running_action: dict) -> FastA app.dependency_overrides[get_action_external_resource_rds_reboot] = ( lambda: action_mock ) + app.dependency_overrides[get_action_external_resource_rds_logs] = ( + lambda: action_mock + ) app.dependency_overrides[get_action_external_resource_rds_snapshot] = ( lambda: action_mock ) @@ -86,6 +98,93 @@ def test_external_resource_rds_reboot( ) +def test_external_resource_rds_logs( + test_app: FastAPI, + client: Callable[[FastAPI], TestClient], + mock_external_resource_rds_logs_task: MagicMock, + running_action: dict, +) -> None: + response = client(test_app).post( + test_app.url_path_for( + "external_resource_rds_logs", + account="test-account", + identifier="test-identifier", + ), + params={ + "expiration_days": 5, + "s3_file_name": "custom-logs.zip", + }, + ) + assert response.status_code == status.HTTP_202_ACCEPTED + assert response.json()["action_id"] == running_action["action_id"] + mock_external_resource_rds_logs_task.apply_async.assert_called_once_with( + kwargs={ + "account": "test-account", + "identifier": "test-identifier", + "expiration_days": 5, + "s3_file_name": "custom-logs.zip", + "action": test_app.dependency_overrides[ + get_action_external_resource_rds_logs + ](), + }, + task_id=running_action["action_id"], + ) + + +def test_external_resource_rds_logs_default_params( + test_app: FastAPI, + client: Callable[[FastAPI], TestClient], + mock_external_resource_rds_logs_task: MagicMock, + running_action: dict, +) -> None: + response = client(test_app).post( + test_app.url_path_for( + "external_resource_rds_logs", + account="test-account", + identifier="test-identifier", + ), + ) + assert response.status_code == status.HTTP_202_ACCEPTED + assert response.json()["action_id"] == running_action["action_id"] + mock_external_resource_rds_logs_task.apply_async.assert_called_once_with( + kwargs={ + "account": "test-account", + "identifier": "test-identifier", + "expiration_days": 7, + "s3_file_name": None, + "action": test_app.dependency_overrides[ + get_action_external_resource_rds_logs + ](), + }, + task_id=running_action["action_id"], + ) + + +def test_external_resource_rds_logs_expiration_validation( + test_app: FastAPI, + client: Callable[[FastAPI], TestClient], +) -> None: + response = client(test_app).post( + test_app.url_path_for( + "external_resource_rds_logs", + account="test-account", + identifier="test-identifier", + ), + params={"expiration_days": 0}, + ) + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + + response = client(test_app).post( + test_app.url_path_for( + "external_resource_rds_logs", + account="test-account", + identifier="test-identifier", + ), + params={"expiration_days": 8}, + ) + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + + def test_external_resource_rds_snapshot( test_app: FastAPI, client: Callable[[FastAPI], TestClient], diff --git a/packages/automated_actions/tests/celery/test_external_resource.py b/packages/automated_actions/tests/celery/test_external_resource.py index ee7dbf2..cca6b54 100644 --- a/packages/automated_actions/tests/celery/test_external_resource.py +++ b/packages/automated_actions/tests/celery/test_external_resource.py @@ -13,8 +13,10 @@ from automated_actions.celery.external_resource.tasks import ( ExternalResourceFlushElastiCache, + ExternalResourceRDSLogs, ExternalResourceRDSReboot, external_resource_flush_elasticache, + external_resource_rds_logs, external_resource_rds_reboot, ) from automated_actions.db.models import ActionStatus @@ -61,11 +63,11 @@ def test_external_resource_rds_reboot_task( mocker: MockerFixture, mock_action: Mock, er: ExternalResource ) -> None: mocker.patch( - "automated_actions.celery.external_resource.tasks.get_external_resource", + "automated_actions.celery.external_resource._rds_reboot.get_external_resource", return_value=er, ) mocker.patch( - "automated_actions.celery.external_resource.tasks.get_aws_credentials", + "automated_actions.celery.external_resource._rds_reboot.get_aws_credentials", return_value=AWSStaticCredentials( access_key_id="test-access-key", secret_access_key="test-secret-key", # noqa: S106 @@ -96,11 +98,11 @@ def test_external_resource_rds_reboot_task_non_retryable_failure( mocker: MockerFixture, mock_action: Mock, er: ExternalResource ) -> None: mocker.patch( - "automated_actions.celery.external_resource.tasks.get_external_resource", + "automated_actions.celery.external_resource._rds_reboot.get_external_resource", return_value=er, ) mocker.patch( - "automated_actions.celery.external_resource.tasks.get_aws_credentials", + "automated_actions.celery.external_resource._rds_reboot.get_aws_credentials", return_value=AWSStaticCredentials( access_key_id="test-access-key", secret_access_key="test-secret-key", # noqa: S106 @@ -157,18 +159,18 @@ def test_external_resource_flush_elasticache_task( mocker: MockerFixture, mock_action: Mock, er: ExternalResource ) -> None: mocker.patch( - "automated_actions.celery.external_resource.tasks.get_external_resource", + "automated_actions.celery.external_resource._elasticache_flush.get_external_resource", return_value=er, ) mocker.patch( - "automated_actions.celery.external_resource.tasks.get_cluster_connection_data", + "automated_actions.celery.external_resource._elasticache_flush.get_cluster_connection_data", return_value=ClusterConnectionData( url="https://test-cluster-url", token="test-cluster-token", # noqa: S106 ), ) mocker.patch( - "automated_actions.celery.external_resource.tasks.OpenshiftClient", + "automated_actions.celery.external_resource._elasticache_flush.OpenshiftClient", ) mock_flush_elasticache_run = mocker.patch.object( ExternalResourceFlushElastiCache, "run" @@ -195,18 +197,18 @@ def test_external_resource_flush_elasticache_task_non_retryable_failure( mocker: MockerFixture, mock_action: Mock, er: ExternalResource ) -> None: mocker.patch( - "automated_actions.celery.external_resource.tasks.get_external_resource", + "automated_actions.celery.external_resource._elasticache_flush.get_external_resource", return_value=er, ) mocker.patch( - "automated_actions.celery.external_resource.tasks.get_cluster_connection_data", + "automated_actions.celery.external_resource._elasticache_flush.get_cluster_connection_data", return_value=ClusterConnectionData( url="https://test-cluster-url", token="test-cluster-token", # noqa: S106 ), ) mocker.patch( - "automated_actions.celery.external_resource.tasks.OpenshiftClient", + "automated_actions.celery.external_resource._elasticache_flush.OpenshiftClient", ) mock_flush_elasticache_run = mocker.patch.object( ExternalResourceFlushElastiCache, @@ -231,3 +233,266 @@ def test_external_resource_flush_elasticache_task_non_retryable_failure( result="what a failure!", task_args=task_args, ) + + +@pytest.fixture +def mock_target_aws(mocker: MockerFixture) -> Mock: + return mocker.Mock(spec=AWSApi) + + +def test_external_resource_rds_logs_run_with_default_filename( + mock_aws: Mock, mock_target_aws: Mock, er: ExternalResource +) -> None: + s3_bucket = "test-bucket" + s3_prefix = "logs" + rds_logs = ExternalResourceRDSLogs( + aws_api=mock_aws, rds=er, s3_bucket=s3_bucket, s3_prefix=s3_prefix + ) + + mock_aws.list_rds_logs.return_value = ["error.log", "slow.log"] + mock_aws.stream_rds_log.return_value = iter([b"log content"]) + mock_aws.generate_s3_download_url.return_value = "https://s3.example.com/download" + + result = rds_logs.run( + target_aws_api=mock_target_aws, + expiration_days=3, + ) + + expected_s3_key = f"{s3_prefix}/{er.account.name}-{er.identifier}.zip" + + mock_aws.list_rds_logs.assert_called_once_with(er.identifier) + assert mock_aws.stream_rds_log.call_count == 2 # noqa: PLR2004 + mock_aws.stream_rds_logs_to_s3_zip.assert_called_once() + mock_aws.generate_s3_download_url.assert_called_once_with( + bucket=s3_bucket, + s3_key=expected_s3_key, + expiration_secs=3 * 24 * 3600, + ) + + assert result == "https://s3.example.com/download" + + +def test_external_resource_rds_logs_run_with_custom_filename( + mock_aws: Mock, mock_target_aws: Mock, er: ExternalResource +) -> None: + s3_bucket = "test-bucket" + s3_prefix = "logs" + custom_filename = "custom-logs.zip" + rds_logs = ExternalResourceRDSLogs( + aws_api=mock_aws, rds=er, s3_bucket=s3_bucket, s3_prefix=s3_prefix + ) + + mock_aws.list_rds_logs.return_value = ["error.log"] + mock_aws.stream_rds_log.return_value = iter([b"log content"]) + mock_aws.generate_s3_download_url.return_value = "https://s3.example.com/download" + + result = rds_logs.run( + target_aws_api=mock_target_aws, + expiration_days=7, + s3_file_name=custom_filename, + ) + + mock_aws.stream_rds_logs_to_s3_zip.assert_called_once() + mock_aws.generate_s3_download_url.assert_called_once_with( + bucket=s3_bucket, + s3_key=custom_filename, + expiration_secs=7 * 24 * 3600, + ) + + assert result == "https://s3.example.com/download" + + +def test_external_resource_rds_logs_run_appends_zip_extension( + mock_aws: Mock, mock_target_aws: Mock, er: ExternalResource +) -> None: + s3_bucket = "test-bucket" + s3_prefix = "logs" + filename_without_zip = "custom-logs" + rds_logs = ExternalResourceRDSLogs( + aws_api=mock_aws, rds=er, s3_bucket=s3_bucket, s3_prefix=s3_prefix + ) + + mock_aws.list_rds_logs.return_value = ["error.log"] + mock_aws.stream_rds_log.return_value = iter([b"log content"]) + mock_aws.generate_s3_download_url.return_value = "https://s3.example.com/download" + + rds_logs.run( + target_aws_api=mock_target_aws, + expiration_days=1, + s3_file_name=filename_without_zip, + ) + + call_args = mock_aws.stream_rds_logs_to_s3_zip.call_args + assert call_args.kwargs["s3_key"] == f"{filename_without_zip}.zip" + + +def test_external_resource_rds_logs_task( + mocker: MockerFixture, mock_action: Mock, er: ExternalResource +) -> None: + mocker.patch( + "automated_actions.celery.external_resource._rds_logs.get_external_resource", + return_value=er, + ) + mocker.patch( + "automated_actions.celery.external_resource._rds_logs.get_aws_credentials", + return_value=AWSStaticCredentials( + access_key_id="test-access-key", + secret_access_key="test-secret-key", # noqa: S106 + region="us-west-2", + ), + ) + mock_settings = mocker.patch( + "automated_actions.celery.external_resource._rds_logs.settings" + ) + mock_settings.external_resource_rds_logs.access_key_id = "log-access-key" + mock_settings.external_resource_rds_logs.secret_access_key = "log-secret-key" # noqa: S105 + mock_settings.external_resource_rds_logs.region = "us-east-1" + mock_settings.external_resource_rds_logs.s3_url = "https://s3.amazonaws.com" + mock_settings.external_resource_rds_logs.bucket = "log-bucket" + mock_settings.external_resource_rds_logs.prefix = "rds-logs" + + mock_rds_logs_run = mocker.patch.object( + ExternalResourceRDSLogs, "run", return_value="https://download.url" + ) + + action_id = str(uuid.uuid4()) + task_args = { + "account": "test-account", + "identifier": "test-identifier", + "expiration_days": 5, + "s3_file_name": "custom.zip", + } + + result = ( + external_resource_rds_logs.signature( + kwargs={**task_args, "action": mock_action}, + task_id=action_id, + ) + .apply() + .result + ) + + mock_rds_logs_run.assert_called_once() + call_args = mock_rds_logs_run.call_args + assert call_args.kwargs["expiration_days"] == 5 # noqa: PLR2004 + assert call_args.kwargs["s3_file_name"] == "custom.zip" + + mock_action.set_status.assert_called_once_with(ActionStatus.RUNNING) + mock_action.set_final_state.assert_called_once_with( + status=ActionStatus.SUCCESS, + result="Download the RDS logs from the following URL: https://download.url. This link will expire in 5 days.", + task_args=task_args, + ) + + assert ( + result + == "Download the RDS logs from the following URL: https://download.url. This link will expire in 5 days." + ) + + +def test_external_resource_rds_logs_task_no_url_returned( + mocker: MockerFixture, mock_action: Mock, er: ExternalResource +) -> None: + mocker.patch( + "automated_actions.celery.external_resource._rds_logs.get_external_resource", + return_value=er, + ) + mocker.patch( + "automated_actions.celery.external_resource._rds_logs.get_aws_credentials", + return_value=AWSStaticCredentials( + access_key_id="test-access-key", + secret_access_key="test-secret-key", # noqa: S106 + region="us-west-2", + ), + ) + mock_settings = mocker.patch( + "automated_actions.celery.external_resource._rds_logs.settings" + ) + mock_settings.external_resource_rds_logs.access_key_id = "log-access-key" + mock_settings.external_resource_rds_logs.secret_access_key = "log-secret-key" # noqa: S105 + mock_settings.external_resource_rds_logs.region = "us-east-1" + mock_settings.external_resource_rds_logs.s3_url = "https://s3.amazonaws.com" + mock_settings.external_resource_rds_logs.bucket = "log-bucket" + mock_settings.external_resource_rds_logs.prefix = "rds-logs" + + mock_rds_logs_run = mocker.patch.object( + ExternalResourceRDSLogs, "run", return_value="" + ) + + action_id = str(uuid.uuid4()) + task_args = { + "account": "test-account", + "identifier": "test-identifier", + "expiration_days": 7, + } + + result = ( + external_resource_rds_logs.signature( + kwargs={**task_args, "action": mock_action}, + task_id=action_id, + ) + .apply() + .result + ) + + mock_rds_logs_run.assert_called_once() + mock_action.set_status.assert_called_once_with(ActionStatus.RUNNING) + mock_action.set_final_state.assert_called_once_with( + status=ActionStatus.SUCCESS, + result="No logs found or no logs available for download.", + task_args=task_args, + ) + + assert result == "No logs found or no logs available for download." + + +def test_external_resource_rds_logs_task_failure( + mocker: MockerFixture, mock_action: Mock, er: ExternalResource +) -> None: + mocker.patch( + "automated_actions.celery.external_resource._rds_logs.get_external_resource", + return_value=er, + ) + mocker.patch( + "automated_actions.celery.external_resource._rds_logs.get_aws_credentials", + return_value=AWSStaticCredentials( + access_key_id="test-access-key", + secret_access_key="test-secret-key", # noqa: S106 + region="us-west-2", + ), + ) + mock_settings = mocker.patch( + "automated_actions.celery.external_resource._rds_logs.settings" + ) + mock_settings.external_resource_rds_logs.access_key_id = "log-access-key" + mock_settings.external_resource_rds_logs.secret_access_key = "log-secret-key" # noqa: S105 + mock_settings.external_resource_rds_logs.region = "us-east-1" + mock_settings.external_resource_rds_logs.s3_url = "https://s3.amazonaws.com" + mock_settings.external_resource_rds_logs.bucket = "log-bucket" + mock_settings.external_resource_rds_logs.prefix = "rds-logs" + + mock_rds_logs_run = mocker.patch.object( + ExternalResourceRDSLogs, + "run", + side_effect=Exception("RDS logs retrieval failed!"), + ) + + action_id = str(uuid.uuid4()) + task_args = { + "account": "test-account", + "identifier": "test-identifier", + "expiration_days": 3, + } + + external_resource_rds_logs.signature( + kwargs={**task_args, "action": mock_action}, + task_id=action_id, + ).apply() + + mock_rds_logs_run.assert_called_once() + mock_action.set_status.assert_called_once_with(ActionStatus.RUNNING) + mock_action.set_final_state.assert_called_once_with( + status=ActionStatus.FAILURE, + result="RDS logs retrieval failed!", + task_args=task_args, + ) diff --git a/packages/automated_actions_cli/pyproject.toml b/packages/automated_actions_cli/pyproject.toml index 930402c..0fd4815 100644 --- a/packages/automated_actions_cli/pyproject.toml +++ b/packages/automated_actions_cli/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "automated-actions-cli" -version = "0.1.6" +version = "0.1.7" description = "Automated Actions Client" authors = [ # Feel free to add or change authors diff --git a/packages/automated_actions_client/automated_actions_client/api/actions/__init__.py b/packages/automated_actions_client/automated_actions_client/api/actions/__init__.py index 8c10af9..c2bbd8a 100644 --- a/packages/automated_actions_client/automated_actions_client/api/actions/__init__.py +++ b/packages/automated_actions_client/automated_actions_client/api/actions/__init__.py @@ -3,6 +3,7 @@ from . import ( external_resource_flush_elasticache, + external_resource_rds_logs, external_resource_rds_reboot, external_resource_rds_snapshot, no_op, @@ -12,6 +13,7 @@ __all__ = [ "external_resource_flush_elasticache", + "external_resource_rds_logs", "external_resource_rds_reboot", "external_resource_rds_snapshot", "no_op", diff --git a/packages/automated_actions_client/automated_actions_client/api/actions/external_resource_rds_logs.py b/packages/automated_actions_client/automated_actions_client/api/actions/external_resource_rds_logs.py new file mode 100644 index 0000000..2cce390 --- /dev/null +++ b/packages/automated_actions_client/automated_actions_client/api/actions/external_resource_rds_logs.py @@ -0,0 +1,285 @@ +# This file is auto-generated by OpenAPI Python Client. +# Do not edit manually. + +from http import HTTPStatus +from typing import Any + +import httpx + +from ... import errors +from ...client import AuthenticatedClient, Client +from ...models.action_schema_out import ActionSchemaOut +from ...models.http_validation_error import HTTPValidationError +from ...types import UNSET, Response, Unset + + +def _get_kwargs( + account: str, + identifier: str, + *, + expiration_days: Unset | int = 7, + s3_file_name: None | Unset | str = UNSET, +) -> dict[str, Any]: + params: dict[str, Any] = {} + + params["expiration_days"] = expiration_days + + json_s3_file_name: None | Unset | str + if isinstance(s3_file_name, Unset): + json_s3_file_name = UNSET + else: + json_s3_file_name = s3_file_name + params["s3_file_name"] = json_s3_file_name + + params = {k: v for k, v in params.items() if v is not UNSET and v is not None} + + _kwargs: dict[str, Any] = { + "method": "post", + "url": f"/api/v1/external-resource/rds-logs/{account}/{identifier}", + "params": params, + } + + return _kwargs + + +def _parse_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> ActionSchemaOut | HTTPValidationError | None: + if response.status_code == 202: + response_202 = ActionSchemaOut.from_dict(response.json()) + + return response_202 + if response.status_code == 422: + response_422 = HTTPValidationError.from_dict(response.json()) + + return response_422 + if client.raise_on_unexpected_status: + raise errors.UnexpectedStatus(response.status_code, response.content) + else: + return None + + +def _build_response( + *, client: AuthenticatedClient | Client, response: httpx.Response +) -> Response[ActionSchemaOut | HTTPValidationError]: + return Response( + status_code=HTTPStatus(response.status_code), + content=response.content, + headers=response.headers, + parsed=_parse_response(client=client, response=response), + ) + + +def sync_detailed( + account: str, + identifier: str, + *, + client: AuthenticatedClient | Client, + expiration_days: Unset | int = 7, + s3_file_name: None | Unset | str = UNSET, +) -> Response[ActionSchemaOut | HTTPValidationError]: + """External Resource Rds Logs + + Get RDS logs for an instance. + + This action retrieves logs from a specified RDS instance in a given AWS account and stores them in + an S3 bucket. + + Args: + account (str): AWS account name + identifier (str): RDS instance identifier + expiration_days (Union[Unset, int]): Expiration time in days Default: 7. + s3_file_name (Union[None, Unset, str]): The S3 target file name. Defaults to + '{account}-{identifier}.zip' if not provided. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[ActionSchemaOut, HTTPValidationError]] + """ + + kwargs = _get_kwargs( + account=account, + identifier=identifier, + expiration_days=expiration_days, + s3_file_name=s3_file_name, + ) + + with client as _client: + response = _client.request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +def sync( + account: str, + identifier: str, + *, + client: AuthenticatedClient | Client, + expiration_days: Unset | int = 7, + s3_file_name: None | Unset | str = UNSET, +) -> ActionSchemaOut | HTTPValidationError | None: + """External Resource Rds Logs + + Get RDS logs for an instance. + + This action retrieves logs from a specified RDS instance in a given AWS account and stores them in + an S3 bucket. + + Args: + account (str): AWS account name + identifier (str): RDS instance identifier + expiration_days (Union[Unset, int]): Expiration time in days Default: 7. + s3_file_name (Union[None, Unset, str]): The S3 target file name. Defaults to + '{account}-{identifier}.zip' if not provided. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Union[ActionSchemaOut, HTTPValidationError] + """ + + return sync_detailed( + account=account, + identifier=identifier, + client=client, + expiration_days=expiration_days, + s3_file_name=s3_file_name, + ).parsed + + +async def asyncio_detailed( + account: str, + identifier: str, + *, + client: AuthenticatedClient | Client, + expiration_days: Unset | int = 7, + s3_file_name: None | Unset | str = UNSET, +) -> Response[ActionSchemaOut | HTTPValidationError]: + """External Resource Rds Logs + + Get RDS logs for an instance. + + This action retrieves logs from a specified RDS instance in a given AWS account and stores them in + an S3 bucket. + + Args: + account (str): AWS account name + identifier (str): RDS instance identifier + expiration_days (Union[Unset, int]): Expiration time in days Default: 7. + s3_file_name (Union[None, Unset, str]): The S3 target file name. Defaults to + '{account}-{identifier}.zip' if not provided. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Response[Union[ActionSchemaOut, HTTPValidationError]] + """ + + kwargs = _get_kwargs( + account=account, + identifier=identifier, + expiration_days=expiration_days, + s3_file_name=s3_file_name, + ) + + async with client as _client: + response = await _client.request( + **kwargs, + ) + + return _build_response(client=client, response=response) + + +async def asyncio( + account: str, + identifier: str, + *, + client: AuthenticatedClient | Client, + expiration_days: Unset | int = 7, + s3_file_name: None | Unset | str = UNSET, +) -> ActionSchemaOut | HTTPValidationError | None: + """External Resource Rds Logs + + Get RDS logs for an instance. + + This action retrieves logs from a specified RDS instance in a given AWS account and stores them in + an S3 bucket. + + Args: + account (str): AWS account name + identifier (str): RDS instance identifier + expiration_days (Union[Unset, int]): Expiration time in days Default: 7. + s3_file_name (Union[None, Unset, str]): The S3 target file name. Defaults to + '{account}-{identifier}.zip' if not provided. + + Raises: + errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True. + httpx.TimeoutException: If the request takes longer than Client.timeout. + + Returns: + Union[ActionSchemaOut, HTTPValidationError] + """ + + return ( + await asyncio_detailed( + account=account, + identifier=identifier, + client=client, + expiration_days=expiration_days, + s3_file_name=s3_file_name, + ) + ).parsed + + +from typing import Annotated + +import typer + +app = typer.Typer() + + +@app.command( + help="""Get RDS logs for an instance. + +This action retrieves logs from a specified RDS instance in a given AWS account and stores them in an S3 bucket.""", + rich_help_panel="Actions", +) +def external_resource_rds_logs( + ctx: typer.Context, + account: Annotated[str, typer.Option(help="AWS account name", show_default=False)], + identifier: Annotated[ + str, typer.Option(help="RDS instance identifier", show_default=False) + ], + expiration_days: Annotated[int, typer.Option(help="Expiration time in days")] = 7, + s3_file_name: Annotated[ + None | str, + typer.Option( + help="The S3 target file name. Defaults to '{account}-{identifier}.zip' if not provided." + ), + ] = None, +) -> None: + result = sync( + account=account, + identifier=identifier, + expiration_days=expiration_days, + s3_file_name=s3_file_name, + client=ctx.obj["client"], + ) + if "formatter" in ctx.obj and result is not None: + output: Any = result + if isinstance(result, list): + output = [ + item.to_dict() if hasattr(item, "to_dict") else item for item in result + ] + elif hasattr(result, "to_dict"): + output = result.to_dict() + ctx.obj["formatter"](output) diff --git a/packages/automated_actions_client/pyproject.toml b/packages/automated_actions_client/pyproject.toml index 4158d54..78adecf 100644 --- a/packages/automated_actions_client/pyproject.toml +++ b/packages/automated_actions_client/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "automated-actions-client" -version = "0.1.5" +version = "0.1.6" description = "Automated Actions Client" authors = [{ name = "AppSRE", email = "sd-app-sre@redhat.com" }] license = { text = "Apache 2.0" } diff --git a/packages/automated_actions_utils/automated_actions_utils/aws_api.py b/packages/automated_actions_utils/automated_actions_utils/aws_api.py index 4627bbe..7a4e4b3 100644 --- a/packages/automated_actions_utils/automated_actions_utils/aws_api.py +++ b/packages/automated_actions_utils/automated_actions_utils/aws_api.py @@ -1,16 +1,25 @@ +from __future__ import annotations + import logging from abc import ABC, abstractmethod -from typing import Any, Protocol, Self +from collections.abc import Generator, Iterable +from typing import TYPE_CHECKING, Any, Protocol, Self from automated_actions.config import settings from boto3 import Session from botocore.config import Config from pydantic import BaseModel -from types_boto3_rds.client import RDSClient -from types_boto3_rds.type_defs import EventTypeDef +from zipstream import ZIP_DEFLATED, ZipStream from automated_actions_utils.vault_client import SecretFieldNotFoundError, VaultClient +if TYPE_CHECKING: + from types_boto3_rds.client import RDSClient + from types_boto3_rds.type_defs import EventTypeDef + from types_boto3_s3.client import S3Client + from types_boto3_s3.type_defs import CompletedPartTypeDef + + log = logging.getLogger(__name__) @@ -41,6 +50,13 @@ def build_session(self) -> Session: ) +class LogStream(BaseModel): + """Represents a log stream for RDS logs.""" + + name: str + content: Generator[bytes, None, None] + + def get_aws_credentials(vault_secret: VaultSecret, region: str) -> AWSCredentials: """Retrieves AWS credentials from Vault and returns them as an AWSCredentials object. @@ -83,7 +99,12 @@ def get_aws_credentials(vault_secret: VaultSecret, region: str) -> AWSCredential class AWSApi: """A client for interacting with AWS services.""" - def __init__(self, credentials: AWSCredentials, region: str | None) -> None: + def __init__( + self, + credentials: AWSCredentials, + region: str | None = None, + s3_endpoint_url: str | None = None, + ) -> None: self.session = credentials.build_session() self.config = Config( region_name=region, @@ -93,6 +114,9 @@ def __init__(self, credentials: AWSCredentials, region: str | None) -> None: }, ) self.rds_client: RDSClient = self.session.client("rds", config=self.config) + self.s3_client: S3Client = self.session.client( + "s3", config=self.config, endpoint_url=s3_endpoint_url + ) def __enter__(self) -> Self: """Enables the use of the AWSApi instance in a context manager.""" @@ -101,6 +125,30 @@ def __enter__(self) -> Self: def __exit__(self, *args: object, **kwargs: Any) -> None: """Handles cleanup when exiting the context manager.""" self.rds_client.close() + self.s3_client.close() + + @staticmethod + def _upload_multipart_chunk( + target_aws_api: AWSApi, + bucket: str, + s3_key: str, + upload_id: str, + part_number: int, + data: bytes, + ) -> CompletedPartTypeDef: + """Uploads a single part for multipart upload and returns part info.""" + log.debug(f"Uploading part {part_number} ({len(data)} bytes)") + part_response = target_aws_api.s3_client.upload_part( + Bucket=bucket, + Key=s3_key, + PartNumber=part_number, + UploadId=upload_id, + Body=data, + ) + return { + "ETag": part_response["ETag"], + "PartNumber": part_number, + } def reboot_rds_instance(self, identifier: str, *, force_failover: bool) -> None: """Reboots a specified RDS database instance. @@ -152,3 +200,117 @@ def create_rds_snapshot(self, identifier: str, snapshot_identifier: str) -> None DBInstanceIdentifier=identifier, DBSnapshotIdentifier=snapshot_identifier, ) + + def list_rds_logs(self, identifier: str) -> list[str]: + """Lists the log files for a specified RDS instance.""" + logs: list[str] = [] + log.info(f"Listing RDS logs for instance {identifier}") + paginator = self.rds_client.get_paginator("describe_db_log_files") + for page in paginator.paginate(DBInstanceIdentifier=identifier): + logs.extend( + log_file["LogFileName"] + for log_file in page.get("DescribeDBLogFiles", []) + if log_file["LogFileName"] + ) + return logs + + def stream_rds_log( + self, identifier: str, log_file: str + ) -> Generator[bytes, None, None]: + """Streams a specific RDS log file.""" + marker = "0" + while True: + response = self.rds_client.download_db_log_file_portion( + DBInstanceIdentifier=identifier, + LogFileName=log_file, + Marker=marker, + ) + if log_data_chunk := response.get("LogFileData"): + yield log_data_chunk.encode("utf-8") + + if response["AdditionalDataPending"]: + marker = response["Marker"] + else: + break + + def stream_rds_logs_to_s3_zip( + self, + log_streams: Iterable[LogStream], + bucket: str, + s3_key: str, + target_aws_api: AWSApi | None = None, + ) -> None: + """Streams all RDS log files to a single zip file in an S3 bucket using multipart upload.""" + target_aws_api = target_aws_api or self + + # Create a zip stream for large files without loading everything into memory + zip_stream = ZipStream(compress_type=ZIP_DEFLATED) + for log_stream in log_streams: + zip_stream.add(log_stream.content, arcname=log_stream.name) + + # Start multipart upload + log.info(f"Starting multipart upload for {s3_key} to bucket {bucket}") + create_response = target_aws_api.s3_client.create_multipart_upload( + Bucket=bucket, Key=s3_key, ContentType="application/zip" + ) + upload_id = create_response["UploadId"] + + try: + parts = [] + part_number = 1 + chunk_size = 5 * 1024 * 1024 # 5MB minimum part size for S3 + buffer = b"" + + # Stream zip data in chunks and upload as parts + for chunk in zip_stream: + buffer += chunk + + # Upload when buffer reaches chunk size + while len(buffer) >= chunk_size: + part_data = buffer[:chunk_size] + buffer = buffer[chunk_size:] + + part_info = self._upload_multipart_chunk( + target_aws_api, + bucket, + s3_key, + upload_id, + part_number, + part_data, + ) + parts.append(part_info) + part_number += 1 + + # Upload remaining buffer as final part (if any) + if buffer: + part_info = self._upload_multipart_chunk( + target_aws_api, bucket, s3_key, upload_id, part_number, buffer + ) + parts.append(part_info) + + # Complete multipart upload + target_aws_api.s3_client.complete_multipart_upload( + Bucket=bucket, + Key=s3_key, + UploadId=upload_id, + MultipartUpload={"Parts": parts}, + ) + log.info(f"Successfully completed multipart upload for {s3_key}") + + except: + # Abort multipart upload on error + log.exception("Error during multipart upload") + target_aws_api.s3_client.abort_multipart_upload( + Bucket=bucket, Key=s3_key, UploadId=upload_id + ) + raise + + def generate_s3_download_url( + self, bucket: str, s3_key: str, expiration_secs: int = 3600 + ) -> str: + """Generate a pre-signed URL for downloading an object from S3.""" + return self.s3_client.generate_presigned_url( + "get_object", + Params={"Bucket": bucket, "Key": s3_key}, + ExpiresIn=expiration_secs, + ) diff --git a/packages/automated_actions_utils/pyproject.toml b/packages/automated_actions_utils/pyproject.toml index b93bcee..4a47333 100644 --- a/packages/automated_actions_utils/pyproject.toml +++ b/packages/automated_actions_utils/pyproject.toml @@ -15,6 +15,7 @@ dependencies = [ "pydantic==2.11.7", "types-boto3-lite[rds]==1.39.16", "types-hvac==2.3.0.20250516", + "zipstream-ng==1.8.0", ] [project.urls] @@ -31,6 +32,8 @@ dev = [ "pytest==8.4.1", "qenerate==0.8.0", "ruff==0.12.7", + "types-boto3-lite[rds,s3]==1.39.16", + "types-hvac==2.3.0.20250516", ] [build-system] @@ -124,7 +127,7 @@ disallow_incomplete_defs = true [[tool.mypy.overrides]] # Below are all of the packages that don't implement stub packages. Mypy will throw an error if we don't ignore the # missing imports. See: https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports -module = ["kubernetes.*", "openshift.dynamic.*"] +module = ["kubernetes.*", "openshift.dynamic.*", "zipstream.*"] ignore_missing_imports = true # Coverage configuration diff --git a/packages/automated_actions_utils/tests/test_aws_api.py b/packages/automated_actions_utils/tests/test_aws_api.py index 0f82b72..c7bd0d8 100644 --- a/packages/automated_actions_utils/tests/test_aws_api.py +++ b/packages/automated_actions_utils/tests/test_aws_api.py @@ -1,4 +1,5 @@ # ruff: noqa: S105 +from collections.abc import Generator from typing import Any from unittest.mock import MagicMock @@ -9,6 +10,7 @@ from automated_actions_utils.aws_api import ( AWSApi, AWSStaticCredentials, + LogStream, get_aws_credentials, ) from automated_actions_utils.vault_client import SecretFieldNotFoundError @@ -166,16 +168,26 @@ def mock_boto_config(mocker: MockerFixture) -> MagicMock: ], ids=["with_region", "none_region"], ) -def test_aws_api_init_and_rds_client_setup( +def test_aws_api_init_and_client_setup( mock_aws_credentials: MagicMock, mock_boto_config: MagicMock, region_input: str | None, expected_region_in_config: str | None, ) -> None: - """Tests the __init__ method of AWSApi and RDS client setup.""" + """Tests the __init__ method of AWSApi and both RDS and S3 client setup.""" mock_session = mock_aws_credentials.build_session.return_value mock_rds_client_returned_by_session = MagicMock() - mock_session.client.return_value = mock_rds_client_returned_by_session + mock_s3_client_returned_by_session = MagicMock() + + # Configure session.client to return different mocks based on service + def client_side_effect(service_name: str, **_: Any) -> MagicMock: + if service_name == "rds": + return mock_rds_client_returned_by_session + if service_name == "s3": + return mock_s3_client_returned_by_session + raise ValueError(f"Unexpected service: {service_name}") + + mock_session.client.side_effect = client_side_effect aws_api = AWSApi(credentials=mock_aws_credentials, region=region_input) @@ -191,10 +203,42 @@ def test_aws_api_init_and_rds_client_setup( ) assert aws_api.config == mock_boto_config.return_value - # This checks that AWSApi.__init__ called self.session.client(...) correctly - mock_session.client.assert_called_once_with("rds", config=aws_api.config) - # And that aws_api.rds_client is the instance returned by that call + # Verify both RDS and S3 clients were created + mock_session.client.assert_any_call("rds", config=aws_api.config) + mock_session.client.assert_any_call("s3", config=aws_api.config, endpoint_url=None) + + # And that both clients are properly assigned assert aws_api.rds_client == mock_rds_client_returned_by_session + assert aws_api.s3_client == mock_s3_client_returned_by_session + + +def test_aws_api_init_with_s3_endpoint_url(mock_aws_credentials: MagicMock) -> None: + """Tests AWSApi initialization with a custom S3 endpoint URL.""" + mock_session = mock_aws_credentials.build_session.return_value + mock_rds_client = MagicMock() + mock_s3_client = MagicMock() + + def client_side_effect(service_name: str, **_: Any) -> MagicMock: + if service_name == "rds": + return mock_rds_client + if service_name == "s3": + return mock_s3_client + raise ValueError(f"Unexpected service: {service_name}") + + mock_session.client.side_effect = client_side_effect + + s3_endpoint = "https://localstack:4566" + aws_api = AWSApi( + credentials=mock_aws_credentials, + region="us-west-2", + s3_endpoint_url=s3_endpoint, + ) + + # Verify S3 client was called with the custom endpoint URL + mock_session.client.assert_any_call( + "s3", config=aws_api.config, endpoint_url=s3_endpoint + ) + assert aws_api.s3_client == mock_s3_client @pytest.mark.parametrize( @@ -362,3 +406,329 @@ def test_aws_api_create_rds_snapshot( mock_rds_client_on_instance.create_db_snapshot.assert_called_once_with( DBInstanceIdentifier=identifier, DBSnapshotIdentifier=snapshot_identifier ) + + +@pytest.mark.parametrize( + ("region", "identifier", "paginator_return_value", "expected_log_files"), + [ + ( + "us-west-2", + "test-db-instance", + [ + { + "DescribeDBLogFiles": [ + {"LogFileName": "error/mysql-error.log"}, + {"LogFileName": "slowquery/mysql-slowquery.log"}, + ] + } + ], + ["error/mysql-error.log", "slowquery/mysql-slowquery.log"], + ), + ( + "us-east-1", + "postgres-instance", + [{"DescribeDBLogFiles": [{"LogFileName": "postgresql.log"}]}], + ["postgresql.log"], + ), + ( + "eu-west-1", + "empty-instance", + [{"DescribeDBLogFiles": []}], + [], + ), + ( + "ap-south-1", + "multi-page-db", + [ + {"DescribeDBLogFiles": [{"LogFileName": "error.log"}]}, + { + "DescribeDBLogFiles": [ + {"LogFileName": "slow.log"}, + {"LogFileName": "general.log"}, + ] + }, + ], + ["error.log", "slow.log", "general.log"], + ), + ( + "eu-north-1", + "missing-key-db", + [ + {"DescribeDBLogFiles": [{"LogFileName": "valid.log"}]}, + {"NotDescribeDBLogFiles": []}, # Page missing 'DescribeDBLogFiles' key + ], + ["valid.log"], + ), + ( + "ca-central-1", + "empty-filename-db", + [ + { + "DescribeDBLogFiles": [ + {"LogFileName": "valid.log"}, + {"LogFileName": ""}, # Empty filename should be filtered out + {"LogFileName": "another.log"}, + ] + } + ], + ["valid.log", "another.log"], + ), + ], + ids=[ + "success_single_page", + "postgres_instance", + "empty_instance", + "multiple_pages", + "missing_key_in_page", + "empty_filename_filtered", + ], +) +def test_aws_api_list_rds_logs( + mock_aws_credentials: MagicMock, + mocker: MockerFixture, + region: str, + identifier: str, + paginator_return_value: list[dict[str, list[dict[str, str]]]], + expected_log_files: list[str], +) -> None: + """Tests the list_rds_logs method of AWSApi with paginated responses.""" + aws_api = AWSApi(credentials=mock_aws_credentials, region=region) + + mock_rds_client_on_instance = mocker.MagicMock() + aws_api.rds_client = mock_rds_client_on_instance + + mock_paginator = mocker.MagicMock() + mock_rds_client_on_instance.get_paginator.return_value = mock_paginator + mock_paginator.paginate.return_value = paginator_return_value + + result = aws_api.list_rds_logs(identifier=identifier) + + mock_rds_client_on_instance.get_paginator.assert_called_once_with( + "describe_db_log_files" + ) + mock_paginator.paginate.assert_called_once_with(DBInstanceIdentifier=identifier) + assert result == expected_log_files + + +@pytest.mark.parametrize( + ("region", "identifier", "log_file", "log_data_chunks"), + [ + ("us-west-2", "test-db", "error.log", ["log line 1\n", "log line 2\n"]), + ("us-east-1", "test-db", "slow.log", ["slow query data\n"]), + ("eu-west-1", "test-db", "empty.log", []), + ], +) +def test_aws_api_stream_rds_log( + mock_aws_credentials: MagicMock, + mocker: MockerFixture, + region: str, + identifier: str, + log_file: str, + log_data_chunks: list[str], +) -> None: + """Tests the stream_rds_log method of AWSApi.""" + aws_api = AWSApi(credentials=mock_aws_credentials, region=region) + + mock_rds_client_on_instance = mocker.MagicMock() + aws_api.rds_client = mock_rds_client_on_instance + + # Mock sequential responses for download_db_log_file_portion + mock_responses = [] + for i, chunk in enumerate(log_data_chunks): + is_last = i == len(log_data_chunks) - 1 + mock_responses.append({ + "LogFileData": chunk, + "Marker": str(i + 1), + "AdditionalDataPending": not is_last, + }) + + # If no chunks, return a single response with empty data + if not log_data_chunks: + mock_responses = [ + { + "LogFileData": "", + "Marker": "0", + "AdditionalDataPending": False, + } + ] + + mock_rds_client_on_instance.download_db_log_file_portion.side_effect = ( + mock_responses + ) + + # Collect all streamed data + result_data = b"".join( + aws_api.stream_rds_log(identifier=identifier, log_file=log_file) + ) + + # Verify the expected data was returned + expected_data = "".join(log_data_chunks).encode("utf-8") + assert result_data == expected_data + + # Verify the RDS client was called correctly + expected_calls = len(mock_responses) + assert ( + mock_rds_client_on_instance.download_db_log_file_portion.call_count + == expected_calls + ) + + +@pytest.mark.parametrize( + ("region", "bucket", "s3_key", "log_stream_count"), + [ + ("us-west-2", "test-bucket", "logs/test.zip", 2), + ("us-east-1", "my-bucket", "rds-logs/instance-logs.zip", 1), + ("eu-west-1", "backup-bucket", "logs/backup.zip", 0), + ], +) +def test_aws_api_stream_rds_logs_to_s3_zip( + mock_aws_credentials: MagicMock, + mocker: MockerFixture, + region: str, + bucket: str, + s3_key: str, + log_stream_count: int, +) -> None: + """Tests the stream_rds_logs_to_s3_zip method of AWSApi.""" + aws_api = AWSApi(credentials=mock_aws_credentials, region=region) + + mock_s3_client_on_instance = mocker.MagicMock() + aws_api.s3_client = mock_s3_client_on_instance + + # Mock S3 multipart upload responses + upload_id = "test-upload-id" + mock_s3_client_on_instance.create_multipart_upload.return_value = { + "UploadId": upload_id + } + mock_s3_client_on_instance.upload_part.return_value = {"ETag": "test-etag"} + + # Create test log streams + log_streams = [] + for i in range(log_stream_count): + + def generate_log_content() -> Generator[bytes, Any, None]: + yield b"log data chunk 1" + yield b"log data chunk 2" + + log_streams.append( + LogStream(name=f"test-log-{i}.log", content=generate_log_content()) + ) + + # Mock ZipStream to avoid actual zip creation + mock_zip_stream = mocker.patch("automated_actions_utils.aws_api.ZipStream") + mock_zip_instance = mocker.MagicMock() + mock_zip_stream.return_value = mock_zip_instance + mock_zip_instance.__iter__.return_value = iter([b"zip content chunk"]) + + # Mock _upload_multipart_chunk + mocker.patch.object( + aws_api, + "_upload_multipart_chunk", + return_value={"PartNumber": 1, "ETag": "test-etag"}, + ) + + aws_api.stream_rds_logs_to_s3_zip( + log_streams=log_streams, + bucket=bucket, + s3_key=s3_key, + ) + + # Verify S3 operations were called + mock_s3_client_on_instance.create_multipart_upload.assert_called_once_with( + Bucket=bucket, Key=s3_key, ContentType="application/zip" + ) + mock_s3_client_on_instance.complete_multipart_upload.assert_called_once() + + # Verify log streams were added to zip + assert mock_zip_instance.add.call_count == log_stream_count + + +@pytest.mark.parametrize("region", ["us-west-2"]) +def test_aws_api_stream_rds_logs_to_s3_zip_with_target_api( + mock_aws_credentials: MagicMock, + mocker: MockerFixture, + region: str, +) -> None: + """Tests stream_rds_logs_to_s3_zip with a different target AWS API.""" + source_aws_api = AWSApi(credentials=mock_aws_credentials, region=region) + target_aws_api = AWSApi(credentials=mock_aws_credentials, region="us-east-1") + + mock_target_s3_client = mocker.MagicMock() + target_aws_api.s3_client = mock_target_s3_client + + upload_id = "test-upload-id" + mock_target_s3_client.create_multipart_upload.return_value = {"UploadId": upload_id} + + # Mock ZipStream + mock_zip_stream = mocker.patch("automated_actions_utils.aws_api.ZipStream") + mock_zip_instance = mocker.MagicMock() + mock_zip_stream.return_value = mock_zip_instance + mock_zip_instance.__iter__.return_value = iter([b"zip content"]) + + # Mock _upload_multipart_chunk + mocker.patch.object( + source_aws_api, + "_upload_multipart_chunk", + return_value={"PartNumber": 1, "ETag": "test-etag"}, + ) + + log_streams = [LogStream(name="test.log", content=iter([b"test data"]))] + + source_aws_api.stream_rds_logs_to_s3_zip( + log_streams=log_streams, + bucket="test-bucket", + s3_key="test.zip", + target_aws_api=target_aws_api, + ) + + # Verify the target API was used for S3 operations + mock_target_s3_client.create_multipart_upload.assert_called_once() + mock_target_s3_client.complete_multipart_upload.assert_called_once() + + +@pytest.mark.parametrize( + ("region", "bucket", "s3_key", "expiration_secs", "expected_url"), + [ + ( + "us-west-2", + "test-bucket", + "logs/test.zip", + 3600, + "https://s3.amazonaws.com/test-bucket/logs/test.zip", + ), + ( + "us-east-1", + "my-logs", + "rds.zip", + 7200, + "https://s3.amazonaws.com/my-logs/rds.zip", + ), + ], +) +def test_aws_api_generate_s3_download_url( + mock_aws_credentials: MagicMock, + mocker: MockerFixture, + region: str, + bucket: str, + s3_key: str, + expiration_secs: int, + expected_url: str, +) -> None: + """Tests the generate_s3_download_url method of AWSApi.""" + aws_api = AWSApi(credentials=mock_aws_credentials, region=region) + + mock_s3_client_on_instance = mocker.MagicMock() + aws_api.s3_client = mock_s3_client_on_instance + + mock_s3_client_on_instance.generate_presigned_url.return_value = expected_url + + result = aws_api.generate_s3_download_url( + bucket=bucket, s3_key=s3_key, expiration_secs=expiration_secs + ) + + mock_s3_client_on_instance.generate_presigned_url.assert_called_once_with( + "get_object", + Params={"Bucket": bucket, "Key": s3_key}, + ExpiresIn=expiration_secs, + ) + assert result == expected_url diff --git a/packages/integration_tests/tests/actions/test_external_resource_rds_logs.py b/packages/integration_tests/tests/actions/test_external_resource_rds_logs.py new file mode 100644 index 0000000..f99072b --- /dev/null +++ b/packages/integration_tests/tests/actions/test_external_resource_rds_logs.py @@ -0,0 +1,85 @@ +import re +from collections.abc import Callable + +import pytest +import requests +from automated_actions_client import AuthenticatedClient +from automated_actions_client.api.actions import external_resource_rds_logs +from automated_actions_client.models.action_schema_out import ActionSchemaOut +from automated_actions_client.models.action_status import ActionStatus +from automated_actions_utils.aws_api import AWSApi, get_aws_credentials +from automated_actions_utils.external_resource import ( + ExternalResourceProvider, + get_external_resource, +) + +from tests.conftest import Config + + +@pytest.fixture +def aws_api(config: Config) -> AWSApi: + rds = get_external_resource( + account=config.external_resource_rds_logs.account, + identifier=config.external_resource_rds_logs.identifier, + provider=ExternalResourceProvider.RDS, + ) + credentials = get_aws_credentials( + vault_secret=rds.account.automation_token, region=rds.account.region + ) + return AWSApi(credentials=credentials, region=rds.region) + + +@pytest.fixture(scope="session") +def action_id(aa_client: AuthenticatedClient, config: Config) -> str: + """Trigger an RDS logs action and return the action id. + + We use a pytest fixture with session scope to avoid multiple actions being triggered + in case of retry via the flaky mark + """ + action = external_resource_rds_logs.sync( + account=config.external_resource_rds_logs.account, + identifier=config.external_resource_rds_logs.identifier, + client=aa_client, + expiration_days=3, + s3_file_name=config.external_resource_rds_logs.s3_file_name, + ) + assert isinstance(action, ActionSchemaOut) + assert action.status == ActionStatus.PENDING + assert not action.result + return action.action_id + + +def test_external_resource_rds_logs( + action_id: str, + wait_for_action_success: Callable, + aws_api: AWSApi, + config: Config, +) -> None: + """Test the RDS logs action retrieves logs and uploads them to S3.""" + # verify that RDS logs exist for the instance before running the action + log_files = aws_api.list_rds_logs(config.external_resource_rds_logs.identifier) + assert log_files, ( + f"No log files found for RDS instance {config.external_resource_rds_logs.identifier}" + ) + + # wait for the action to complete and assert it was successful + action_result = wait_for_action_success( + action_id=action_id, + retries=config.external_resource_rds_logs.retries, + sleep_time=config.external_resource_rds_logs.sleep_time, + ) + + # verify the action result contains a download URL + assert "Download the RDS logs from the following URL:" in action_result.result + assert "This link will expire in" in action_result.result + + # extract download URL and verify it's accessible via HTTP HEAD + url_match = re.search(r"URL: (https?://[^\s]+)", action_result.result) + assert url_match, "No download URL found in action result" + download_url = url_match.group(1).rstrip(".") + + # verify the zip file is accessible without downloading it + response = requests.head(download_url, timeout=30) + assert response.status_code == requests.codes.OK, ( + f"Download URL is not accessible: {response.status_code}" + ) diff --git a/packages/integration_tests/tests/conftest.py b/packages/integration_tests/tests/conftest.py index 780c887..b871641 100644 --- a/packages/integration_tests/tests/conftest.py +++ b/packages/integration_tests/tests/conftest.py @@ -20,6 +20,12 @@ class ExternalResourceFlushElasticache(BaseParameters): identifier: str +class ExternalResourceRDSLogs(BaseParameters): + account: str + identifier: str + s3_file_name: str = "automated-actions-integration-tests.zip" + + class ExternalResourceRDSReboot(BaseParameters): account: str identifier: str @@ -66,6 +72,7 @@ class Config(BaseSettings): token: str external_resource_flush_elasticache: ExternalResourceFlushElasticache + external_resource_rds_logs: ExternalResourceRDSLogs external_resource_rds_reboot: ExternalResourceRDSReboot external_resource_rds_snapshot: ExternalResourceRDSSnapshot no_op: NoOpParameters = NoOpParameters(sleep_time=2) diff --git a/uv.lock b/uv.lock index 8277a3a..2ea012e 100644 --- a/uv.lock +++ b/uv.lock @@ -129,12 +129,12 @@ dev = [ { name = "pytest-httpx", specifier = "==0.35.0" }, { name = "pytest-mock", specifier = "==3.14.1" }, { name = "requests-mock", specifier = "==1.12.1" }, - { name = "ruff", specifier = "==0.12.5" }, + { name = "ruff", specifier = "==0.12.7" }, ] [[package]] name = "automated-actions-cli" -version = "0.1.5" +version = "0.1.7" source = { editable = "packages/automated_actions_cli" } dependencies = [ { name = "appdirs" }, @@ -175,13 +175,13 @@ dev = [ { name = "mypy", specifier = "==1.17.0" }, { name = "pytest", specifier = "==8.4.1" }, { name = "pytest-cov", specifier = "==6.2.1" }, - { name = "ruff", specifier = "==0.12.5" }, + { name = "ruff", specifier = "==0.12.7" }, { name = "types-pyyaml", specifier = "==6.0.12.20250516" }, ] [[package]] name = "automated-actions-client" -version = "0.1.4" +version = "0.1.6" source = { editable = "packages/automated_actions_client" } dependencies = [ { name = "attrs" }, @@ -211,10 +211,10 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ { name = "mypy", specifier = "==1.17.0" }, - { name = "openapi-python-client", specifier = "==0.25.2" }, + { name = "openapi-python-client", specifier = "==0.25.3" }, { name = "pytest", specifier = "==8.4.1" }, { name = "pytest-cov", specifier = "==6.2.1" }, - { name = "ruff", specifier = "==0.12.5" }, + { name = "ruff", specifier = "==0.12.7" }, { name = "types-python-dateutil", specifier = "==2.9.0.20250708" }, ] @@ -236,6 +236,7 @@ dependencies = [ { name = "pydantic" }, { name = "types-boto3-lite", extra = ["rds"] }, { name = "types-hvac" }, + { name = "zipstream-ng" }, ] [package.dev-dependencies] @@ -246,18 +247,21 @@ dev = [ { name = "pytest-mock" }, { name = "qenerate" }, { name = "ruff" }, + { name = "types-boto3-lite", extra = ["rds", "s3"] }, + { name = "types-hvac" }, ] [package.metadata] requires-dist = [ - { name = "boto3", specifier = "==1.39.13" }, + { name = "boto3", specifier = "==1.39.16" }, { name = "gql", specifier = "==3.5.3" }, { name = "hvac", specifier = "==2.3.0" }, { name = "kubernetes", specifier = "==33.1.0" }, { name = "openshift", specifier = "==0.13.2" }, { name = "pydantic", specifier = "==2.11.7" }, - { name = "types-boto3-lite", extras = ["rds"], specifier = "==1.39.13" }, + { name = "types-boto3-lite", extras = ["rds"], specifier = "==1.39.16" }, { name = "types-hvac", specifier = "==2.3.0.20250516" }, + { name = "zipstream-ng", specifier = "==1.8.0" }, ] [package.metadata.requires-dev] @@ -267,7 +271,9 @@ dev = [ { name = "pytest-cov", specifier = "==6.2.1" }, { name = "pytest-mock", specifier = "==3.14.1" }, { name = "qenerate", specifier = "==0.8.0" }, - { name = "ruff", specifier = "==0.12.5" }, + { name = "ruff", specifier = "==0.12.7" }, + { name = "types-boto3-lite", extras = ["rds", "s3"], specifier = "==1.39.16" }, + { name = "types-hvac", specifier = "==2.3.0.20250516" }, ] [[package]] @@ -290,30 +296,30 @@ wheels = [ [[package]] name = "boto3" -version = "1.39.13" +version = "1.39.16" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore" }, { name = "jmespath" }, { name = "s3transfer" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/cc/5ebce7eeba468c0bf5092c94684039e8484ca00329e62f0627662c930959/boto3-1.39.13.tar.gz", hash = "sha256:ace50ccfc4caba235b020e7d36f0191aa399771cb6fe6e34b4359b671aab1a4b", size = 111862, upload-time = "2025-07-24T19:18:27.416Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ff/17/941d8af6b46dac8add650d4e2a77bccc61fd5f433b318edea6a67e7db548/boto3-1.39.16.tar.gz", hash = "sha256:d4ce6ba5c030d7ff2033b35e5574d2414e42b80b937bf40d080e11c4d9d0acc1", size = 111818, upload-time = "2025-07-29T19:21:32.389Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/08/bf/a6a73de65e3347305fc0aed129c63897e047042f55152414295a930700d6/boto3-1.39.13-py3-none-any.whl", hash = "sha256:8e62c5724dc06a1934fde155a2eb48cb851cc17ad0b5142da9eb9e46fe0355d3", size = 139899, upload-time = "2025-07-24T19:18:25.388Z" }, + { url = "https://files.pythonhosted.org/packages/83/61/60d6058f478c563b8cb3bdd3c912ca7f91c11bc2498533eb33f1e0e60c84/boto3-1.39.16-py3-none-any.whl", hash = "sha256:cf843228928fd1caebb46c21fbd757a390ce22672b937a354ae89a1d16cb99f8", size = 139901, upload-time = "2025-07-29T19:21:29.788Z" }, ] [[package]] name = "botocore" -version = "1.39.13" +version = "1.39.16" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jmespath" }, { name = "python-dateutil" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dd/39/eb875fff1c1d3299da660ed6cb00dd9b313e831ef7d18cb3c0f346a39578/botocore-1.39.13.tar.gz", hash = "sha256:ee8053f34e425a40843daccfa78820d6891f0d4f85fc647ab98f9ba28c36f9e7", size = 14222597, upload-time = "2025-07-24T19:18:16.023Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/b8/f5bde4a029e05683300a57c0c13d4534a0410e1543601b0def6e1f6b205a/botocore-1.39.16.tar.gz", hash = "sha256:b5a1416849637aa8e72292ee3e7b11cd0c22f9b96f6043d2ac6ba0092a193188", size = 14241828, upload-time = "2025-07-29T19:21:20.849Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ac/ce/8bb5626b0313c3f203215124f432d04ef46de611a9377b0791386e7355b2/botocore-1.39.13-py3-none-any.whl", hash = "sha256:6318ae28984d05aaabe92160446d37a2c498951b34a4d5431bc1ec7eb0376417", size = 13882999, upload-time = "2025-07-24T19:18:11.103Z" }, + { url = "https://files.pythonhosted.org/packages/d4/69/273f907a4296e74740e12d1e4e777b4977df8d7722d14a94be2b7c95575d/botocore-1.39.16-py3-none-any.whl", hash = "sha256:1f1c3b614ac88fd68f824c481cfd7686460c38fe13c01e2963556e7186be3248", size = 13901879, upload-time = "2025-07-29T19:21:14.381Z" }, ] [[package]] @@ -454,22 +460,22 @@ wheels = [ [[package]] name = "coverage" -version = "7.10.0" +version = "7.10.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6d/8f/6ac7fbb29e35645065f7be835bfe3e0cce567f80390de2f3db65d83cb5e3/coverage-7.10.0.tar.gz", hash = "sha256:2768885aef484b5dcde56262cbdfba559b770bfc46994fe9485dc3614c7a5867", size = 819816, upload-time = "2025-07-24T16:53:00.896Z" } +sdist = { url = "https://files.pythonhosted.org/packages/87/0e/66dbd4c6a7f0758a8d18044c048779ba21fb94856e1edcf764bd5403e710/coverage-7.10.1.tar.gz", hash = "sha256:ae2b4856f29ddfe827106794f3589949a57da6f0d38ab01e24ec35107979ba57", size = 819938, upload-time = "2025-07-27T14:13:39.045Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b6/b4/7b419bb368c9f0b88889cb24805164f6e5550d7183fb59524f6173e0cf0b/coverage-7.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a2adcfdaf3b4d69b0c64ad024fe9dd6996782b52790fb6033d90f36f39e287df", size = 215124, upload-time = "2025-07-24T16:50:55.46Z" }, - { url = "https://files.pythonhosted.org/packages/f4/15/d862a806734c7e50fd5350cef18e22832ba3cdad282ca5660d6fd49def92/coverage-7.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d7b27c2c0840e8eeff3f1963782bd9d3bc767488d2e67a31de18d724327f9f6", size = 215364, upload-time = "2025-07-24T16:50:57.849Z" }, - { url = "https://files.pythonhosted.org/packages/a6/93/4671ca5b2f3650c961a01252cbad96cb41f7c0c2b85c6062f27740a66b06/coverage-7.10.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0ed50429786e935517570b08576a661fd79032e6060985ab492b9d39ba8e66ee", size = 246369, upload-time = "2025-07-24T16:50:59.505Z" }, - { url = "https://files.pythonhosted.org/packages/64/79/2ca676c712d0540df0d7957a4266232980b60858a7a654846af1878cfde0/coverage-7.10.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7171c139ab6571d70460ecf788b1dcaf376bfc75a42e1946b8c031d062bbbad4", size = 248798, upload-time = "2025-07-24T16:51:01.105Z" }, - { url = "https://files.pythonhosted.org/packages/82/c5/67e000b03ba5291f915ddd6ba7c3333e4fdee9ba003b914c8f8f2d966dfe/coverage-7.10.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a726aac7e6e406e403cdee4c443a13aed3ea3d67d856414c5beacac2e70c04e", size = 250260, upload-time = "2025-07-24T16:51:02.761Z" }, - { url = "https://files.pythonhosted.org/packages/9d/76/196783c425b5633db5c789b02a023858377bd73e4db4c805c2503cc42bbf/coverage-7.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2886257481a14e953e96861a00c0fe7151117a523f0470a51e392f00640bba03", size = 248171, upload-time = "2025-07-24T16:51:04.651Z" }, - { url = "https://files.pythonhosted.org/packages/83/1f/bf86c75f42de3641b4bbeab9712ec2815a3a8f5939768077245a492fad9f/coverage-7.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:536578b79521e59c385a2e0a14a5dc2a8edd58761a966d79368413e339fc9535", size = 246368, upload-time = "2025-07-24T16:51:06.16Z" }, - { url = "https://files.pythonhosted.org/packages/2d/95/bfc9a3abef0b160404438e82ec778a0f38660c66a4b0ed94d0417d4d2290/coverage-7.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77fae95558f7804a9ceefabf3c38ad41af1da92b39781b87197c6440dcaaa967", size = 247578, upload-time = "2025-07-24T16:51:07.632Z" }, - { url = "https://files.pythonhosted.org/packages/c6/7e/4fb2a284d56fe2a3ba0c76806923014854a64e503dc8ce21e5a2e6497eea/coverage-7.10.0-cp312-cp312-win32.whl", hash = "sha256:97803e14736493eb029558e1502fe507bd6a08af277a5c8eeccf05c3e970cb84", size = 217521, upload-time = "2025-07-24T16:51:09.56Z" }, - { url = "https://files.pythonhosted.org/packages/f7/30/3ab51058b75e9931fc48594d79888396cf009910fabebe12a6a636ab7f9e/coverage-7.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:4c73ab554e54ffd38d114d6bc4a7115fb0c840cf6d8622211bee3da26e4bd25d", size = 218308, upload-time = "2025-07-24T16:51:11.115Z" }, - { url = "https://files.pythonhosted.org/packages/b0/34/2adc74fd132eaa1873b1688acb906b477216074ed8a37e90426eca6d2900/coverage-7.10.0-cp312-cp312-win_arm64.whl", hash = "sha256:3ae95d5a9aedab853641026b71b2ddd01983a0a7e9bf870a20ef3c8f5d904699", size = 216706, upload-time = "2025-07-24T16:51:12.632Z" }, - { url = "https://files.pythonhosted.org/packages/09/df/7c34bada8ace39f688b3bd5bc411459a20a3204ccb0984c90169a80a9366/coverage-7.10.0-py3-none-any.whl", hash = "sha256:310a786330bb0463775c21d68e26e79973839b66d29e065c5787122b8dd4489f", size = 206777, upload-time = "2025-07-24T16:52:59.009Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3f/b051feeb292400bd22d071fdf933b3ad389a8cef5c80c7866ed0c7414b9e/coverage-7.10.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6b7dc7f0a75a7eaa4584e5843c873c561b12602439d2351ee28c7478186c4da4", size = 214934, upload-time = "2025-07-27T14:11:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e4/a61b27d5c4c2d185bdfb0bfe9d15ab4ac4f0073032665544507429ae60eb/coverage-7.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:607f82389f0ecafc565813aa201a5cade04f897603750028dd660fb01797265e", size = 215173, upload-time = "2025-07-27T14:11:38.005Z" }, + { url = "https://files.pythonhosted.org/packages/8a/01/40a6ee05b60d02d0bc53742ad4966e39dccd450aafb48c535a64390a3552/coverage-7.10.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f7da31a1ba31f1c1d4d5044b7c5813878adae1f3af8f4052d679cc493c7328f4", size = 246190, upload-time = "2025-07-27T14:11:39.887Z" }, + { url = "https://files.pythonhosted.org/packages/11/ef/a28d64d702eb583c377255047281305dc5a5cfbfb0ee36e721f78255adb6/coverage-7.10.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:51fe93f3fe4f5d8483d51072fddc65e717a175490804e1942c975a68e04bf97a", size = 248618, upload-time = "2025-07-27T14:11:41.841Z" }, + { url = "https://files.pythonhosted.org/packages/6a/ad/73d018bb0c8317725370c79d69b5c6e0257df84a3b9b781bda27a438a3be/coverage-7.10.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3e59d00830da411a1feef6ac828b90bbf74c9b6a8e87b8ca37964925bba76dbe", size = 250081, upload-time = "2025-07-27T14:11:43.705Z" }, + { url = "https://files.pythonhosted.org/packages/2d/dd/496adfbbb4503ebca5d5b2de8bed5ec00c0a76558ffc5b834fd404166bc9/coverage-7.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:924563481c27941229cb4e16eefacc35da28563e80791b3ddc5597b062a5c386", size = 247990, upload-time = "2025-07-27T14:11:45.244Z" }, + { url = "https://files.pythonhosted.org/packages/18/3c/a9331a7982facfac0d98a4a87b36ae666fe4257d0f00961a3a9ef73e015d/coverage-7.10.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ca79146ee421b259f8131f153102220b84d1a5e6fb9c8aed13b3badfd1796de6", size = 246191, upload-time = "2025-07-27T14:11:47.093Z" }, + { url = "https://files.pythonhosted.org/packages/62/0c/75345895013b83f7afe92ec595e15a9a525ede17491677ceebb2ba5c3d85/coverage-7.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2b225a06d227f23f386fdc0eab471506d9e644be699424814acc7d114595495f", size = 247400, upload-time = "2025-07-27T14:11:48.643Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a9/98b268cfc5619ef9df1d5d34fee408ecb1542d9fd43d467e5c2f28668cd4/coverage-7.10.1-cp312-cp312-win32.whl", hash = "sha256:5ba9a8770effec5baaaab1567be916c87d8eea0c9ad11253722d86874d885eca", size = 217338, upload-time = "2025-07-27T14:11:50.258Z" }, + { url = "https://files.pythonhosted.org/packages/fe/31/22a5440e4d1451f253c5cd69fdcead65e92ef08cd4ec237b8756dc0b20a7/coverage-7.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:9eb245a8d8dd0ad73b4062135a251ec55086fbc2c42e0eb9725a9b553fba18a3", size = 218125, upload-time = "2025-07-27T14:11:52.034Z" }, + { url = "https://files.pythonhosted.org/packages/d6/2b/40d9f0ce7ee839f08a43c5bfc9d05cec28aaa7c9785837247f96cbe490b9/coverage-7.10.1-cp312-cp312-win_arm64.whl", hash = "sha256:7718060dd4434cc719803a5e526838a5d66e4efa5dc46d2b25c21965a9c6fcc4", size = 216523, upload-time = "2025-07-27T14:11:53.965Z" }, + { url = "https://files.pythonhosted.org/packages/0f/64/922899cff2c0fd3496be83fa8b81230f5a8d82a2ad30f98370b133c2c83b/coverage-7.10.1-py3-none-any.whl", hash = "sha256:fa2a258aa6bf188eb9a8948f7102a83da7c430a0dce918dbd8b60ef8fcb772d7", size = 206597, upload-time = "2025-07-27T14:13:37.221Z" }, ] [[package]] @@ -685,7 +691,7 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ { name = "mypy", specifier = "==1.17.0" }, - { name = "ruff", specifier = "==0.12.5" }, + { name = "ruff", specifier = "==0.12.7" }, { name = "types-pyyaml", specifier = "==6.0.12.20250516" }, ] @@ -868,7 +874,7 @@ wheels = [ [[package]] name = "openapi-python-client" -version = "0.25.2" +version = "0.25.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -883,9 +889,9 @@ dependencies = [ { name = "typer" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2f/b0/8cda0476b5148016ee661a8079cf703d5be06127f1f2dba4e5bf26cb5de3/openapi_python_client-0.25.2.tar.gz", hash = "sha256:8a0a03c3391ff77793047b14f70bb8f6705b9150153d1bd5baf35fea6b1669c2", size = 124422, upload-time = "2025-07-03T02:42:18.065Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/af/df5579c9a3cf515501518634c9c2004fa0999a810a0ef1145fa3bc82beac/openapi_python_client-0.25.3.tar.gz", hash = "sha256:cafc6b5aebd0c55fe7be4d400b24d3b107f7ec64923b8117ef8aa7ceb2a918a4", size = 124703, upload-time = "2025-07-28T01:25:53.66Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/03/f9c3680dc8a04d0d7dfa79e2f83b29e56c8ac18d8824066f08320f2a242e/openapi_python_client-0.25.2-py3-none-any.whl", hash = "sha256:b2a6898804805cd6669d74aa0c93b4dc8da8575070885128ab1b8eae729b3bc8", size = 180849, upload-time = "2025-07-03T02:42:16.408Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d2/539333a5cf2f14f5dd9aa8a3a73a4029464328e3f6ac4350577a28ea08ce/openapi_python_client-0.25.3-py3-none-any.whl", hash = "sha256:008e4c5f3079f312c135b55b142eb9616eafbd739ad6d5f8e95add726d1a02f2", size = 182159, upload-time = "2025-07-28T01:25:51.941Z" }, ] [[package]] @@ -1353,27 +1359,27 @@ wheels = [ [[package]] name = "ruff" -version = "0.12.5" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/30/cd/01015eb5034605fd98d829c5839ec2c6b4582b479707f7c1c2af861e8258/ruff-0.12.5.tar.gz", hash = "sha256:b209db6102b66f13625940b7f8c7d0f18e20039bb7f6101fbdac935c9612057e", size = 5170722, upload-time = "2025-07-24T13:26:37.456Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d4/de/ad2f68f0798ff15dd8c0bcc2889558970d9a685b3249565a937cd820ad34/ruff-0.12.5-py3-none-linux_armv6l.whl", hash = "sha256:1de2c887e9dec6cb31fcb9948299de5b2db38144e66403b9660c9548a67abd92", size = 11819133, upload-time = "2025-07-24T13:25:56.369Z" }, - { url = "https://files.pythonhosted.org/packages/f8/fc/c6b65cd0e7fbe60f17e7ad619dca796aa49fbca34bb9bea5f8faf1ec2643/ruff-0.12.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d1ab65e7d8152f519e7dea4de892317c9da7a108da1c56b6a3c1d5e7cf4c5e9a", size = 12501114, upload-time = "2025-07-24T13:25:59.471Z" }, - { url = "https://files.pythonhosted.org/packages/c5/de/c6bec1dce5ead9f9e6a946ea15e8d698c35f19edc508289d70a577921b30/ruff-0.12.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:962775ed5b27c7aa3fdc0d8f4d4433deae7659ef99ea20f783d666e77338b8cf", size = 11716873, upload-time = "2025-07-24T13:26:01.496Z" }, - { url = "https://files.pythonhosted.org/packages/a1/16/cf372d2ebe91e4eb5b82a2275c3acfa879e0566a7ac94d331ea37b765ac8/ruff-0.12.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73b4cae449597e7195a49eb1cdca89fd9fbb16140c7579899e87f4c85bf82f73", size = 11958829, upload-time = "2025-07-24T13:26:03.721Z" }, - { url = "https://files.pythonhosted.org/packages/25/bf/cd07e8f6a3a6ec746c62556b4c4b79eeb9b0328b362bb8431b7b8afd3856/ruff-0.12.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b13489c3dc50de5e2d40110c0cce371e00186b880842e245186ca862bf9a1ac", size = 11626619, upload-time = "2025-07-24T13:26:06.118Z" }, - { url = "https://files.pythonhosted.org/packages/d8/c9/c2ccb3b8cbb5661ffda6925f81a13edbb786e623876141b04919d1128370/ruff-0.12.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1504fea81461cf4841778b3ef0a078757602a3b3ea4b008feb1308cb3f23e08", size = 13221894, upload-time = "2025-07-24T13:26:08.292Z" }, - { url = "https://files.pythonhosted.org/packages/6b/58/68a5be2c8e5590ecdad922b2bcd5583af19ba648f7648f95c51c3c1eca81/ruff-0.12.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c7da4129016ae26c32dfcbd5b671fe652b5ab7fc40095d80dcff78175e7eddd4", size = 14163909, upload-time = "2025-07-24T13:26:10.474Z" }, - { url = "https://files.pythonhosted.org/packages/bd/d1/ef6b19622009ba8386fdb792c0743f709cf917b0b2f1400589cbe4739a33/ruff-0.12.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca972c80f7ebcfd8af75a0f18b17c42d9f1ef203d163669150453f50ca98ab7b", size = 13583652, upload-time = "2025-07-24T13:26:13.381Z" }, - { url = "https://files.pythonhosted.org/packages/62/e3/1c98c566fe6809a0c83751d825a03727f242cdbe0d142c9e292725585521/ruff-0.12.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8dbbf9f25dfb501f4237ae7501d6364b76a01341c6f1b2cd6764fe449124bb2a", size = 12700451, upload-time = "2025-07-24T13:26:15.488Z" }, - { url = "https://files.pythonhosted.org/packages/24/ff/96058f6506aac0fbc0d0fc0d60b0d0bd746240a0594657a2d94ad28033ba/ruff-0.12.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c47dea6ae39421851685141ba9734767f960113d51e83fd7bb9958d5be8763a", size = 12937465, upload-time = "2025-07-24T13:26:17.808Z" }, - { url = "https://files.pythonhosted.org/packages/eb/d3/68bc5e7ab96c94b3589d1789f2dd6dd4b27b263310019529ac9be1e8f31b/ruff-0.12.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c5076aa0e61e30f848846f0265c873c249d4b558105b221be1828f9f79903dc5", size = 11771136, upload-time = "2025-07-24T13:26:20.422Z" }, - { url = "https://files.pythonhosted.org/packages/52/75/7356af30a14584981cabfefcf6106dea98cec9a7af4acb5daaf4b114845f/ruff-0.12.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a5a4c7830dadd3d8c39b1cc85386e2c1e62344f20766be6f173c22fb5f72f293", size = 11601644, upload-time = "2025-07-24T13:26:22.928Z" }, - { url = "https://files.pythonhosted.org/packages/c2/67/91c71d27205871737cae11025ee2b098f512104e26ffd8656fd93d0ada0a/ruff-0.12.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:46699f73c2b5b137b9dc0fc1a190b43e35b008b398c6066ea1350cce6326adcb", size = 12478068, upload-time = "2025-07-24T13:26:26.134Z" }, - { url = "https://files.pythonhosted.org/packages/34/04/b6b00383cf2f48e8e78e14eb258942fdf2a9bf0287fbf5cdd398b749193a/ruff-0.12.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5a655a0a0d396f0f072faafc18ebd59adde8ca85fb848dc1b0d9f024b9c4d3bb", size = 12991537, upload-time = "2025-07-24T13:26:28.533Z" }, - { url = "https://files.pythonhosted.org/packages/3e/b9/053d6445dc7544fb6594785056d8ece61daae7214859ada4a152ad56b6e0/ruff-0.12.5-py3-none-win32.whl", hash = "sha256:dfeb2627c459b0b78ca2bbdc38dd11cc9a0a88bf91db982058b26ce41714ffa9", size = 11751575, upload-time = "2025-07-24T13:26:30.835Z" }, - { url = "https://files.pythonhosted.org/packages/bc/0f/ab16e8259493137598b9149734fec2e06fdeda9837e6f634f5c4e35916da/ruff-0.12.5-py3-none-win_amd64.whl", hash = "sha256:ae0d90cf5f49466c954991b9d8b953bd093c32c27608e409ae3564c63c5306a5", size = 12882273, upload-time = "2025-07-24T13:26:32.929Z" }, - { url = "https://files.pythonhosted.org/packages/00/db/c376b0661c24cf770cb8815268190668ec1330eba8374a126ceef8c72d55/ruff-0.12.5-py3-none-win_arm64.whl", hash = "sha256:48cdbfc633de2c5c37d9f090ba3b352d1576b0015bfc3bc98eaf230275b7e805", size = 11951564, upload-time = "2025-07-24T13:26:34.994Z" }, +version = "0.12.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/81/0bd3594fa0f690466e41bd033bdcdf86cba8288345ac77ad4afbe5ec743a/ruff-0.12.7.tar.gz", hash = "sha256:1fc3193f238bc2d7968772c82831a4ff69252f673be371fb49663f0068b7ec71", size = 5197814, upload-time = "2025-07-29T22:32:35.877Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/d2/6cb35e9c85e7a91e8d22ab32ae07ac39cc34a71f1009a6f9e4a2a019e602/ruff-0.12.7-py3-none-linux_armv6l.whl", hash = "sha256:76e4f31529899b8c434c3c1dede98c4483b89590e15fb49f2d46183801565303", size = 11852189, upload-time = "2025-07-29T22:31:41.281Z" }, + { url = "https://files.pythonhosted.org/packages/63/5b/a4136b9921aa84638f1a6be7fb086f8cad0fde538ba76bda3682f2599a2f/ruff-0.12.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:789b7a03e72507c54fb3ba6209e4bb36517b90f1a3569ea17084e3fd295500fb", size = 12519389, upload-time = "2025-07-29T22:31:54.265Z" }, + { url = "https://files.pythonhosted.org/packages/a8/c9/3e24a8472484269b6b1821794141f879c54645a111ded4b6f58f9ab0705f/ruff-0.12.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2e1c2a3b8626339bb6369116e7030a4cf194ea48f49b64bb505732a7fce4f4e3", size = 11743384, upload-time = "2025-07-29T22:31:59.575Z" }, + { url = "https://files.pythonhosted.org/packages/26/7c/458dd25deeb3452c43eaee853c0b17a1e84169f8021a26d500ead77964fd/ruff-0.12.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32dec41817623d388e645612ec70d5757a6d9c035f3744a52c7b195a57e03860", size = 11943759, upload-time = "2025-07-29T22:32:01.95Z" }, + { url = "https://files.pythonhosted.org/packages/7f/8b/658798472ef260ca050e400ab96ef7e85c366c39cf3dfbef4d0a46a528b6/ruff-0.12.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47ef751f722053a5df5fa48d412dbb54d41ab9b17875c6840a58ec63ff0c247c", size = 11654028, upload-time = "2025-07-29T22:32:04.367Z" }, + { url = "https://files.pythonhosted.org/packages/a8/86/9c2336f13b2a3326d06d39178fd3448dcc7025f82514d1b15816fe42bfe8/ruff-0.12.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a828a5fc25a3efd3e1ff7b241fd392686c9386f20e5ac90aa9234a5faa12c423", size = 13225209, upload-time = "2025-07-29T22:32:06.952Z" }, + { url = "https://files.pythonhosted.org/packages/76/69/df73f65f53d6c463b19b6b312fd2391dc36425d926ec237a7ed028a90fc1/ruff-0.12.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5726f59b171111fa6a69d82aef48f00b56598b03a22f0f4170664ff4d8298efb", size = 14182353, upload-time = "2025-07-29T22:32:10.053Z" }, + { url = "https://files.pythonhosted.org/packages/58/1e/de6cda406d99fea84b66811c189b5ea139814b98125b052424b55d28a41c/ruff-0.12.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74e6f5c04c4dd4aba223f4fe6e7104f79e0eebf7d307e4f9b18c18362124bccd", size = 13631555, upload-time = "2025-07-29T22:32:12.644Z" }, + { url = "https://files.pythonhosted.org/packages/6f/ae/625d46d5164a6cc9261945a5e89df24457dc8262539ace3ac36c40f0b51e/ruff-0.12.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d0bfe4e77fba61bf2ccadf8cf005d6133e3ce08793bbe870dd1c734f2699a3e", size = 12667556, upload-time = "2025-07-29T22:32:15.312Z" }, + { url = "https://files.pythonhosted.org/packages/55/bf/9cb1ea5e3066779e42ade8d0cd3d3b0582a5720a814ae1586f85014656b6/ruff-0.12.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06bfb01e1623bf7f59ea749a841da56f8f653d641bfd046edee32ede7ff6c606", size = 12939784, upload-time = "2025-07-29T22:32:17.69Z" }, + { url = "https://files.pythonhosted.org/packages/55/7f/7ead2663be5627c04be83754c4f3096603bf5e99ed856c7cd29618c691bd/ruff-0.12.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e41df94a957d50083fd09b916d6e89e497246698c3f3d5c681c8b3e7b9bb4ac8", size = 11771356, upload-time = "2025-07-29T22:32:20.134Z" }, + { url = "https://files.pythonhosted.org/packages/17/40/a95352ea16edf78cd3a938085dccc55df692a4d8ba1b3af7accbe2c806b0/ruff-0.12.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4000623300563c709458d0ce170c3d0d788c23a058912f28bbadc6f905d67afa", size = 11612124, upload-time = "2025-07-29T22:32:22.645Z" }, + { url = "https://files.pythonhosted.org/packages/4d/74/633b04871c669e23b8917877e812376827c06df866e1677f15abfadc95cb/ruff-0.12.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:69ffe0e5f9b2cf2b8e289a3f8945b402a1b19eff24ec389f45f23c42a3dd6fb5", size = 12479945, upload-time = "2025-07-29T22:32:24.765Z" }, + { url = "https://files.pythonhosted.org/packages/be/34/c3ef2d7799c9778b835a76189c6f53c179d3bdebc8c65288c29032e03613/ruff-0.12.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a07a5c8ffa2611a52732bdc67bf88e243abd84fe2d7f6daef3826b59abbfeda4", size = 12998677, upload-time = "2025-07-29T22:32:27.022Z" }, + { url = "https://files.pythonhosted.org/packages/77/ab/aca2e756ad7b09b3d662a41773f3edcbd262872a4fc81f920dc1ffa44541/ruff-0.12.7-py3-none-win32.whl", hash = "sha256:c928f1b2ec59fb77dfdf70e0419408898b63998789cc98197e15f560b9e77f77", size = 11756687, upload-time = "2025-07-29T22:32:29.381Z" }, + { url = "https://files.pythonhosted.org/packages/b4/71/26d45a5042bc71db22ddd8252ca9d01e9ca454f230e2996bb04f16d72799/ruff-0.12.7-py3-none-win_amd64.whl", hash = "sha256:9c18f3d707ee9edf89da76131956aba1270c6348bfee8f6c647de841eac7194f", size = 12912365, upload-time = "2025-07-29T22:32:31.517Z" }, + { url = "https://files.pythonhosted.org/packages/4c/9b/0b8aa09817b63e78d94b4977f18b1fcaead3165a5ee49251c5d5c245bb2d/ruff-0.12.7-py3-none-win_arm64.whl", hash = "sha256:dfce05101dbd11833a0776716d5d1578641b7fddb537fe7fa956ab85d1769b69", size = 11982083, upload-time = "2025-07-29T22:32:33.881Z" }, ] [[package]] @@ -1454,21 +1460,24 @@ wheels = [ [[package]] name = "types-boto3-lite" -version = "1.39.13" +version = "1.39.16" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore-stubs" }, { name = "types-s3transfer" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/21/42/e53e91ccb7b97663eb0b137c577830cf1265338f1c2b4e3efe32f4c96c26/types_boto3_lite-1.39.13.tar.gz", hash = "sha256:f61c544ed91d152ea251dfa81483b70beb627c2f22cf2fba42b4bb62f495d18b", size = 72973, upload-time = "2025-07-24T19:29:56.987Z" } +sdist = { url = "https://files.pythonhosted.org/packages/02/81/80e59a34580917c628c82aef9fa7577e9761d4486680371726a084b19d95/types_boto3_lite-1.39.16.tar.gz", hash = "sha256:148d18e47fefbc4d200fe5a42de4f0cae0827353a79ef5197bf4a644d21ed41b", size = 72964, upload-time = "2025-07-29T19:32:46.78Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/24/cb426d46f10d0418760d19e6ee2161e7c78886db09cf9d39e1db18c4763c/types_boto3_lite-1.39.13-py3-none-any.whl", hash = "sha256:427abe5ac1d93709285c35752183cae74f238e564ffb4a27b10f4fccb33ea934", size = 42588, upload-time = "2025-07-24T19:29:52.18Z" }, + { url = "https://files.pythonhosted.org/packages/ba/38/facd99eebb7bf141aadc14dc23a40f5b9a2c5d3a58f34a2969f1d386d64c/types_boto3_lite-1.39.16-py3-none-any.whl", hash = "sha256:020d8a1fc2f04cb2a8a58412a61d7f2b6acb2eb1e5a25c85656efc0dc390d87f", size = 42590, upload-time = "2025-07-29T19:32:39.887Z" }, ] [package.optional-dependencies] rds = [ { name = "types-boto3-rds" }, ] +s3 = [ + { name = "types-boto3-s3" }, +] [[package]] name = "types-boto3-rds" @@ -1479,6 +1488,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f0/8e/cdfe5c742e117900d6d138f6c429caff7e34827849184afb94c710e9523d/types_boto3_rds-1.39.1-py3-none-any.whl", hash = "sha256:36bf2d0aaa322306916fdda17c9d4546a4437208b39898293e946b2da120642f", size = 91214, upload-time = "2025-07-01T19:28:07.718Z" }, ] +[[package]] +name = "types-boto3-s3" +version = "1.39.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/68/184e6cf84d6e2f7bb54a8bdc01b3a23ab4b99db0b8c1cc02fdb529d10420/types_boto3_s3-1.39.5.tar.gz", hash = "sha256:34150a4ee656b74e939962869e8b73f29e9dcc7511b0102215f27e004bcbdc72", size = 75545, upload-time = "2025-07-15T22:39:12.201Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/10/3c8867dcc9f7dc70f0664389aca96ea149af89dd87acd62b330fb05fd8d4/types_boto3_s3-1.39.5-py3-none-any.whl", hash = "sha256:c49e103249f03948589387537a7964a30e50c3e971d0f2a9600fa3766406b196", size = 82504, upload-time = "2025-07-15T22:39:10.207Z" }, +] + [[package]] name = "types-hvac" version = "2.3.0.20250516" @@ -1660,3 +1678,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698", size = 86710, upload-time = "2025-06-10T00:44:16.716Z" }, { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542, upload-time = "2025-06-10T00:46:07.521Z" }, ] + +[[package]] +name = "zipstream-ng" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/16/5d9224baf640214255c34a0a0e9528c8403d2b89e2ba7df9d7cada58beb1/zipstream_ng-1.8.0.tar.gz", hash = "sha256:b7129d2c15d26934b3e1cb22256593b6bdbd03c553c26f4199a5bf05110642bc", size = 35887, upload-time = "2024-10-10T05:22:33.213Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cd/81/11ecdfd5370d6c383f0188a6f2fa2842499e1be617e678d1845f972c6821/zipstream_ng-1.8.0-py3-none-any.whl", hash = "sha256:e7196cb845cf924ed12e7a3b38404ef9e82a5a699801295f5f4cf601449e2bf6", size = 23082, upload-time = "2024-10-10T05:22:31.655Z" }, +]