Skip to content
Closed
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions actions.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,13 @@ These are the core operations that users can request the system to perform.
* **Required Parameters**: The AWS account name and the ElastiCache cluster identifier.
* **Usage Example (CLI)**: `automated-actions external-resource-flush-elasticache --account aws-account-name --identifier my-elasticache-cluster`

* **`external-resource-rds-logs`**:
* **Description**: Retrieves logs from an Amazon RDS instance and stores them in an S3 bucket.
* **Use Case**: Typically used for troubleshooting database issues, analyzing performance problems, or collecting logs for audit purposes.
* **Required Parameters**: The AWS account name and the RDS instance identifier.
* **Optional Parameters**: Expiration time in days (1-7, default: 7), S3 target file name (defaults to '{account}-{identifier}.zip').
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would clarify that this is the expiration date of the url you are going to get as result.

* **Usage Example (CLI)**: `automated-actions external-resource-rds-logs --account aws-account-name --identifier my-rds-instance --expiration-days 5 --s3-file-name my-custom-logs.zip`

* **`external-resource-rds-reboot`**:
* **Description**: Reboots an Amazon RDS instance.
* **Use Case**: Typically used for maintenance, applying updates, or resolving performance issues.
Expand Down
3 changes: 2 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ services:
environment:
- DEBUG=${DEBUG-}
- DOCKER_HOST=unix:///var/run/docker.sock
- SERVICES=${SERVICES:-sqs,dynamodb}
- SERVICES=${SERVICES:-sqs,dynamodb,s3}
- PERSISTENCE=${PERSISTENCE:-0}
volumes:
- "${LOCALSTACK_VOLUME_DIR:-./.localstack_volume}:/var/lib/localstack"
Expand Down Expand Up @@ -83,6 +83,7 @@ services:
- AA_BROKER_URL=sqs://localstack:4566
- AA_SQS_URL=http://localstack:4566/000000000000/automated-actions
- AA_DYNAMODB_URL=http://localstack:4566
- AA_EXTERNAL_RESOURCE_RDS_LOGS__S3_URL=http://localstack:4566
build:
context: .
dockerfile: Dockerfile
Expand Down
4 changes: 4 additions & 0 deletions localstack/init-s3.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/bin/bash

# create bucket
awslocal s3api create-bucket --bucket automated-actions
Original file line number Diff line number Diff line change
Expand Up @@ -7,26 +7,84 @@
from automated_actions.celery.external_resource.tasks import (
external_resource_flush_elasticache as external_resource_flush_elasticache_task,
)
from automated_actions.celery.external_resource.tasks import (
external_resource_rds_logs as external_resource_rds_logs_task,
)
from automated_actions.celery.external_resource.tasks import (
external_resource_rds_reboot as external_resource_rds_reboot_task,
)
from automated_actions.celery.external_resource.tasks import (
external_resource_rds_snapshot as external_resource_rds_snapshot_task,
)
from automated_actions.db.models import (
Action,
ActionSchemaOut,
)
from automated_actions.db.models import Action, ActionSchemaOut
from automated_actions.db.models._action import ActionManager, get_action_manager

router = APIRouter()
log = logging.getLogger(__name__)

EXTERNAL_RESOURCE_RDS_REBOOT_ACTION_ID = "external-resource-rds-reboot"
EXTERNAL_RESOURCE_FLUSH_ELASTICACHE_ACTION_ID = "external-resource-flush-elasticache"
EXTERNAL_RESOURCE_RDS_LOGS_ACTION_ID = "external-resource-rds-logs"
EXTERNAL_RESOURCE_RDS_REBOOT_ACTION_ID = "external-resource-rds-reboot"
EXTERNAL_RESOURCE_RDS_SNAPSHOT_ACTION_ID = "external-resource-rds-snapshot"


def get_action_external_resource_rds_logs(
action_mgr: Annotated[ActionManager, Depends(get_action_manager)], user: UserDep
) -> Action:
"""Get a new action object for the user.

Args:
action_mgr: The action manager dependency.
user: The user dependency.

Returns:
A new Action object.
"""
return action_mgr.create_action(
name=EXTERNAL_RESOURCE_RDS_LOGS_ACTION_ID, owner=user
)


@router.post(
"/external-resource/rds-logs/{account}/{identifier}",
operation_id=EXTERNAL_RESOURCE_RDS_LOGS_ACTION_ID,
status_code=202,
tags=["Actions"],
)
def external_resource_rds_logs(
account: Annotated[str, Path(description="AWS account name")],
identifier: Annotated[str, Path(description="RDS instance identifier")],
action: Annotated[Action, Depends(get_action_external_resource_rds_logs)],
expiration_days: Annotated[
int, Query(description="Expiration time in days", ge=1, le=7)
] = 7,
s3_file_name: Annotated[
str | None,
Query(
description="The S3 target file name. Defaults to '{account}-{identifier}.zip' if not provided."
),
] = None,
) -> ActionSchemaOut:
"""Get RDS logs for an instance.

This action retrieves logs from a specified RDS instance in a given AWS account and stores them in an S3 bucket.
"""
log.info(
f"Getting logs for RDS {identifier} in AWS account {account}. action_id={action.action_id}"
)
external_resource_rds_logs_task.apply_async(
kwargs={
"account": account,
"identifier": identifier,
"expiration_days": expiration_days,
"s3_file_name": s3_file_name,
"action": action,
},
task_id=action.action_id,
)
return action.dump()


def get_action_external_resource_rds_reboot(
action_mgr: Annotated[ActionManager, Depends(get_action_manager)], user: UserDep
) -> Action:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,12 @@ def before_start( # noqa: PLR6301

def on_success( # noqa: PLR6301
self,
retval: Any, # noqa: ARG002
retval: Any,
task_id: str, # noqa: ARG002
args: tuple, # noqa: ARG002
kwargs: dict,
) -> None:
result = "ok"
result = "ok" if retval is None else str(retval)
kwargs["action"].set_final_state(
status=ActionStatus.SUCCESS,
result=result,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
from automated_actions_utils.cluster_connection import get_cluster_connection_data
from automated_actions_utils.external_resource import (
ExternalResource,
ExternalResourceProvider,
get_external_resource,
settings,
)
from automated_actions_utils.openshift_client import (
OpenshiftClient,
SecretKeyRef,
job_builder,
)

from automated_actions.celery.app import app
from automated_actions.celery.automated_action_task import AutomatedActionTask
from automated_actions.db.models import Action


class ExternalResourceFlushElastiCache:
def __init__(
self, action: Action, oc: OpenshiftClient, elasticache: ExternalResource
) -> None:
self.action = action
self.oc = oc
self.elasticache = elasticache

def run(
self,
image: str,
command: list[str],
args: list[str],
secret_name: str,
env_secret_mappings: dict[str, str],
) -> None:
job = job_builder(
image=image,
command=command,
args=args,
job_name_prefix="flush-elasticache-",
annotations={
"automated-actions.action_id": str(self.action.action_id),
},
env_secrets={
key: SecretKeyRef(
secret=secret_name,
key=value,
)
for key, value in env_secret_mappings.items()
},
)
return self.oc.run_job(namespace=self.elasticache.namespace, job=job)


@app.task(base=AutomatedActionTask)
def external_resource_flush_elasticache(
account: str,
identifier: str,
*,
action: Action,
) -> None:
elasticache = get_external_resource(
account=account,
identifier=identifier,
provider=ExternalResourceProvider.ELASTICACHE,
)

cluster_connection = get_cluster_connection_data(elasticache.cluster, settings)
oc = OpenshiftClient(
server_url=cluster_connection.url, token=cluster_connection.token
)
if not elasticache.output_resource_name:
raise ValueError(
f"Output resource name not defined for {elasticache.identifier} in {elasticache.namespace} namespace.",
)
ExternalResourceFlushElastiCache(
action=action,
oc=oc,
elasticache=elasticache,
).run(
image=settings.external_resource_elasticache.image,
command=settings.external_resource_elasticache.flush_command,
args=settings.external_resource_elasticache.flush_command_args,
secret_name=elasticache.output_resource_name,
env_secret_mappings=settings.external_resource_elasticache.env_secret_mappings,
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
import logging

from automated_actions_utils.aws_api import (
AWSApi,
AWSStaticCredentials,
LogStream,
get_aws_credentials,
)
from automated_actions_utils.external_resource import (
ExternalResource,
ExternalResourceProvider,
get_external_resource,
)

from automated_actions.celery.app import app
from automated_actions.celery.automated_action_task import AutomatedActionTask
from automated_actions.config import settings
from automated_actions.db.models import Action

log = logging.getLogger(__name__)


class ExternalResourceRDSLogs:
"""Class to handle RDS logs retrieval."""

def __init__(
self, aws_api: AWSApi, rds: ExternalResource, s3_bucket: str, s3_prefix: str
) -> None:
self.aws_api = aws_api
self.rds = rds
self.s3_bucket = s3_bucket
self.s3_prefix = s3_prefix

def run(
self,
target_aws_api: AWSApi,
expiration_days: int,
s3_file_name: str | None = None,
) -> str | None:
"""Retrieve RDS logs and upload them to S3 as a zip file."""
s3_key = (
s3_file_name
or f"{self.s3_prefix}/{self.rds.account.name}-{self.rds.identifier}.zip"
)
# append .zip to the filename if not present
if not s3_key.endswith(".zip"):
s3_key += ".zip"

log.info(
f"Saving RDS logs for {self.rds.account.name}/{self.rds.identifier} to S3 {self.s3_bucket}/{s3_key}"
)
log_streams = [
LogStream(
name=log_file,
content=self.aws_api.stream_rds_log(
identifier=self.rds.identifier, log_file=log_file
),
)
for log_file in self.aws_api.list_rds_logs(self.rds.identifier)
]
if not log_streams:
log.warning(
f"No logs found for RDS {self.rds.identifier} in account {self.rds.account.name}"
)
return None
self.aws_api.stream_rds_logs_to_s3_zip(
log_streams=log_streams,
bucket=self.s3_bucket,
s3_key=s3_key,
target_aws_api=target_aws_api,
)
return self.aws_api.generate_s3_download_url(
bucket=self.s3_bucket,
s3_key=s3_key,
expiration_secs=expiration_days * 24 * 3600,
)


@app.task(base=AutomatedActionTask)
def external_resource_rds_logs(
account: str,
identifier: str,
expiration_days: int,
action: Action, # noqa: ARG001
s3_file_name: str | None = None,
) -> str:
rds = get_external_resource(
account=account, identifier=identifier, provider=ExternalResourceProvider.RDS
)
rds_account_credentials = get_aws_credentials(
vault_secret=rds.account.automation_token, region=rds.account.region
)

log_account_credentials = AWSStaticCredentials(
access_key_id=settings.external_resource_rds_logs.access_key_id,
secret_access_key=settings.external_resource_rds_logs.secret_access_key,
region=settings.external_resource_rds_logs.region,
)

with (
AWSApi(credentials=rds_account_credentials, region=rds.region) as aws_api,
AWSApi(
credentials=log_account_credentials,
s3_endpoint_url=settings.external_resource_rds_logs.s3_url,
) as log_aws_api,
):
url = ExternalResourceRDSLogs(
aws_api=aws_api,
rds=rds,
s3_bucket=settings.external_resource_rds_logs.bucket,
s3_prefix=settings.external_resource_rds_logs.prefix,
).run(
target_aws_api=log_aws_api,
expiration_days=expiration_days,
s3_file_name=s3_file_name,
)

if not url:
return "No logs found or no logs available for download."

return f"Download the RDS logs from the following URL: {url}. This link will expire in {expiration_days} days."
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
from automated_actions_utils.aws_api import AWSApi, get_aws_credentials
from automated_actions_utils.external_resource import (
ExternalResource,
ExternalResourceProvider,
get_external_resource,
)

from automated_actions.celery.app import app
from automated_actions.celery.automated_action_task import AutomatedActionTask
from automated_actions.db.models import Action


class ExternalResourceRDSReboot:
def __init__(self, aws_api: AWSApi, rds: ExternalResource) -> None:
self.aws_api = aws_api
self.rds = rds

def run(self, *, force_failover: bool) -> None:
self.aws_api.reboot_rds_instance(
identifier=self.rds.identifier, force_failover=force_failover
)


@app.task(base=AutomatedActionTask)
def external_resource_rds_reboot(
account: str,
identifier: str,
*,
force_failover: bool,
action: Action, # noqa: ARG001
) -> None:
rds = get_external_resource(
account=account,
identifier=identifier,
provider=ExternalResourceProvider.RDS,
)

credentials = get_aws_credentials(
vault_secret=rds.account.automation_token, region=rds.account.region
)
with AWSApi(credentials=credentials, region=rds.region) as aws_api:
ExternalResourceRDSReboot(aws_api, rds).run(force_failover=force_failover)
Loading