Skip to content

feat(logs): AWSX-1592 Move dms, docdb, fsx and opensearch to logs-bac… #939

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 0 additions & 17 deletions aws/logs_monitoring/steps/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,9 @@ class AwsEventSource(Enum):
CLOUDFRONT = "cloudfront"
CLOUDTRAIL = "cloudtrail"
CLOUDWATCH = "cloudwatch"
DMS = "dms"
DOCDB = "docdb"
EKS = "eks"
ELASTICSEARCH = "elasticsearch"
ELB = "elb"
FARGATE = "fargate"
FSX = "aws.fsx"
GUARDDUTY = "guardduty"
IAMAUTHENTICATOR = "aws-iam-authenticator"
KINESIS = "kinesis"
Expand All @@ -26,7 +22,6 @@ class AwsEventSource(Enum):
MSK = "msk"
MYSQL = "mysql"
NETWORKFIREWALL = "network-firewall"
OPENSEARCH = "opensearch"
POSTGRESQL = "postgresql"
ROUTE53 = "route53"
S3 = "s3"
Expand Down Expand Up @@ -68,8 +63,6 @@ def __init__(self, string, event_source):

# e.g. carbon-black-cloud-forwarder/alerts/org_key=*****/year=2021/month=7/day=19/hour=18/minute=15/second=41/8436e850-7e78-40e4-b3cd-6ebbc854d0a2.jsonl.gz
CARBONBLACK = ("carbon-black", AwsEventSource.CARBONBLACK)
DMS = ("amazon_dms", AwsEventSource.DMS)
DOCDB = ("amazon_documentdb", AwsEventSource.DOCDB)
# e.g. AWSLogs/123456779121/elasticloadbalancing/us-east-1/2020/10/02/123456779121_elasticloadbalancing_us-east-1_app.alb.xxxxx.xx.xxx.xxx_x.log.gz
ELB = ("elasticloadbalancing", AwsEventSource.ELB)
GUARDDUTY = ("guardduty", AwsEventSource.GUARDDUTY)
Expand All @@ -94,20 +87,10 @@ def __init__(self, string, event_source):

# e.g. /aws/codebuild/my-project
CLOUDTRAIL = ("_CloudTrail_", AwsEventSource.CLOUDTRAIL)
# e.g. dms-tasks-test-instance
DMS = ("dms-tasks", AwsEventSource.DMS)
# e.g. /aws/docdb/yourClusterName/profile
DOCDB = ("/aws/docdb", AwsEventSource.DOCDB)
# e.g. /aws/eks/yourClusterName/profile
EKS = ("/aws/eks", AwsEventSource.EKS)
# e.g. /aws/fsx/windows/xxx
FSX = ("/aws/fsx/windows", AwsEventSource.FSX)
# e.g. /aws/kinesisfirehose/dev
KINESIS = ("/aws/kinesis", AwsEventSource.KINESIS)
# e.g. /aws/lambda/helloDatadog
LAMBDA = ("/aws/lambda", AwsEventSource.LAMBDA)
# e.g. /aws/opensearchservice/domains/my-cluster
OPENSEARCH = ("/aws/opensearchservice/domains/", AwsEventSource.OPENSEARCH)
# e.g. sns/us-east-1/123456779121/SnsTopicX
SNS = ("sns/", AwsEventSource.SNS)
SSM = ("/aws/ssm/", AwsEventSource.SSM)
Expand Down
19 changes: 0 additions & 19 deletions aws/logs_monitoring/steps/handlers/awslogs_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,11 +60,6 @@ def handle(self, event):
# then rebuild the arn of the monitored lambda using that name.
if metadata[DD_SOURCE] == str(AwsEventSource.LAMBDA):
self.process_lambda_logs(metadata, aws_attributes)
# The EKS log group contains various sources from the K8S control plane.
# In order to have these automatically trigger the correct pipelines they
# need to send their events with the correct log source.
if metadata[DD_SOURCE] == str(AwsEventSource.EKS):
self.process_eks_logs(metadata, aws_attributes)
# Create and send structured logs to Datadog
for log in logs["logEvents"]:
merged = merge_dicts(log, aws_attributes.to_dict())
Expand Down Expand Up @@ -183,20 +178,6 @@ def get_state_machine_arn(self, aws_attributes):
logger.debug("Unable to get state_machine_arn: %s" % e)
return ""

def process_eks_logs(self, metadata, aws_attributes):
log_stream = aws_attributes.get_log_stream()
if log_stream.startswith("kube-apiserver-audit-"):
metadata[DD_SOURCE] = "kubernetes.audit"
elif log_stream.startswith("kube-scheduler-"):
metadata[DD_SOURCE] = "kube_scheduler"
elif log_stream.startswith("kube-apiserver-"):
metadata[DD_SOURCE] = "kube-apiserver"
elif log_stream.startswith("kube-controller-manager-"):
metadata[DD_SOURCE] = "kube-controller-manager"
elif log_stream.startswith("authenticator-"):
metadata[DD_SOURCE] = "aws-iam-authenticator"
# In case the conditions above don't match we maintain eks as the source

# Lambda logs can be from either default or customized log group
def process_lambda_logs(self, metadata, aws_attributes):
lower_cased_lambda_function_name = self.get_lower_cased_lambda_function_name(
Expand Down
42 changes: 0 additions & 42 deletions aws/logs_monitoring/tests/test_parsing.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,18 +78,6 @@ def test_lambda_event(self):
str(AwsEventSource.LAMBDA),
)

def test_dms_event(self):
self.assertEqual(
parse_event_source({"awslogs": "logs"}, "dms-tasks-test-instance"),
str(AwsEventSource.DMS),
)
self.assertEqual(
parse_event_source(
{"Records": ["logs-from-s3"]}, "AWSLogs/amazon_dms/my-s3.json.gz"
),
str(AwsEventSource.DMS),
)

def test_sns_event(self):
self.assertEqual(
parse_event_source(
Expand All @@ -110,18 +98,6 @@ def test_kinesis_event(self):
str(AwsEventSource.KINESIS),
)

def test_docdb_event(self):
self.assertEqual(
parse_event_source({"awslogs": "logs"}, "/aws/docdb/testCluster/profile"),
str(AwsEventSource.DOCDB),
)
self.assertEqual(
parse_event_source(
{"Records": ["logs-from-s3"]}, "/amazon_documentdb/dev/123abc.zip"
),
str(AwsEventSource.DOCDB),
)

def test_vpc_event(self):
self.assertEqual(
parse_event_source({"awslogs": "logs"}, "abc123_my_vpc_loggroup"),
Expand Down Expand Up @@ -216,15 +192,6 @@ def test_cloudfront_event(self):
str(AwsEventSource.S3),
)

def test_eks_event(self):
self.assertEqual(
parse_event_source(
{"awslogs": "logs"},
"/aws/eks/control-plane/cluster",
),
str(AwsEventSource.EKS),
)

def test_elasticsearch_event(self):
self.assertEqual(
parse_event_source({"awslogs": "logs"}, "/elasticsearch/domain"),
Expand Down Expand Up @@ -256,15 +223,6 @@ def test_carbon_black_event(self):
str(AwsEventSource.CARBONBLACK),
)

def test_opensearch_event(self):
self.assertEqual(
parse_event_source(
{"awslogs": "logs"},
"/aws/OpenSearchService/domains/my-opensearch-cluster/ES_APPLICATION_LOGS",
),
str(AwsEventSource.OPENSEARCH),
)

def test_cloudwatch_source_if_none_found(self):
self.assertEqual(
parse_event_source({"awslogs": "logs"}, ""), str(AwsEventSource.CLOUDWATCH)
Expand Down

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -154,11 +154,6 @@ def test_cloudwatch_log_custom_tags(self):
snapshot_filename = f"{input_filename}~snapshot"
self.compare_snapshot(input_filename, snapshot_filename)

def test_cloudwatch_log_fsx_windows(self):
input_filename = f"{snapshot_dir}/cloudwatch_log_fsx_windows.json"
snapshot_filename = f"{input_filename}~snapshot"
self.compare_snapshot(input_filename, snapshot_filename)

def test_cloudwatch_log_lambda_invocation(self):
input_filename = f"{snapshot_dir}/cloudwatch_log_lambda_invocation.json"
snapshot_filename = f"{input_filename}~snapshot"
Expand Down
Loading