Skip to content

Commit 4c25b64

Browse files
committed
feat(aws): AWSX-1592 Move eks support to logs-backend
Signed-off-by: Vincent Boutour <vincent.boutour@datadoghq.com>
1 parent 0fb58a9 commit 4c25b64

File tree

3 files changed

+0
-31
lines changed

3 files changed

+0
-31
lines changed

aws/logs_monitoring/steps/enums.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ class AwsEventSource(Enum):
77
CLOUDFRONT = "cloudfront"
88
CLOUDTRAIL = "cloudtrail"
99
CLOUDWATCH = "cloudwatch"
10-
EKS = "eks"
1110
ELASTICSEARCH = "elasticsearch"
1211
ELB = "elb"
1312
FARGATE = "fargate"
@@ -88,8 +87,6 @@ def __init__(self, string, event_source):
8887

8988
# e.g. /aws/codebuild/my-project
9089
CLOUDTRAIL = ("_CloudTrail_", AwsEventSource.CLOUDTRAIL)
91-
# e.g. /aws/eks/yourClusterName/profile
92-
EKS = ("/aws/eks", AwsEventSource.EKS)
9390
# e.g. /aws/kinesisfirehose/dev
9491
KINESIS = ("/aws/kinesis", AwsEventSource.KINESIS)
9592
# e.g. /aws/lambda/helloDatadog

aws/logs_monitoring/steps/handlers/awslogs_handler.py

Lines changed: 0 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -59,11 +59,6 @@ def handle(self, event):
5959
# then rebuild the arn of the monitored lambda using that name.
6060
if metadata[DD_SOURCE] == str(AwsEventSource.LAMBDA):
6161
self.process_lambda_logs(metadata, aws_attributes)
62-
# The EKS log group contains various sources from the K8S control plane.
63-
# In order to have these automatically trigger the correct pipelines they
64-
# need to send their events with the correct log source.
65-
if metadata[DD_SOURCE] == str(AwsEventSource.EKS):
66-
self.process_eks_logs(metadata, aws_attributes)
6762
# Create and send structured logs to Datadog
6863
for log in logs["logEvents"]:
6964
merged = merge_dicts(log, aws_attributes.to_dict())
@@ -182,20 +177,6 @@ def get_state_machine_arn(self, aws_attributes):
182177
logger.debug("Unable to get state_machine_arn: %s" % e)
183178
return ""
184179

185-
def process_eks_logs(self, metadata, aws_attributes):
186-
log_stream = aws_attributes.get_log_stream()
187-
if log_stream.startswith("kube-apiserver-audit-"):
188-
metadata[DD_SOURCE] = "kubernetes.audit"
189-
elif log_stream.startswith("kube-scheduler-"):
190-
metadata[DD_SOURCE] = "kube_scheduler"
191-
elif log_stream.startswith("kube-apiserver-"):
192-
metadata[DD_SOURCE] = "kube-apiserver"
193-
elif log_stream.startswith("kube-controller-manager-"):
194-
metadata[DD_SOURCE] = "kube-controller-manager"
195-
elif log_stream.startswith("authenticator-"):
196-
metadata[DD_SOURCE] = "aws-iam-authenticator"
197-
# In case the conditions above don't match we maintain eks as the source
198-
199180
# Lambda logs can be from either default or customized log group
200181
def process_lambda_logs(self, metadata, aws_attributes):
201182
lower_cased_lambda_function_name = self.get_lower_cased_lambda_function_name(

aws/logs_monitoring/tests/test_parsing.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -192,15 +192,6 @@ def test_cloudfront_event(self):
192192
str(AwsEventSource.S3),
193193
)
194194

195-
def test_eks_event(self):
196-
self.assertEqual(
197-
parse_event_source(
198-
{"awslogs": "logs"},
199-
"/aws/eks/control-plane/cluster",
200-
),
201-
str(AwsEventSource.EKS),
202-
)
203-
204195
def test_elasticsearch_event(self):
205196
self.assertEqual(
206197
parse_event_source({"awslogs": "logs"}, "/elasticsearch/domain"),

0 commit comments

Comments
 (0)