Relevant telegraf.conf
[global_tags]
[agent]
interval = "30s"
collection_jitter = "0s"
flush_interval = "10s"
flush_jitter = "0s"
[[inputs.cloudwatch]]
region = "us-east-1"
period = "1m"
delay = "5m"
interval = "1m"
namespace = "Jitsi"
ratelimit = 15
statistic_include = ["maximum"]
[inputs.cloudwatch.tags]
cluster = "jitsidev"
host = "dev"
parentCluster = "jitsi"
[[inputs.cloudwatch]]
region = "us-east-2"
period = "1m"
delay = "5m"
interval = "1m"
namespace = "Jitsi"
ratelimit = 15
statistic_include = ["maximum"]
[inputs.cloudwatch.tags]
cluster = "jitsidev"
host = "dev"
parentCluster = "jitsi"
[[inputs.cloudwatch]]
region = "us-west-2"
period = "1m"
delay = "5m"
interval = "1m"
namespace = "Jitsi"
ratelimit = 15
statistic_include = ["maximum"]
[inputs.cloudwatch.tags]
cluster = "jitsidev"
host = "dev"
parentCluster = "jitsi"
[[inputs.cloudwatch]]
region = "us-east-1"
period = "1m"
delay = "5m"
interval = "1m"
namespace = "Jitsi"
ratelimit = 15
statistic_include = ["maximum"]
[inputs.cloudwatch.tags]
cluster = "jitsisandbox"
host = "qa"
parentCluster = "jitsi"
[[inputs.cloudwatch]]
region = "us-east-2"
period = "1m"
delay = "5m"
interval = "1m"
namespace = "Jitsi"
ratelimit = 15
statistic_include = ["maximum"]
[inputs.cloudwatch.tags]
cluster = "jitsisandbox"
host = "qa"
parentCluster = "jitsi"
[[inputs.cloudwatch]]
region = "us-west-2"
period = "1m"
delay = "5m"
interval = "1m"
namespace = "Jitsi"
ratelimit = 15
statistic_include = ["maximum"]
[inputs.cloudwatch.tags]
cluster = "jitsisandbox"
host = "qa"
parentCluster = "jitsi"
[[inputs.cloudwatch]]
region = "us-east-2"
period = "1m"
delay = "5m"
interval = "1m"
namespace = "Jitsi"
ratelimit = 10
statistic_include = ["maximum"]
[inputs.cloudwatch.tags]
cluster = "jitsiprod"
host = "prod"
parentCluster = "jitsi"
[[inputs.cloudwatch]]
region = "ap-northeast-1"
period = "1m"
delay = "5m"
interval = "1m"
namespace = "Jitsi"
ratelimit = 10
statistic_include = ["maximum"]
[inputs.cloudwatch.tags]
cluster = "jitsiprod"
host = "prod"
parentCluster = "jitsi"
[[inputs.cloudwatch]]
region = "eu-west-1"
period = "1m"
delay = "5m"
interval = "1m"
namespace = "Jitsi"
ratelimit = 10
statistic_include = ["maximum"]
[inputs.cloudwatch.tags]
cluster = "jitsiprod"
host = "prod"
parentCluster = "jitsi"
[[inputs.cloudwatch]]
region = "us-west-2"
period = "1m"
delay = "5m"
interval = "1m"
namespace = "Jitsi"
ratelimit = 10
statistic_include = ["maximum"]
[inputs.cloudwatch.tags]
cluster = "jitsiprod"
host = "prod"
parentCluster = "jitsi"
Logs from Telegraf
$ telegraf --debug --test --config ./telegraf.conf > metrics
2022-03-17T18:40:40Z I! Starting Telegraf 1.21.4
2022-03-17T18:40:40Z I! Loaded inputs: cloudwatch (10x)
2022-03-17T18:40:40Z I! Loaded aggregators:
2022-03-17T18:40:40Z I! Loaded processors: rename
2022-03-17T18:40:40Z W! Outputs are not used in testing mode!
2022-03-17T18:40:40Z I! Tags enabled: host=dev-10-34-4-97
2022-03-17T18:40:40Z D! [agent] Initializing plugins
2022-03-17T18:40:40Z D! [agent] Starting service inputs
2022-03-17T18:40:40Z D! [inputs.cloudwatch] no metrics found to collect
2022-03-17T18:40:40Z D! [inputs.cloudwatch] no metrics found to collect
2022-03-17T18:40:41Z D! [inputs.cloudwatch] no metrics found to collect
2022-03-17T18:40:42Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 1537ba1c-ce53-4bbf-9c3d-14431f539bb7, api error RequestEntityTooLarge: Request size 639468 exceeded 614400 bytes
2022-03-17T18:40:42Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: f22f8d43-fe1b-4a9b-868e-8294f5a9f842, api error RequestEntityTooLarge: Request size 642529 exceeded 614400 bytes
2022-03-17T18:40:42Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: e79fdb83-094f-4088-822e-f18813eabfbf, api error RequestEntityTooLarge: Request size 645382 exceeded 614400 bytes
2022-03-17T18:40:42Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 853bc67c-853b-469d-9bbb-fbdced52791c, api error RequestEntityTooLarge: Request size 638307 exceeded 614400 bytes
2022-03-17T18:40:42Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 68ce0c4c-b416-4dd8-9fa3-0218c634794f, api error RequestEntityTooLarge: Request size 643586 exceeded 614400 bytes
2022-03-17T18:40:43Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: e9189f7c-b779-4de4-a358-7a8233031c5e, api error RequestEntityTooLarge: Request size 637044 exceeded 614400 bytes
2022-03-17T18:40:43Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 08b89165-06c6-468b-97fe-1b431c5fc27e, api error RequestEntityTooLarge: Request size 632813 exceeded 614400 bytes
2022-03-17T18:40:43Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: f247cc4c-1227-492f-a375-90bce043a67d, api error RequestEntityTooLarge: Request size 639441 exceeded 614400 bytes
2022-03-17T18:40:43Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 98e357a8-1799-4896-8366-3d778f2a24bf, api error RequestEntityTooLarge: Request size 638764 exceeded 614400 bytes
2022-03-17T18:40:44Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 0d195c0c-0513-4e62-a285-1eb0dedb34df, api error RequestEntityTooLarge: Request size 635101 exceeded 614400 bytes
2022-03-17T18:40:44Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 16d3c469-62d7-4b61-9d47-30c922423468, api error RequestEntityTooLarge: Request size 632924 exceeded 614400 bytes
2022-03-17T18:40:44Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: b5592cf3-30ce-4a9a-9bd2-392fcd8e6e52, api error RequestEntityTooLarge: Request size 635558 exceeded 614400 bytes
2022-03-17T18:40:44Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 3eea2aae-692f-4570-83dd-f9e3fb299afa, api error RequestEntityTooLarge: Request size 640921 exceeded 614400 bytes
2022-03-17T18:40:44Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 8e50f909-b957-470c-a8b3-15c7a9161818, api error RequestEntityTooLarge: Request size 639315 exceeded 614400 bytes
2022-03-17T18:40:44Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 2185ad88-28ad-4fad-83d2-6a61127b4b0a, api error RequestEntityTooLarge: Request size 638813 exceeded 614400 bytes
2022-03-17T18:40:44Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 7156711f-d6b7-49bc-8b8c-25aa08ef2005, api error RequestEntityTooLarge: Request size 637099 exceeded 614400 bytes
2022-03-17T18:40:46Z D! [agent] Stopping service inputs
2022-03-17T18:40:46Z D! [agent] Input channel closed
2022-03-17T18:40:46Z D! [agent] Processor channel closed
2022-03-17T18:40:46Z D! [agent] Stopped Successfully
2022-03-17T18:40:46Z E! [telegraf] Error running agent: input plugins recorded 16 errors
System info
Telegraf 1.21.4, Ubuntu 20.04.3
Docker
No response
Steps to reproduce
- Add cloudwatch input plugin to capture measurements from AWS, configure as supplied in the example config
- When running it throws several errors and fails to proceed and no metrics are being gathered
Expected behavior
No errors and request entity should be in allowed limit and not throw any errors.
Actual behavior
When running it throws several errors and fails to proceed and no metrics are being gathered
Additional info
No response
Relevant telegraf.conf
Logs from Telegraf
$ telegraf --debug --test --config ./telegraf.conf > metrics
2022-03-17T18:40:40Z I! Starting Telegraf 1.21.4
2022-03-17T18:40:40Z I! Loaded inputs: cloudwatch (10x)
2022-03-17T18:40:40Z I! Loaded aggregators:
2022-03-17T18:40:40Z I! Loaded processors: rename
2022-03-17T18:40:40Z W! Outputs are not used in testing mode!
2022-03-17T18:40:40Z I! Tags enabled: host=dev-10-34-4-97
2022-03-17T18:40:40Z D! [agent] Initializing plugins
2022-03-17T18:40:40Z D! [agent] Starting service inputs
2022-03-17T18:40:40Z D! [inputs.cloudwatch] no metrics found to collect
2022-03-17T18:40:40Z D! [inputs.cloudwatch] no metrics found to collect
2022-03-17T18:40:41Z D! [inputs.cloudwatch] no metrics found to collect
2022-03-17T18:40:42Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 1537ba1c-ce53-4bbf-9c3d-14431f539bb7, api error RequestEntityTooLarge: Request size 639468 exceeded 614400 bytes
2022-03-17T18:40:42Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: f22f8d43-fe1b-4a9b-868e-8294f5a9f842, api error RequestEntityTooLarge: Request size 642529 exceeded 614400 bytes
2022-03-17T18:40:42Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: e79fdb83-094f-4088-822e-f18813eabfbf, api error RequestEntityTooLarge: Request size 645382 exceeded 614400 bytes
2022-03-17T18:40:42Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 853bc67c-853b-469d-9bbb-fbdced52791c, api error RequestEntityTooLarge: Request size 638307 exceeded 614400 bytes
2022-03-17T18:40:42Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 68ce0c4c-b416-4dd8-9fa3-0218c634794f, api error RequestEntityTooLarge: Request size 643586 exceeded 614400 bytes
2022-03-17T18:40:43Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: e9189f7c-b779-4de4-a358-7a8233031c5e, api error RequestEntityTooLarge: Request size 637044 exceeded 614400 bytes
2022-03-17T18:40:43Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 08b89165-06c6-468b-97fe-1b431c5fc27e, api error RequestEntityTooLarge: Request size 632813 exceeded 614400 bytes
2022-03-17T18:40:43Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: f247cc4c-1227-492f-a375-90bce043a67d, api error RequestEntityTooLarge: Request size 639441 exceeded 614400 bytes
2022-03-17T18:40:43Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 98e357a8-1799-4896-8366-3d778f2a24bf, api error RequestEntityTooLarge: Request size 638764 exceeded 614400 bytes
2022-03-17T18:40:44Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 0d195c0c-0513-4e62-a285-1eb0dedb34df, api error RequestEntityTooLarge: Request size 635101 exceeded 614400 bytes
2022-03-17T18:40:44Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 16d3c469-62d7-4b61-9d47-30c922423468, api error RequestEntityTooLarge: Request size 632924 exceeded 614400 bytes
2022-03-17T18:40:44Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: b5592cf3-30ce-4a9a-9bd2-392fcd8e6e52, api error RequestEntityTooLarge: Request size 635558 exceeded 614400 bytes
2022-03-17T18:40:44Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 3eea2aae-692f-4570-83dd-f9e3fb299afa, api error RequestEntityTooLarge: Request size 640921 exceeded 614400 bytes
2022-03-17T18:40:44Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 8e50f909-b957-470c-a8b3-15c7a9161818, api error RequestEntityTooLarge: Request size 639315 exceeded 614400 bytes
2022-03-17T18:40:44Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 2185ad88-28ad-4fad-83d2-6a61127b4b0a, api error RequestEntityTooLarge: Request size 638813 exceeded 614400 bytes
2022-03-17T18:40:44Z E! [inputs.cloudwatch] Error in plugin: failed to get metric data: operation error CloudWatch: GetMetricData, https response error StatusCode: 413, RequestID: 7156711f-d6b7-49bc-8b8c-25aa08ef2005, api error RequestEntityTooLarge: Request size 637099 exceeded 614400 bytes
2022-03-17T18:40:46Z D! [agent] Stopping service inputs
2022-03-17T18:40:46Z D! [agent] Input channel closed
2022-03-17T18:40:46Z D! [agent] Processor channel closed
2022-03-17T18:40:46Z D! [agent] Stopped Successfully
2022-03-17T18:40:46Z E! [telegraf] Error running agent: input plugins recorded 16 errors
System info
Telegraf 1.21.4, Ubuntu 20.04.3
Docker
No response
Steps to reproduce
Expected behavior
No errors and request entity should be in allowed limit and not throw any errors.
Actual behavior
When running it throws several errors and fails to proceed and no metrics are being gathered
Additional info
No response