Skip to content

Commit fb966a3

Browse files
authored
Merge branch 'main' into add-fips-compatible-flag-infraobs
2 parents b11fb54 + 05aa358 commit fb966a3

93 files changed

Lines changed: 8146 additions & 578 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

.buildkite/pipeline.schedule-daily.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -86,13 +86,13 @@ steps:
8686
if: |
8787
build.env('TEST_PACKAGES_BASIC_SUBSCRIPTION') == "true"
8888
89-
- label: "Check integrations local stacks - Stack Version v9.1"
89+
- label: "Check integrations local stacks - Stack Version v9.2"
9090
trigger: "integrations"
9191
build:
9292
env:
9393
SERVERLESS: "false"
9494
FORCE_CHECK_ALL: "true"
95-
STACK_VERSION: 9.1.0-SNAPSHOT
95+
STACK_VERSION: 9.2.0-SNAPSHOT
9696
PUBLISH_COVERAGE_REPORTS: "false"
9797
depends_on:
9898
- step: "check"

.buildkite/pipeline.schedule-weekly.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,13 +28,13 @@ steps:
2828
- step: "check"
2929
allow_failure: false
3030

31-
- label: "Check integrations local stacks and non-wolfi images for Elastic Agent - Stack Version v9.1"
31+
- label: "Check integrations local stacks and non-wolfi images for Elastic Agent - Stack Version v9.2"
3232
trigger: "integrations"
3333
build:
3434
env:
3535
SERVERLESS: "false"
3636
FORCE_CHECK_ALL: "true"
37-
STACK_VERSION: 9.1.0-SNAPSHOT
37+
STACK_VERSION: 9.2.0-SNAPSHOT
3838
PUBLISH_COVERAGE_REPORTS: "false"
3939
ELASTIC_PACKAGE_DISABLE_ELASTIC_AGENT_WOLFI: "true"
4040
depends_on:

.buildkite/pull-requests.json

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,17 @@
1313
"always_trigger_comment_regex": "^(?:(?:buildkite\\W+)?(?:build|test)\\W+(?:this|it))|^/test$|^/test benchmark fullreport$",
1414
"skip_ci_labels": [],
1515
"skip_target_branches": [],
16-
"skip_ci_on_only_changed": ["^.github/workflows/", "^.github/dependabot.yml$", "^.github/ISSUE_TEMPLATE/", "^docs/", "^catalog-info.yaml$", "^.buildkite/pull-requests.json$"],
16+
"skip_ci_on_only_changed": [
17+
"^.github/workflows/",
18+
"^.github/dependabot.yml$",
19+
"^.github/ISSUE_TEMPLATE/",
20+
"^docs/",
21+
"^catalog-info.yaml$",
22+
"^.buildkite/pipeline.schedule-daily.yml$",
23+
"^.buildkite/pipeline.schedule-weekly.yml$",
24+
"^.buildkite/pipeline.backport.yml$",
25+
"^.buildkite/pull-requests.json$"
26+
],
1727
"always_require_ci_on_changed": []
1828
},
1929
{

.buildkite/scripts/common.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -746,7 +746,7 @@ is_pr_affected() {
746746
# Example:
747747
# https://buildkite.com/elastic/integrations/builds/25606
748748
# https://github.com/elastic/integrations/pull/13810
749-
if git diff --name-only "${commit_merge}" "${to}" | grep -E -v '^(packages/|\.github/(CODEOWNERS|ISSUE_TEMPLATE|PULL_REQUEST_TEMPLATE)|README\.md|docs/|catalog-info\.yaml|\.buildkite/pull-requests\.json)' > /dev/null; then
749+
if git diff --name-only "${commit_merge}" "${to}" | grep -E -v '^(packages/|\.github/(CODEOWNERS|ISSUE_TEMPLATE|PULL_REQUEST_TEMPLATE)|README\.md|docs/|catalog-info\.yaml|\.buildkite/(pull-requests\.json|pipeline\.schedule-daily\.yml|pipeline\.schedule-weekly\.yml|pipeline\.backport\.yml))' > /dev/null; then
750750
echo "[${package}] PR is affected: found non-package files"
751751
return 0
752752
fi

packages/amazon_security_lake/changelog.yml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,9 @@
11
# newer versions go on top
2+
- version: "2.5.3"
3+
changes:
4+
- description: Remove null fields on entry to ingest pipeline to prevent spurious rename failures.
5+
type: bugfix
6+
link: https://github.com/elastic/integrations/pull/14392
27
- version: "2.5.2"
38
changes:
49
- description: Fix handling `ocsf.api.request.data` and `ocsf.api.response.data` when they are a JSON encoded object.

packages/amazon_security_lake/data_stream/event/_dev/test/pipeline/test-application-activity.log-expected.json

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

packages/amazon_security_lake/data_stream/event/_dev/test/pipeline/test-network-activity.log-expected.json

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

packages/amazon_security_lake/data_stream/event/elasticsearch/ingest_pipeline/default.yml

Lines changed: 21 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,25 @@ processors:
2020
- append:
2121
field: error.message
2222
value: 'Processor {{{_ingest.on_failure_processor_type}}} with tag {{{_ingest.on_failure_processor_tag}}} in pipeline {{{_ingest.on_failure_pipeline}}} failed with message: {{{_ingest.on_failure_message}}}'
23+
- script:
24+
lang: painless
25+
description: Drops null/empty values recursively.
26+
tag: painless_remove_null_from_ocsf
27+
if: ctx.ocsf instanceof Map
28+
source: |-
29+
boolean drop(Object object) {
30+
if (object == null || object == '') {
31+
return true;
32+
} else if (object instanceof Map) {
33+
((Map) object).values().removeIf(v -> drop(v));
34+
return (((Map) object).size() == 0);
35+
} else if (object instanceof List) {
36+
((List) object).removeIf(v -> drop(v));
37+
return (((List) object).length == 0);
38+
}
39+
return false;
40+
}
41+
drop(ctx.ocsf);
2342
- script:
2443
description: Recursively traverses the ocsf object to convert suspected timestamps to milliseconds.
2544
tag: convert_timestamps_to_milliseconds
@@ -64,19 +83,18 @@ processors:
6483
return null;
6584
}
6685
processFields(ctx.ocsf);
67-
6886
- rename:
6987
field: ocsf.resource
7088
target_field: ocsf.resources
7189
tag: rename_resource_to_resources
7290
ignore_missing: true
73-
if : ctx.ocsf?.resources == null
91+
if: ctx.ocsf?.resources == null
7492
- rename:
7593
field: ocsf.finding_info_list
7694
target_field: ocsf.finding_info
7795
tag: rename_finding_info_list_to_finding_info
7896
ignore_missing: true
79-
if : ctx.ocsf?.finding_info == null
97+
if: ctx.ocsf?.finding_info == null
8098
- convert:
8199
field: ocsf.class_uid
82100
tag: convert_class_uid_to_string

packages/amazon_security_lake/manifest.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
format_version: "3.0.3"
22
name: amazon_security_lake
33
title: Amazon Security Lake
4-
version: "2.5.2"
4+
version: "2.5.3"
55
description: Collect logs from Amazon Security Lake with Elastic Agent.
66
type: integration
77
categories: ["aws", "security"]

packages/armis/_dev/build/docs/README.md

Lines changed: 14 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -22,65 +22,46 @@ The Armis integration collects three types of logs.
2222

2323
## Requirements
2424

25-
### Agentless Enabled Integration
25+
### Agentless-enabled integration
26+
2627
Agentless integrations allow you to collect data without having to manage Elastic Agent in your cloud. They make manual agent deployment unnecessary, so you can focus on your data instead of the agent that collects it. For more information, refer to [Agentless integrations](https://www.elastic.co/guide/en/serverless/current/security-agentless-integrations.html) and the [Agentless integrations FAQ](https://www.elastic.co/guide/en/serverless/current/agentless-integration-troubleshooting.html).
2728

2829
Agentless deployments are only supported in Elastic Serverless and Elastic Cloud environments. This functionality is in beta and is subject to change. Beta features are not subject to the support SLA of official GA features.
2930

30-
### Agent Based Installation
31-
- Elastic Agent must be installed
32-
- You can install only one Elastic Agent per host.
33-
- Elastic Agent is required to stream data from the GCP Pub/Sub or REST API and ship the data to Elastic, where the events will then be processed via the integration's ingest pipelines.
34-
35-
#### Installing and managing an Elastic Agent:
36-
37-
You have a few options for installing and managing an Elastic Agent:
38-
39-
#### Install a Fleet-managed Elastic Agent (recommended):
40-
41-
With this approach, you install Elastic Agent and use Fleet in Kibana to define, configure, and manage your agents in a central location. We recommend using Fleet management because it makes the management and upgrade of your agents considerably easier.
42-
43-
#### Install Elastic Agent in standalone mode (advanced users):
44-
45-
With this approach, you install Elastic Agent and manually configure the agent locally on the system where it’s installed. You are responsible for managing and upgrading the agents. This approach is reserved for advanced users only.
46-
47-
#### Install Elastic Agent in a containerized environment:
48-
49-
You can run Elastic Agent inside a container, either with Fleet Server or standalone. Docker images for all versions of Elastic Agent are available from the Elastic Docker registry and we provide deployment manifests for running on Kubernetes.
31+
### Agent-based installation
5032

51-
There are some minimum requirements for running Elastic Agent and for more information, refer to the link [here](https://www.elastic.co/guide/en/fleet/current/elastic-agent-installation.html).
33+
Elastic Agent must be installed. For more details, check the Elastic Agent [installation instructions](docs-content://reference/fleet/install-elastic-agents.md).
5234

5335
## Setup
5436

55-
### To collect logs through REST API, follow the below steps:
37+
### Collect logs through REST API
5638

5739
1. Log in to your Armis portal.
5840
2. Navigate to the **Settings** tab.
5941
3. Select **Asset Management & Security**.
6042
4. Go to **API Management** and generate a **Secret Key**.
6143

62-
### Enabling the integration in Elastic:
44+
### Enable the integration in Elastic
6345

64-
1. In Kibana navigate to Management > Integrations.
65-
2. In "Search for integrations" top bar, search for `Armis`.
66-
3. Select the "Armis" integration from the search results.
67-
4. Select "Add Armis" to add the integration.
68-
5. Add all the required integration configuration parameters, including the URL, Secret Key to enable data collection.
69-
6. Select "Save and continue" to save the integration.
46+
1. In Kibana navigate to **Management** > **Integrations**.
47+
2. In the search bar, type **Armis**.
48+
3. Select the **Armis** integration and add it.
49+
4. Add all the required integration configuration parameters, including the URL, Secret Key to enable data collection.
50+
5. Save the integration.
7051

7152
## Limitations
7253

73-
1. In the **vulnerability data stream**, our filtering mechanism for the **vulnerability search API** relies specifically on the `lastDetected` field. This means that when a user takes action on a vulnerability and `lastDetected` updates, only then will the event for that vulnerability be retrieved. Initially, we assumed this field would always have a value and could be used as a cursor timestamp for fetching data between intervals. However, due to inconsistencies in the API response, we observed cases where `lastDetected` is `null`.
54+
In the **vulnerability data stream**, our filtering mechanism for the **vulnerability search API** relies specifically on the `lastDetected` field. This means that when a user takes action on a vulnerability and `lastDetected` updates, only then will the event for that vulnerability be retrieved. Initially, we assumed this field would always have a value and could be used as a cursor timestamp for fetching data between intervals. However, due to inconsistencies in the API response, we observed cases where `lastDetected` is `null`.
7455

7556
## Troubleshooting
7657

77-
- If you are seeing below mentioned errors in the **vulnerability data stream**, try reducing the page size in your request.
58+
- If you get the following errors in the **vulnerability data stream**, reduce the page size in your request.
7859

7960
**Common errors:**
8061
- `502 Bad Gateway`
8162
- `414 Request-URI Too Large`
8263

83-
- If you are encountering issues in the **alert data stream**, particularly during the initial data fetch, try reducing the initial interval.
64+
- If you encounter issues in the **alert data stream**, particularly during the initial data fetch, reduce the initial interval.
8465

8566
**Example error:**
8667
- `The server encountered an internal error and was unable to complete your request. Either the server is overloaded or there is an error in the application.`

0 commit comments

Comments
 (0)