Skip to content

Commit 355c2bd

Browse files
committed
ignore publish version code hash when using hot reloading, use fixture for opensearch test to avoid resource leak
1 parent 6dd1c41 commit 355c2bd

File tree

3 files changed

+43
-5
lines changed

3 files changed

+43
-5
lines changed

localstack/services/awslambda/provider.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -385,7 +385,11 @@ def _create_version_model(
385385
Type="User",
386386
)
387387
current_hash = current_latest_version.config.code.code_sha256
388-
if code_sha256 and current_hash != code_sha256:
388+
if (
389+
code_sha256
390+
and current_hash != code_sha256
391+
and not current_latest_version.config.code.is_hot_reloading()
392+
):
389393
raise InvalidParameterValueException(
390394
f"CodeSHA256 ({code_sha256}) is different from current CodeSHA256 in $LATEST ({current_hash}). Please try again with the CodeSHA256 in $LATEST.",
391395
Type="User",

tests/integration/awslambda/test_lambda_developer_tools.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,39 @@ def test_hot_reloading(
9898
assert response_dict["counter"] == 1
9999
assert response_dict["constant"] == "value2"
100100

101+
def test_hot_reloading_publish_version(
102+
self,
103+
create_lambda_function_aws,
104+
lambda_client,
105+
lambda_su_role,
106+
cleanups,
107+
):
108+
"""
109+
Test if publish version code sha256s are ignored when using hot-reload (cannot be matched anyways)
110+
Serverless, for example, will hash the code before publishing on the client side, which can brick the publish
111+
version operation
112+
"""
113+
114+
function_name = f"test-hot-reloading-{short_uid()}"
115+
hot_reloading_bucket = config.BUCKET_MARKER_LOCAL
116+
tmp_path = config.dirs.tmp
117+
hot_reloading_dir_path = os.path.join(tmp_path, f"hot-reload-{short_uid()}")
118+
mkdir(hot_reloading_dir_path)
119+
cleanups.append(lambda: rm_rf(hot_reloading_dir_path))
120+
function_content = load_file(HOT_RELOADING_NODEJS_HANDLER)
121+
with open(os.path.join(hot_reloading_dir_path, "handler.mjs"), mode="wt") as f:
122+
f.write(function_content)
123+
124+
mount_path = get_host_path_for_path_in_docker(hot_reloading_dir_path)
125+
create_lambda_function_aws(
126+
FunctionName=function_name,
127+
Handler="handler.handler",
128+
Code={"S3Bucket": hot_reloading_bucket, "S3Key": mount_path},
129+
Role=lambda_su_role,
130+
Runtime=Runtime.nodejs18_x,
131+
)
132+
lambda_client.publish_version(FunctionName=function_name, CodeSha256="zipfilehash")
133+
101134

102135
@pytest.mark.skipif(condition=is_old_provider(), reason="Focussing on the new provider")
103136
class TestDockerFlags:

tests/integration/test_firehose.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -240,6 +240,7 @@ def test_kinesis_firehose_incompatible_with_opensearch_2_3(
240240
self,
241241
firehose_client,
242242
opensearch_client,
243+
opensearch_create_domain,
243244
kinesis_client,
244245
kinesis_create_stream,
245246
):
@@ -250,10 +251,10 @@ def test_kinesis_firehose_incompatible_with_opensearch_2_3(
250251
bucket_arn = "arn:aws:s3:::foo"
251252
delivery_stream_name = f"test-delivery-stream-{short_uid()}"
252253

253-
opensearch_create_response = opensearch_client.create_domain(
254-
DomainName=domain_name, EngineVersion="OpenSearch_2.3"
255-
)
256-
opensearch_arn = opensearch_create_response["DomainStatus"]["ARN"]
254+
opensearch_create_domain(DomainName=domain_name, EngineVersion="OpenSearch_2.3")
255+
opensearch_arn = opensearch_client.describe_domain(DomainName=domain_name)["DomainStatus"][
256+
"ARN"
257+
]
257258

258259
# create kinesis stream
259260
kinesis_create_stream(StreamName=stream_name, ShardCount=2)

0 commit comments

Comments
 (0)