-
Notifications
You must be signed in to change notification settings - Fork 8.3k
test_database_delta failed #79109
Copy link
Copy link
Labels
flaky testflaky test found by CIflaky test found by CItestingSpecial issue with list of bugs found by CISpecial issue with list of bugs found by CI
Description
started_cluster = <helpers.cluster.ClickHouseCluster object at 0x7f4140ac71f0>
def test_complex_table_schema(started_cluster):
node1 = started_cluster.instances['node1']
execute_spark_query(node1, "CREATE SCHEMA schema_with_complex_tables", ignore_exit_code=True)
schema = "event_date DATE, event_time TIMESTAMP, hits ARRAY<integer>, ids MAP<int, string>, really_complex STRUCT<f1:int,f2:string>"
create_query = f"CREATE TABLE schema_with_complex_tables.complex_table ({schema}) using Delta location '/tmp/complex_schema/complex_table'"
execute_spark_query(node1, create_query, ignore_exit_code=True)
> execute_spark_query(node1, "insert into schema_with_complex_tables.complex_table SELECT to_date('2024-10-01', 'yyyy-MM-dd'), to_timestamp('2024-10-01 00:12:00'), array(42, 123, 77), map(7, 'v7', 5, 'v5'), named_struct(\\\"f1\\\", 34, \\\"f2\\\", 'hello')", ignore_exit_code=True)
test_database_delta/test.py:117:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
test_database_delta/test.py:54: in execute_spark_query
return node.exec_in_container(
helpers/cluster.py:4073: in exec_in_container
return self.cluster.exec_in_container(
helpers/cluster.py:2081: in exec_in_container
result = subprocess_check_call(
helpers/cluster.py:238: in subprocess_check_call
return run_and_check(args, detach=detach, nothrow=nothrow, **kwargs)
helpers/cluster.py:131: in run_and_check
res = subprocess.run(
/usr/lib/python3.10/subprocess.py:505: in run
stdout, stderr = process.communicate(input, timeout=timeout)
/usr/lib/python3.10/subprocess.py:1154: in communicate
stdout, stderr = self._communicate(input, endtime, timeout)
/usr/lib/python3.10/subprocess.py:2022: in _communicate
self._check_timeout(endtime, orig_timeout, stdout, stderr)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <Popen: returncode: -9 args: ['docker', 'exec', 'roottestdatabasedelta-gw2-n...>
endtime = 2876.800267523, orig_timeout = 300, stdout_seq = []
stderr_seq = [b'Ivy Default Cache set to: /root/.ivy2/cache\nThe jars for the packages stored in: /root/.ivy2/jars\n', b'org.apache....apache.hadoop#hadoop-aws;3.3.4 in central\n', b'\tfound com.amazonaws#aws-java-sdk-bundle;1.12.262 in central\n', ...]
skip_check_and_raise = False
def _check_timeout(self, endtime, orig_timeout, stdout_seq, stderr_seq,
skip_check_and_raise=False):
"""Convenience for checking if a timeout has expired."""
if endtime is None:
return
if skip_check_and_raise or _time() > endtime:
> raise TimeoutExpired(
self.args, orig_timeout,
output=b''.join(stdout_seq) if stdout_seq else None,
stderr=b''.join(stderr_seq) if stderr_seq else None)
E subprocess.TimeoutExpired: Command '['docker', 'exec', 'roottestdatabasedelta-gw2-node1-1', 'bash', '-c', '\ncd /spark-3.5.4-bin-hadoop3 && bin/spark-sql --name "s3-uc-test" \\\n --master "local[*]" \\\n --packages "org.apache.hadoop:hadoop-aws:3.3.4,io.delta:delta-spark_2.12:3.2.1,io.unitycatalog:unitycatalog-spark_2.12:0.2.0" \\\n --conf "spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension" \\\n --conf "spark.sql.catalog.spark_catalog=io.unitycatalog.spark.UCSingleCatalog" \\\n --conf "spark.hadoop.fs.s3.impl=org.apache.hadoop.fs.s3a.S3AFileSystem" \\\n --conf "spark.sql.catalog.unity=io.unitycatalog.spark.UCSingleCatalog" \\\n --conf "spark.sql.catalog.unity.uri=http://localhost:8080" \\\n --conf "spark.sql.catalog.unity.token=" \\\n --conf "spark.sql.defaultCatalog=unity" \\\n -S -e "insert into schema_with_complex_tables.complex_table SELECT to_date(\'2024-10-01\', \'yyyy-MM-dd\'), to_timestamp(\'2024-10-01 00:12:00\'), array(42, 123, 77), map(7, \'v7\', 5, \'v5\'), named_struct(\\"f1\\", 34, \\"f2\\", \'hello\')" | grep -v \'loading settings\'\n']' timed out after 300 seconds
/usr/lib/python3.10/subprocess.py:1198: TimeoutExpired
Reactions are currently unavailable
Metadata
Metadata
Assignees
Labels
flaky testflaky test found by CIflaky test found by CItestingSpecial issue with list of bugs found by CISpecial issue with list of bugs found by CI