Actions
Bug #53959
closedmake check: unittest_rbd_mirror: TestImageReplayer/3.SnapshotRemove FAILED ceph_assert(m_pending_ops == 0)
% Done:
0%
Source:
Backport:
quincy
Regression:
No
Severity:
3 - minor
Reviewed:
Affected Versions:
ceph-qa-suite:
Pull request ID:
Tags (freeform):
Merge Commit:
Fixed In:
Released In:
Upkeep Timestamp:
Description
[ RUN ] TestImageReplayer/3.SnapshotRemove
flushed
remote_snap_id=6
local_snap_id=5, local_snap_ns=[mirror state=non-primary, complete=1, mirror_peer_uuids=, primary_mirror_uuid=c9589ea1-78ed-4577-a67e-58eb1b0e86ef, primary_snap_id=6, last_copied_object_number=0, snap_seqs={5=4,6=18446744073709551614}]
flushed
remote_snap_id=7
local_snap_id=6, local_snap_ns=[mirror state=non-primary, complete=1, mirror_peer_uuids=, primary_mirror_uuid=c9589ea1-78ed-4577-a67e-58eb1b0e86ef, primary_snap_id=7, last_copied_object_number=0, snap_seqs={7=18446744073709551614}]
../src/common/AsyncOpTracker.cc: In function 'AsyncOpTracker::~AsyncOpTracker()' thread 7f4bc9fe3700 time 2022-01-20T23:20:04.445824+0000
../src/common/AsyncOpTracker.cc: 13: FAILED ceph_assert(m_pending_ops == 0)
ceph version Development (no_version) quincy (dev)
1: (ceph::__ceph_assert_fail(char const*, char const*, int, char const*)+0x1db) [0x7f4d1575179b]
2: /home/jenkins-build/build/workspace/ceph-pull-requests/build/lib/libceph-common.so.2(+0x13f65bf) [0x7f4d157515bf]
3: (AsyncOpTracker::~AsyncOpTracker()+0x49) [0x7f4d15624189]
4: (librbd::ImageWatcher<librbd::ImageCtx>::~ImageWatcher()+0x72) [0x2ec0832]
5: (librbd::ImageWatcher<librbd::ImageCtx>::~ImageWatcher()+0x1c) [0x2ec08dc]
6: (librbd::ImageCtx::shutdown()+0x36) [0x2e92d86]
7: (librbd::image::CloseRequest<librbd::ImageCtx>::finish()+0x1c) [0x308417c]
8: (librbd::image::CloseRequest<librbd::ImageCtx>::handle_flush_image_watcher(int)+0x35c) [0x308456c]
9: (librbd::util::detail::C_CallbackAdapter<librbd::image::CloseRequest<librbd::ImageCtx>, &librbd::image::CloseRequest<librbd::ImageCtx>::handle_flush_image_watcher>::finish(int)+0x68) [0x3085158]
10: (Context::complete(int)+0x22) [0x25a22b2]
11: (librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}::operator()() const+0x27) [0x25b7057]
12: (void boost::asio::asio_handler_invoke<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>(librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}&, ...)+0x15) [0x25b7025]
13: (void boost_asio_handler_invoke_helpers::invoke<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}, {lambda()#1}>(librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}&, {lambda()#1}&)+0x32) [0x25b6fd2]
14: (void boost::asio::detail::handler_work<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}, boost::asio::io_context::basic_executor_type<std::allocator<void>, 0u>, void>::complete<{lambda()#1}>({lambda()#1}&, {lambda()#1}&)+0x3c) [0x25b6d4c]
15: (boost::asio::detail::completion_handler<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}, boost::asio::io_context::basic_executor_type<std::allocator<void>, 0u> >::do_complete(void*, boost::asio::detail::scheduler_operation*, boost::system::error_code const&, unsigned long)+0xa1) [0x25b6b61]
16: (boost::asio::detail::scheduler_operation::complete(void*, boost::system::error_code const&, unsigned long)+0x45) [0x2e84b85]
17: (boost::asio::detail::strand_service::do_complete(void*, boost::asio::detail::scheduler_operation*, boost::system::error_code const&, unsigned long)+0x9e) [0x2e84a2e]
18: (boost::asio::detail::scheduler_operation::complete(void*, boost::system::error_code const&, unsigned long)+0x45) [0x2e84b85]
19: (boost::asio::detail::scheduler::do_run_one(boost::asio::detail::conditionally_enabled_mutex::scoped_lock&, boost::asio::detail::scheduler_thread_info&, boost::system::error_code const&)+0x1f8) [0x327fa08]
20: (boost::asio::detail::scheduler::run(boost::system::error_code&)+0xee) [0x327f5ee]
21: (boost::asio::io_context::run()+0x2e) [0x367e40e]
22: (ceph::async::io_context_pool::start(short)::{lambda()#1}::operator()() const+0x21) [0x367e3d1]
23: (void std::__invoke_impl<void, ceph::async::io_context_pool::start(short)::{lambda()#1}>(std::__invoke_other, ceph::async::io_context_pool::start(short)::{lambda()#1}&&)+0x1d) [0x367e39d]
24: (std::__invoke_result<ceph::async::io_context_pool::start(short)::{lambda()#1}>::type std::__invoke<ceph::async::io_context_pool::start(short)::{lambda()#1}>(std::__invoke_result&&, (ceph::async::io_context_pool::start(short)::{lambda()#1}&&)...)+0x1d) [0x367e36d]
25: (std::invoke_result<ceph::async::io_context_pool::start(short)::{lambda()#1}>::type std::invoke<ceph::async::io_context_pool::start(short)::{lambda()#1}>(std::invoke_result&&, (ceph::async::io_context_pool::start(short)::{lambda()#1}&&)...)+0x1d) [0x367e33d]
26: _ZZ17make_named_threadIZN4ceph5async15io_context_pool5startEsEUlvE_JEESt6threadSt17basic_string_viewIcSt11char_traitsIcEEOT_DpOT0_ENKUlSA_SD_E_clIS3_JEEEDaSA_SD_()
27: (std::basic_string_view<char, std::char_traits<char> > std::__invoke_impl<void, make_named_thread<ceph::async::io_context_pool::start(short)::{lambda()#1}>(std::basic_string_view<char, std::char_traits<char> >, ceph::async::io_context_pool::start(short)::{lambda()#1}&&)::{lambda(auto:1, auto:2&&)#1}, {lambda()#1}>(std::__invoke_other, make_named_thread<ceph::async::io_context_pool::start(short)::{lambda()#1}>(std::basic_string_view<char, std::char_traits<char> >, ceph::async::io_context_pool::start(short)::{lambda()#1}&&)::{lambda(auto:1, auto:2&&)#1}&&, {lambda()#1}&&)+0x32) [0x367e2c2]
28: (std::__invoke_result<std::basic_string_view<char, std::char_traits<char> >, (make_named_thread<ceph::async::io_context_pool::start(short)::{lambda()#1}>(std::basic_string_view<char, std::char_traits<char> >, ceph::async::io_context_pool::start(short)::{lambda()#1}&&)::{lambda(auto:1, auto:2&&)#1}&&)...>::type std::__invoke<make_named_thread<ceph::async::io_context_pool::start(short)::{lambda()#1}>(std::basic_string_view<char, std::char_traits<char> >, make_named_thread<ceph::async::io_context_pool::start(short)::{lambda()#1}>(std::basic_string_view<char, std::char_traits<char> >, ceph::async::io_context_pool::start(short)::{lambda()#1}&&)::{lambda(auto:1, auto:2&&)#1}&&)::{lambda(auto:1, auto:2&&)#1}, {lambda()#1}>(make_named_thread<ceph::async::io_context_pool::start(short)::{lambda()#1}>(std::basic_string_view<char, std::char_traits<char> >, ceph::async::io_context_pool::start(short)::{lambda()#1}&&)::{lambda(auto:1, auto:2&&)#1}, {lambda()#1}&&)+0x32) [0x367e212]
29: (void std::thread::_Invoker<std::tuple<make_named_thread<ceph::async::io_context_pool::start(short)::{lambda()#1}>(std::basic_string_view<char, std::char_traits<char> >, ceph::async::io_context_pool::start(short)::{lambda()#1}&&)::{lambda(auto:1, auto:2&&)#1}, {lambda()#1}> >::_M_invoke<0ul, 1ul>(std::_Index_tuple<0ul, 1ul>)+0x45) [0x367e1d5]
30: (std::thread::_Invoker<std::tuple<make_named_thread<ceph::async::io_context_pool::start(short)::{lambda()#1}>(std::basic_string_view<char, std::char_traits<char> >, ceph::async::io_context_pool::start(short)::{lambda()#1}&&)::{lambda(auto:1, auto:2&&)#1}, {lambda()#1}> >::operator()()+0x15) [0x367e185]
31: (std::thread::_State_impl<std::thread::_Invoker<std::tuple<make_named_thread<ceph::async::io_context_pool::start(short)::{lambda()#1}>(std::basic_string_view<char, std::char_traits<char> >, ceph::async::io_context_pool::start(short)::{lambda()#1}&&)::{lambda(auto:1, auto:2&&)#1}, {lambda()#1}> > >::_M_run()+0x1e) [0x367df1e]
Updated by Ilya Dryomov about 4 years ago
Ran TestImageReplayer/3.SnapshotRemove overnight in a loop -- it doesn't reproduce.
Updated by Ilya Dryomov about 4 years ago
- Status changed from New to In Progress
- Assignee set to Ilya Dryomov
- Priority changed from Normal to High
I think it reproduced, but I got a segfault instead of an assert failure. Same code path though, it appears that a memory corruption is involved.
Updated by Ilya Dryomov about 4 years ago
- Related to Bug #54302: unittest_rbd_mirror fails due to SEGFAULT exception on Jenkins "make check" runs added
Updated by Ilya Dryomov about 4 years ago
- Has duplicate Bug #48850: "FAILED ceph_assert(m_pending_ops == 0)" in TestImageReplayer/3.SnapshotUnprotect added
Updated by Neha Ojha almost 4 years ago
[ RUN ] TestImageReplayer/3.SnapshotUnprotect
flushed
remote_snap_id=6
local_snap_id=5, local_snap_ns=[mirror state=non-primary, complete=1, mirror_peer_uuids=, primary_mirror_uuid=98d4e5da-b579-444f-b3cf-0cb932499d17, primary_snap_id=6, last_copied_object_number=0, snap_seqs={5=4,6=18446744073709551614}]
flushed
remote_snap_id=7
local_snap_id=6, local_snap_ns=[mirror state=non-primary, complete=1, mirror_peer_uuids=, primary_mirror_uuid=98d4e5da-b579-444f-b3cf-0cb932499d17, primary_snap_id=7, last_copied_object_number=0, snap_seqs={5=4,7=18446744073709551614}]
../src/common/AsyncOpTracker.cc: In function 'AsyncOpTracker::~AsyncOpTracker()' thread 7fbd8afdd700 time 2022-05-09T07:46:44.388135+0000
../src/common/AsyncOpTracker.cc: 13: FAILED ceph_assert(m_pending_ops == 0)
ceph version Development (no_version) quincy (stable)
1: (ceph::__ceph_assert_fail(char const*, char const*, int, char const*)+0x1db) [0x7fbedae714eb]
2: /home/jenkins-build/build/workspace/ceph-pull-requests/build/lib/libceph-common.so.2(+0x141030f) [0x7fbedae7130f]
3: (AsyncOpTracker::~AsyncOpTracker()+0x49) [0x7fbedad43119]
4: (librbd::ImageWatcher<librbd::ImageCtx>::~ImageWatcher()+0x72) [0x2edbda2]
5: (librbd::ImageWatcher<librbd::ImageCtx>::~ImageWatcher()+0x1c) [0x2edbe6c]
6: (librbd::ImageCtx::shutdown()+0x36) [0x2ead846]
7: (librbd::image::CloseRequest<librbd::ImageCtx>::finish()+0x1c) [0x30a087c]
8: (librbd::image::CloseRequest<librbd::ImageCtx>::handle_flush_image_watcher(int)+0x35c) [0x30a0c6c]
Updated by Neha Ojha almost 4 years ago
- Backport changed from octopus,pacific to octopus,pacific,quincy
Updated by Sridhar Seshasayee almost 4 years ago
[ OK ] TestImageReplayer/3.SnapshotProtect (338 ms)
[ RUN ] TestImageReplayer/3.SnapshotRemove
flushed
remote_snap_id=6
local_snap_id=5, local_snap_ns=[mirror state=non-primary, complete=1, mirror_peer_uuids=, primary_mirror_uuid=47837a54-2f40-41ea-9610-009f7461e659, primary_snap_id=6, last_copied_object_number=0, snap_seqs={5=4,6=18446744073709551614}]
flushed
remote_snap_id=7
local_snap_id=6, local_snap_ns=[mirror state=non-primary, complete=1, mirror_peer_uuids=, primary_mirror_uuid=47837a54-2f40-41ea-9610-009f7461e659, primary_snap_id=7, last_copied_object_number=0, snap_seqs={7=18446744073709551614}]
../src/common/AsyncOpTracker.cc: In function 'AsyncOpTracker::~AsyncOpTracker()' thread 7fc62dff3700 time 2022-06-15T15:21:34.258423+0000
../src/common/AsyncOpTracker.cc: 13: FAILED ceph_assert(m_pending_ops == 0)
ceph version Development (no_version) quincy (dev)
1: (ceph::__ceph_assert_fail(char const*, char const*, int, char const*)+0x1b4) [0x7fc6ecd70114]
2: /home/jenkins-build/build/workspace/ceph-pull-requests/build/lib/libceph-common.so.2(+0x14b4f5f) [0x7fc6ecd6ff5f]
3: (AsyncOpTracker::~AsyncOpTracker()+0x46) [0x7fc6ecc490a6]
4: (librbd::ImageWatcher<librbd::ImageCtx>::~ImageWatcher()+0x69) [0x2f15d59]
5: (librbd::ImageWatcher<librbd::ImageCtx>::~ImageWatcher()+0x19) [0x2f15e09]
6: (librbd::ImageCtx::shutdown()+0x33) [0x2eea543]
7: (librbd::image::CloseRequest<librbd::ImageCtx>::finish()+0x1c) [0x30d2e2c]
8: (librbd::image::CloseRequest<librbd::ImageCtx>::handle_flush_image_watcher(int)+0x35f) [0x30d321f]
Updated by Josh Durgin over 3 years ago
[ RUN ] TestImageReplayer/3.SnapshotLimit
remote_snap_id=4
local_snap_id=3, local_snap_ns=[mirror state=non-primary, complete=1, mirror_peer_uuids=, primary_mirror_uuid=13ca01d9-a2a7-47af-8ad4-54e6ad8266e9, primary_snap_id=4, last_copied_object_number=1, snap_seqs={4=18446744073709551614}]
flushed
remote_snap_id=5
local_snap_id=4, local_snap_ns=[mirror state=non-primary, complete=1, mirror_peer_uuids=, primary_mirror_uuid=13ca01d9-a2a7-47af-8ad4-54e6ad8266e9, primary_snap_id=5, last_copied_object_number=1, snap_seqs={5=18446744073709551614}]
flushed
remote_snap_id=6
local_snap_id=5, local_snap_ns=[mirror state=non-primary, complete=1, mirror_peer_uuids=, primary_mirror_uuid=13ca01d9-a2a7-47af-8ad4-54e6ad8266e9, primary_snap_id=6, last_copied_object_number=1, snap_seqs={6=18446744073709551614}]
../src/common/AsyncOpTracker.cc: In function 'AsyncOpTracker::~AsyncOpTracker()' thread 7f435c7f0700 time 2022-07-11T19:31:54.802280+0000
../src/common/AsyncOpTracker.cc: 13: FAILED ceph_assert(m_pending_ops == 0)
ceph version Development (no_version) quincy (dev)
1: (ceph::__ceph_assert_fail(char const*, char const*, int, char const*)+0x1b4) [0x7f441c3e2104]
2: /home/jenkins-build/build/workspace/ceph-pull-requests/build/lib/libceph-common.so.2(+0x14b5f4f) [0x7f441c3e1f4f]
3: (AsyncOpTracker::~AsyncOpTracker()+0x46) [0x7f441c2bb096]
4: (librbd::ImageWatcher<librbd::ImageCtx>::~ImageWatcher()+0x69) [0x2f17759]
5: (librbd::ImageWatcher<librbd::ImageCtx>::~ImageWatcher()+0x19) [0x2f17809]
6: (librbd::ImageCtx::shutdown()+0x33) [0x2eebf43]
7: (librbd::image::CloseRequest<librbd::ImageCtx>::finish()+0x1c) [0x30d47ec]
8: (librbd::image::CloseRequest<librbd::ImageCtx>::handle_flush_image_watcher(int)+0x35f) [0x30d4bdf]
9: (librbd::util::detail::C_CallbackAdapter<librbd::image::CloseRequest<librbd::ImageCtx>, &librbd::image::CloseRequest<librbd::ImageCtx>::handle_flush_image_watcher>::finish(int)+0x66) [0x30d5726]
10: (Context::complete(int)+0x1f) [0x25faf1f]
11: (librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}::operator()() const+0x24) [0x260f704]
12: (void boost::asio::asio_handler_invoke<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>(librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}&, ...)+0x15) [0x260f6d5]
13: (void boost_asio_handler_invoke_helpers::invoke<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}, librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>(librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}&, librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}&)+0x2f) [0x260f67f]
14: (void boost::asio::detail::handler_work<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}, boost::asio::io_context::basic_executor_type<std::allocator<void>, 0u>, void>::complete<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>(librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}&, librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}&)+0x36) [0x260f406]
15: (boost::asio::detail::completion_handler<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}, boost::asio::io_context::basic_executor_type<std::allocator<void>, 0u> >::do_complete(void*, boost::asio::detail::scheduler_operation*, boost::system::error_code const&, unsigned long)+0x9b) [0x260f23b]
16: (boost::asio::detail::scheduler_operation::complete(void*, boost::system::error_code const&, unsigned long)+0x2e) [0x2ede0de]
17: (boost::asio::detail::strand_service::do_complete(void*, boost::asio::detail::scheduler_operation*, boost::system::error_code const&, unsigned long)+0x98) [0x2eddfa8]
18: (boost::asio::detail::scheduler_operation::complete(void*, boost::system::error_code const&, unsigned long)+0x2e) [0x2ede0de]
19: (boost::asio::detail::scheduler::do_run_one(boost::asio::detail::conditionally_enabled_mutex::scoped_lock&, boost::asio::detail::scheduler_thread_info&, boost::system::error_code const&)+0x1e5) [0x32c2995]
20: (boost::asio::detail::scheduler::run(boost::system::error_code&)+0xe6) [0x32c25b6]
21: (boost::asio::io_context::run()+0x2e) [0x369d6ce]
22: (ceph::async::io_context_pool::start(short)::{lambda()#1}::operator()() const+0x1c) [0x369d68c]
23: (void std::__invoke_impl<void, ceph::async::io_context_pool::start(short)::{lambda()#1}>(std::__invoke_other, ceph::async::io_context_pool::start(short)::{lambda()#1}&&)+0x1d) [0x369d65d]
24: (std::__invoke_result<ceph::async::io_context_pool::start(short)::{lambda()#1}>::type std::__invoke<ceph::async::io_context_pool::start(short)::{lambda()#1}>(ceph::async::io_context_pool::start(short)::{lambda()#1}&&)+0x1d) [0x369d62d]
25: (std::invoke_result<ceph::async::io_context_pool::start(short)::{lambda()#1}>::type std::invoke<ceph::async::io_context_pool::start(short)::{lambda()#1}>(ceph::async::io_context_pool::start(short)::{lambda()#1}&&)+0x1d) [0x369d5fd]
26: _ZZ17make_named_threadIZN4ceph5async15io_context_pool5startEsEUlvE_JEESt6threadSt17basic_string_viewIcSt11char_traitsIcEEOT_DpOT0_ENKUlSA_SD_E_clIS3_JEEEDaSA_SD_()
27: _ZSt13__invoke_implIvZ17make_named_threadIZN4ceph5async15io_context_pool5startEsEUlvE_JEESt6threadSt17basic_string_viewIcSt11char_traitsIcEEOT_DpOT0_EUlSB_SE_E_JS4_EESA_St14__invoke_otherOT0_DpOT1_()
28: _ZSt8__invokeIZ17make_named_threadIZN4ceph5async15io_context_pool5startEsEUlvE_JEESt6threadSt17basic_string_viewIcSt11char_traitsIcEEOT_DpOT0_EUlSB_SE_E_JS4_EENSt15__invoke_resultISA_JDpSC_EE4typeESB_SE_()
29: _ZNSt6thread8_InvokerISt5tupleIJZ17make_named_threadIZN4ceph5async15io_context_pool5startEsEUlvE_JEES_St17basic_string_viewIcSt11char_traitsIcEEOT_DpOT0_EUlSC_SF_E_S6_EEE9_M_invokeIJLm0ELm1EEEEvSt12_Index_tupleIJXspT_EEE()
30: _ZNSt6thread8_InvokerISt5tupleIJZ17make_named_threadIZN4ceph5async15io_context_pool5startEsEUlvE_JEES_St17basic_string_viewIcSt11char_traitsIcEEOT_DpOT0_EUlSC_SF_E_S6_EEEclEv()
31: _ZNSt6thread11_State_implINS_8_InvokerISt5tupleIJZ17make_named_threadIZN4ceph5async15io_context_pool5startEsEUlvE_JEES_St17basic_string_viewIcSt11char_traitsIcEEOT_DpOT0_EUlSD_SG_E_S7_EEEEE6_M_runEv()
Updated by Deepika Upadhyay over 3 years ago
observed this in a recent make check run:
/home/jenkins-build/build/workspace/ceph-pull-requests/src/common/AsyncOpTracker.cc: 13: FAILED ceph_assert(m_pending_ops == 0)
ceph version Development (no_version) quincy (dev)
1: (ceph::__ceph_assert_fail(char const*, char const*, int, char const*)+0x1ba) [0x7fb93828f05a]
2: /home/jenkins-build/build/workspace/ceph-pull-requests/build/lib/libceph-common.so.2(+0x1496e9f) [0x7fb93828ee9f]
3: (AsyncOpTracker::~AsyncOpTracker()+0x46) [0x7fb938183596]
4: (librbd::ImageWatcher<librbd::ImageCtx>::~ImageWatcher()+0x69) [0x307f879]
5: (librbd::ImageWatcher<librbd::ImageCtx>::~ImageWatcher()+0x19) [0x307f929]
6: (librbd::ImageCtx::shutdown()+0x33) [0x3054043]
7: (librbd::image::CloseRequest<librbd::ImageCtx>::finish()+0x1c) [0x3243f0c]
8: (librbd::image::CloseRequest<librbd::ImageCtx>::handle_flush_image_watcher(int)+0x380) [0x3244320]
9: (librbd::util::detail::C_CallbackAdapter<librbd::image::CloseRequest<librbd::ImageCtx>, &librbd::image::CloseRequest<librbd::ImageCtx>::handle_flush_image_watcher>::finish(int)+0x66) [0x3244e66]
10: (Context::complete(int)+0x1f) [0x273f27f]
11: (librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}::operator()() const+0x24) [0x27546b4]
12: (boost::asio::detail::binder0<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>::operator()()+0x15) [0x2754685]
13: (void boost::asio::asio_handler_invoke<boost::asio::detail::binder0<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}> >(boost::asio::detail::binder0<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>&, ...)+0x15) [0x2754665]
14: (void boost_asio_handler_invoke_helpers::invoke<boost::asio::detail::binder0<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>, librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>(boost::asio::detail::binder0<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>&, librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}&)+0x2f) [0x275463f]
15: (void boost::asio::detail::asio_handler_invoke<boost::asio::detail::binder0<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>, librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>(boost::asio::detail::binder0<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>&, boost::asio::detail::binder0<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>*)+0x1d) [0x27545fd]
16: (void boost_asio_handler_invoke_helpers::invoke<boost::asio::detail::binder0<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>, boost::asio::detail::binder0<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}> >(boost::asio::detail::binder0<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>&, boost::asio::detail::binder0<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>&)+0x2d) [0x275459d]
17: (void boost::asio::detail::handler_work<boost::asio::detail::binder0<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>, boost::asio::io_context::basic_executor_type<std::allocator<void>, 0ul>, void>::complete<boost::asio::detail::binder0<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}> >(boost::asio::detail::binder0<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>&, boost::asio::detail::binder0<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>&)+0x36) [0x2754286]
18: (boost::asio::detail::completion_handler<boost::asio::detail::binder0<librbd::asio::ContextWQ::queue(Context*, int)::{lambda()#1}>, boost::asio::io_context::basic_executor_type<std::allocator<void>, 0ul> >::do_complete(void*, boost::asio::detail::scheduler_operation*, boost::system::error_code const&, unsigned long)+0xa6) [0x27540a6]
19: (boost::asio::detail::scheduler_operation::complete(void*, boost::system::error_code const&, unsigned long)+0x2e) [0x3045e3e]
20: (boost::asio::detail::strand_service::do_complete(void*, boost::asio::detail::scheduler_operation*, boost::system::error_code const&, unsigned long)+0x99) [0x3045d09]
21: (boost::asio::detail::scheduler_operation::complete(void*, boost::system::error_code const&, unsigned long)+0x2e) [0x3045e3e]
22: (boost::asio::detail::scheduler::do_run_one(boost::asio::detail::conditionally_enabled_mutex::scoped_lock&, boost::asio::detail::scheduler_thread_info&, boost::system::error_code const&)+0x1e8) [0x3438958]
23: (boost::asio::detail::scheduler::run(boost::system::error_code&)+0xf7) [0x3438557]
24: (boost::asio::io_context::run()+0x2e) [0x383c87e]
25: (ceph::async::io_context_pool::start(short)::{lambda()#1}::operator()() const+0x1c) [0x383c83c]
26: (void std::__invoke_impl<void, ceph::async::io_context_pool::start(short)::{lambda()#1}>(std::__invoke_other, ceph::async::io_context_pool::start(short)::{lambda()#1}&&)+0x1d) [0x383c80d]
27: (std::__invoke_result<ceph::async::io_context_pool::start(short)::{lambda()#1}>::type std::__invoke<ceph::async::io_context_pool::start(short)::{lambda()#1}>(ceph::async::io_context_pool::start(short)::{lambda()#1}&&)+0x1d) [0x383c7dd]
28: (std::invoke_result<ceph::async::io_context_pool::start(short)::{lambda()#1}>::type std::invoke<ceph::async::io_context_pool::start(short)::{lambda()#1}>(ceph::async::io_context_pool::start(short)::{lambda()#1}&&)+0x1d) [0x383c7ad]
29: _ZZ17make_named_threadIZN4ceph5async15io_context_pool5startEsEUlvE_JEESt6threadSt17basic_string_viewIcSt11char_traitsIcEEOT_DpOT0_ENKUlSA_SD_E_clIS3_JEEEDaSA_SD_()
30: _ZSt13__invoke_implIvZ17make_named_threadIZN4ceph5async15io_context_pool5startEsEUlvE_JEESt6threadSt17basic_string_viewIcSt11char_traitsIcEEOT_DpOT0_EUlSB_SE_E_JS4_EESA_St14__invoke_otherOT0_DpOT1_()
31: _ZSt8__invokeIZ17make_named_threadIZN4ceph5async15io_context_pool5startEsEUlvE_JEESt6threadSt17basic_string_viewIcSt11char_traitsIcEEOT_DpOT0_EUlSB_SE_E_JS4_EENSt15__invoke_resultISA_JDpSC_EE4typeESB_SE_()
Updated by Ilya Dryomov over 2 years ago
- Related to Bug #63798: make check: unittest_rbd_mirror SEGFAULT added
Updated by Konstantin Shalygin over 1 year ago
- Backport changed from octopus,pacific,quincy to octopus,quincy
Updated by Konstantin Shalygin over 1 year ago
- Backport changed from octopus,quincy to quincy
Updated by Ilya Dryomov about 1 year ago
- Related to deleted (Bug #63798: make check: unittest_rbd_mirror SEGFAULT)
Updated by Ilya Dryomov about 1 year ago
- Status changed from In Progress to Can't reproduce
In the course of root causing a segfault in https://tracker.ceph.com/issues/63798 I did close to a million runs of this test and didn't see any asserts.
Updated by Ilya Dryomov about 1 year ago
- Related to deleted (Bug #54302: unittest_rbd_mirror fails due to SEGFAULT exception on Jenkins "make check" runs)
Actions