[2024-03-15 07:03:41.564056] WDIAG [STORAGE.TRANS] refresh_rec_log_ts_ (ob_trans_part_ctx.cpp:5337) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=174][errcode=0] we should not allow concurrent merge of tx ctx table(*this={this:0x7f547a8881d0, trans_id:{txid:97202008}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482160326809}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():711}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710300100573252183}, max_applying_log_ts:{val:1710300100573252183}, max_applying_part_log_no:3, max_submitted_seq_no:1710300100555816, checksum:0, checksum_scn:{val:0}, max_durable_lsn:{lsn:86777504403}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:false, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a8899e0, is_inited_:true, trans_id:{txid:97202008}, ls_id:{id:1}, ctx:0x7f547a8881d0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97202008}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710300100573252183}, start_scn:{val:1710300100420124601}, end_scn:{val:1710300100573252183}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97202008}, state:1, commit_version:{val:1710300100573252183}, start_scn:{val:1710300100420124601}, end_scn:{val:1710300100573252183}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710300100420124601}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710300100573252183} lock_wait_start_ts=0 replay_compact_version={val:1710299448966099399}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97202008} ls_id=1 callback_alloc_count=17 callback_free_count=17 checksum=0 tmp_checksum=0 checksum_scn={val:0} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=27, slave=0, merge=0, tx_end=27, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710300100420124601}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}) [2024-03-15 07:03:41.564190] INFO [STORAGE.TRANS] serialize_ (ob_tx_table_define.cpp:215) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=124] ObTxCtxTableMeta encode succ(buf_len=26, pos=26) [2024-03-15 07:03:41.564491] INFO [STORAGE.TRANS] tx_calc_checksum_before_scn (ob_tx_callback_list.cpp:298) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=13] calc checksum before log ts(functor={target_scn:{val:1710434834810234890}, checksum_scn:{val:4611686018427387903}, checksum_last_scn:{val:0}}, *this={get_trans_ctx():{this:0x7f547a87ac50, trans_id:{txid:97366649}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482181203492}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():51}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710434834810234890}, max_applying_log_ts:{val:1710434834810234890}, max_applying_part_log_no:3, max_submitted_seq_no:1710434834797987, checksum:1935440914, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87200011816}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a87c460, is_inited_:true, trans_id:{txid:97366649}, ls_id:{id:1}, ctx:0x7f547a87ac50, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97366649}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710434834810234890}, start_scn:{val:1710434834807598175}, end_scn:{val:1710434834810234890}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97366649}, state:1, commit_version:{val:1710434834810234890}, start_scn:{val:1710434834807598175}, end_scn:{val:1710434834810234890}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710434834807598175}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710434834810234890} lock_wait_start_ts=0 replay_compact_version={val:1710433924715818735}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97366649} ls_id=1 callback_alloc_count=8 callback_free_count=8 checksum=1935440914 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=15, slave=0, merge=0, tx_end=15, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710434834807598175}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}, length:0, checksum_scn:{val:4611686018427387903}, checksum:1935440914, tmp_checksum:0}) [2024-03-15 07:03:41.564644] INFO [STORAGE.TRANS] get_checksum_and_scn (ob_tx_callback_list.cpp:427) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=151] get checksum and checksum_scn(this={get_trans_ctx():{this:0x7f547a87ac50, trans_id:{txid:97366649}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482181203492}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():51}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710434834810234890}, max_applying_log_ts:{val:1710434834810234890}, max_applying_part_log_no:3, max_submitted_seq_no:1710434834797987, checksum:1935440914, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87200011816}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a87c460, is_inited_:true, trans_id:{txid:97366649}, ls_id:{id:1}, ctx:0x7f547a87ac50, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97366649}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710434834810234890}, start_scn:{val:1710434834807598175}, end_scn:{val:1710434834810234890}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97366649}, state:1, commit_version:{val:1710434834810234890}, start_scn:{val:1710434834807598175}, end_scn:{val:1710434834810234890}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710434834807598175}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710434834810234890} lock_wait_start_ts=0 replay_compact_version={val:1710433924715818735}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97366649} ls_id=1 callback_alloc_count=8 callback_free_count=8 checksum=1935440914 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=15, slave=0, merge=0, tx_end=15, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710434834807598175}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}, length:0, checksum_scn:{val:4611686018427387903}, checksum:1935440914, tmp_checksum:0}, checksum=1935440914, checksum_scn={val:4611686018427387903}) [2024-03-15 07:03:41.564799] INFO [STORAGE.TRANS] get_tx_ctx_table_info_ (ob_trans_part_ctx.cpp:5377) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=132] store ctx_info: (ret=0, info={tx_id:{txid:97366649}, ls_id:{id:1}, cluster_id:1, state_info:{tx_id:{txid:97366649}, ref_cnt:0, state:"COMMIT", commit_version:{val:1710434834810234890}, start_scn:{val:1710434834807598175}, end_scn:{val:1710434834810234890}, undo_status_list:{head:null, undo_node_cnt:0}}, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():51}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710434834810234890}, max_applying_log_ts:{val:1710434834810234890}, max_applying_part_log_no:3, max_submitted_seq_no:1710434834797987, checksum:1935440914, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87200011816}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}}, this={this:0x7f547a87ac50, trans_id:{txid:97366649}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482181203492}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():51}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710434834810234890}, max_applying_log_ts:{val:1710434834810234890}, max_applying_part_log_no:3, max_submitted_seq_no:1710434834797987, checksum:1935440914, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87200011816}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a87c460, is_inited_:true, trans_id:{txid:97366649}, ls_id:{id:1}, ctx:0x7f547a87ac50, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97366649}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710434834810234890}, start_scn:{val:1710434834807598175}, end_scn:{val:1710434834810234890}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97366649}, state:1, commit_version:{val:1710434834810234890}, start_scn:{val:1710434834807598175}, end_scn:{val:1710434834810234890}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710434834807598175}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710434834810234890} lock_wait_start_ts=0 replay_compact_version={val:1710433924715818735}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97366649} ls_id=1 callback_alloc_count=8 callback_free_count=8 checksum=1935440914 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=15, slave=0, merge=0, tx_end=15, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710434834807598175}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}) [2024-03-15 07:03:41.564991] WDIAG [STORAGE.TRANS] refresh_rec_log_ts_ (ob_trans_part_ctx.cpp:5337) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=190][errcode=0] we should not allow concurrent merge of tx ctx table(*this={this:0x7f547a87ac50, trans_id:{txid:97366649}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482181203492}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():51}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710434834810234890}, max_applying_log_ts:{val:1710434834810234890}, max_applying_part_log_no:3, max_submitted_seq_no:1710434834797987, checksum:1935440914, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87200011816}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a87c460, is_inited_:true, trans_id:{txid:97366649}, ls_id:{id:1}, ctx:0x7f547a87ac50, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97366649}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710434834810234890}, start_scn:{val:1710434834807598175}, end_scn:{val:1710434834810234890}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97366649}, state:1, commit_version:{val:1710434834810234890}, start_scn:{val:1710434834807598175}, end_scn:{val:1710434834810234890}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710434834807598175}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710434834810234890} lock_wait_start_ts=0 replay_compact_version={val:1710433924715818735}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97366649} ls_id=1 callback_alloc_count=8 callback_free_count=8 checksum=1935440914 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=15, slave=0, merge=0, tx_end=15, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710434834807598175}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}) [2024-03-15 07:03:41.565150] INFO [STORAGE.TRANS] serialize_ (ob_tx_table_define.cpp:215) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=150] ObTxCtxTableMeta encode succ(buf_len=26, pos=26) [2024-03-15 07:03:41.565234] INFO [STORAGE.TRANS] tx_calc_checksum_before_scn (ob_tx_callback_list.cpp:298) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=13] calc checksum before log ts(functor={target_scn:{val:1710434961555661114}, checksum_scn:{val:4611686018427387903}, checksum_last_scn:{val:0}}, *this={get_trans_ctx():{this:0x7f547a828cd0, trans_id:{txid:97366720}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482181350209}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710434961555661114}, max_applying_log_ts:{val:1710434961555661114}, max_applying_part_log_no:3, max_submitted_seq_no:1710434961541958, checksum:118964550, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87200970071}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a82a4e0, is_inited_:true, trans_id:{txid:97366720}, ls_id:{id:1}, ctx:0x7f547a828cd0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97366720}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710434961555661114}, start_scn:{val:1710434961499670379}, end_scn:{val:1710434961555661114}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97366720}, state:1, commit_version:{val:1710434961555661114}, start_scn:{val:1710434961499670379}, end_scn:{val:1710434961555661114}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710434961499670379}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710434961555661114} lock_wait_start_ts=0 replay_compact_version={val:1710434172555532195}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97366720} ls_id=1 callback_alloc_count=8 callback_free_count=8 checksum=118964550 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=15, slave=0, merge=0, tx_end=15, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710434961499670379}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}, length:0, checksum_scn:{val:4611686018427387903}, checksum:118964550, tmp_checksum:0}) [2024-03-15 07:03:41.565396] INFO [STORAGE.TRANS] get_checksum_and_scn (ob_tx_callback_list.cpp:427) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=160] get checksum and checksum_scn(this={get_trans_ctx():{this:0x7f547a828cd0, trans_id:{txid:97366720}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482181350209}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710434961555661114}, max_applying_log_ts:{val:1710434961555661114}, max_applying_part_log_no:3, max_submitted_seq_no:1710434961541958, checksum:118964550, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87200970071}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a82a4e0, is_inited_:true, trans_id:{txid:97366720}, ls_id:{id:1}, ctx:0x7f547a828cd0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97366720}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710434961555661114}, start_scn:{val:1710434961499670379}, end_scn:{val:1710434961555661114}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97366720}, state:1, commit_version:{val:1710434961555661114}, start_scn:{val:1710434961499670379}, end_scn:{val:1710434961555661114}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710434961499670379}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710434961555661114} lock_wait_start_ts=0 replay_compact_version={val:1710434172555532195}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97366720} ls_id=1 callback_alloc_count=8 callback_free_count=8 checksum=118964550 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=15, slave=0, merge=0, tx_end=15, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710434961499670379}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}, length:0, checksum_scn:{val:4611686018427387903}, checksum:118964550, tmp_checksum:0}, checksum=118964550, checksum_scn={val:4611686018427387903}) [2024-03-15 07:03:41.565572] INFO [STORAGE.TRANS] get_tx_ctx_table_info_ (ob_trans_part_ctx.cpp:5377) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=173] store ctx_info: (ret=0, info={tx_id:{txid:97366720}, ls_id:{id:1}, cluster_id:1, state_info:{tx_id:{txid:97366720}, ref_cnt:0, state:"COMMIT", commit_version:{val:1710434961555661114}, start_scn:{val:1710434961499670379}, end_scn:{val:1710434961555661114}, undo_status_list:{head:null, undo_node_cnt:0}}, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710434961555661114}, max_applying_log_ts:{val:1710434961555661114}, max_applying_part_log_no:3, max_submitted_seq_no:1710434961541958, checksum:118964550, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87200970071}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}}, this={this:0x7f547a828cd0, trans_id:{txid:97366720}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482181350209}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710434961555661114}, max_applying_log_ts:{val:1710434961555661114}, max_applying_part_log_no:3, max_submitted_seq_no:1710434961541958, checksum:118964550, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87200970071}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a82a4e0, is_inited_:true, trans_id:{txid:97366720}, ls_id:{id:1}, ctx:0x7f547a828cd0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97366720}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710434961555661114}, start_scn:{val:1710434961499670379}, end_scn:{val:1710434961555661114}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97366720}, state:1, commit_version:{val:1710434961555661114}, start_scn:{val:1710434961499670379}, end_scn:{val:1710434961555661114}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710434961499670379}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710434961555661114} lock_wait_start_ts=0 replay_compact_version={val:1710434172555532195}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97366720} ls_id=1 callback_alloc_count=8 callback_free_count=8 checksum=118964550 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=15, slave=0, merge=0, tx_end=15, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710434961499670379}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}) [2024-03-15 07:03:41.565747] WDIAG [STORAGE.TRANS] refresh_rec_log_ts_ (ob_trans_part_ctx.cpp:5337) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=173][errcode=0] we should not allow concurrent merge of tx ctx table(*this={this:0x7f547a828cd0, trans_id:{txid:97366720}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482181350209}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710434961555661114}, max_applying_log_ts:{val:1710434961555661114}, max_applying_part_log_no:3, max_submitted_seq_no:1710434961541958, checksum:118964550, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87200970071}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a82a4e0, is_inited_:true, trans_id:{txid:97366720}, ls_id:{id:1}, ctx:0x7f547a828cd0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97366720}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710434961555661114}, start_scn:{val:1710434961499670379}, end_scn:{val:1710434961555661114}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97366720}, state:1, commit_version:{val:1710434961555661114}, start_scn:{val:1710434961499670379}, end_scn:{val:1710434961555661114}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710434961499670379}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710434961555661114} lock_wait_start_ts=0 replay_compact_version={val:1710434172555532195}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97366720} ls_id=1 callback_alloc_count=8 callback_free_count=8 checksum=118964550 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=15, slave=0, merge=0, tx_end=15, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710434961499670379}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}) [2024-03-15 07:03:41.565880] INFO [STORAGE.TRANS] serialize_ (ob_tx_table_define.cpp:215) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=126] ObTxCtxTableMeta encode succ(buf_len=26, pos=26) [2024-03-15 07:03:41.565953] INFO [STORAGE.TRANS] tx_calc_checksum_before_scn (ob_tx_callback_list.cpp:298) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=13] calc checksum before log ts(functor={target_scn:{val:1710402653699896929}, checksum_scn:{val:4611686018427387903}, checksum_last_scn:{val:0}}, *this={get_trans_ctx():{this:0x7f547a8dde50, trans_id:{txid:97364417}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180862126}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710402653699896929}, max_applying_log_ts:{val:1710402653699896929}, max_applying_part_log_no:3, max_submitted_seq_no:1710402653697414, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87187999587}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a8df660, is_inited_:true, trans_id:{txid:97364417}, ls_id:{id:1}, ctx:0x7f547a8dde50, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97364417}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710402653699896929}, start_scn:{val:1710402653635140842}, end_scn:{val:1710402653699896929}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97364417}, state:1, commit_version:{val:1710402653699896929}, start_scn:{val:1710402653635140842}, end_scn:{val:1710402653699896929}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710402653635140842}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710402653699896929} lock_wait_start_ts=0 replay_compact_version={val:1710401970131993833}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97364417} ls_id=1 callback_alloc_count=1 callback_free_count=1 checksum=4106063110 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=3, slave=0, merge=0, tx_end=3, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710402653635140842}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}, length:0, checksum_scn:{val:4611686018427387903}, checksum:4106063110, tmp_checksum:0}) [2024-03-15 07:03:41.566099] INFO [STORAGE.TRANS] get_checksum_and_scn (ob_tx_callback_list.cpp:427) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=144] get checksum and checksum_scn(this={get_trans_ctx():{this:0x7f547a8dde50, trans_id:{txid:97364417}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180862126}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710402653699896929}, max_applying_log_ts:{val:1710402653699896929}, max_applying_part_log_no:3, max_submitted_seq_no:1710402653697414, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87187999587}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a8df660, is_inited_:true, trans_id:{txid:97364417}, ls_id:{id:1}, ctx:0x7f547a8dde50, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97364417}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710402653699896929}, start_scn:{val:1710402653635140842}, end_scn:{val:1710402653699896929}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97364417}, state:1, commit_version:{val:1710402653699896929}, start_scn:{val:1710402653635140842}, end_scn:{val:1710402653699896929}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710402653635140842}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710402653699896929} lock_wait_start_ts=0 replay_compact_version={val:1710401970131993833}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97364417} ls_id=1 callback_alloc_count=1 callback_free_count=1 checksum=4106063110 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=3, slave=0, merge=0, tx_end=3, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710402653635140842}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}, length:0, checksum_scn:{val:4611686018427387903}, checksum:4106063110, tmp_checksum:0}, checksum=4106063110, checksum_scn={val:4611686018427387903}) [2024-03-15 07:03:41.566233] INFO [STORAGE.TRANS] get_tx_ctx_table_info_ (ob_trans_part_ctx.cpp:5377) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=132] store ctx_info: (ret=0, info={tx_id:{txid:97364417}, ls_id:{id:1}, cluster_id:1, state_info:{tx_id:{txid:97364417}, ref_cnt:0, state:"COMMIT", commit_version:{val:1710402653699896929}, start_scn:{val:1710402653635140842}, end_scn:{val:1710402653699896929}, undo_status_list:{head:null, undo_node_cnt:0}}, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710402653699896929}, max_applying_log_ts:{val:1710402653699896929}, max_applying_part_log_no:3, max_submitted_seq_no:1710402653697414, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87187999587}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}}, this={this:0x7f547a8dde50, trans_id:{txid:97364417}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180862126}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710402653699896929}, max_applying_log_ts:{val:1710402653699896929}, max_applying_part_log_no:3, max_submitted_seq_no:1710402653697414, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87187999587}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a8df660, is_inited_:true, trans_id:{txid:97364417}, ls_id:{id:1}, ctx:0x7f547a8dde50, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97364417}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710402653699896929}, start_scn:{val:1710402653635140842}, end_scn:{val:1710402653699896929}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97364417}, state:1, commit_version:{val:1710402653699896929}, start_scn:{val:1710402653635140842}, end_scn:{val:1710402653699896929}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710402653635140842}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710402653699896929} lock_wait_start_ts=0 replay_compact_version={val:1710401970131993833}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97364417} ls_id=1 callback_alloc_count=1 callback_free_count=1 checksum=4106063110 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=3, slave=0, merge=0, tx_end=3, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710402653635140842}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}) [2024-03-15 07:03:41.566406] WDIAG [STORAGE.TRANS] refresh_rec_log_ts_ (ob_trans_part_ctx.cpp:5337) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=172][errcode=0] we should not allow concurrent merge of tx ctx table(*this={this:0x7f547a8dde50, trans_id:{txid:97364417}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180862126}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710402653699896929}, max_applying_log_ts:{val:1710402653699896929}, max_applying_part_log_no:3, max_submitted_seq_no:1710402653697414, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87187999587}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a8df660, is_inited_:true, trans_id:{txid:97364417}, ls_id:{id:1}, ctx:0x7f547a8dde50, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97364417}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710402653699896929}, start_scn:{val:1710402653635140842}, end_scn:{val:1710402653699896929}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97364417}, state:1, commit_version:{val:1710402653699896929}, start_scn:{val:1710402653635140842}, end_scn:{val:1710402653699896929}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710402653635140842}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710402653699896929} lock_wait_start_ts=0 replay_compact_version={val:1710401970131993833}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97364417} ls_id=1 callback_alloc_count=1 callback_free_count=1 checksum=4106063110 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=3, slave=0, merge=0, tx_end=3, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710402653635140842}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}) [2024-03-15 07:03:41.566564] INFO [STORAGE.TRANS] serialize_ (ob_tx_table_define.cpp:215) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=151] ObTxCtxTableMeta encode succ(buf_len=26, pos=26) [2024-03-15 07:03:41.566911] INFO [STORAGE.TRANS] tx_calc_checksum_before_scn (ob_tx_callback_list.cpp:298) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=13] calc checksum before log ts(functor={target_scn:{val:1710318172205294335}, checksum_scn:{val:0}, checksum_last_scn:{val:0}}, *this={get_trans_ctx():{this:0x7f547a8e39d0, trans_id:{txid:97264674}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482165535185}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():590}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710318172205294335}, max_applying_log_ts:{val:1710318172205294335}, max_applying_part_log_no:3, max_submitted_seq_no:1710318171746259, checksum:0, checksum_scn:{val:0}, max_durable_lsn:{lsn:86858807910}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:false, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a8e51e0, is_inited_:true, trans_id:{txid:97264674}, ls_id:{id:1}, ctx:0x7f547a8e39d0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97264674}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710318172205294335}, start_scn:{val:1710318172145187557}, end_scn:{val:1710318172205294335}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97264674}, state:1, commit_version:{val:1710318172205294335}, start_scn:{val:1710318172145187557}, end_scn:{val:1710318172205294335}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710318172145187557}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710318172205294335} lock_wait_start_ts=0 replay_compact_version={val:1710317641144199976}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97264674} ls_id=1 callback_alloc_count=17 callback_free_count=17 checksum=0 tmp_checksum=0 checksum_scn={val:0} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=27, slave=0, merge=0, tx_end=27, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710318172145187557}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}, length:0, checksum_scn:{val:0}, checksum:0, tmp_checksum:0}) [2024-03-15 07:03:41.567061] INFO [STORAGE.TRANS] get_checksum_and_scn (ob_tx_callback_list.cpp:427) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=148] get checksum and checksum_scn(this={get_trans_ctx():{this:0x7f547a8e39d0, trans_id:{txid:97264674}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482165535185}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():590}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710318172205294335}, max_applying_log_ts:{val:1710318172205294335}, max_applying_part_log_no:3, max_submitted_seq_no:1710318171746259, checksum:0, checksum_scn:{val:0}, max_durable_lsn:{lsn:86858807910}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:false, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a8e51e0, is_inited_:true, trans_id:{txid:97264674}, ls_id:{id:1}, ctx:0x7f547a8e39d0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97264674}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710318172205294335}, start_scn:{val:1710318172145187557}, end_scn:{val:1710318172205294335}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97264674}, state:1, commit_version:{val:1710318172205294335}, start_scn:{val:1710318172145187557}, end_scn:{val:1710318172205294335}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710318172145187557}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710318172205294335} lock_wait_start_ts=0 replay_compact_version={val:1710317641144199976}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97264674} ls_id=1 callback_alloc_count=17 callback_free_count=17 checksum=0 tmp_checksum=0 checksum_scn={val:0} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=27, slave=0, merge=0, tx_end=27, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710318172145187557}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}, length:0, checksum_scn:{val:0}, checksum:0, tmp_checksum:0}, checksum=0, checksum_scn={val:0}) [2024-03-15 07:03:41.567195] INFO [STORAGE.TRANS] get_tx_ctx_table_info_ (ob_trans_part_ctx.cpp:5377) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=131] store ctx_info: (ret=0, info={tx_id:{txid:97264674}, ls_id:{id:1}, cluster_id:1, state_info:{tx_id:{txid:97264674}, ref_cnt:0, state:"COMMIT", commit_version:{val:1710318172205294335}, start_scn:{val:1710318172145187557}, end_scn:{val:1710318172205294335}, undo_status_list:{head:null, undo_node_cnt:0}}, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():590}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710318172205294335}, max_applying_log_ts:{val:1710318172205294335}, max_applying_part_log_no:3, max_submitted_seq_no:1710318171746259, checksum:0, checksum_scn:{val:0}, max_durable_lsn:{lsn:86858807910}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:false, is_sub2pc:false}}, this={this:0x7f547a8e39d0, trans_id:{txid:97264674}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482165535185}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():590}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710318172205294335}, max_applying_log_ts:{val:1710318172205294335}, max_applying_part_log_no:3, max_submitted_seq_no:1710318171746259, checksum:0, checksum_scn:{val:0}, max_durable_lsn:{lsn:86858807910}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:false, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a8e51e0, is_inited_:true, trans_id:{txid:97264674}, ls_id:{id:1}, ctx:0x7f547a8e39d0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97264674}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710318172205294335}, start_scn:{val:1710318172145187557}, end_scn:{val:1710318172205294335}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97264674}, state:1, commit_version:{val:1710318172205294335}, start_scn:{val:1710318172145187557}, end_scn:{val:1710318172205294335}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710318172145187557}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710318172205294335} lock_wait_start_ts=0 replay_compact_version={val:1710317641144199976}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97264674} ls_id=1 callback_alloc_count=17 callback_free_count=17 checksum=0 tmp_checksum=0 checksum_scn={val:0} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=27, slave=0, merge=0, tx_end=27, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710318172145187557}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}) [2024-03-15 07:03:41.567367] WDIAG [STORAGE.TRANS] refresh_rec_log_ts_ (ob_trans_part_ctx.cpp:5337) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=171][errcode=0] we should not allow concurrent merge of tx ctx table(*this={this:0x7f547a8e39d0, trans_id:{txid:97264674}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482165535185}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():590}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710318172205294335}, max_applying_log_ts:{val:1710318172205294335}, max_applying_part_log_no:3, max_submitted_seq_no:1710318171746259, checksum:0, checksum_scn:{val:0}, max_durable_lsn:{lsn:86858807910}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:false, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a8e51e0, is_inited_:true, trans_id:{txid:97264674}, ls_id:{id:1}, ctx:0x7f547a8e39d0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97264674}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710318172205294335}, start_scn:{val:1710318172145187557}, end_scn:{val:1710318172205294335}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97264674}, state:1, commit_version:{val:1710318172205294335}, start_scn:{val:1710318172145187557}, end_scn:{val:1710318172205294335}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710318172145187557}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710318172205294335} lock_wait_start_ts=0 replay_compact_version={val:1710317641144199976}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97264674} ls_id=1 callback_alloc_count=17 callback_free_count=17 checksum=0 tmp_checksum=0 checksum_scn={val:0} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=27, slave=0, merge=0, tx_end=27, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710318172145187557}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}) [2024-03-15 07:03:41.567524] INFO [STORAGE.TRANS] serialize_ (ob_tx_table_define.cpp:215) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=148] ObTxCtxTableMeta encode succ(buf_len=26, pos=26) [2024-03-15 07:03:41.567670] INFO [STORAGE.TRANS] tx_calc_checksum_before_scn (ob_tx_callback_list.cpp:298) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=13] calc checksum before log ts(functor={target_scn:{val:1710434022253820403}, checksum_scn:{val:4611686018427387903}, checksum_last_scn:{val:0}}, *this={get_trans_ctx():{this:0x7f547a87cad0, trans_id:{txid:97366208}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482181063103}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():553}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710434022253820403}, max_applying_log_ts:{val:1710434022253820403}, max_applying_part_log_no:3, max_submitted_seq_no:1710434021729185, checksum:1488654682, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87197430540}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a87e2e0, is_inited_:true, trans_id:{txid:97366208}, ls_id:{id:1}, ctx:0x7f547a87cad0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97366208}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710434022253820403}, start_scn:{val:1710434022111843528}, end_scn:{val:1710434022253820403}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97366208}, state:1, commit_version:{val:1710434022253820403}, start_scn:{val:1710434022111843528}, end_scn:{val:1710434022253820403}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710434022111843528}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710434022253820403} lock_wait_start_ts=0 replay_compact_version={val:1710433429038972324}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97366208} ls_id=1 callback_alloc_count=12 callback_free_count=12 checksum=1488654682 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=21, slave=0, merge=0, tx_end=21, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710434022111843528}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}, length:0, checksum_scn:{val:4611686018427387903}, checksum:1488654682, tmp_checksum:0}) [2024-03-15 07:03:41.567818] INFO [STORAGE.TRANS] get_checksum_and_scn (ob_tx_callback_list.cpp:427) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=148] get checksum and checksum_scn(this={get_trans_ctx():{this:0x7f547a87cad0, trans_id:{txid:97366208}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482181063103}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():553}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710434022253820403}, max_applying_log_ts:{val:1710434022253820403}, max_applying_part_log_no:3, max_submitted_seq_no:1710434021729185, checksum:1488654682, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87197430540}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a87e2e0, is_inited_:true, trans_id:{txid:97366208}, ls_id:{id:1}, ctx:0x7f547a87cad0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97366208}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710434022253820403}, start_scn:{val:1710434022111843528}, end_scn:{val:1710434022253820403}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97366208}, state:1, commit_version:{val:1710434022253820403}, start_scn:{val:1710434022111843528}, end_scn:{val:1710434022253820403}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710434022111843528}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710434022253820403} lock_wait_start_ts=0 replay_compact_version={val:1710433429038972324}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97366208} ls_id=1 callback_alloc_count=12 callback_free_count=12 checksum=1488654682 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=21, slave=0, merge=0, tx_end=21, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710434022111843528}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}, length:0, checksum_scn:{val:4611686018427387903}, checksum:1488654682, tmp_checksum:0}, checksum=1488654682, checksum_scn={val:4611686018427387903}) [2024-03-15 07:03:41.568084] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:41.567951] INFO [STORAGE.TRANS] get_tx_ctx_table_info_ (ob_trans_part_ctx.cpp:5377) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=131] store ctx_info: (ret=0, info={tx_id:{txid:97366208}, ls_id:{id:1}, cluster_id:1, state_info:{tx_id:{txid:97366208}, ref_cnt:0, state:"COMMIT", commit_version:{val:1710434022253820403}, start_scn:{val:1710434022111843528}, end_scn:{val:1710434022253820403}, undo_status_list:{head:null, undo_node_cnt:0}}, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():553}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710434022253820403}, max_applying_log_ts:{val:1710434022253820403}, max_applying_part_log_no:3, max_submitted_seq_no:1710434021729185, checksum:1488654682, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87197430540}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}}, this={this:0x7f547a87cad0, trans_id:{txid:97366208}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482181063103}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():553}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710434022253820403}, max_applying_log_ts:{val:1710434022253820403}, max_applying_part_log_no:3, max_submitted_seq_no:1710434021729185, checksum:1488654682, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87197430540}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a87e2e0, is_inited_:true, trans_id:{txid:97366208}, ls_id:{id:1}, ctx:0x7f547a87cad0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97366208}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710434022253820403}, start_scn:{val:1710434022111843528}, end_scn:{val:1710434022253820403}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97366208}, state:1, commit_version:{val:1710434022253820403}, start_scn:{val:1710434022111843528}, end_scn:{val:1710434022253820403}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710434022111843528}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710434022253820403} lock_wait_start_ts=0 replay_compact_version={val:1710433429038972324}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97366208} ls_id=1 callback_alloc_count=12 callback_free_count=12 checksum=1488654682 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=21, slave=0, merge=0, tx_end=21, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710434022111843528}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}) [2024-03-15 07:03:41.568142] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=31][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486221568066], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.568194] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=46][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486221568066], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.568127] WDIAG [STORAGE.TRANS] refresh_rec_log_ts_ (ob_trans_part_ctx.cpp:5337) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=173][errcode=0] we should not allow concurrent merge of tx ctx table(*this={this:0x7f547a87cad0, trans_id:{txid:97366208}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482181063103}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():553}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710434022253820403}, max_applying_log_ts:{val:1710434022253820403}, max_applying_part_log_no:3, max_submitted_seq_no:1710434021729185, checksum:1488654682, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87197430540}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a87e2e0, is_inited_:true, trans_id:{txid:97366208}, ls_id:{id:1}, ctx:0x7f547a87cad0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97366208}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710434022253820403}, start_scn:{val:1710434022111843528}, end_scn:{val:1710434022253820403}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97366208}, state:1, commit_version:{val:1710434022253820403}, start_scn:{val:1710434022111843528}, end_scn:{val:1710434022253820403}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710434022111843528}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710434022253820403} lock_wait_start_ts=0 replay_compact_version={val:1710433429038972324}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97366208} ls_id=1 callback_alloc_count=12 callback_free_count=12 checksum=1488654682 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=21, slave=0, merge=0, tx_end=21, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710434022111843528}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}) [2024-03-15 07:03:41.568261] INFO [STORAGE.TRANS] serialize_ (ob_tx_table_define.cpp:215) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=126] ObTxCtxTableMeta encode succ(buf_len=26, pos=26) [2024-03-15 07:03:41.568270] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=44] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486221568066]}) [2024-03-15 07:03:41.568297] WDIAG [STORAGE.TRANS] operator() (ob_ts_mgr.h:167) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=25][errcode=-4023] refresh gts failed(ret=-4023, ret="OB_EAGAIN", gts_tenant_info={v:1}) [2024-03-15 07:03:41.568316] INFO [STORAGE.TRANS] operator() (ob_ts_mgr.h:171) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=18] refresh gts functor(ret=-4023, ret="OB_EAGAIN", gts_tenant_info={v:1}) [2024-03-15 07:03:41.568357] WDIAG [SHARE.LOCATION] batch_process_tasks (ob_ls_location_service.cpp:524) [159][SysLocAsyncUp0][T0][YB427F000001-000613ACAFBFB1A9-0-0] [lt=24][errcode=0] tenant schema is not ready, need wait(ret=0, ret="OB_SUCCESS", superior_tenant_id=1, tasks=[{cluster_id:1, tenant_id:1, ls_id:{id:1}, add_timestamp:1710486221568238}]) [2024-03-15 07:03:41.568345] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=13] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:41.568521] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=169][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486221568339], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.568584] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=27] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486221568339]}) [2024-03-15 07:03:41.568554] INFO [STORAGE.TRANS] tx_calc_checksum_before_scn (ob_tx_callback_list.cpp:298) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=12] calc checksum before log ts(functor={target_scn:{val:1710403026705235189}, checksum_scn:{val:4611686018427387903}, checksum_last_scn:{val:0}}, *this={get_trans_ctx():{this:0x7f547a8807d0, trans_id:{txid:97364617}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180871751}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710403026705235189}, max_applying_log_ts:{val:1710403026705235189}, max_applying_part_log_no:3, max_submitted_seq_no:1710403026699319, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87189257847}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a881fe0, is_inited_:true, trans_id:{txid:97364617}, ls_id:{id:1}, ctx:0x7f547a8807d0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97364617}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710403026705235189}, start_scn:{val:1710403026551568373}, end_scn:{val:1710403026705235189}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97364617}, state:1, commit_version:{val:1710403026705235189}, start_scn:{val:1710403026551568373}, end_scn:{val:1710403026705235189}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710403026551568373}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710403026705235189} lock_wait_start_ts=0 replay_compact_version={val:1710401970131993833}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97364617} ls_id=1 callback_alloc_count=1 callback_free_count=1 checksum=4106063110 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=3, slave=0, merge=0, tx_end=3, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710403026551568373}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}, length:0, checksum_scn:{val:4611686018427387903}, checksum:4106063110, tmp_checksum:0}) [2024-03-15 07:03:41.568704] INFO [STORAGE.TRANS] get_checksum_and_scn (ob_tx_callback_list.cpp:427) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=148] get checksum and checksum_scn(this={get_trans_ctx():{this:0x7f547a8807d0, trans_id:{txid:97364617}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180871751}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710403026705235189}, max_applying_log_ts:{val:1710403026705235189}, max_applying_part_log_no:3, max_submitted_seq_no:1710403026699319, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87189257847}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a881fe0, is_inited_:true, trans_id:{txid:97364617}, ls_id:{id:1}, ctx:0x7f547a8807d0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97364617}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710403026705235189}, start_scn:{val:1710403026551568373}, end_scn:{val:1710403026705235189}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97364617}, state:1, commit_version:{val:1710403026705235189}, start_scn:{val:1710403026551568373}, end_scn:{val:1710403026705235189}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710403026551568373}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710403026705235189} lock_wait_start_ts=0 replay_compact_version={val:1710401970131993833}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97364617} ls_id=1 callback_alloc_count=1 callback_free_count=1 checksum=4106063110 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=3, slave=0, merge=0, tx_end=3, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710403026551568373}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}, length:0, checksum_scn:{val:4611686018427387903}, checksum:4106063110, tmp_checksum:0}, checksum=4106063110, checksum_scn={val:4611686018427387903}) [2024-03-15 07:03:41.568941] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:41.568963] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=17][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486221568921], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.568990] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=15] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486221568921]}) [2024-03-15 07:03:41.568838] INFO [STORAGE.TRANS] get_tx_ctx_table_info_ (ob_trans_part_ctx.cpp:5377) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=131] store ctx_info: (ret=0, info={tx_id:{txid:97364617}, ls_id:{id:1}, cluster_id:1, state_info:{tx_id:{txid:97364617}, ref_cnt:0, state:"COMMIT", commit_version:{val:1710403026705235189}, start_scn:{val:1710403026551568373}, end_scn:{val:1710403026705235189}, undo_status_list:{head:null, undo_node_cnt:0}}, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710403026705235189}, max_applying_log_ts:{val:1710403026705235189}, max_applying_part_log_no:3, max_submitted_seq_no:1710403026699319, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87189257847}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}}, this={this:0x7f547a8807d0, trans_id:{txid:97364617}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180871751}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710403026705235189}, max_applying_log_ts:{val:1710403026705235189}, max_applying_part_log_no:3, max_submitted_seq_no:1710403026699319, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87189257847}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a881fe0, is_inited_:true, trans_id:{txid:97364617}, ls_id:{id:1}, ctx:0x7f547a8807d0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97364617}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710403026705235189}, start_scn:{val:1710403026551568373}, end_scn:{val:1710403026705235189}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97364617}, state:1, commit_version:{val:1710403026705235189}, start_scn:{val:1710403026551568373}, end_scn:{val:1710403026705235189}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710403026551568373}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710403026705235189} lock_wait_start_ts=0 replay_compact_version={val:1710401970131993833}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97364617} ls_id=1 callback_alloc_count=1 callback_free_count=1 checksum=4106063110 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=3, slave=0, merge=0, tx_end=3, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710403026551568373}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}) [2024-03-15 07:03:41.569043] WDIAG [STORAGE.TRANS] refresh_rec_log_ts_ (ob_trans_part_ctx.cpp:5337) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=203][errcode=0] we should not allow concurrent merge of tx ctx table(*this={this:0x7f547a8807d0, trans_id:{txid:97364617}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180871751}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710403026705235189}, max_applying_log_ts:{val:1710403026705235189}, max_applying_part_log_no:3, max_submitted_seq_no:1710403026699319, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87189257847}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a881fe0, is_inited_:true, trans_id:{txid:97364617}, ls_id:{id:1}, ctx:0x7f547a8807d0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97364617}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710403026705235189}, start_scn:{val:1710403026551568373}, end_scn:{val:1710403026705235189}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97364617}, state:1, commit_version:{val:1710403026705235189}, start_scn:{val:1710403026551568373}, end_scn:{val:1710403026705235189}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710403026551568373}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710403026705235189} lock_wait_start_ts=0 replay_compact_version={val:1710401970131993833}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97364617} ls_id=1 callback_alloc_count=1 callback_free_count=1 checksum=4106063110 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=3, slave=0, merge=0, tx_end=3, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710403026551568373}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187100734}) [2024-03-15 07:03:41.569189] INFO [STORAGE.TRANS] serialize_ (ob_tx_table_define.cpp:215) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=137] ObTxCtxTableMeta encode succ(buf_len=26, pos=26) [2024-03-15 07:03:41.569330] INFO [STORAGE.TRANS] tx_calc_checksum_before_scn (ob_tx_callback_list.cpp:298) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=13] calc checksum before log ts(functor={target_scn:{val:1710300088531356512}, checksum_scn:{val:0}, checksum_last_scn:{val:0}}, *this={get_trans_ctx():{this:0x7f547a886350, trans_id:{txid:97201961}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482160345952}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():590}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710300088531356512}, max_applying_log_ts:{val:1710300088531356512}, max_applying_part_log_no:3, max_submitted_seq_no:1710300088210809, checksum:0, checksum_scn:{val:0}, max_durable_lsn:{lsn:86776661752}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:false, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a887b60, is_inited_:true, trans_id:{txid:97201961}, ls_id:{id:1}, ctx:0x7f547a886350, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97201961}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710300088531356512}, start_scn:{val:1710300088341019363}, end_scn:{val:1710300088531356512}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97201961}, state:1, commit_version:{val:1710300088531356512}, start_scn:{val:1710300088341019363}, end_scn:{val:1710300088531356512}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710300088341019363}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710300088531356512} lock_wait_start_ts=0 replay_compact_version={val:1710299729348218561}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97201961} ls_id=1 callback_alloc_count=17 callback_free_count=17 checksum=0 tmp_checksum=0 checksum_scn={val:0} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=27, slave=0, merge=0, tx_end=27, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710300088341019363}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}, length:0, checksum_scn:{val:0}, checksum:0, tmp_checksum:0}) [2024-03-15 07:03:41.569512] INFO [STORAGE.TRANS] get_checksum_and_scn (ob_tx_callback_list.cpp:427) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=180] get checksum and checksum_scn(this={get_trans_ctx():{this:0x7f547a886350, trans_id:{txid:97201961}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482160345952}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():590}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710300088531356512}, max_applying_log_ts:{val:1710300088531356512}, max_applying_part_log_no:3, max_submitted_seq_no:1710300088210809, checksum:0, checksum_scn:{val:0}, max_durable_lsn:{lsn:86776661752}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:false, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a887b60, is_inited_:true, trans_id:{txid:97201961}, ls_id:{id:1}, ctx:0x7f547a886350, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97201961}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710300088531356512}, start_scn:{val:1710300088341019363}, end_scn:{val:1710300088531356512}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97201961}, state:1, commit_version:{val:1710300088531356512}, start_scn:{val:1710300088341019363}, end_scn:{val:1710300088531356512}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710300088341019363}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710300088531356512} lock_wait_start_ts=0 replay_compact_version={val:1710299729348218561}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97201961} ls_id=1 callback_alloc_count=17 callback_free_count=17 checksum=0 tmp_checksum=0 checksum_scn={val:0} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=27, slave=0, merge=0, tx_end=27, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710300088341019363}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}, length:0, checksum_scn:{val:0}, checksum:0, tmp_checksum:0}, checksum=0, checksum_scn={val:0}) [2024-03-15 07:03:41.569655] INFO [STORAGE.TRANS] get_tx_ctx_table_info_ (ob_trans_part_ctx.cpp:5377) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=140] store ctx_info: (ret=0, info={tx_id:{txid:97201961}, ls_id:{id:1}, cluster_id:1, state_info:{tx_id:{txid:97201961}, ref_cnt:0, state:"COMMIT", commit_version:{val:1710300088531356512}, start_scn:{val:1710300088341019363}, end_scn:{val:1710300088531356512}, undo_status_list:{head:null, undo_node_cnt:0}}, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():590}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710300088531356512}, max_applying_log_ts:{val:1710300088531356512}, max_applying_part_log_no:3, max_submitted_seq_no:1710300088210809, checksum:0, checksum_scn:{val:0}, max_durable_lsn:{lsn:86776661752}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:false, is_sub2pc:false}}, this={this:0x7f547a886350, trans_id:{txid:97201961}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482160345952}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():590}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710300088531356512}, max_applying_log_ts:{val:1710300088531356512}, max_applying_part_log_no:3, max_submitted_seq_no:1710300088210809, checksum:0, checksum_scn:{val:0}, max_durable_lsn:{lsn:86776661752}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:false, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a887b60, is_inited_:true, trans_id:{txid:97201961}, ls_id:{id:1}, ctx:0x7f547a886350, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97201961}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710300088531356512}, start_scn:{val:1710300088341019363}, end_scn:{val:1710300088531356512}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97201961}, state:1, commit_version:{val:1710300088531356512}, start_scn:{val:1710300088341019363}, end_scn:{val:1710300088531356512}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710300088341019363}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710300088531356512} lock_wait_start_ts=0 replay_compact_version={val:1710299729348218561}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97201961} ls_id=1 callback_alloc_count=17 callback_free_count=17 checksum=0 tmp_checksum=0 checksum_scn={val:0} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=27, slave=0, merge=0, tx_end=27, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710300088341019363}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}) [2024-03-15 07:03:41.569833] WDIAG [STORAGE.TRANS] refresh_rec_log_ts_ (ob_trans_part_ctx.cpp:5337) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=176][errcode=0] we should not allow concurrent merge of tx ctx table(*this={this:0x7f547a886350, trans_id:{txid:97201961}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482160345952}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():590}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710300088531356512}, max_applying_log_ts:{val:1710300088531356512}, max_applying_part_log_no:3, max_submitted_seq_no:1710300088210809, checksum:0, checksum_scn:{val:0}, max_durable_lsn:{lsn:86776661752}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:false, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a887b60, is_inited_:true, trans_id:{txid:97201961}, ls_id:{id:1}, ctx:0x7f547a886350, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97201961}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710300088531356512}, start_scn:{val:1710300088341019363}, end_scn:{val:1710300088531356512}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97201961}, state:1, commit_version:{val:1710300088531356512}, start_scn:{val:1710300088341019363}, end_scn:{val:1710300088531356512}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710300088341019363}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710300088531356512} lock_wait_start_ts=0 replay_compact_version={val:1710299729348218561}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97201961} ls_id=1 callback_alloc_count=17 callback_free_count=17 checksum=0 tmp_checksum=0 checksum_scn={val:0} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=27, slave=0, merge=0, tx_end=27, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710300088341019363}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}) [2024-03-15 07:03:41.569974] INFO [STORAGE.TRANS] serialize_ (ob_tx_table_define.cpp:215) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=132] ObTxCtxTableMeta encode succ(buf_len=26, pos=26) [2024-03-15 07:03:41.570115] INFO [STORAGE.TRANS] tx_calc_checksum_before_scn (ob_tx_callback_list.cpp:298) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=13] calc checksum before log ts(functor={target_scn:{val:1710399127821303489}, checksum_scn:{val:4611686018427387903}, checksum_last_scn:{val:0}}, *this={get_trans_ctx():{this:0x7f547a882650, trans_id:{txid:97362411}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180518304}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():1474}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710399127821303489}, max_applying_log_ts:{val:1710399127821303489}, max_applying_part_log_no:3, max_submitted_seq_no:1710399127248452, checksum:1582670829, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87174998374}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a883e60, is_inited_:true, trans_id:{txid:97362411}, ls_id:{id:1}, ctx:0x7f547a882650, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97362411}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710399127821303489}, start_scn:{val:1710399127732911047}, end_scn:{val:1710399127821303489}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97362411}, state:1, commit_version:{val:1710399127821303489}, start_scn:{val:1710399127732911047}, end_scn:{val:1710399127821303489}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710399127732911047}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710399127821303489} lock_wait_start_ts=0 replay_compact_version={val:1710397178709776789}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97362411} ls_id=1 callback_alloc_count=37 callback_free_count=37 checksum=1582670829 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=51, slave=0, merge=0, tx_end=51, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710399127732911047}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}, length:0, checksum_scn:{val:4611686018427387903}, checksum:1582670829, tmp_checksum:0}) [2024-03-15 07:03:41.570264] INFO [STORAGE.TRANS] get_checksum_and_scn (ob_tx_callback_list.cpp:427) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=148] get checksum and checksum_scn(this={get_trans_ctx():{this:0x7f547a882650, trans_id:{txid:97362411}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180518304}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():1474}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710399127821303489}, max_applying_log_ts:{val:1710399127821303489}, max_applying_part_log_no:3, max_submitted_seq_no:1710399127248452, checksum:1582670829, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87174998374}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a883e60, is_inited_:true, trans_id:{txid:97362411}, ls_id:{id:1}, ctx:0x7f547a882650, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97362411}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710399127821303489}, start_scn:{val:1710399127732911047}, end_scn:{val:1710399127821303489}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97362411}, state:1, commit_version:{val:1710399127821303489}, start_scn:{val:1710399127732911047}, end_scn:{val:1710399127821303489}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710399127732911047}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710399127821303489} lock_wait_start_ts=0 replay_compact_version={val:1710397178709776789}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97362411} ls_id=1 callback_alloc_count=37 callback_free_count=37 checksum=1582670829 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=51, slave=0, merge=0, tx_end=51, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710399127732911047}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}, length:0, checksum_scn:{val:4611686018427387903}, checksum:1582670829, tmp_checksum:0}, checksum=1582670829, checksum_scn={val:4611686018427387903}) [2024-03-15 07:03:41.570406] INFO [STORAGE.TRANS] get_tx_ctx_table_info_ (ob_trans_part_ctx.cpp:5377) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=139] store ctx_info: (ret=0, info={tx_id:{txid:97362411}, ls_id:{id:1}, cluster_id:1, state_info:{tx_id:{txid:97362411}, ref_cnt:0, state:"COMMIT", commit_version:{val:1710399127821303489}, start_scn:{val:1710399127732911047}, end_scn:{val:1710399127821303489}, undo_status_list:{head:null, undo_node_cnt:0}}, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():1474}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710399127821303489}, max_applying_log_ts:{val:1710399127821303489}, max_applying_part_log_no:3, max_submitted_seq_no:1710399127248452, checksum:1582670829, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87174998374}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}}, this={this:0x7f547a882650, trans_id:{txid:97362411}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180518304}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():1474}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710399127821303489}, max_applying_log_ts:{val:1710399127821303489}, max_applying_part_log_no:3, max_submitted_seq_no:1710399127248452, checksum:1582670829, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87174998374}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a883e60, is_inited_:true, trans_id:{txid:97362411}, ls_id:{id:1}, ctx:0x7f547a882650, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97362411}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710399127821303489}, start_scn:{val:1710399127732911047}, end_scn:{val:1710399127821303489}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97362411}, state:1, commit_version:{val:1710399127821303489}, start_scn:{val:1710399127732911047}, end_scn:{val:1710399127821303489}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710399127732911047}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710399127821303489} lock_wait_start_ts=0 replay_compact_version={val:1710397178709776789}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97362411} ls_id=1 callback_alloc_count=37 callback_free_count=37 checksum=1582670829 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=51, slave=0, merge=0, tx_end=51, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710399127732911047}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}) [2024-03-15 07:03:41.570621] WDIAG [STORAGE.TRANS] refresh_rec_log_ts_ (ob_trans_part_ctx.cpp:5337) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=214][errcode=0] we should not allow concurrent merge of tx ctx table(*this={this:0x7f547a882650, trans_id:{txid:97362411}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180518304}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():1474}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710399127821303489}, max_applying_log_ts:{val:1710399127821303489}, max_applying_part_log_no:3, max_submitted_seq_no:1710399127248452, checksum:1582670829, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87174998374}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a883e60, is_inited_:true, trans_id:{txid:97362411}, ls_id:{id:1}, ctx:0x7f547a882650, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97362411}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710399127821303489}, start_scn:{val:1710399127732911047}, end_scn:{val:1710399127821303489}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97362411}, state:1, commit_version:{val:1710399127821303489}, start_scn:{val:1710399127732911047}, end_scn:{val:1710399127821303489}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710399127732911047}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710399127821303489} lock_wait_start_ts=0 replay_compact_version={val:1710397178709776789}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97362411} ls_id=1 callback_alloc_count=37 callback_free_count=37 checksum=1582670829 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=51, slave=0, merge=0, tx_end=51, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710399127732911047}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}) [2024-03-15 07:03:41.570757] INFO [STORAGE.TRANS] serialize_ (ob_tx_table_define.cpp:215) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=127] ObTxCtxTableMeta encode succ(buf_len=26, pos=26) [2024-03-15 07:03:41.570989] INFO [STORAGE.TRANS] tx_calc_checksum_before_scn (ob_tx_callback_list.cpp:298) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=13] calc checksum before log ts(functor={target_scn:{val:1710435096734134968}, checksum_scn:{val:4611686018427387903}, checksum_last_scn:{val:0}}, *this={get_trans_ctx():{this:0x7f547a878dd0, trans_id:{txid:97366790}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482181525227}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():47}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710435096734134968}, max_applying_log_ts:{val:1710435096734134968}, max_applying_part_log_no:3, max_submitted_seq_no:1710435096728527, checksum:2661375979, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87201129960}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a87a5e0, is_inited_:true, trans_id:{txid:97366790}, ls_id:{id:1}, ctx:0x7f547a878dd0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97366790}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710435096734134968}, start_scn:{val:1710435096682739897}, end_scn:{val:1710435096734134968}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97366790}, state:1, commit_version:{val:1710435096734134968}, start_scn:{val:1710435096682739897}, end_scn:{val:1710435096734134968}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710435096682739897}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710435096734134968} lock_wait_start_ts=0 replay_compact_version={val:1710434761310890062}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97366790} ls_id=1 callback_alloc_count=8 callback_free_count=8 checksum=2661375979 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=15, slave=0, merge=0, tx_end=15, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710435096682739897}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}, length:0, checksum_scn:{val:4611686018427387903}, checksum:2661375979, tmp_checksum:0}) [2024-03-15 07:03:41.571137] INFO [STORAGE.TRANS] get_checksum_and_scn (ob_tx_callback_list.cpp:427) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=146] get checksum and checksum_scn(this={get_trans_ctx():{this:0x7f547a878dd0, trans_id:{txid:97366790}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482181525227}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():47}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710435096734134968}, max_applying_log_ts:{val:1710435096734134968}, max_applying_part_log_no:3, max_submitted_seq_no:1710435096728527, checksum:2661375979, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87201129960}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a87a5e0, is_inited_:true, trans_id:{txid:97366790}, ls_id:{id:1}, ctx:0x7f547a878dd0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97366790}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710435096734134968}, start_scn:{val:1710435096682739897}, end_scn:{val:1710435096734134968}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97366790}, state:1, commit_version:{val:1710435096734134968}, start_scn:{val:1710435096682739897}, end_scn:{val:1710435096734134968}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710435096682739897}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710435096734134968} lock_wait_start_ts=0 replay_compact_version={val:1710434761310890062}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97366790} ls_id=1 callback_alloc_count=8 callback_free_count=8 checksum=2661375979 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=15, slave=0, merge=0, tx_end=15, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710435096682739897}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}, length:0, checksum_scn:{val:4611686018427387903}, checksum:2661375979, tmp_checksum:0}, checksum=2661375979, checksum_scn={val:4611686018427387903}) [2024-03-15 07:03:41.571272] INFO [STORAGE.TRANS] get_tx_ctx_table_info_ (ob_trans_part_ctx.cpp:5377) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=133] store ctx_info: (ret=0, info={tx_id:{txid:97366790}, ls_id:{id:1}, cluster_id:1, state_info:{tx_id:{txid:97366790}, ref_cnt:0, state:"COMMIT", commit_version:{val:1710435096734134968}, start_scn:{val:1710435096682739897}, end_scn:{val:1710435096734134968}, undo_status_list:{head:null, undo_node_cnt:0}}, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():47}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710435096734134968}, max_applying_log_ts:{val:1710435096734134968}, max_applying_part_log_no:3, max_submitted_seq_no:1710435096728527, checksum:2661375979, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87201129960}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}}, this={this:0x7f547a878dd0, trans_id:{txid:97366790}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482181525227}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():47}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710435096734134968}, max_applying_log_ts:{val:1710435096734134968}, max_applying_part_log_no:3, max_submitted_seq_no:1710435096728527, checksum:2661375979, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87201129960}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a87a5e0, is_inited_:true, trans_id:{txid:97366790}, ls_id:{id:1}, ctx:0x7f547a878dd0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97366790}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710435096734134968}, start_scn:{val:1710435096682739897}, end_scn:{val:1710435096734134968}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97366790}, state:1, commit_version:{val:1710435096734134968}, start_scn:{val:1710435096682739897}, end_scn:{val:1710435096734134968}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710435096682739897}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710435096734134968} lock_wait_start_ts=0 replay_compact_version={val:1710434761310890062}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97366790} ls_id=1 callback_alloc_count=8 callback_free_count=8 checksum=2661375979 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=15, slave=0, merge=0, tx_end=15, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710435096682739897}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}) [2024-03-15 07:03:41.571473] WDIAG [STORAGE.TRANS] refresh_rec_log_ts_ (ob_trans_part_ctx.cpp:5337) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=200][errcode=0] we should not allow concurrent merge of tx ctx table(*this={this:0x7f547a878dd0, trans_id:{txid:97366790}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482181525227}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():47}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710435096734134968}, max_applying_log_ts:{val:1710435096734134968}, max_applying_part_log_no:3, max_submitted_seq_no:1710435096728527, checksum:2661375979, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87201129960}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a87a5e0, is_inited_:true, trans_id:{txid:97366790}, ls_id:{id:1}, ctx:0x7f547a878dd0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97366790}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710435096734134968}, start_scn:{val:1710435096682739897}, end_scn:{val:1710435096734134968}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97366790}, state:1, commit_version:{val:1710435096734134968}, start_scn:{val:1710435096682739897}, end_scn:{val:1710435096734134968}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710435096682739897}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710435096734134968} lock_wait_start_ts=0 replay_compact_version={val:1710434761310890062}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97366790} ls_id=1 callback_alloc_count=8 callback_free_count=8 checksum=2661375979 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=15, slave=0, merge=0, tx_end=15, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710435096682739897}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}) [2024-03-15 07:03:41.571628] INFO [STORAGE.TRANS] serialize_ (ob_tx_table_define.cpp:215) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=146] ObTxCtxTableMeta encode succ(buf_len=26, pos=26) [2024-03-15 07:03:41.571781] INFO [STORAGE.TRANS] tx_calc_checksum_before_scn (ob_tx_callback_list.cpp:298) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=12] calc checksum before log ts(functor={target_scn:{val:1710402586638587493}, checksum_scn:{val:4611686018427387903}, checksum_last_scn:{val:0}}, *this={get_trans_ctx():{this:0x7f547a8dfcd0, trans_id:{txid:97364353}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180858413}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710402586638587493}, max_applying_log_ts:{val:1710402586638587493}, max_applying_part_log_no:3, max_submitted_seq_no:1710402586553145, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87187111110}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a8e14e0, is_inited_:true, trans_id:{txid:97364353}, ls_id:{id:1}, ctx:0x7f547a8dfcd0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97364353}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710402586638587493}, start_scn:{val:1710402586630060917}, end_scn:{val:1710402586638587493}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97364353}, state:1, commit_version:{val:1710402586638587493}, start_scn:{val:1710402586630060917}, end_scn:{val:1710402586638587493}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710402586630060917}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710402586638587493} lock_wait_start_ts=0 replay_compact_version={val:1710401970131993833}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97364353} ls_id=1 callback_alloc_count=1 callback_free_count=1 checksum=4106063110 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=3, slave=0, merge=0, tx_end=3, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710402586630060917}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}, length:0, checksum_scn:{val:4611686018427387903}, checksum:4106063110, tmp_checksum:0}) [2024-03-15 07:03:41.572018] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=23][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.571929] INFO [STORAGE.TRANS] get_checksum_and_scn (ob_tx_callback_list.cpp:427) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=146] get checksum and checksum_scn(this={get_trans_ctx():{this:0x7f547a8dfcd0, trans_id:{txid:97364353}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180858413}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710402586638587493}, max_applying_log_ts:{val:1710402586638587493}, max_applying_part_log_no:3, max_submitted_seq_no:1710402586553145, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87187111110}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a8e14e0, is_inited_:true, trans_id:{txid:97364353}, ls_id:{id:1}, ctx:0x7f547a8dfcd0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97364353}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710402586638587493}, start_scn:{val:1710402586630060917}, end_scn:{val:1710402586638587493}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97364353}, state:1, commit_version:{val:1710402586638587493}, start_scn:{val:1710402586630060917}, end_scn:{val:1710402586638587493}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710402586630060917}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710402586638587493} lock_wait_start_ts=0 replay_compact_version={val:1710401970131993833}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97364353} ls_id=1 callback_alloc_count=1 callback_free_count=1 checksum=4106063110 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=3, slave=0, merge=0, tx_end=3, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710402586630060917}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}, length:0, checksum_scn:{val:4611686018427387903}, checksum:4106063110, tmp_checksum:0}, checksum=4106063110, checksum_scn={val:4611686018427387903}) [2024-03-15 07:03:41.572060] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=41][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.572065] INFO [STORAGE.TRANS] get_tx_ctx_table_info_ (ob_trans_part_ctx.cpp:5377) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=132] store ctx_info: (ret=0, info={tx_id:{txid:97364353}, ls_id:{id:1}, cluster_id:1, state_info:{tx_id:{txid:97364353}, ref_cnt:0, state:"COMMIT", commit_version:{val:1710402586638587493}, start_scn:{val:1710402586630060917}, end_scn:{val:1710402586638587493}, undo_status_list:{head:null, undo_node_cnt:0}}, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710402586638587493}, max_applying_log_ts:{val:1710402586638587493}, max_applying_part_log_no:3, max_submitted_seq_no:1710402586553145, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87187111110}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}}, this={this:0x7f547a8dfcd0, trans_id:{txid:97364353}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180858413}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710402586638587493}, max_applying_log_ts:{val:1710402586638587493}, max_applying_part_log_no:3, max_submitted_seq_no:1710402586553145, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87187111110}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a8e14e0, is_inited_:true, trans_id:{txid:97364353}, ls_id:{id:1}, ctx:0x7f547a8dfcd0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97364353}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710402586638587493}, start_scn:{val:1710402586630060917}, end_scn:{val:1710402586638587493}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97364353}, state:1, commit_version:{val:1710402586638587493}, start_scn:{val:1710402586630060917}, end_scn:{val:1710402586638587493}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710402586630060917}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710402586638587493} lock_wait_start_ts=0 replay_compact_version={val:1710401970131993833}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97364353} ls_id=1 callback_alloc_count=1 callback_free_count=1 checksum=4106063110 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=3, slave=0, merge=0, tx_end=3, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710402586630060917}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}) [2024-03-15 07:03:41.572240] WDIAG [STORAGE.TRANS] refresh_rec_log_ts_ (ob_trans_part_ctx.cpp:5337) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=173][errcode=0] we should not allow concurrent merge of tx ctx table(*this={this:0x7f547a8dfcd0, trans_id:{txid:97364353}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180858413}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710402586638587493}, max_applying_log_ts:{val:1710402586638587493}, max_applying_part_log_no:3, max_submitted_seq_no:1710402586553145, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87187111110}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a8e14e0, is_inited_:true, trans_id:{txid:97364353}, ls_id:{id:1}, ctx:0x7f547a8dfcd0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97364353}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710402586638587493}, start_scn:{val:1710402586630060917}, end_scn:{val:1710402586638587493}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97364353}, state:1, commit_version:{val:1710402586638587493}, start_scn:{val:1710402586630060917}, end_scn:{val:1710402586638587493}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710402586630060917}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710402586638587493} lock_wait_start_ts=0 replay_compact_version={val:1710401970131993833}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97364353} ls_id=1 callback_alloc_count=1 callback_free_count=1 checksum=4106063110 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=3, slave=0, merge=0, tx_end=3, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710402586630060917}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}) [2024-03-15 07:03:41.572372] INFO [STORAGE.TRANS] serialize_ (ob_tx_table_define.cpp:215) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=126] ObTxCtxTableMeta encode succ(buf_len=26, pos=26) [2024-03-15 07:03:41.572834] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=53] add dag success(dag=0x7f544482c080, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-6402190515082147958, dag_cnt=2, dag_type_cnts=1) [2024-03-15 07:03:41.572884] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=30] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101065}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f545f263b40, key:{tablet_id:{id:101065}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482149305805}, this:0x7f545f263b40, timestamp:1710482149305805, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:29, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710247060052153142}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939822661, mt_stat_.ready_for_flush_time:1710483939822714, mt_stat_.create_flush_dag_time:1710486221558090, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.572951] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=19] succeed to add sys task(task={start_time:1710486221572943, task_id:YB427F000001-000613ACAD3FC20F-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=101065"}) [2024-03-15 07:03:41.572975] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=21] schedule one task(task={this:0x7f5444806080, type:15, status:2, dag:{this:0x7f544482c080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC20F-0-0, dag_ret:0, dag_status:2, start_time:1710486221572972, running_task_cnt:1, indegree:0, hash:-6402190515082147958}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=2, running_task_cnts_[priority]=2, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.573136] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=11][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.573159] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=23][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710234133899240143) [2024-03-15 07:03:41.573181] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=13] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.573224] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=14] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.573245] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=15] get storage schema to merge(ls_id={id:1}, tablet_id={id:101065}, schema_ctx={base_schema_version:0, schema_version:1681902230998688, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550af1d990, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902230998688, column_cnt:4, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:19, meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.573405] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=126] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101065}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.573501] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=46] succeed to build merge ctx(tablet_id={id:101065}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101065}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710118072591257993, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902230998688, schema_version:1681902230998688, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550af1d990, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902230998688, column_cnt:4, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:19, meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:101065}, table_count:1, [{i:0, table_key:{tablet_id:{id:101065}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref:4}]}, schedule_major:false, scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f550af1d410, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221572692, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=82us|(0.24)|GET_PARALLEL_RANGE=253us|(0.76)|total=335us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.573746] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=191] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544482c080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC20F-0-0, dag_ret:0, dag_status:2, start_time:1710486221572972, running_task_cnt:1, indegree:0, hash:-6402190515082147958}, scanned_row_cnt_arr:0x7f542c0275f0, output_block_cnt_arr:0x7f542c0275f8, concurrent_cnt:1, estimate_row_cnt:108, estimate_occupy_size:0, latest_update_ts:1710486221572692, estimated_finish_time:1710486238573743}) [2024-03-15 07:03:41.573781] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=34] succeed to init merge ctx(task={this:0x7f5444806080, type:15, status:2, dag:{this:0x7f544482c080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC20F-0-0, dag_ret:0, dag_status:2, start_time:1710486221572972, running_task_cnt:1, indegree:0, hash:-6402190515082147958}}) [2024-03-15 07:03:41.573807] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=24] task finish process(ret=0, start_time=1710486221573117, end_time=1710486221573804, runtime=687, *this={this:0x7f5444806080, type:15, status:2, dag:{this:0x7f544482c080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC20F-0-0, dag_ret:0, dag_status:2, start_time:1710486221572972, running_task_cnt:1, indegree:0, hash:-6402190515082147958}}) [2024-03-15 07:03:41.573895] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=78] add dag success(dag=0x7f544482c590, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=7224666818425847873, dag_cnt=3, dag_type_cnts=2) [2024-03-15 07:03:41.573915] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=19] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:333}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f545f264310, key:{tablet_id:{id:333}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482149306305}, this:0x7f545f264310, timestamp:1710482149306305, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:30, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710247060052153142}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939822755, mt_stat_.ready_for_flush_time:1710483939822797, mt_stat_.create_flush_dag_time:1710486221572962, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.573976] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=34] schedule one task(task={this:0x7f54448061b0, type:1, status:2, dag:{this:0x7f544482c080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC20F-0-0, dag_ret:0, dag_status:2, start_time:1710486221572972, running_task_cnt:1, indegree:0, hash:-6402190515082147958}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=2, running_task_cnts_[priority]=2, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.574017] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=28] succeed to add sys task(task={start_time:1710486221574013, task_id:YB427F000001-000613ACAD3FC210-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=333"}) [2024-03-15 07:03:41.574032] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=13] schedule one task(task={this:0x7f5444854080, type:15, status:2, dag:{this:0x7f544482c590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC210-0-0, dag_ret:0, dag_status:2, start_time:1710486221574030, running_task_cnt:1, indegree:0, hash:7224666818425847873}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=3, running_task_cnts_[priority]=3, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.574239] INFO [STORAGE.TRANS] tx_calc_checksum_before_scn (ob_tx_callback_list.cpp:298) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=13] calc checksum before log ts(functor={target_scn:{val:1710402728620248601}, checksum_scn:{val:4611686018427387903}, checksum_last_scn:{val:0}}, *this={get_trans_ctx():{this:0x7f547a87e950, trans_id:{txid:97364458}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180937196}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710402728620248601}, max_applying_log_ts:{val:1710402728620248601}, max_applying_part_log_no:3, max_submitted_seq_no:1710402728617714, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87188092393}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a880160, is_inited_:true, trans_id:{txid:97364458}, ls_id:{id:1}, ctx:0x7f547a87e950, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97364458}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710402728620248601}, start_scn:{val:1710402728458568504}, end_scn:{val:1710402728620248601}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97364458}, state:1, commit_version:{val:1710402728620248601}, start_scn:{val:1710402728458568504}, end_scn:{val:1710402728620248601}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710402728458568504}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710402728620248601} lock_wait_start_ts=0 replay_compact_version={val:1710401970131993833}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97364458} ls_id=1 callback_alloc_count=1 callback_free_count=1 checksum=4106063110 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=3, slave=0, merge=0, tx_end=3, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710402728458568504}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}, length:0, checksum_scn:{val:4611686018427387903}, checksum:4106063110, tmp_checksum:0}) [2024-03-15 07:03:41.574510] ERROR alloc_block (ob_local_device.cpp:716) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=14][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.574527] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=16][errcode=-4184] Failed to alloc block from io device(ret=-4184) [2024-03-15 07:03:41.574537] WDIAG [STORAGE] alloc_block (ob_macro_block_writer.cpp:1338) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=9][errcode=-4184] Fail to pre-alloc block for new macro block(ret=-4184, current_index=0, current_macro_seq=0) [2024-03-15 07:03:41.574545] WDIAG [STORAGE] write_micro_block (ob_macro_block_writer.cpp:1116) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=9][errcode=-4184] Fail to pre-alloc block(ret=-4184) [2024-03-15 07:03:41.574413] INFO [STORAGE.TRANS] get_checksum_and_scn (ob_tx_callback_list.cpp:427) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=171] get checksum and checksum_scn(this={get_trans_ctx():{this:0x7f547a87e950, trans_id:{txid:97364458}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180937196}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710402728620248601}, max_applying_log_ts:{val:1710402728620248601}, max_applying_part_log_no:3, max_submitted_seq_no:1710402728617714, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87188092393}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a880160, is_inited_:true, trans_id:{txid:97364458}, ls_id:{id:1}, ctx:0x7f547a87e950, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97364458}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710402728620248601}, start_scn:{val:1710402728458568504}, end_scn:{val:1710402728620248601}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97364458}, state:1, commit_version:{val:1710402728620248601}, start_scn:{val:1710402728458568504}, end_scn:{val:1710402728620248601}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710402728458568504}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710402728620248601} lock_wait_start_ts=0 replay_compact_version={val:1710401970131993833}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97364458} ls_id=1 callback_alloc_count=1 callback_free_count=1 checksum=4106063110 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=3, slave=0, merge=0, tx_end=3, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710402728458568504}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}, length:0, checksum_scn:{val:4611686018427387903}, checksum:4106063110, tmp_checksum:0}, checksum=4106063110, checksum_scn={val:4611686018427387903}) [2024-03-15 07:03:41.574553] WDIAG [STORAGE] build_micro_block (ob_macro_block_writer.cpp:938) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=6][errcode=-4184] fail to write micro block (ret=-4184, micro_block_desc={last_rowkey:{datum_cnt:6, group_idx:0, hash:0, [idx=0:{len: 8, flag: 0, null: 0, ptr: 0x7f54657f81b0, hex: 8B1967328B130600, int: 1710338427263371},idx=1:{len: 8, flag: 0, null: 0, ptr: 0x7f54657f80a8, hex: 0000000000000000, int: 0},idx=2:{len: 8, flag: 0, null: 0, ptr: 0x7f54657f80e0, hex: 4B01000000000000, int: 331},idx=3:{len: 8, flag: 0, null: 0, ptr: 0x7f54657f8118, hex: 4B01000000000000, int: 331},idx=4:{len: 8, flag: 0, null: 0, ptr: 0x7f54657f8150, hex: 48FC0F1D43A843E8, int: -1710338427266794424},idx=5:{len: 8, flag: 0, null: 0, ptr: 0x7f54657f8188, hex: 0000000000000000, int: 0},]store_rowkey:}, header:{magic:1005, version:2, header_size:64, header_checksum:-9457, column_count:6, rowkey_column_count:6, has_column_checksum:0, row_count:108, row_store_type:0, opt:4, var_column_count:0, row_offset:5790, original_length:6162, max_merged_trans_version:1710338427266794424, data_length:6162, data_zlength:6162, data_checksum:3857027817, column_checksums:null, single_version_rows:0, contain_uncommitted_rows:0, is_last_row_last_flag:1, is_valid():true}, buf:0x7f543ec04090, buf_size:6162, data_size:6162, row_count:108, column_count:6, max_merged_trans_version:1710338427266794424, macro_id:[9223372036854775807](ver=0,mode=0,seq=0), block_offset:0, block_checksum:1686750586, row_count_delta:18, contain_uncommitted_row:false, can_mark_deletion:false, has_string_out_row:false, has_lob_out_row:false, is_last_row_last_flag:true, original_size:6162}) [2024-03-15 07:03:41.574602] WDIAG [STORAGE] build_micro_block (ob_data_macro_block_merge_writer.cpp:137) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=47][errcode=-4184] ObMacroBlockWriter fail to build_micro_block(ret=-4184) [2024-03-15 07:03:41.574609] WDIAG [STORAGE] close (ob_macro_block_writer.cpp:707) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=6][errcode=-4184] macro block writer fail to build current micro block.(ret=-4184) [2024-03-15 07:03:41.574615] WDIAG [STORAGE] close (ob_partition_merger.cpp:170) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=6][errcode=-4184] Failed to close macro block writer(ret=-4184) [2024-03-15 07:03:41.574621] WDIAG [STORAGE] close (ob_partition_merger.cpp:988) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=7][errcode=-4184] Failed to finish merge for partition merger(ret=-4184) [2024-03-15 07:03:41.574628] WDIAG [STORAGE] merge_partition (ob_partition_merger.cpp:1156) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=6][errcode=-4184] failed to close partition merger(ret=-4184) [2024-03-15 07:03:41.574635] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=6] partition merge iter row count(i=0, row_count=108, ghost_row_count=0, pkey={tablet_id:{id:101065}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f545f263b40, key:{tablet_id:{id:101065}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482149305805}, this:0x7f545f263b40, timestamp:1710482149305805, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:29, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710247060052153142}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939822661, mt_stat_.ready_for_flush_time:1710483939822714, mt_stat_.create_flush_dag_time:1710486221558090, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.574685] WDIAG [STORAGE] process (ob_tablet_merge_task.cpp:1434) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=41][errcode=-4184] failed to merge partition(ret=-4184) [2024-03-15 07:03:41.574708] WDIAG [STORAGE] process (ob_tablet_merge_task.cpp:1446) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=6][errcode=-4184] failed to merge(ret=-4184, ctx_->param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101065}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, idx_=0) [2024-03-15 07:03:41.574721] WDIAG [COMMON] do_work (ob_dag_scheduler.cpp:241) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=11][errcode=-4184] failed to process task(ret=-4184) [2024-03-15 07:03:41.574728] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=7] task finish process(ret=-4184, start_time=1710486221574031, end_time=1710486221574727, runtime=696, *this={this:0x7f54448061b0, type:1, status:2, dag:{this:0x7f544482c080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC20F-0-0, dag_ret:0, dag_status:2, start_time:1710486221572972, running_task_cnt:1, indegree:0, hash:-6402190515082147958}}) [2024-03-15 07:03:41.574586] INFO [STORAGE.TRANS] get_tx_ctx_table_info_ (ob_trans_part_ctx.cpp:5377) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=169] store ctx_info: (ret=0, info={tx_id:{txid:97364458}, ls_id:{id:1}, cluster_id:1, state_info:{tx_id:{txid:97364458}, ref_cnt:0, state:"COMMIT", commit_version:{val:1710402728620248601}, start_scn:{val:1710402728458568504}, end_scn:{val:1710402728620248601}, undo_status_list:{head:null, undo_node_cnt:0}}, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710402728620248601}, max_applying_log_ts:{val:1710402728620248601}, max_applying_part_log_no:3, max_submitted_seq_no:1710402728617714, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87188092393}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}}, this={this:0x7f547a87e950, trans_id:{txid:97364458}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180937196}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710402728620248601}, max_applying_log_ts:{val:1710402728620248601}, max_applying_part_log_no:3, max_submitted_seq_no:1710402728617714, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87188092393}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a880160, is_inited_:true, trans_id:{txid:97364458}, ls_id:{id:1}, ctx:0x7f547a87e950, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97364458}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710402728620248601}, start_scn:{val:1710402728458568504}, end_scn:{val:1710402728620248601}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97364458}, state:1, commit_version:{val:1710402728620248601}, start_scn:{val:1710402728458568504}, end_scn:{val:1710402728620248601}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710402728458568504}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710402728620248601} lock_wait_start_ts=0 replay_compact_version={val:1710401970131993833}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97364458} ls_id=1 callback_alloc_count=1 callback_free_count=1 checksum=4106063110 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=3, slave=0, merge=0, tx_end=3, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710402728458568504}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}) [2024-03-15 07:03:41.574756] WDIAG [COMMON] run1 (ob_dag_scheduler.cpp:1424) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=28][errcode=-4184] failed to do work(ret=-4184, *task_={this:0x7f54448061b0, type:1, status:2, dag:{this:0x7f544482c080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC20F-0-0, dag_ret:0, dag_status:2, start_time:1710486221572972, running_task_cnt:1, indegree:0, hash:-6402190515082147958}}, compat_mode=0) [2024-03-15 07:03:41.574777] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=14] dag finished(dag_ret=-4184, runtime=1804, dag_cnt=2, dag_cnts_[dag.get_type()]=1, &dag=0x7f544482c080, dag={this:0x7f544482c080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC20F-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221572972, running_task_cnt:0, indegree:0, hash:-6402190515082147958}) [2024-03-15 07:03:41.574797] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC20F-0-0] [lt=14] succeed to del sys task(removed_task={start_time:1710486221572943, task_id:YB427F000001-000613ACAD3FC20F-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=101065"}) [2024-03-15 07:03:41.574773] WDIAG [STORAGE.TRANS] refresh_rec_log_ts_ (ob_trans_part_ctx.cpp:5337) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=184][errcode=0] we should not allow concurrent merge of tx ctx table(*this={this:0x7f547a87e950, trans_id:{txid:97364458}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482180937196}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710402728620248601}, max_applying_log_ts:{val:1710402728620248601}, max_applying_part_log_no:3, max_submitted_seq_no:1710402728617714, checksum:4106063110, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:87188092393}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a880160, is_inited_:true, trans_id:{txid:97364458}, ls_id:{id:1}, ctx:0x7f547a87e950, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97364458}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710402728620248601}, start_scn:{val:1710402728458568504}, end_scn:{val:1710402728620248601}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97364458}, state:1, commit_version:{val:1710402728620248601}, start_scn:{val:1710402728458568504}, end_scn:{val:1710402728620248601}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710402728458568504}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710402728620248601} lock_wait_start_ts=0 replay_compact_version={val:1710401970131993833}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97364458} ls_id=1 callback_alloc_count=1 callback_free_count=1 checksum=4106063110 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=3, slave=0, merge=0, tx_end=3, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710402728458568504}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}) [2024-03-15 07:03:41.574912] INFO [STORAGE.TRANS] serialize_ (ob_tx_table_define.cpp:215) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=129] ObTxCtxTableMeta encode succ(buf_len=26, pos=26) [2024-03-15 07:03:41.575213] INFO [STORAGE.TRANS] tx_calc_checksum_before_scn (ob_tx_callback_list.cpp:298) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=14] calc checksum before log ts(functor={target_scn:{val:1710321006043271943}, checksum_scn:{val:0}, checksum_last_scn:{val:0}}, *this={get_trans_ctx():{this:0x7f547a8844d0, trans_id:{txid:97274573}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482166348202}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():590}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710321006043271943}, max_applying_log_ts:{val:1710321006043271943}, max_applying_part_log_no:3, max_submitted_seq_no:1710321005658598, checksum:0, checksum_scn:{val:0}, max_durable_lsn:{lsn:86871573164}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:false, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a885ce0, is_inited_:true, trans_id:{txid:97274573}, ls_id:{id:1}, ctx:0x7f547a8844d0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97274573}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710321006043271943}, start_scn:{val:1710321006000022115}, end_scn:{val:1710321006043271943}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97274573}, state:1, commit_version:{val:1710321006043271943}, start_scn:{val:1710321006000022115}, end_scn:{val:1710321006043271943}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710321006000022115}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710321006043271943} lock_wait_start_ts=0 replay_compact_version={val:1710320767861890117}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97274573} ls_id=1 callback_alloc_count=17 callback_free_count=17 checksum=0 tmp_checksum=0 checksum_scn={val:0} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=27, slave=0, merge=0, tx_end=27, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710321006000022115}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}, length:0, checksum_scn:{val:0}, checksum:0, tmp_checksum:0}) [2024-03-15 07:03:41.575364] INFO [STORAGE.TRANS] get_checksum_and_scn (ob_tx_callback_list.cpp:427) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=149] get checksum and checksum_scn(this={get_trans_ctx():{this:0x7f547a8844d0, trans_id:{txid:97274573}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482166348202}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():590}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710321006043271943}, max_applying_log_ts:{val:1710321006043271943}, max_applying_part_log_no:3, max_submitted_seq_no:1710321005658598, checksum:0, checksum_scn:{val:0}, max_durable_lsn:{lsn:86871573164}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:false, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a885ce0, is_inited_:true, trans_id:{txid:97274573}, ls_id:{id:1}, ctx:0x7f547a8844d0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97274573}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710321006043271943}, start_scn:{val:1710321006000022115}, end_scn:{val:1710321006043271943}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97274573}, state:1, commit_version:{val:1710321006043271943}, start_scn:{val:1710321006000022115}, end_scn:{val:1710321006043271943}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710321006000022115}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710321006043271943} lock_wait_start_ts=0 replay_compact_version={val:1710320767861890117}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97274573} ls_id=1 callback_alloc_count=17 callback_free_count=17 checksum=0 tmp_checksum=0 checksum_scn={val:0} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=27, slave=0, merge=0, tx_end=27, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710321006000022115}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}, length:0, checksum_scn:{val:0}, checksum:0, tmp_checksum:0}, checksum=0, checksum_scn={val:0}) [2024-03-15 07:03:41.575526] INFO [STORAGE.TRANS] get_tx_ctx_table_info_ (ob_trans_part_ctx.cpp:5377) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=159] store ctx_info: (ret=0, info={tx_id:{txid:97274573}, ls_id:{id:1}, cluster_id:1, state_info:{tx_id:{txid:97274573}, ref_cnt:0, state:"COMMIT", commit_version:{val:1710321006043271943}, start_scn:{val:1710321006000022115}, end_scn:{val:1710321006043271943}, undo_status_list:{head:null, undo_node_cnt:0}}, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():590}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710321006043271943}, max_applying_log_ts:{val:1710321006043271943}, max_applying_part_log_no:3, max_submitted_seq_no:1710321005658598, checksum:0, checksum_scn:{val:0}, max_durable_lsn:{lsn:86871573164}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:false, is_sub2pc:false}}, this={this:0x7f547a8844d0, trans_id:{txid:97274573}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482166348202}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():590}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710321006043271943}, max_applying_log_ts:{val:1710321006043271943}, max_applying_part_log_no:3, max_submitted_seq_no:1710321005658598, checksum:0, checksum_scn:{val:0}, max_durable_lsn:{lsn:86871573164}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:false, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a885ce0, is_inited_:true, trans_id:{txid:97274573}, ls_id:{id:1}, ctx:0x7f547a8844d0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97274573}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710321006043271943}, start_scn:{val:1710321006000022115}, end_scn:{val:1710321006043271943}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97274573}, state:1, commit_version:{val:1710321006043271943}, start_scn:{val:1710321006000022115}, end_scn:{val:1710321006043271943}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710321006000022115}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710321006043271943} lock_wait_start_ts=0 replay_compact_version={val:1710320767861890117}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97274573} ls_id=1 callback_alloc_count=17 callback_free_count=17 checksum=0 tmp_checksum=0 checksum_scn={val:0} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=27, slave=0, merge=0, tx_end=27, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710321006000022115}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}) [2024-03-15 07:03:41.575703] WDIAG [STORAGE.TRANS] refresh_rec_log_ts_ (ob_trans_part_ctx.cpp:5337) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=176][errcode=0] we should not allow concurrent merge of tx ctx table(*this={this:0x7f547a8844d0, trans_id:{txid:97274573}, tenant_id:1004, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482166348202}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():590}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710321006043271943}, max_applying_log_ts:{val:1710321006043271943}, max_applying_part_log_no:3, max_submitted_seq_no:1710321005658598, checksum:0, checksum_scn:{val:0}, max_durable_lsn:{lsn:86871573164}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:false, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f547a885ce0, is_inited_:true, trans_id:{txid:97274573}, ls_id:{id:1}, ctx:0x7f547a8844d0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f545f604030, tx_data_guard_:{tx_data:{tx_id:{txid:97274573}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710321006043271943}, start_scn:{val:1710321006000022115}, end_scn:{val:1710321006043271943}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:97274573}, state:1, commit_version:{val:1710321006043271943}, start_scn:{val:1710321006000022115}, end_scn:{val:1710321006043271943}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710321006000022115}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710321006043271943} lock_wait_start_ts=0 replay_compact_version={val:1710320767861890117}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:97274573} ls_id=1 callback_alloc_count=17 callback_free_count=17 checksum=0 tmp_checksum=0 checksum_scn={val:0} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=27, slave=0, merge=0, tx_end=27, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710321006000022115}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482187101866}) [2024-03-15 07:03:41.575838] INFO [STORAGE.TRANS] serialize_ (ob_tx_table_define.cpp:215) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=127] ObTxCtxTableMeta encode succ(buf_len=26, pos=26) [2024-03-15 07:03:41.576152] ERROR alloc_block (ob_local_device.cpp:716) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=13][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.576173] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=19][errcode=-4184] Failed to alloc block from io device(ret=-4184) [2024-03-15 07:03:41.576186] WDIAG [STORAGE] alloc_block (ob_macro_block_writer.cpp:1338) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=13][errcode=-4184] Fail to pre-alloc block for new macro block(ret=-4184, current_index=0, current_macro_seq=0) [2024-03-15 07:03:41.576199] WDIAG [STORAGE] write_micro_block (ob_macro_block_writer.cpp:1116) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=12][errcode=-4184] Fail to pre-alloc block(ret=-4184) [2024-03-15 07:03:41.576210] WDIAG [STORAGE] build_micro_block (ob_macro_block_writer.cpp:938) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=10][errcode=-4184] fail to write micro block (ret=-4184, micro_block_desc={last_rowkey:{datum_cnt:3, group_idx:0, hash:0, [idx=0:{len: 8, flag: 0, null: 0, ptr: 0x7f5441e06070, hex: 0D00000000000000, int: 13},idx=1:{len: 8, flag: 0, null: 0, ptr: 0x7f5441e060a8, hex: 00F0FFFFFFFFFFFF, int: -4096},idx=2:{len: 8, flag: 0, null: 0, ptr: 0x7f5441e060e0, hex: 0000000000000000, int: 0},]store_rowkey:}, header:{magic:1005, version:2, header_size:64, header_checksum:-32380, column_count:5, rowkey_column_count:3, has_column_checksum:0, row_count:14, row_store_type:0, opt:5, var_column_count:0, row_offset:9267, original_length:9263, max_merged_trans_version:4096, data_length:9263, data_zlength:9263, data_checksum:66621890, column_checksums:null, single_version_rows:1, contain_uncommitted_rows:0, is_last_row_last_flag:1, is_valid():true}, buf:0x7f543ee04090, buf_size:9263, data_size:9263, row_count:14, column_count:5, max_merged_trans_version:4096, macro_id:[9223372036854775807](ver=0,mode=0,seq=0), block_offset:0, block_checksum:1332158011, row_count_delta:14, contain_uncommitted_row:false, can_mark_deletion:false, has_string_out_row:false, has_lob_out_row:false, is_last_row_last_flag:true, original_size:9263}) [2024-03-15 07:03:41.576275] WDIAG [STORAGE] build_micro_block (ob_data_macro_block_merge_writer.cpp:137) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=65][errcode=-4184] ObMacroBlockWriter fail to build_micro_block(ret=-4184) [2024-03-15 07:03:41.576287] WDIAG [STORAGE] close (ob_macro_block_writer.cpp:707) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=12][errcode=-4184] macro block writer fail to build current micro block.(ret=-4184) [2024-03-15 07:03:41.576299] WDIAG [STORAGE] close (ob_partition_merger.cpp:170) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=10][errcode=-4184] Failed to close macro block writer(ret=-4184) [2024-03-15 07:03:41.576309] WDIAG [STORAGE] close (ob_partition_merger.cpp:988) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=10][errcode=-4184] Failed to finish merge for partition merger(ret=-4184) [2024-03-15 07:03:41.576320] WDIAG [STORAGE] merge_partition (ob_partition_merger.cpp:1156) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=10][errcode=-4184] failed to close partition merger(ret=-4184) [2024-03-15 07:03:41.576334] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=11] partition merge iter row count(i=0, row_count=14, ghost_row_count=0, pkey={tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710483939808321}}}, table={ObITable:{this:0x7f545f234080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710483939808321}}}, ref_cnt:3, upper_trans_version:-4007, timestamp:0}, this:0x7f545f234080, snapshot_version:{val:1710483939808321}, ls_id:{id:1}, is_frozen:true}) [2024-03-15 07:03:41.576395] WDIAG [STORAGE] process (ob_tablet_merge_task.cpp:1434) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=43][errcode=-4184] failed to merge partition(ret=-4184) [2024-03-15 07:03:41.576470] WDIAG [STORAGE] process (ob_tablet_merge_task.cpp:1446) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=11][errcode=-4184] failed to merge(ret=-4184, ctx_->param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:49401}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, idx_=0) [2024-03-15 07:03:41.576494] WDIAG [COMMON] do_work (ob_dag_scheduler.cpp:241) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=22][errcode=-4184] failed to process task(ret=-4184) [2024-03-15 07:03:41.576506] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=11] task finish process(ret=-4184, start_time=1710486221561547, end_time=1710486221576504, runtime=14957, *this={this:0x7f544482e1b0, type:1, status:2, dag:{this:0x7f5444872080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAD3FC209-0-0, dag_ret:0, dag_status:2, start_time:1710486221557562, running_task_cnt:1, indegree:0, hash:3526853371410145563}}) [2024-03-15 07:03:41.576537] WDIAG [COMMON] run1 (ob_dag_scheduler.cpp:1424) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=30][errcode=-4184] failed to do work(ret=-4184, *task_={this:0x7f544482e1b0, type:1, status:2, dag:{this:0x7f5444872080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAD3FC209-0-0, dag_ret:0, dag_status:2, start_time:1710486221557562, running_task_cnt:1, indegree:0, hash:3526853371410145563}}, compat_mode=0) [2024-03-15 07:03:41.576608] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=25] dag finished(dag_ret=-4184, runtime=19031, dag_cnt=1, dag_cnts_[dag.get_type()]=0, &dag=0x7f5444872080, dag={this:0x7f5444872080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAD3FC209-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221557562, running_task_cnt:0, indegree:0, hash:3526853371410145563}) [2024-03-15 07:03:41.576646] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [857][T1004_TX_TABLE_][T1004][YB427F000001-000613ACAD3FC209-0-0] [lt=31] succeed to del sys task(removed_task={start_time:1710486221557520, task_id:YB427F000001-000613ACAD3FC209-0-0, task_type:4, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=49401"}) [2024-03-15 07:03:41.576771] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [827][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=14][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.576791] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [827][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=20][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710234133899240143) [2024-03-15 07:03:41.576810] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [827][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=12] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.576846] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [827][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=13] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.576865] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [827][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=14] get storage schema to merge(ls_id={id:1}, tablet_id={id:333}, schema_ctx={base_schema_version:0, schema_version:1681902231005328, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547976d330, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902231005328, column_cnt:29, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:21, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:22, meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.577359] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [827][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=258] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:333}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.577568] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [827][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=24] succeed to build merge ctx(tablet_id={id:333}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:333}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710118072591257993, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902231005328, schema_version:1681902231005328, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547976d330, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902231005328, column_cnt:29, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:21, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:22, meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:333}, table_count:1, [{i:0, table_key:{tablet_id:{id:333}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref:4}]}, schedule_major:false, scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f547976cdb0, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221573885, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=71us|(0.11)|GET_PARALLEL_RANGE=559us|(0.89)|total=630us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.578299] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [827][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=393] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544482c590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC210-0-0, dag_ret:0, dag_status:2, start_time:1710486221574030, running_task_cnt:1, indegree:0, hash:7224666818425847873}, scanned_row_cnt_arr:0x7f542c02b5f0, output_block_cnt_arr:0x7f542c02b5f8, concurrent_cnt:1, estimate_row_cnt:1402, estimate_occupy_size:0, latest_update_ts:1710486221573885, estimated_finish_time:1710486238578295}) [2024-03-15 07:03:41.578336] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [827][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=37] succeed to init merge ctx(task={this:0x7f5444854080, type:15, status:2, dag:{this:0x7f544482c590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC210-0-0, dag_ret:0, dag_status:2, start_time:1710486221574030, running_task_cnt:1, indegree:0, hash:7224666818425847873}}) [2024-03-15 07:03:41.578363] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [827][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=24] task finish process(ret=0, start_time=1710486221576755, end_time=1710486221578360, runtime=1605, *this={this:0x7f5444854080, type:15, status:2, dag:{this:0x7f544482c590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC210-0-0, dag_ret:0, dag_status:2, start_time:1710486221574030, running_task_cnt:1, indegree:0, hash:7224666818425847873}}) [2024-03-15 07:03:41.578440] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=17] schedule one task(task={this:0x7f54448541b0, type:1, status:2, dag:{this:0x7f544482c590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC210-0-0, dag_ret:0, dag_status:2, start_time:1710486221574030, running_task_cnt:1, indegree:0, hash:7224666818425847873}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=1, running_task_cnts_[priority]=1, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.578523] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=68] add dag success(dag=0x7f544482caa0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-1930035241269723440, dag_cnt=2, dag_type_cnts=2) [2024-03-15 07:03:41.578545] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=21] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101066}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f545f264ae0, key:{tablet_id:{id:101066}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482149308275}, this:0x7f545f264ae0, timestamp:1710482149308275, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:31, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710247060764933686}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939822836, mt_stat_.ready_for_flush_time:1710483939822878, mt_stat_.create_flush_dag_time:1710486221573983, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.578614] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=27] succeed to add sys task(task={start_time:1710486221578600, task_id:YB427F000001-000613ACAD3FC211-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=101066"}) [2024-03-15 07:03:41.578631] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=25] schedule one task(task={this:0x7f5444856080, type:15, status:2, dag:{this:0x7f544482caa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC211-0-0, dag_ret:0, dag_status:2, start_time:1710486221578630, running_task_cnt:1, indegree:0, hash:-1930035241269723440}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=2, running_task_cnts_[priority]=2, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.578976] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54e845a228, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486221515553, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486200007067, use_das=false, nested_level=0, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:41.579088] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1] will sleep(sleep_us=100000, remain_us=8427963, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=159, timeout_timestamp=1710486230007049) [2024-03-15 07:03:41.579101] ERROR alloc_block (ob_local_device.cpp:716) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=15][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.579165] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=63][errcode=-4184] Failed to alloc block from io device(ret=-4184) [2024-03-15 07:03:41.579173] WDIAG [STORAGE] alloc_block (ob_macro_block_writer.cpp:1338) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=8][errcode=-4184] Fail to pre-alloc block for new macro block(ret=-4184, current_index=0, current_macro_seq=0) [2024-03-15 07:03:41.579181] WDIAG [STORAGE] write_micro_block (ob_macro_block_writer.cpp:1116) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=7][errcode=-4184] Fail to pre-alloc block(ret=-4184) [2024-03-15 07:03:41.579187] WDIAG [STORAGE] build_micro_block (ob_macro_block_writer.cpp:938) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=5][errcode=-4184] fail to write micro block (ret=-4184, micro_block_desc={last_rowkey:{datum_cnt:7, group_idx:0, hash:0, [idx=0:{len: 8, flag: 0, null: 0, ptr: 0x7f53f1618070, hex: 0000000000000000, int: 0},idx=1:{len: 8, flag: 0, null: 0, ptr: 0x7f53f16180a8, hex: 0300000000000000, int: 3},idx=2:{len: 8, flag: 0, null: 0, ptr: 0x7f53f16180e0, hex: 0300000000000000, int: 3},idx=3:{len: 8, flag: 0, null: 0, ptr: 0x7f53f1618118, hex: 1D00000000000000, int: 29},idx=4:{len: 8, flag: 0, null: 0, ptr: 0x7f53f16181e8, hex: B473B31577130600, int: 1710252046382004},idx=5:{len: 8, flag: 0, null: 0, ptr: 0x7f53f1618188, hex: A4380231D3F643E8, int: -1710252046549895004},idx=6:{len: 8, flag: 0, null: 0, ptr: 0x7f53f16181c0, hex: 0000000000000000, int: 0},]store_rowkey:}, header:{magic:1005, version:2, header_size:64, header_checksum:-3885, column_count:31, rowkey_column_count:7, has_column_checksum:0, row_count:44, row_store_type:0, opt:4, var_column_count:0, row_offset:16636, original_length:16752, max_merged_trans_version:1710338426968360956, data_length:16752, data_zlength:16752, data_checksum:933761970, column_checksums:null, single_version_rows:0, contain_uncommitted_rows:0, is_last_row_last_flag:1, is_valid():true}, buf:0x7f53ec604090, buf_size:16752, data_size:16752, row_count:44, column_count:31, max_merged_trans_version:1710338426968360956, macro_id:[9223372036854775807](ver=0,mode=0,seq=0), block_offset:0, block_checksum:1517509953, row_count_delta:8, contain_uncommitted_row:false, can_mark_deletion:false, has_string_out_row:false, has_lob_out_row:false, is_last_row_last_flag:true, original_size:16752}) [2024-03-15 07:03:41.579234] WDIAG [STORAGE] build_micro_block (ob_data_macro_block_merge_writer.cpp:137) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=46][errcode=-4184] ObMacroBlockWriter fail to build_micro_block(ret=-4184) [2024-03-15 07:03:41.579240] WDIAG [STORAGE] append_row (ob_macro_block_writer.cpp:583) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=5][errcode=-4184] Fail to build micro block, (ret=-4184) [2024-03-15 07:03:41.579246] WDIAG [STORAGE] append_row (ob_macro_block_writer.cpp:507) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=5][errcode=-4184] Fail to append row(ret=-4184) [2024-03-15 07:03:41.579251] WDIAG [STORAGE] append_row (ob_data_macro_block_merge_writer.cpp:72) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=5][errcode=-4184] ObMacroBlockWriter fail to append_row(ret=-4184) [2024-03-15 07:03:41.579257] WDIAG [STORAGE] inner_process (ob_partition_merger.cpp:1064) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=5][errcode=-4184] Failed to append row to macro writer(ret=-4184) [2024-03-15 07:03:41.579263] WDIAG [STORAGE] process (ob_partition_merger.cpp:277) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=6][errcode=-4184] Failed to inner append row(ret=-4184) [2024-03-15 07:03:41.579269] WDIAG [STORAGE] merge_single_iter (ob_partition_merger.cpp:1190) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=5][errcode=-4184] Failed to process row(ret=-4184, cur_row={row_flag:{flag:"INSERT", flag_type:0}, trans_id:{txid:0}, scan_index:0, mvcc_row_flag:{first:1, uncommitted:0, shadow:0, compact:1, ghost:0, last:1, reserved:0, flag:41}, snapshot_version:1710252046549895004, fast_filter_skipped:false, have_uncommited_row:false, group_idx:0, count:31, datum_buffer:{capacity:32, datums:0x7f5494dc8458, local_datums:0x7f5494dc8458}[col_id=0:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8468, hex: 0000000000000000, int: 0},col_id=1:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc84a0, hex: 0300000000000000, int: 3},col_id=2:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc84d8, hex: 0300000000000000, int: 3},col_id=3:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8510, hex: 1D00000000000000, int: 29},col_id=4:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1efe0, hex: B473B31577130600, int: 1710252046382004},col_id=5:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8580, hex: A4380231D3F643E8, int: -1710252046549895004},col_id=6:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc85b8, hex: 0000000000000000, int: 0},col_id=7:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1efe8, hex: B6B1B51577130600, int: 1710252046528950},col_id=8:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1eff0, hex: B6B1B51577130600, int: 1710252046528950},col_id=9:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8660, hex: 0100000000000000, int: 1, num_digit0: 0},col_id=10:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8698, hex: 0000000000000000, int: 0},col_id=11:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1effa, hex: E2EEC26B52100600, int: 1706796041563874},col_id=12:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8708, hex: A800000000000000, int: 168},col_id=13:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8740, hex: 0000000000000000, int: 0},col_id=14:{len: 19, flag: 0, null: 0, ptr: 0x7f5447e1f004, hex: 31373435363238323731323831343936303636, cstr: 1745628271281496066},col_id=15:{len: 26, flag: 0, null: 0, ptr: 0x7f5447e1f017, hex: 3041303533463030383243304633444339464635454439433138, cstr: 0A053F0082C0F3DC9FF5ED9C18},col_id=16:{len: 1, flag: 0, null: 0, ptr: 0x7f5447e1f031, hex: 31, cstr: 1},col_id=17:{len: 10, flag: 0, null: 0, ptr: 0x7f5447e1f032, hex: 30413035334630303031, cstr: 0A053F0001},col_id=18:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1f03c, hex: 0000000000003440, int: 4626322717216342016},col_id=19:{len: 410, flag: 0, null: 0, ptr: 0x7f5447e1f044, hex: 3738394337443533353131364333323030383833413837424544454537464530363930313035363543443037324130343438393531323239373831414235314443444642313942343130304546463331463246304143323836333244414233454641303142314444444133374531464138384130303830393934303531463537, cstr: 789C7D535116C3200883A87BEDEE7FE069010565CD072A0448951229781AB51DCDFB19B4100EFF31F2F0AC28632DAB3EFA01B1DDDA37E1FA88A0080994051F57},col_id=20:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc88c8, hex: 9A01000000000000, int: 410},col_id=21:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8900, hex: C509000000000000, int: 2501},col_id=22:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8938, hex: 0000000000000000, int: 0},col_id=23:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8970, hex: 0000000000000000, int: 0},col_id=24:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc89a8, hex: 0000000000000000, int: 0},col_id=25:null,col_id=26:null,col_id=27:null,col_id=28:null,col_id=29:null,col_id=30:null,]}, merge_iter={ObPartitionMinorRowMergeIter:{tablet_id:{id:333}, iter_end:false, schema_rowkey_column_cnt:5, schema_version:1681902231005328, merge_range:{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}, curr_row_:{row_flag:{flag:"INSERT", flag_type:0}, trans_id:{txid:0}, scan_index:0, mvcc_row_flag:{first:1, uncommitted:0, shadow:0, compact:1, ghost:0, last:1, reserved:0, flag:41}, snapshot_version:1710252046549895004, fast_filter_skipped:false, have_uncommited_row:false, group_idx:0, count:31, datum_buffer:{capacity:32, datums:0x7f5494dc8458, local_datums:0x7f5494dc8458}[col_id=0:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8468, hex: 0000000000000000, int: 0},col_id=1:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc84a0, hex: 0300000000000000, int: 3},col_id=2:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc84d8, hex: 0300000000000000, int: 3},col_id=3:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8510, hex: 1D00000000000000, int: 29},col_id=4:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1efe0, hex: B473B31577130600, int: 1710252046382004},col_id=5:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8580, hex: A4380231D3F643E8, int: -1710252046549895004},col_id=6:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc85b8, hex: 0000000000000000, int: 0},col_id=7:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1efe8, hex: B6B1B51577130600, int: 1710252046528950},col_id=8:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1eff0, hex: B6B1B51577130600, int: 1710252046528950},col_id=9:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8660, hex: 0100000000000000, int: 1, num_digit0: 0},col_id=10:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8698, hex: 0000000000000000, int: 0},col_id=11:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1effa, hex: E2EEC26B52100600, int: 1706796041563874},col_id=12:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8708, hex: A800000000000000, int: 168},col_id=13:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8740, hex: 0000000000000000, int: 0},col_id=14:{len: 19, flag: 0, null: 0, ptr: 0x7f5447e1f004, hex: 31373435363238323731323831343936303636, cstr: 1745628271281496066},col_id=15:{len: 26, flag: 0, null: 0, ptr: 0x7f5447e1f017, hex: 3041303533463030383243304633444339464635454439433138, cstr: 0A053F0082C0F3DC9FF5ED9C18},col_id=16:{len: 1, flag: 0, null: 0, ptr: 0x7f5447e1f031, hex: 31, cstr: 1},col_id=17:{len: 10, flag: 0, null: 0, ptr: 0x7f5447e1f032, hex: 30413035334630303031, cstr: 0A053F0001},col_id=18:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1f03c, hex: 0000000000003440, int: 4626322717216342016},col_id=19:{len: 410, flag: 0, null: 0, ptr: 0x7f5447e1f044, hex: 3738394337443533353131364333323030383833413837424544454537464530363930313035363543443037324130343438393531323239373831414235314443444642313942343130304546463331463246304143323836333244414233454641303142314444444133374531464138384130303830393934303531463537, cstr: 789C7D535116C3200883A87BEDEE7FE069010565CD072A0448951229781AB51DCDFB19B4100EFF31F2F0AC28632DAB3EFA01B1DDDA37E1FA88A0080994051F57},col_id=20:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc88c8, hex: 9A01000000000000, int: 410},col_id=21:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8900, hex: C509000000000000, int: 2501},col_id=22:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8938, hex: 0000000000000000, int: 0},col_id=23:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8970, hex: 0000000000000000, int: 0},col_id=24:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc89a8, hex: 0000000000000000, int: 0},col_id=25:null,col_id=26:null,col_id=27:null,col_id=28:null,col_id=29:null,col_id=30:null,]}, store_ctx:{this:0x7f5494dc4118, ls_id:{id:1}, ls:null, timeout:9223372036854775807, tablet_id:{id:0}, table_iter:null, table_version:9223372036854775807, mvcc_acc_ctx:{type:1, abs_lock_timeout:9223372036854775807, tx_lock_timeout:-1, snapshot:{version:{val:4611686018427387901}, tx_id:{txid:0}, scn:-1}, tx_table_guard:{tx_table:0x7f54639dfa10, epoch:0}, tx_id:{txid:0}, tx_desc:NULL, tx_ctx:null, mem_ctx:null, tx_scn:-1, write_flag:{is_table_api:0, is_table_lock:0, is_mds:0, is_dml_batch_opt:0}, handle_start_time:-1, lock_wait_start_ts:0}, tablet_stat:{ls_id:0, tablet_id:0, query_cnt:0, merge_cnt:0, scan_logical_row_cnt:0, scan_physical_row_cnt:0, scan_micro_block_cnt:0, pushdown_micro_block_cnt:0, exist_row_total_table_cnt:0, exist_row_read_table_cnt:0, merge_physical_row_cnt:0, merge_logical_row_cnt:0}, replay_log_scn:{val:4611686018427387903}}, row_iter_:{context:{is_inited:true, timeout:9223372036854775807, ls_id:{id:1}, tablet_id:{id:0}, query_flag:{scan_order:1, daily_merge:1, rmmb_optimize:1, whole_macro_scan:1, full_row:0, index_back:0, query_stat:0, sql_mode:0, read_latest:0, prewarm:0, join_type:0, use_row_cache:0, use_block_index_cache:0, use_bloomfilter_cache:0, multi_version_minor_merge:1, is_need_feedback:0, use_fuse_row_cache:0, use_fast_agg:0, iter_uncommitted_row:0, ignore_trans_stat:0, is_large_query:0, is_sstable_cut:0, skip_read_lob:0, reserved:0}, sql_mode:0, store_ctx:0x7f5494dc4118, limit_param:null, stmt_allocator:0x7f5494dc4600, allocator:0x7f5494dc4578, range_allocator:null, table_scan_stat:null, out_cnt:0, trans_version_range:{multi_version_start:1710118072591257993, base_version:0, snapshot_version:4611686018427387901}, merge_scn:{val:1710506547144172701}, lob_locator_helper:{table_id:0, ls_id:1, snapshot_version:4611686018427387901, rowid_version:0, rowid_project_:NULL, rowid_objs:[], enable_locator_v2:true, is_inited:true}, iter_pool:null, block_row_store:null, io_callback:null}, row:{row_flag:{flag:"INSERT", flag_type:0}, trans_id:{txid:0}, scan_index:0, mvcc_row_flag:{first:1, uncommitted:0, shadow:0, compact:1, ghost:0, last:1, reserved:0, flag:41}, snapshot_version:1710252046549895004, fast_filter_skipped:false, have_uncommited_row:false, group_idx:0, count:31, datum_buffer:{capacity:32, datums:0x7f5494dc8458, local_datums:0x7f5494dc8458}[col_id=0:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8468, hex: 0000000000000000, int: 0},col_id=1:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc84a0, hex: 0300000000000000, int: 3},col_id=2:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc84d8, hex: 0300000000000000, int: 3},col_id=3:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8510, hex: 1D00000000000000, int: 29},col_id=4:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1efe0, hex: B473B31577130600, int: 1710252046382004},col_id=5:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8580, hex: A4380231D3F643E8, int: -1710252046549895004},col_id=6:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc85b8, hex: 0000000000000000, int: 0},col_id=7:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1efe8, hex: B6B1B51577130600, int: 1710252046528950},col_id=8:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1eff0, hex: B6B1B51577130600, int: 1710252046528950},col_id=9:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8660, hex: 0100000000000000, int: 1, num_digit0: 0},col_id=10:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8698, hex: 0000000000000000, int: 0},col_id=11:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1effa, hex: E2EEC26B52100600, int: 1706796041563874},col_id=12:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8708, hex: A800000000000000, int: 168},col_id=13:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8740, hex: 0000000000000000, int: 0},col_id=14:{len: 19, flag: 0, null: 0, ptr: 0x7f5447e1f004, hex: 31373435363238323731323831343936303636, cstr: 1745628271281496066},col_id=15:{len: 26, flag: 0, null: 0, ptr: 0x7f5447e1f017, hex: 3041303533463030383243304633444339464635454439433138, cstr: 0A053F0082C0F3DC9FF5ED9C18},col_id=16:{len: 1, flag: 0, null: 0, ptr: 0x7f5447e1f031, hex: 31, cstr: 1},col_id=17:{len: 10, flag: 0, null: 0, ptr: 0x7f5447e1f032, hex: 30413035334630303031, cstr: 0A053F0001},col_id=18:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1f03c, hex: 0000000000003440, int: 4626322717216342016},col_id=19:{len: 410, flag: 0, null: 0, ptr: 0x7f5447e1f044, hex: 3738394337443533353131364333323030383833413837424544454537464530363930313035363543443037324130343438393531323239373831414235314443444642313942343130304546463331463246304143323836333244414233454641303142314444444133374531464138384130303830393934303531463537, cstr: 789C7D535116C3200883A87BEDEE7FE069010565CD072A0448951229781AB51DCDFB19B4100EFF31F2F0AC28632DAB3EFA01B1DDDA37E1FA88A0080994051F57},col_id=20:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc88c8, hex: 9A01000000000000, int: 410},col_id=21:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8900, hex: C509000000000000, int: 2501},col_id=22:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8938, hex: 0000000000000000, int: 0},col_id=23:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8970, hex: 0000000000000000, int: 0},col_id=24:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc89a8, hex: 0000000000000000, int: 0},col_id=25:null,col_id=26:null,col_id=27:null,col_id=28:null,col_id=29:null,col_id=30:null,]}, key:rowkey_object=[{"BIGINT":0},{"BIGINT":3},{"BIGINT":3},{"BIGINT":29},{"TIMESTAMP":"2024-03-12 14:00:46.382004"}] , value_iter:{value:{this=0x7f5447e1eed8 latch_=unlocked flag=3 first_dml=INSERT last_dml=INSERT update_since_compact=1 list_head=0x7f5447e1ef68 latest_compact_node=(nil) max_trans_version={val:1710252046549895004} max_trans_id=97039142 max_elr_trans_version={val:1710252046549895004} max_elr_trans_id=97039142 latest_compact_ts=0 last_compact_cnt=0 total_trans_node_cnt=1 max_modify_count=0 min_modify_count=0}, version_iter:NULL, multi_version_iter:NULL, max_committed_trans_version:1710252046549895004, cur_trans_version:{val:1710252046549895004}, is_node_compacted:false, ctx:{type:1, abs_lock_timeout:9223372036854775807, tx_lock_timeout:-1, snapshot:{version:{val:4611686018427387901}, tx_id:{txid:0}, scn:-1}, tx_table_guard:{tx_table:0x7f54639dfa10, epoch:0}, tx_id:{txid:0}, tx_desc:NULL, tx_ctx:null, mem_ctx:null, tx_scn:-1, write_flag:{is_table_api:0, is_table_lock:0, is_mds:0, is_dml_batch_opt:0}, handle_start_time:-1, lock_wait_start_ts:0}, merge_scn:{val:1710506547144172701}, version_range:{multi_version_start:1710118072591257993, base_version:0, snapshot_version:4611686018427387901}, has_multi_commit_trans:false}, scan_state:4}, iter_row_count:44, is_inited:true, is_rowkey_first_row_reused:false, table_:{ObITable:{this:0x7f545f264310, key:{tablet_id:{id:333}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482149306305}, this:0x7f545f264310, timestamp:1710482149306305, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:30, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710247060052153142}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939822755, mt_stat_.ready_for_flush_time:1710483939822797, mt_stat_.create_flush_dag_time:1710486221572962, mt_stat_.release_time:0, mt_stat_.last_print_time:0}}, ghost_row_count:0, check_committing_trans_compacted:true, row_queue:{col_cnt:31, cur_pos:0, count:0}}) [2024-03-15 07:03:41.579608] WDIAG [STORAGE] merge_same_rowkey_iters (ob_partition_merger.cpp:1416) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=338][errcode=-4184] Failed to merge single merge iter(ret=-4184) [2024-03-15 07:03:41.579615] WDIAG [STORAGE] merge_partition (ob_partition_merger.cpp:1131) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=7][errcode=-4184] Failed to merge iters with same rowkey(ret=-4184, rowkey_minimum_iters=[{ObPartitionMinorRowMergeIter:{tablet_id:{id:333}, iter_end:false, schema_rowkey_column_cnt:5, schema_version:1681902231005328, merge_range:{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}, curr_row_:{row_flag:{flag:"INSERT", flag_type:0}, trans_id:{txid:0}, scan_index:0, mvcc_row_flag:{first:1, uncommitted:0, shadow:0, compact:1, ghost:0, last:1, reserved:0, flag:41}, snapshot_version:1710252046549895004, fast_filter_skipped:false, have_uncommited_row:false, group_idx:0, count:31, datum_buffer:{capacity:32, datums:0x7f5494dc8458, local_datums:0x7f5494dc8458}[col_id=0:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8468, hex: 0000000000000000, int: 0},col_id=1:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc84a0, hex: 0300000000000000, int: 3},col_id=2:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc84d8, hex: 0300000000000000, int: 3},col_id=3:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8510, hex: 1D00000000000000, int: 29},col_id=4:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1efe0, hex: B473B31577130600, int: 1710252046382004},col_id=5:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8580, hex: A4380231D3F643E8, int: -1710252046549895004},col_id=6:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc85b8, hex: 0000000000000000, int: 0},col_id=7:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1efe8, hex: B6B1B51577130600, int: 1710252046528950},col_id=8:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1eff0, hex: B6B1B51577130600, int: 1710252046528950},col_id=9:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8660, hex: 0100000000000000, int: 1, num_digit0: 0},col_id=10:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8698, hex: 0000000000000000, int: 0},col_id=11:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1effa, hex: E2EEC26B52100600, int: 1706796041563874},col_id=12:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8708, hex: A800000000000000, int: 168},col_id=13:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8740, hex: 0000000000000000, int: 0},col_id=14:{len: 19, flag: 0, null: 0, ptr: 0x7f5447e1f004, hex: 31373435363238323731323831343936303636, cstr: 1745628271281496066},col_id=15:{len: 26, flag: 0, null: 0, ptr: 0x7f5447e1f017, hex: 3041303533463030383243304633444339464635454439433138, cstr: 0A053F0082C0F3DC9FF5ED9C18},col_id=16:{len: 1, flag: 0, null: 0, ptr: 0x7f5447e1f031, hex: 31, cstr: 1},col_id=17:{len: 10, flag: 0, null: 0, ptr: 0x7f5447e1f032, hex: 30413035334630303031, cstr: 0A053F0001},col_id=18:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1f03c, hex: 0000000000003440, int: 4626322717216342016},col_id=19:{len: 410, flag: 0, null: 0, ptr: 0x7f5447e1f044, hex: 3738394337443533353131364333323030383833413837424544454537464530363930313035363543443037324130343438393531323239373831414235314443444642313942343130304546463331463246304143323836333244414233454641303142314444444133374531464138384130303830393934303531463537, cstr: 789C7D535116C3200883A87BEDEE7FE069010565CD072A0448951229781AB51DCDFB19B4100EFF31F2F0AC28632DAB3EFA01B1DDDA37E1FA88A0080994051F57},col_id=20:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc88c8, hex: 9A01000000000000, int: 410},col_id=21:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8900, hex: C509000000000000, int: 2501},col_id=22:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8938, hex: 0000000000000000, int: 0},col_id=23:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8970, hex: 0000000000000000, int: 0},col_id=24:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc89a8, hex: 0000000000000000, int: 0},col_id=25:null,col_id=26:null,col_id=27:null,col_id=28:null,col_id=29:null,col_id=30:null,]}, store_ctx:{this:0x7f5494dc4118, ls_id:{id:1}, ls:null, timeout:9223372036854775807, tablet_id:{id:0}, table_iter:null, table_version:9223372036854775807, mvcc_acc_ctx:{type:1, abs_lock_timeout:9223372036854775807, tx_lock_timeout:-1, snapshot:{version:{val:4611686018427387901}, tx_id:{txid:0}, scn:-1}, tx_table_guard:{tx_table:0x7f54639dfa10, epoch:0}, tx_id:{txid:0}, tx_desc:NULL, tx_ctx:null, mem_ctx:null, tx_scn:-1, write_flag:{is_table_api:0, is_table_lock:0, is_mds:0, is_dml_batch_opt:0}, handle_start_time:-1, lock_wait_start_ts:0}, tablet_stat:{ls_id:0, tablet_id:0, query_cnt:0, merge_cnt:0, scan_logical_row_cnt:0, scan_physical_row_cnt:0, scan_micro_block_cnt:0, pushdown_micro_block_cnt:0, exist_row_total_table_cnt:0, exist_row_read_table_cnt:0, merge_physical_row_cnt:0, merge_logical_row_cnt:0}, replay_log_scn:{val:4611686018427387903}}, row_iter_:{context:{is_inited:true, timeout:9223372036854775807, ls_id:{id:1}, tablet_id:{id:0}, query_flag:{scan_order:1, daily_merge:1, rmmb_optimize:1, whole_macro_scan:1, full_row:0, index_back:0, query_stat:0, sql_mode:0, read_latest:0, prewarm:0, join_type:0, use_row_cache:0, use_block_index_cache:0, use_bloomfilter_cache:0, multi_version_minor_merge:1, is_need_feedback:0, use_fuse_row_cache:0, use_fast_agg:0, iter_uncommitted_row:0, ignore_trans_stat:0, is_large_query:0, is_sstable_cut:0, skip_read_lob:0, reserved:0}, sql_mode:0, store_ctx:0x7f5494dc4118, limit_param:null, stmt_allocator:0x7f5494dc4600, allocator:0x7f5494dc4578, range_allocator:null, table_scan_stat:null, out_cnt:0, trans_version_range:{multi_version_start:1710118072591257993, base_version:0, snapshot_version:4611686018427387901}, merge_scn:{val:1710506547144172701}, lob_locator_helper:{table_id:0, ls_id:1, snapshot_version:4611686018427387901, rowid_version:0, rowid_project_:NULL, rowid_objs:[], enable_locator_v2:true, is_inited:true}, iter_pool:null, block_row_store:null, io_callback:null}, row:{row_flag:{flag:"INSERT", flag_type:0}, trans_id:{txid:0}, scan_index:0, mvcc_row_flag:{first:1, uncommitted:0, shadow:0, compact:1, ghost:0, last:1, reserved:0, flag:41}, snapshot_version:1710252046549895004, fast_filter_skipped:false, have_uncommited_row:false, group_idx:0, count:31, datum_buffer:{capacity:32, datums:0x7f5494dc8458, local_datums:0x7f5494dc8458}[col_id=0:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8468, hex: 0000000000000000, int: 0},col_id=1:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc84a0, hex: 0300000000000000, int: 3},col_id=2:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc84d8, hex: 0300000000000000, int: 3},col_id=3:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8510, hex: 1D00000000000000, int: 29},col_id=4:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1efe0, hex: B473B31577130600, int: 1710252046382004},col_id=5:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8580, hex: A4380231D3F643E8, int: -1710252046549895004},col_id=6:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc85b8, hex: 0000000000000000, int: 0},col_id=7:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1efe8, hex: B6B1B51577130600, int: 1710252046528950},col_id=8:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1eff0, hex: B6B1B51577130600, int: 1710252046528950},col_id=9:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8660, hex: 0100000000000000, int: 1, num_digit0: 0},col_id=10:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8698, hex: 0000000000000000, int: 0},col_id=11:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1effa, hex: E2EEC26B52100600, int: 1706796041563874},col_id=12:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8708, hex: A800000000000000, int: 168},col_id=13:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8740, hex: 0000000000000000, int: 0},col_id=14:{len: 19, flag: 0, null: 0, ptr: 0x7f5447e1f004, hex: 31373435363238323731323831343936303636, cstr: 1745628271281496066},col_id=15:{len: 26, flag: 0, null: 0, ptr: 0x7f5447e1f017, hex: 3041303533463030383243304633444339464635454439433138, cstr: 0A053F0082C0F3DC9FF5ED9C18},col_id=16:{len: 1, flag: 0, null: 0, ptr: 0x7f5447e1f031, hex: 31, cstr: 1},col_id=17:{len: 10, flag: 0, null: 0, ptr: 0x7f5447e1f032, hex: 30413035334630303031, cstr: 0A053F0001},col_id=18:{len: 8, flag: 0, null: 0, ptr: 0x7f5447e1f03c, hex: 0000000000003440, int: 4626322717216342016},col_id=19:{len: 410, flag: 0, null: 0, ptr: 0x7f5447e1f044, hex: 3738394337443533353131364333323030383833413837424544454537464530363930313035363543443037324130343438393531323239373831414235314443444642313942343130304546463331463246304143323836333244414233454641303142314444444133374531464138384130303830393934303531463537, cstr: 789C7D535116C3200883A87BEDEE7FE069010565CD072A0448951229781AB51DCDFB19B4100EFF31F2F0AC28632DAB3EFA01B1DDDA37E1FA88A0080994051F57},col_id=20:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc88c8, hex: 9A01000000000000, int: 410},col_id=21:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8900, hex: C509000000000000, int: 2501},col_id=22:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8938, hex: 0000000000000000, int: 0},col_id=23:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc8970, hex: 0000000000000000, int: 0},col_id=24:{len: 8, flag: 0, null: 0, ptr: 0x7f5494dc89a8, hex: 0000000000000000, int: 0},col_id=25:null,col_id=26:null,col_id=27:null,col_id=28:null,col_id=29:null,col_id=30:null,]}, key:rowkey_object=[{"BIGINT":0},{"BIGINT":3},{"BIGINT":3},{"BIGINT":29},{"TIMESTAMP":"2024-03-12 14:00:46.382004"}] , value_iter:{value:{this=0x7f5447e1eed8 latch_=unlocked flag=3 first_dml=INSERT last_dml=INSERT update_since_compact=1 list_head=0x7f5447e1ef68 latest_compact_node=(nil) max_trans_version={val:1710252046549895004} max_trans_id=97039142 max_elr_trans_version={val:1710252046549895004} max_elr_trans_id=97039142 latest_compact_ts=0 last_compact_cnt=0 total_trans_node_cnt=1 max_modify_count=0 min_modify_count=0}, version_iter:NULL, multi_version_iter:NULL, max_committed_trans_version:1710252046549895004, cur_trans_version:{val:1710252046549895004}, is_node_compacted:false, ctx:{type:1, abs_lock_timeout:9223372036854775807, tx_lock_timeout:-1, snapshot:{version:{val:4611686018427387901}, tx_id:{txid:0}, scn:-1}, tx_table_guard:{tx_table:0x7f54639dfa10, epoch:0}, tx_id:{txid:0}, tx_desc:NULL, tx_ctx:null, mem_ctx:null, tx_scn:-1, write_flag:{is_table_api:0, is_table_lock:0, is_mds:0, is_dml_batch_opt:0}, handle_start_time:-1, lock_wait_start_ts:0}, merge_scn:{val:1710506547144172701}, version_range:{multi_version_start:1710118072591257993, base_version:0, snapshot_version:4611686018427387901}, has_multi_commit_trans:false}, scan_state:4}, iter_row_count:44, is_inited:true, is_rowkey_first_row_reused:false, table_:{ObITable:{this:0x7f545f264310, key:{tablet_id:{id:333}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482149306305}, this:0x7f545f264310, timestamp:1710482149306305, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:30, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710247060052153142}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939822755, mt_stat_.ready_for_flush_time:1710483939822797, mt_stat_.create_flush_dag_time:1710486221572962, mt_stat_.release_time:0, mt_stat_.last_print_time:0}}, ghost_row_count:0, check_committing_trans_compacted:true, row_queue:{col_cnt:31, cur_pos:0, count:0}}]) [2024-03-15 07:03:41.579846] WDIAG [STORAGE] merge_partition (ob_partition_merger.cpp:1150) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=230][errcode=-4184] Partition merge did not end normally(ret=-4184) [2024-03-15 07:03:41.579854] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=6] partition merge iter row count(i=0, row_count=44, ghost_row_count=0, pkey={tablet_id:{id:333}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f545f264310, key:{tablet_id:{id:333}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482149306305}, this:0x7f545f264310, timestamp:1710482149306305, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:30, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710247060052153142}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939822755, mt_stat_.ready_for_flush_time:1710483939822797, mt_stat_.create_flush_dag_time:1710486221572962, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.579897] WDIAG [STORAGE] process (ob_tablet_merge_task.cpp:1434) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=34][errcode=-4184] failed to merge partition(ret=-4184) [2024-03-15 07:03:41.579922] WDIAG [STORAGE] process (ob_tablet_merge_task.cpp:1446) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=6][errcode=-4184] failed to merge(ret=-4184, ctx_->param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:333}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, idx_=0) [2024-03-15 07:03:41.579934] WDIAG [COMMON] do_work (ob_dag_scheduler.cpp:241) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=12][errcode=-4184] failed to process task(ret=-4184) [2024-03-15 07:03:41.579940] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=5] task finish process(ret=-4184, start_time=1710486221578514, end_time=1710486221579939, runtime=1425, *this={this:0x7f54448541b0, type:1, status:2, dag:{this:0x7f544482c590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC210-0-0, dag_ret:0, dag_status:2, start_time:1710486221574030, running_task_cnt:1, indegree:0, hash:7224666818425847873}}) [2024-03-15 07:03:41.579956] WDIAG [COMMON] run1 (ob_dag_scheduler.cpp:1424) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=15][errcode=-4184] failed to do work(ret=-4184, *task_={this:0x7f54448541b0, type:1, status:2, dag:{this:0x7f544482c590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC210-0-0, dag_ret:0, dag_status:2, start_time:1710486221574030, running_task_cnt:1, indegree:0, hash:7224666818425847873}}, compat_mode=0) [2024-03-15 07:03:41.579975] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=13] dag finished(dag_ret=-4184, runtime=5944, dag_cnt=1, dag_cnts_[dag.get_type()]=1, &dag=0x7f544482c590, dag={this:0x7f544482c590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC210-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221574030, running_task_cnt:0, indegree:0, hash:7224666818425847873}) [2024-03-15 07:03:41.579993] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC210-0-0] [lt=14] succeed to del sys task(removed_task={start_time:1710486221574013, task_id:YB427F000001-000613ACAD3FC210-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=333"}) [2024-03-15 07:03:41.580066] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=15][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.580078] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=12][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710234133899240143) [2024-03-15 07:03:41.580089] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=7] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.580154] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=7] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.580166] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=10] get storage schema to merge(ls_id={id:1}, tablet_id={id:101066}, schema_ctx={base_schema_version:0, schema_version:1681902231009384, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550af1e3e0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902231009384, column_cnt:5, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:20, meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.580384] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=62] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101066}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.580436] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=13] succeed to build merge ctx(tablet_id={id:101066}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101066}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710232452548914501, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902231009384, schema_version:1681902231009384, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550af1e3e0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902231009384, column_cnt:5, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:20, meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:101066}, table_count:1, [{i:0, table_key:{tablet_id:{id:101066}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref:4}]}, schedule_major:false, scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f550af1de60, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221578503, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=83us|(0.24)|GET_PARALLEL_RANGE=262us|(0.76)|total=345us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.580711] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=110] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544482caa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC211-0-0, dag_ret:0, dag_status:2, start_time:1710486221578630, running_task_cnt:1, indegree:0, hash:-1930035241269723440}, scanned_row_cnt_arr:0x7f542c02d5f0, output_block_cnt_arr:0x7f542c02d5f8, concurrent_cnt:1, estimate_row_cnt:1402, estimate_occupy_size:0, latest_update_ts:1710486221578503, estimated_finish_time:1710486238580709}) [2024-03-15 07:03:41.580729] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=18] succeed to init merge ctx(task={this:0x7f5444856080, type:15, status:2, dag:{this:0x7f544482caa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC211-0-0, dag_ret:0, dag_status:2, start_time:1710486221578630, running_task_cnt:1, indegree:0, hash:-1930035241269723440}}) [2024-03-15 07:03:41.580742] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=12] task finish process(ret=0, start_time=1710486221580054, end_time=1710486221580741, runtime=687, *this={this:0x7f5444856080, type:15, status:2, dag:{this:0x7f544482caa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC211-0-0, dag_ret:0, dag_status:2, start_time:1710486221578630, running_task_cnt:1, indegree:0, hash:-1930035241269723440}}) [2024-03-15 07:03:41.580782] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=18] schedule one task(task={this:0x7f54448561b0, type:1, status:2, dag:{this:0x7f544482caa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC211-0-0, dag_ret:0, dag_status:2, start_time:1710486221578630, running_task_cnt:1, indegree:0, hash:-1930035241269723440}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=1, running_task_cnts_[priority]=1, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.581899] ERROR alloc_block (ob_local_device.cpp:716) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=14][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.581914] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=14][errcode=-4184] Failed to alloc block from io device(ret=-4184) [2024-03-15 07:03:41.581921] WDIAG [STORAGE] alloc_block (ob_macro_block_writer.cpp:1338) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=7][errcode=-4184] Fail to pre-alloc block for new macro block(ret=-4184, current_index=0, current_macro_seq=0) [2024-03-15 07:03:41.581929] WDIAG [STORAGE] write_micro_block (ob_macro_block_writer.cpp:1116) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=7][errcode=-4184] Fail to pre-alloc block(ret=-4184) [2024-03-15 07:03:41.581935] WDIAG [STORAGE] build_micro_block (ob_macro_block_writer.cpp:938) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=6][errcode=-4184] fail to write micro block (ret=-4184, micro_block_desc={last_rowkey:{datum_cnt:7, group_idx:0, hash:0, [idx=0:{len: 8, flag: 0, null: 0, ptr: 0x7f5441eba1e8, hex: BCE3935006110600, int: 1707568679609276},idx=1:{len: 8, flag: 0, null: 0, ptr: 0x7f5441eba0a8, hex: 0000000000000000, int: 0},idx=2:{len: 8, flag: 0, null: 0, ptr: 0x7f5441eba0e0, hex: 7901000000000000, int: 377},idx=3:{len: 8, flag: 0, null: 0, ptr: 0x7f5441eba118, hex: 7901000000000000, int: 377},idx=4:{len: 8, flag: 0, null: 0, ptr: 0x7f5441eba150, hex: 2200000000000000, int: 34},idx=5:{len: 8, flag: 0, null: 0, ptr: 0x7f5441eba188, hex: FB6FBE9DC1AC43E8, int: -1710333485895487493},idx=6:{len: 8, flag: 0, null: 0, ptr: 0x7f5441eba1c0, hex: 0000000000000000, int: 0},]store_rowkey:}, header:{magic:1005, version:2, header_size:64, header_checksum:-7176, column_count:7, rowkey_column_count:7, has_column_checksum:0, row_count:276, row_store_type:0, opt:4, var_column_count:0, row_offset:15312, original_length:16356, max_merged_trans_version:1710333485895487493, data_length:16356, data_zlength:16356, data_checksum:2516174412, column_checksums:null, single_version_rows:0, contain_uncommitted_rows:0, is_last_row_last_flag:1, is_valid():true}, buf:0x7f53ecc04090, buf_size:16356, data_size:16356, row_count:276, column_count:7, max_merged_trans_version:1710333485895487493, macro_id:[9223372036854775807](ver=0,mode=0,seq=0), block_offset:0, block_checksum:642266761, row_count_delta:-276, contain_uncommitted_row:false, can_mark_deletion:false, has_string_out_row:false, has_lob_out_row:false, is_last_row_last_flag:true, original_size:16356}) [2024-03-15 07:03:41.581988] WDIAG [STORAGE] build_micro_block (ob_data_macro_block_merge_writer.cpp:137) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=53][errcode=-4184] ObMacroBlockWriter fail to build_micro_block(ret=-4184) [2024-03-15 07:03:41.581995] WDIAG [STORAGE] append_row (ob_macro_block_writer.cpp:583) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=7][errcode=-4184] Fail to build micro block, (ret=-4184) [2024-03-15 07:03:41.582001] WDIAG [STORAGE] append_row (ob_macro_block_writer.cpp:507) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=6][errcode=-4184] Fail to append row(ret=-4184) [2024-03-15 07:03:41.582007] WDIAG [STORAGE] append_row (ob_data_macro_block_merge_writer.cpp:72) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=5][errcode=-4184] ObMacroBlockWriter fail to append_row(ret=-4184) [2024-03-15 07:03:41.582013] WDIAG [STORAGE] inner_process (ob_partition_merger.cpp:1064) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=6][errcode=-4184] Failed to append row to macro writer(ret=-4184) [2024-03-15 07:03:41.582019] WDIAG [STORAGE] process (ob_partition_merger.cpp:277) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=6][errcode=-4184] Failed to inner append row(ret=-4184) [2024-03-15 07:03:41.582037] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=70] add dag success(dag=0x7f544482cfb0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=7376661037264142783, dag_cnt=2, dag_type_cnts=2) [2024-03-15 07:03:41.582074] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=22] succeed to add sys task(task={start_time:1710486221582067, task_id:YB427F000001-000613ACAD3FC212-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=334"}) [2024-03-15 07:03:41.582093] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=17] schedule one task(task={this:0x7f544482e080, type:15, status:2, dag:{this:0x7f544482cfb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC212-0-0, dag_ret:0, dag_status:2, start_time:1710486221582091, running_task_cnt:1, indegree:0, hash:7376661037264142783}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=2, running_task_cnts_[priority]=2, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.582064] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=27] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:334}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f545f2652b0, key:{tablet_id:{id:334}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482149310470}, this:0x7f545f2652b0, timestamp:1710482149310470, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:32, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:2097152}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710247060764933686}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939822917, mt_stat_.ready_for_flush_time:1710483939822959, mt_stat_.create_flush_dag_time:1710486221578615, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.582161] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=71] add dag success(dag=0x7f544482d4c0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-5924056271702463576, dag_cnt=3, dag_type_cnts=3) [2024-03-15 07:03:41.582203] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=19] succeed to add sys task(task={start_time:1710486221582199, task_id:YB427F000001-000613ACAD3FC213-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=101067"}) [2024-03-15 07:03:41.582195] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=26][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.582178] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=16] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101067}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f54629d8080, key:{tablet_id:{id:101067}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482149323563}, this:0x7f54629d8080, timestamp:1710482149323563, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:33, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:2097152}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710247060871482440}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939822997, mt_stat_.ready_for_flush_time:1710483939823039, mt_stat_.create_flush_dag_time:1710486221582135, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.582232] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=26] schedule one task(task={this:0x7f5444854080, type:15, status:2, dag:{this:0x7f544482d4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC213-0-0, dag_ret:0, dag_status:2, start_time:1710486221582230, running_task_cnt:1, indegree:0, hash:-5924056271702463576}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=3, running_task_cnts_[priority]=3, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.582237] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=42][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.582025] WDIAG [STORAGE] merge_single_iter (ob_partition_merger.cpp:1190) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=5][errcode=-4184] Failed to process row(ret=-4184, cur_row={row_flag:{flag:"DELETE", flag_type:0}, trans_id:{txid:0}, scan_index:0, mvcc_row_flag:{first:1, uncommitted:0, shadow:0, compact:1, ghost:0, last:1, reserved:0, flag:41}, snapshot_version:1710333485895487493, fast_filter_skipped:false, have_uncommited_row:false, group_idx:0, count:7, datum_buffer:{capacity:32, datums:0x7f54350fa458, local_datums:0x7f54350fa458}[col_id=0:{len: 8, flag: 0, null: 0, ptr: 0x7f5446450b04, hex: BCE3935006110600, int: 1707568679609276},col_id=1:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa4a0, hex: 0000000000000000, int: 0},col_id=2:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa4d8, hex: 7901000000000000, int: 377},col_id=3:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa510, hex: 7901000000000000, int: 377},col_id=4:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa548, hex: 2200000000000000, int: 34},col_id=5:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa580, hex: FB6FBE9DC1AC43E8, int: -1710333485895487493},col_id=6:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa5b8, hex: 0000000000000000, int: 0},]}, merge_iter={ObPartitionMinorRowMergeIter:{tablet_id:{id:101066}, iter_end:false, schema_rowkey_column_cnt:5, schema_version:1681902231009384, merge_range:{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}, curr_row_:{row_flag:{flag:"DELETE", flag_type:0}, trans_id:{txid:0}, scan_index:0, mvcc_row_flag:{first:1, uncommitted:0, shadow:0, compact:1, ghost:0, last:1, reserved:0, flag:41}, snapshot_version:1710333485895487493, fast_filter_skipped:false, have_uncommited_row:false, group_idx:0, count:7, datum_buffer:{capacity:32, datums:0x7f54350fa458, local_datums:0x7f54350fa458}[col_id=0:{len: 8, flag: 0, null: 0, ptr: 0x7f5446450b04, hex: BCE3935006110600, int: 1707568679609276},col_id=1:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa4a0, hex: 0000000000000000, int: 0},col_id=2:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa4d8, hex: 7901000000000000, int: 377},col_id=3:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa510, hex: 7901000000000000, int: 377},col_id=4:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa548, hex: 2200000000000000, int: 34},col_id=5:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa580, hex: FB6FBE9DC1AC43E8, int: -1710333485895487493},col_id=6:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa5b8, hex: 0000000000000000, int: 0},]}, store_ctx:{this:0x7f5435050118, ls_id:{id:1}, ls:null, timeout:9223372036854775807, tablet_id:{id:0}, table_iter:null, table_version:9223372036854775807, mvcc_acc_ctx:{type:1, abs_lock_timeout:9223372036854775807, tx_lock_timeout:-1, snapshot:{version:{val:4611686018427387901}, tx_id:{txid:0}, scn:-1}, tx_table_guard:{tx_table:0x7f54639dfa10, epoch:0}, tx_id:{txid:0}, tx_desc:NULL, tx_ctx:null, mem_ctx:null, tx_scn:-1, write_flag:{is_table_api:0, is_table_lock:0, is_mds:0, is_dml_batch_opt:0}, handle_start_time:-1, lock_wait_start_ts:0}, tablet_stat:{ls_id:0, tablet_id:0, query_cnt:0, merge_cnt:0, scan_logical_row_cnt:0, scan_physical_row_cnt:0, scan_micro_block_cnt:0, pushdown_micro_block_cnt:0, exist_row_total_table_cnt:0, exist_row_read_table_cnt:0, merge_physical_row_cnt:0, merge_logical_row_cnt:0}, replay_log_scn:{val:4611686018427387903}}, row_iter_:{context:{is_inited:true, timeout:9223372036854775807, ls_id:{id:1}, tablet_id:{id:0}, query_flag:{scan_order:1, daily_merge:1, rmmb_optimize:1, whole_macro_scan:1, full_row:0, index_back:0, query_stat:0, sql_mode:0, read_latest:0, prewarm:0, join_type:0, use_row_cache:0, use_block_index_cache:0, use_bloomfilter_cache:0, multi_version_minor_merge:1, is_need_feedback:0, use_fuse_row_cache:0, use_fast_agg:0, iter_uncommitted_row:0, ignore_trans_stat:0, is_large_query:0, is_sstable_cut:0, skip_read_lob:0, reserved:0}, sql_mode:0, store_ctx:0x7f5435050118, limit_param:null, stmt_allocator:0x7f5435050600, allocator:0x7f5435050578, range_allocator:null, table_scan_stat:null, out_cnt:0, trans_version_range:{multi_version_start:1710232452548914501, base_version:0, snapshot_version:4611686018427387901}, merge_scn:{val:1710506547144172701}, lob_locator_helper:{table_id:0, ls_id:1, snapshot_version:4611686018427387901, rowid_version:0, rowid_project_:NULL, rowid_objs:[], enable_locator_v2:true, is_inited:true}, iter_pool:null, block_row_store:null, io_callback:null}, row:{row_flag:{flag:"DELETE", flag_type:0}, trans_id:{txid:0}, scan_index:0, mvcc_row_flag:{first:1, uncommitted:0, shadow:0, compact:1, ghost:0, last:1, reserved:0, flag:41}, snapshot_version:1710333485895487493, fast_filter_skipped:false, have_uncommited_row:false, group_idx:0, count:7, datum_buffer:{capacity:32, datums:0x7f54350fa458, local_datums:0x7f54350fa458}[col_id=0:{len: 8, flag: 0, null: 0, ptr: 0x7f5446450b04, hex: BCE3935006110600, int: 1707568679609276},col_id=1:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa4a0, hex: 0000000000000000, int: 0},col_id=2:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa4d8, hex: 7901000000000000, int: 377},col_id=3:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa510, hex: 7901000000000000, int: 377},col_id=4:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa548, hex: 2200000000000000, int: 34},col_id=5:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa580, hex: FB6FBE9DC1AC43E8, int: -1710333485895487493},col_id=6:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa5b8, hex: 0000000000000000, int: 0},]}, key:rowkey_object=[{"TIMESTAMP":"2024-02-10 12:37:59.609276"},{"BIGINT":0},{"BIGINT":377},{"BIGINT":377},{"BIGINT":34}] , value_iter:{value:{this=0x7f5446450a00 latch_=unlocked flag=3 first_dml=DELETE last_dml=DELETE update_since_compact=1 list_head=0x7f5446450a90 latest_compact_node=(nil) max_trans_version={val:1710333485895487493} max_trans_id=97316801 max_elr_trans_version={val:1710333485895487493} max_elr_trans_id=97316801 latest_compact_ts=0 last_compact_cnt=0 total_trans_node_cnt=1 max_modify_count=0 min_modify_count=0}, version_iter:NULL, multi_version_iter:NULL, max_committed_trans_version:1710333485895487493, cur_trans_version:{val:1710333485895487493}, is_node_compacted:false, ctx:{type:1, abs_lock_timeout:9223372036854775807, tx_lock_timeout:-1, snapshot:{version:{val:4611686018427387901}, tx_id:{txid:0}, scn:-1}, tx_table_guard:{tx_table:0x7f54639dfa10, epoch:0}, tx_id:{txid:0}, tx_desc:NULL, tx_ctx:null, mem_ctx:null, tx_scn:-1, write_flag:{is_table_api:0, is_table_lock:0, is_mds:0, is_dml_batch_opt:0}, handle_start_time:-1, lock_wait_start_ts:0}, merge_scn:{val:1710506547144172701}, version_range:{multi_version_start:1710232452548914501, base_version:0, snapshot_version:4611686018427387901}, has_multi_commit_trans:false}, scan_state:4}, iter_row_count:276, is_inited:true, is_rowkey_first_row_reused:false, table_:{ObITable:{this:0x7f545f264ae0, key:{tablet_id:{id:101066}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482149308275}, this:0x7f545f264ae0, timestamp:1710482149308275, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:31, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710247060764933686}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939822836, mt_stat_.ready_for_flush_time:1710483939822878, mt_stat_.create_flush_dag_time:1710486221573983, mt_stat_.release_time:0, mt_stat_.last_print_time:0}}, ghost_row_count:0, check_committing_trans_compacted:true, row_queue:{col_cnt:7, cur_pos:0, count:0}}) [2024-03-15 07:03:41.582359] WDIAG [STORAGE] merge_same_rowkey_iters (ob_partition_merger.cpp:1416) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=331][errcode=-4184] Failed to merge single merge iter(ret=-4184) [2024-03-15 07:03:41.582467] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=59] add dag success(dag=0x7f544482d9d0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=6700715730357722855, dag_cnt=4, dag_type_cnts=4) [2024-03-15 07:03:41.582367] WDIAG [STORAGE] merge_partition (ob_partition_merger.cpp:1131) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=8][errcode=-4184] Failed to merge iters with same rowkey(ret=-4184, rowkey_minimum_iters=[{ObPartitionMinorRowMergeIter:{tablet_id:{id:101066}, iter_end:false, schema_rowkey_column_cnt:5, schema_version:1681902231009384, merge_range:{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}, curr_row_:{row_flag:{flag:"DELETE", flag_type:0}, trans_id:{txid:0}, scan_index:0, mvcc_row_flag:{first:1, uncommitted:0, shadow:0, compact:1, ghost:0, last:1, reserved:0, flag:41}, snapshot_version:1710333485895487493, fast_filter_skipped:false, have_uncommited_row:false, group_idx:0, count:7, datum_buffer:{capacity:32, datums:0x7f54350fa458, local_datums:0x7f54350fa458}[col_id=0:{len: 8, flag: 0, null: 0, ptr: 0x7f5446450b04, hex: BCE3935006110600, int: 1707568679609276},col_id=1:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa4a0, hex: 0000000000000000, int: 0},col_id=2:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa4d8, hex: 7901000000000000, int: 377},col_id=3:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa510, hex: 7901000000000000, int: 377},col_id=4:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa548, hex: 2200000000000000, int: 34},col_id=5:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa580, hex: FB6FBE9DC1AC43E8, int: -1710333485895487493},col_id=6:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa5b8, hex: 0000000000000000, int: 0},]}, store_ctx:{this:0x7f5435050118, ls_id:{id:1}, ls:null, timeout:9223372036854775807, tablet_id:{id:0}, table_iter:null, table_version:9223372036854775807, mvcc_acc_ctx:{type:1, abs_lock_timeout:9223372036854775807, tx_lock_timeout:-1, snapshot:{version:{val:4611686018427387901}, tx_id:{txid:0}, scn:-1}, tx_table_guard:{tx_table:0x7f54639dfa10, epoch:0}, tx_id:{txid:0}, tx_desc:NULL, tx_ctx:null, mem_ctx:null, tx_scn:-1, write_flag:{is_table_api:0, is_table_lock:0, is_mds:0, is_dml_batch_opt:0}, handle_start_time:-1, lock_wait_start_ts:0}, tablet_stat:{ls_id:0, tablet_id:0, query_cnt:0, merge_cnt:0, scan_logical_row_cnt:0, scan_physical_row_cnt:0, scan_micro_block_cnt:0, pushdown_micro_block_cnt:0, exist_row_total_table_cnt:0, exist_row_read_table_cnt:0, merge_physical_row_cnt:0, merge_logical_row_cnt:0}, replay_log_scn:{val:4611686018427387903}}, row_iter_:{context:{is_inited:true, timeout:9223372036854775807, ls_id:{id:1}, tablet_id:{id:0}, query_flag:{scan_order:1, daily_merge:1, rmmb_optimize:1, whole_macro_scan:1, full_row:0, index_back:0, query_stat:0, sql_mode:0, read_latest:0, prewarm:0, join_type:0, use_row_cache:0, use_block_index_cache:0, use_bloomfilter_cache:0, multi_version_minor_merge:1, is_need_feedback:0, use_fuse_row_cache:0, use_fast_agg:0, iter_uncommitted_row:0, ignore_trans_stat:0, is_large_query:0, is_sstable_cut:0, skip_read_lob:0, reserved:0}, sql_mode:0, store_ctx:0x7f5435050118, limit_param:null, stmt_allocator:0x7f5435050600, allocator:0x7f5435050578, range_allocator:null, table_scan_stat:null, out_cnt:0, trans_version_range:{multi_version_start:1710232452548914501, base_version:0, snapshot_version:4611686018427387901}, merge_scn:{val:1710506547144172701}, lob_locator_helper:{table_id:0, ls_id:1, snapshot_version:4611686018427387901, rowid_version:0, rowid_project_:NULL, rowid_objs:[], enable_locator_v2:true, is_inited:true}, iter_pool:null, block_row_store:null, io_callback:null}, row:{row_flag:{flag:"DELETE", flag_type:0}, trans_id:{txid:0}, scan_index:0, mvcc_row_flag:{first:1, uncommitted:0, shadow:0, compact:1, ghost:0, last:1, reserved:0, flag:41}, snapshot_version:1710333485895487493, fast_filter_skipped:false, have_uncommited_row:false, group_idx:0, count:7, datum_buffer:{capacity:32, datums:0x7f54350fa458, local_datums:0x7f54350fa458}[col_id=0:{len: 8, flag: 0, null: 0, ptr: 0x7f5446450b04, hex: BCE3935006110600, int: 1707568679609276},col_id=1:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa4a0, hex: 0000000000000000, int: 0},col_id=2:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa4d8, hex: 7901000000000000, int: 377},col_id=3:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa510, hex: 7901000000000000, int: 377},col_id=4:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa548, hex: 2200000000000000, int: 34},col_id=5:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa580, hex: FB6FBE9DC1AC43E8, int: -1710333485895487493},col_id=6:{len: 8, flag: 0, null: 0, ptr: 0x7f54350fa5b8, hex: 0000000000000000, int: 0},]}, key:rowkey_object=[{"TIMESTAMP":"2024-02-10 12:37:59.609276"},{"BIGINT":0},{"BIGINT":377},{"BIGINT":377},{"BIGINT":34}] , value_iter:{value:{this=0x7f5446450a00 latch_=unlocked flag=3 first_dml=DELETE last_dml=DELETE update_since_compact=1 list_head=0x7f5446450a90 latest_compact_node=(nil) max_trans_version={val:1710333485895487493} max_trans_id=97316801 max_elr_trans_version={val:1710333485895487493} max_elr_trans_id=97316801 latest_compact_ts=0 last_compact_cnt=0 total_trans_node_cnt=1 max_modify_count=0 min_modify_count=0}, version_iter:NULL, multi_version_iter:NULL, max_committed_trans_version:1710333485895487493, cur_trans_version:{val:1710333485895487493}, is_node_compacted:false, ctx:{type:1, abs_lock_timeout:9223372036854775807, tx_lock_timeout:-1, snapshot:{version:{val:4611686018427387901}, tx_id:{txid:0}, scn:-1}, tx_table_guard:{tx_table:0x7f54639dfa10, epoch:0}, tx_id:{txid:0}, tx_desc:NULL, tx_ctx:null, mem_ctx:null, tx_scn:-1, write_flag:{is_table_api:0, is_table_lock:0, is_mds:0, is_dml_batch_opt:0}, handle_start_time:-1, lock_wait_start_ts:0}, merge_scn:{val:1710506547144172701}, version_range:{multi_version_start:1710232452548914501, base_version:0, snapshot_version:4611686018427387901}, has_multi_commit_trans:false}, scan_state:4}, iter_row_count:276, is_inited:true, is_rowkey_first_row_reused:false, table_:{ObITable:{this:0x7f545f264ae0, key:{tablet_id:{id:101066}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482149308275}, this:0x7f545f264ae0, timestamp:1710482149308275, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:31, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710247060764933686}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939822836, mt_stat_.ready_for_flush_time:1710483939822878, mt_stat_.create_flush_dag_time:1710486221573983, mt_stat_.release_time:0, mt_stat_.last_print_time:0}}, ghost_row_count:0, check_committing_trans_compacted:true, row_queue:{col_cnt:7, cur_pos:0, count:0}}]) [2024-03-15 07:03:41.582552] WDIAG [STORAGE] merge_partition (ob_partition_merger.cpp:1150) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=184][errcode=-4184] Partition merge did not end normally(ret=-4184) [2024-03-15 07:03:41.582561] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=7] partition merge iter row count(i=0, row_count=276, ghost_row_count=0, pkey={tablet_id:{id:101066}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f545f264ae0, key:{tablet_id:{id:101066}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482149308275}, this:0x7f545f264ae0, timestamp:1710482149308275, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:31, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710247060764933686}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939822836, mt_stat_.ready_for_flush_time:1710483939822878, mt_stat_.create_flush_dag_time:1710486221573983, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.582507] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=23] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:378}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f545f262ba0, key:{tablet_id:{id:378}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482149242391}, this:0x7f545f262ba0, timestamp:1710482149242391, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:27, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710247061203907923}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823107, mt_stat_.ready_for_flush_time:1710483939823151, mt_stat_.create_flush_dag_time:1710486221582237, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.582616] WDIAG [STORAGE] process (ob_tablet_merge_task.cpp:1434) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=35][errcode=-4184] failed to merge partition(ret=-4184) [2024-03-15 07:03:41.582639] WDIAG [STORAGE] process (ob_tablet_merge_task.cpp:1446) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=7][errcode=-4184] failed to merge(ret=-4184, ctx_->param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101066}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, idx_=0) [2024-03-15 07:03:41.582652] WDIAG [COMMON] do_work (ob_dag_scheduler.cpp:241) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=12][errcode=-4184] failed to process task(ret=-4184) [2024-03-15 07:03:41.582660] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=18] succeed to add sys task(task={start_time:1710486221582654, task_id:YB427F000001-000613ACAD3FC214-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=378"}) [2024-03-15 07:03:41.582658] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=6] task finish process(ret=-4184, start_time=1710486221580835, end_time=1710486221582657, runtime=1822, *this={this:0x7f54448561b0, type:1, status:2, dag:{this:0x7f544482caa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC211-0-0, dag_ret:0, dag_status:2, start_time:1710486221578630, running_task_cnt:1, indegree:0, hash:-1930035241269723440}}) [2024-03-15 07:03:41.582678] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=17] schedule one task(task={this:0x7f5444876080, type:15, status:2, dag:{this:0x7f544482d9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC214-0-0, dag_ret:0, dag_status:2, start_time:1710486221582676, running_task_cnt:1, indegree:0, hash:6700715730357722855}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=4, running_task_cnts_[priority]=4, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.582690] WDIAG [COMMON] run1 (ob_dag_scheduler.cpp:1424) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=31][errcode=-4184] failed to do work(ret=-4184, *task_={this:0x7f54448561b0, type:1, status:2, dag:{this:0x7f544482caa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC211-0-0, dag_ret:0, dag_status:2, start_time:1710486221578630, running_task_cnt:1, indegree:0, hash:-1930035241269723440}}, compat_mode=0) [2024-03-15 07:03:41.582691] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=10][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.582712] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=13] dag finished(dag_ret=-4184, runtime=4080, dag_cnt=3, dag_cnts_[dag.get_type()]=3, &dag=0x7f544482caa0, dag={this:0x7f544482caa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC211-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221578630, running_task_cnt:0, indegree:0, hash:-1930035241269723440}) [2024-03-15 07:03:41.582729] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=37][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710234133899240143) [2024-03-15 07:03:41.582745] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC211-0-0] [lt=29] succeed to del sys task(removed_task={start_time:1710486221578600, task_id:YB427F000001-000613ACAD3FC211-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=101066"}) [2024-03-15 07:03:41.582750] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=14] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.582754] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [837][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=12][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.582779] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [837][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=24][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710234133899240143) [2024-03-15 07:03:41.582789] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=15] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.582807] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [837][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=17] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.582815] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=15][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.582827] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=13][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710234133899240143) [2024-03-15 07:03:41.582838] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=8] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.582862] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=7] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.582855] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [837][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=18] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.582809] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=16] get storage schema to merge(ls_id={id:1}, tablet_id={id:101067}, schema_ctx={base_schema_version:0, schema_version:1681902231111608, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550af1ee30, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902231111608, column_cnt:6, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:21, meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.582873] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=9] get storage schema to merge(ls_id={id:1}, tablet_id={id:334}, schema_ctx={base_schema_version:0, schema_version:1681902231014096, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f5479796600, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902231014096, column_cnt:19, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:21, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:22, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:23, meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.582905] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [837][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=43] get storage schema to merge(ls_id={id:1}, tablet_id={id:378}, schema_ctx={base_schema_version:0, schema_version:1681902231278816, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547979d770, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902231278816, column_cnt:7, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.583032] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [837][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=112] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:378}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.583086] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [837][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=20] succeed to build merge ctx(tablet_id={id:378}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:378}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710118072591257993, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902231278816, schema_version:1681902231278816, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547979d770, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902231278816, column_cnt:7, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:378}, table_count:1, [{i:0, table_key:{tablet_id:{id:378}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref:4}]}, schedule_major:false, scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f547979d1f0, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221582419, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=94us|(0.30)|GET_PARALLEL_RANGE=221us|(0.70)|total=315us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.583337] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [837][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=222] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544482d9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC214-0-0, dag_ret:0, dag_status:2, start_time:1710486221582676, running_task_cnt:1, indegree:0, hash:6700715730357722855}, scanned_row_cnt_arr:0x7f542c0215f0, output_block_cnt_arr:0x7f542c0215f8, concurrent_cnt:1, estimate_row_cnt:4, estimate_occupy_size:0, latest_update_ts:1710486221582419, estimated_finish_time:1710486238583333}) [2024-03-15 07:03:41.583370] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [837][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=32] succeed to init merge ctx(task={this:0x7f5444876080, type:15, status:2, dag:{this:0x7f544482d9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC214-0-0, dag_ret:0, dag_status:2, start_time:1710486221582676, running_task_cnt:1, indegree:0, hash:6700715730357722855}}) [2024-03-15 07:03:41.583389] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [837][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=17] task finish process(ret=0, start_time=1710486221582736, end_time=1710486221583387, runtime=651, *this={this:0x7f5444876080, type:15, status:2, dag:{this:0x7f544482d9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC214-0-0, dag_ret:0, dag_status:2, start_time:1710486221582676, running_task_cnt:1, indegree:0, hash:6700715730357722855}}) [2024-03-15 07:03:41.583466] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=19] schedule one task(task={this:0x7f54448761b0, type:1, status:2, dag:{this:0x7f544482d9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC214-0-0, dag_ret:0, dag_status:2, start_time:1710486221582676, running_task_cnt:1, indegree:0, hash:6700715730357722855}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=3, running_task_cnts_[priority]=3, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.583731] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=115] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:334}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.583769] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=16] succeed to build merge ctx(tablet_id={id:334}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:334}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710118072591257993, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902231014096, schema_version:1681902231014096, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f5479796600, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902231014096, column_cnt:19, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:21, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:22, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:23, meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:334}, table_count:1, [{i:0, table_key:{tablet_id:{id:334}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref:4}]}, schedule_major:false, scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f5479796080, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221582007, estimated_finish_time:0}, compaction_filter:NULL, time_guard:GET_PARALLEL_RANGE=901us|(0.95)|total=944us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.584119] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=117] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101067}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.584173] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=156] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544482cfb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC212-0-0, dag_ret:0, dag_status:2, start_time:1710486221582091, running_task_cnt:1, indegree:0, hash:7376661037264142783}, scanned_row_cnt_arr:0x7f542c00b5f0, output_block_cnt_arr:0x7f542c00b5f8, concurrent_cnt:1, estimate_row_cnt:7873, estimate_occupy_size:2097152, latest_update_ts:1710486221582007, estimated_finish_time:1710486238584172}) [2024-03-15 07:03:41.584193] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=20] succeed to init merge ctx(task={this:0x7f544482e080, type:15, status:2, dag:{this:0x7f544482cfb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC212-0-0, dag_ret:0, dag_status:2, start_time:1710486221582091, running_task_cnt:1, indegree:0, hash:7376661037264142783}}) [2024-03-15 07:03:41.584207] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=13] task finish process(ret=0, start_time=1710486221582804, end_time=1710486221584206, runtime=1402, *this={this:0x7f544482e080, type:15, status:2, dag:{this:0x7f544482cfb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC212-0-0, dag_ret:0, dag_status:2, start_time:1710486221582091, running_task_cnt:1, indegree:0, hash:7376661037264142783}}) [2024-03-15 07:03:41.584263] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=24] schedule one task(task={this:0x7f544482e1b0, type:1, status:2, dag:{this:0x7f544482cfb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC212-0-0, dag_ret:0, dag_status:2, start_time:1710486221582091, running_task_cnt:1, indegree:0, hash:7376661037264142783}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=3, running_task_cnts_[priority]=3, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.584361] ERROR alloc_block (ob_local_device.cpp:716) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=13][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.584373] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=12][errcode=-4184] Failed to alloc block from io device(ret=-4184) [2024-03-15 07:03:41.584380] WDIAG [STORAGE] alloc_block (ob_macro_block_writer.cpp:1338) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=7][errcode=-4184] Fail to pre-alloc block for new macro block(ret=-4184, current_index=0, current_macro_seq=0) [2024-03-15 07:03:41.584388] WDIAG [STORAGE] write_micro_block (ob_macro_block_writer.cpp:1116) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=7][errcode=-4184] Fail to pre-alloc block(ret=-4184) [2024-03-15 07:03:41.584194] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=30] succeed to build merge ctx(tablet_id={id:101067}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101067}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710118072591257993, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902231111608, schema_version:1681902231111608, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550af1ee30, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902231111608, column_cnt:6, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:21, meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:101067}, table_count:1, [{i:0, table_key:{tablet_id:{id:101067}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref:4}]}, schedule_major:false, scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f550af1e8b0, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221582148, estimated_finish_time:0}, compaction_filter:NULL, time_guard:GET_PARALLEL_RANGE=1.38ms|(0.94)|total=1.47ms, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.584394] WDIAG [STORAGE] build_micro_block (ob_macro_block_writer.cpp:938) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=6][errcode=-4184] fail to write micro block (ret=-4184, micro_block_desc={last_rowkey:{datum_cnt:5, group_idx:0, hash:0, [idx=0:{len: 8, flag: 0, null: 0, ptr: 0x7f5444894070, hex: 0000000000000000, int: 0},idx=1:{len: 8, flag: 0, null: 0, ptr: 0x7f54448940a8, hex: 0800000000000000, int: 8},idx=2:{len: 8, flag: 0, null: 0, ptr: 0x7f5444894178, hex: BA89E00B8A130600, int: 1710333485943226},idx=3:{len: 8, flag: 0, null: 0, ptr: 0x7f5444894118, hex: 82AEBF98C1AC43E8, int: -1710333485979292030},idx=4:{len: 8, flag: 0, null: 0, ptr: 0x7f5444894150, hex: 0000000000000000, int: 0},]store_rowkey:}, header:{magic:1005, version:2, header_size:64, header_checksum:-4247, column_count:9, rowkey_column_count:5, has_column_checksum:0, row_count:4, row_store_type:0, opt:5, var_column_count:0, row_offset:368, original_length:324, max_merged_trans_version:1710338427498190495, data_length:324, data_zlength:324, data_checksum:4191056660, column_checksums:null, single_version_rows:1, contain_uncommitted_rows:0, is_last_row_last_flag:1, is_valid():true}, buf:0x7f543f804090, buf_size:324, data_size:324, row_count:4, column_count:9, max_merged_trans_version:1710338427498190495, macro_id:[9223372036854775807](ver=0,mode=0,seq=0), block_offset:0, block_checksum:1544922066, row_count_delta:4, contain_uncommitted_row:false, can_mark_deletion:false, has_string_out_row:false, has_lob_out_row:false, is_last_row_last_flag:true, original_size:324}) [2024-03-15 07:03:41.584448] WDIAG [STORAGE] build_micro_block (ob_data_macro_block_merge_writer.cpp:137) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=54][errcode=-4184] ObMacroBlockWriter fail to build_micro_block(ret=-4184) [2024-03-15 07:03:41.584455] WDIAG [STORAGE] close (ob_macro_block_writer.cpp:707) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=6][errcode=-4184] macro block writer fail to build current micro block.(ret=-4184) [2024-03-15 07:03:41.584461] WDIAG [STORAGE] close (ob_partition_merger.cpp:170) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=6][errcode=-4184] Failed to close macro block writer(ret=-4184) [2024-03-15 07:03:41.584467] WDIAG [STORAGE] close (ob_partition_merger.cpp:988) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=6][errcode=-4184] Failed to finish merge for partition merger(ret=-4184) [2024-03-15 07:03:41.584473] WDIAG [STORAGE] merge_partition (ob_partition_merger.cpp:1156) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=5][errcode=-4184] failed to close partition merger(ret=-4184) [2024-03-15 07:03:41.584480] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=6] partition merge iter row count(i=0, row_count=4, ghost_row_count=0, pkey={tablet_id:{id:378}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f545f262ba0, key:{tablet_id:{id:378}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482149242391}, this:0x7f545f262ba0, timestamp:1710482149242391, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:27, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710247061203907923}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823107, mt_stat_.ready_for_flush_time:1710483939823151, mt_stat_.create_flush_dag_time:1710486221582237, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.584525] WDIAG [STORAGE] process (ob_tablet_merge_task.cpp:1434) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=38][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.584546] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221584256, end_time=1710486221584545, runtime=289, *this={this:0x7f54448761b0, type:1, status:2, dag:{this:0x7f544482d9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC214-0-0, dag_ret:0, dag_status:2, start_time:1710486221582676, running_task_cnt:1, indegree:0, hash:6700715730357722855}}) [2024-03-15 07:03:41.584568] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=1891, dag_cnt=2, dag_cnts_[dag.get_type()]=2, &dag=0x7f544482d9d0, dag={this:0x7f544482d9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC214-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221582676, running_task_cnt:0, indegree:0, hash:6700715730357722855}) [2024-03-15 07:03:41.584585] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC214-0-0] [lt=13] succeed to del sys task(removed_task={start_time:1710486221582654, task_id:YB427F000001-000613ACAD3FC214-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=378"}) [2024-03-15 07:03:41.584732] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=245] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544482d4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC213-0-0, dag_ret:0, dag_status:2, start_time:1710486221582230, running_task_cnt:1, indegree:0, hash:-5924056271702463576}, scanned_row_cnt_arr:0x7f542c02f5f0, output_block_cnt_arr:0x7f542c02f5f8, concurrent_cnt:1, estimate_row_cnt:7765, estimate_occupy_size:2097152, latest_update_ts:1710486221582148, estimated_finish_time:1710486238584728}) [2024-03-15 07:03:41.584771] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=38] succeed to init merge ctx(task={this:0x7f5444854080, type:15, status:2, dag:{this:0x7f544482d4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC213-0-0, dag_ret:0, dag_status:2, start_time:1710486221582230, running_task_cnt:1, indegree:0, hash:-5924056271702463576}}) [2024-03-15 07:03:41.584797] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=23] task finish process(ret=0, start_time=1710486221582674, end_time=1710486221584794, runtime=2120, *this={this:0x7f5444854080, type:15, status:2, dag:{this:0x7f544482d4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC213-0-0, dag_ret:0, dag_status:2, start_time:1710486221582230, running_task_cnt:1, indegree:0, hash:-5924056271702463576}}) [2024-03-15 07:03:41.584853] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=24] schedule one task(task={this:0x7f54448541b0, type:1, status:2, dag:{this:0x7f544482d4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC213-0-0, dag_ret:0, dag_status:2, start_time:1710486221582230, running_task_cnt:1, indegree:0, hash:-5924056271702463576}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=2, running_task_cnts_[priority]=2, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.585209] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=106] add dag success(dag=0x7f5444872080, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-1962062846896452141, dag_cnt=3, dag_type_cnts=3) [2024-03-15 07:03:41.585236] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=24] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:323}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f54629d97f0, key:{tablet_id:{id:323}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482150021455}, this:0x7f54629d97f0, timestamp:1710482150021455, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:36, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:2097152}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710252001342354519}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823190, mt_stat_.ready_for_flush_time:1710483939823231, mt_stat_.create_flush_dag_time:1710486221582613, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.585353] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=21] succeed to add sys task(task={start_time:1710486221585347, task_id:YB427F000001-000613ACAD3FC215-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=323"}) [2024-03-15 07:03:41.585379] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=24] schedule one task(task={this:0x7f5444878080, type:15, status:2, dag:{this:0x7f5444872080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC215-0-0, dag_ret:0, dag_status:2, start_time:1710486221585377, running_task_cnt:1, indegree:0, hash:-1962062846896452141}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=3, running_task_cnts_[priority]=3, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.585480] ERROR alloc_block (ob_local_device.cpp:716) [819][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=14][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.585492] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [819][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=12][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.585508] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [819][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=0] partition merge iter row count(i=0, row_count=148, ghost_row_count=0, pkey={tablet_id:{id:334}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f545f2652b0, key:{tablet_id:{id:334}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482149310470}, this:0x7f545f2652b0, timestamp:1710482149310470, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:32, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:2097152}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710247060764933686}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939822917, mt_stat_.ready_for_flush_time:1710483939822959, mt_stat_.create_flush_dag_time:1710486221578615, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.585575] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [819][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=1] task finish process(ret=-4184, start_time=1710486221584643, end_time=1710486221585574, runtime=931, *this={this:0x7f544482e1b0, type:1, status:2, dag:{this:0x7f544482cfb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC212-0-0, dag_ret:0, dag_status:2, start_time:1710486221582091, running_task_cnt:1, indegree:0, hash:7376661037264142783}}) [2024-03-15 07:03:41.585597] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [819][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=3505, dag_cnt=2, dag_cnts_[dag.get_type()]=2, &dag=0x7f544482cfb0, dag={this:0x7f544482cfb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC212-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221582091, running_task_cnt:0, indegree:0, hash:7376661037264142783}) [2024-03-15 07:03:41.585613] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [819][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC212-0-0] [lt=13] succeed to del sys task(removed_task={start_time:1710486221582067, task_id:YB427F000001-000613ACAD3FC212-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=334"}) [2024-03-15 07:03:41.585900] ERROR alloc_block (ob_local_device.cpp:716) [841][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=14][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.585914] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [841][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=15][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.585931] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [841][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=0] partition merge iter row count(i=0, row_count=263, ghost_row_count=0, pkey={tablet_id:{id:101067}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f54629d8080, key:{tablet_id:{id:101067}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482149323563}, this:0x7f54629d8080, timestamp:1710482149323563, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:33, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:2097152}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710247060871482440}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939822997, mt_stat_.ready_for_flush_time:1710483939823039, mt_stat_.create_flush_dag_time:1710486221582135, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.586005] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=14][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.586017] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [841][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221584897, end_time=1710486221586016, runtime=1119, *this={this:0x7f54448541b0, type:1, status:2, dag:{this:0x7f544482d4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC213-0-0, dag_ret:0, dag_status:2, start_time:1710486221582230, running_task_cnt:1, indegree:0, hash:-5924056271702463576}}) [2024-03-15 07:03:41.586028] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=23][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710234133899240143) [2024-03-15 07:03:41.586043] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [841][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=3812, dag_cnt=1, dag_cnts_[dag.get_type()]=1, &dag=0x7f544482d4c0, dag={this:0x7f544482d4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC213-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221582230, running_task_cnt:0, indegree:0, hash:-5924056271702463576}) [2024-03-15 07:03:41.586047] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=13] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.586062] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [841][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC213-0-0] [lt=15] succeed to del sys task(removed_task={start_time:1710486221582199, task_id:YB427F000001-000613ACAD3FC213-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=101067"}) [2024-03-15 07:03:41.586084] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=14] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.586105] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=16] get storage schema to merge(ls_id={id:1}, tablet_id={id:323}, schema_ctx={base_schema_version:0, schema_version:1681902230881080, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f5479766c10, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902230881080, column_cnt:20, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.586466] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=209] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:323}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.586534] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=26] succeed to build merge ctx(tablet_id={id:323}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:323}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710118072591257993, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902230881080, schema_version:1681902230881080, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f5479766c10, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902230881080, column_cnt:20, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:323}, table_count:1, [{i:0, table_key:{tablet_id:{id:323}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref:4}]}, schedule_major:false, scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f5479766690, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221582626, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=82us|(0.16)|GET_PARALLEL_RANGE=428us|(0.84)|total=510us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.587215] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=332] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f5444872080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC215-0-0, dag_ret:0, dag_status:2, start_time:1710486221585377, running_task_cnt:1, indegree:0, hash:-1962062846896452141}, scanned_row_cnt_arr:0x7f542c0235f0, output_block_cnt_arr:0x7f542c0235f8, concurrent_cnt:1, estimate_row_cnt:748, estimate_occupy_size:2097152, latest_update_ts:1710486221582626, estimated_finish_time:1710486238587213}) [2024-03-15 07:03:41.587249] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=33] succeed to init merge ctx(task={this:0x7f5444878080, type:15, status:2, dag:{this:0x7f5444872080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC215-0-0, dag_ret:0, dag_status:2, start_time:1710486221585377, running_task_cnt:1, indegree:0, hash:-1962062846896452141}}) [2024-03-15 07:03:41.587274] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=22] task finish process(ret=0, start_time=1710486221585981, end_time=1710486221587271, runtime=1290, *this={this:0x7f5444878080, type:15, status:2, dag:{this:0x7f5444872080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC215-0-0, dag_ret:0, dag_status:2, start_time:1710486221585377, running_task_cnt:1, indegree:0, hash:-1962062846896452141}}) [2024-03-15 07:03:41.587335] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=18] schedule one task(task={this:0x7f54448781b0, type:1, status:2, dag:{this:0x7f5444872080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC215-0-0, dag_ret:0, dag_status:2, start_time:1710486221585377, running_task_cnt:1, indegree:0, hash:-1962062846896452141}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=1, running_task_cnts_[priority]=1, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.587418] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=102] add dag success(dag=0x7f5444872590, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-2810766448934472, dag_cnt=2, dag_type_cnts=2) [2024-03-15 07:03:41.587462] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=41] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:329}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f54629d9020, key:{tablet_id:{id:329}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482150021077}, this:0x7f54629d9020, timestamp:1710482150021077, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:35, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710252037157080262}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823270, mt_stat_.ready_for_flush_time:1710483939823317, mt_stat_.create_flush_dag_time:1710486221585337, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.587548] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=70] add dag success(dag=0x7f5444872aa0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-6605681042843664973, dag_cnt=3, dag_type_cnts=3) [2024-03-15 07:03:41.587565] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=17] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:330}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f54629d8850, key:{tablet_id:{id:330}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234139332999384}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482150020716}, this:0x7f54629d8850, timestamp:1710482150020716, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:34, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:4194304}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710252037663609291}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823356, mt_stat_.ready_for_flush_time:1710483939823397, mt_stat_.create_flush_dag_time:1710486221587531, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.587644] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=64] add dag success(dag=0x7f5444872fb0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-2785108733753523045, dag_cnt=4, dag_type_cnts=4) [2024-03-15 07:03:41.587660] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=15] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:182}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f54629db730, key:{tablet_id:{id:182}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482152646140}, this:0x7f54629db730, timestamp:1710482152646140, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:40, clock:2097152}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710263819867968870}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823453, mt_stat_.ready_for_flush_time:1710483939823495, mt_stat_.create_flush_dag_time:1710486221587630, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.587731] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=59] add dag success(dag=0x7f54448734c0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-6912080413595199522, dag_cnt=5, dag_type_cnts=5) [2024-03-15 07:03:41.587747] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=16] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:3}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f54629e14f0, key:{tablet_id:{id:3}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482160328110}, this:0x7f54629e14f0, timestamp:1710482160328110, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:52, clock:14680064}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710300088531356512}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823534, mt_stat_.ready_for_flush_time:1710483939823596, mt_stat_.create_flush_dag_time:1710486221587719, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.587819] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=59] add dag success(dag=0x7f54448739d0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-1828233772001493880, dag_cnt=6, dag_type_cnts=6) [2024-03-15 07:03:41.587835] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=16] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100002}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f54629e1cc0, key:{tablet_id:{id:100002}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482160328459}, this:0x7f54629e1cc0, timestamp:1710482160328459, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:53, clock:14680064}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710300088531356512}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823635, mt_stat_.ready_for_flush_time:1710483939823677, mt_stat_.create_flush_dag_time:1710486221587807, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.587905] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=59] add dag success(dag=0x7f544482c080, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-1633701860824767150, dag_cnt=7, dag_type_cnts=7) [2024-03-15 07:03:41.587919] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=14] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100003}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f54629e2490, key:{tablet_id:{id:100003}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482160328679}, this:0x7f54629e2490, timestamp:1710482160328679, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:54, clock:14680064}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710300088531356512}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823715, mt_stat_.ready_for_flush_time:1710483939823757, mt_stat_.create_flush_dag_time:1710486221587893, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.587986] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=56] add dag success(dag=0x7f544482c590, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=1878867541797627461, dag_cnt=8, dag_type_cnts=8) [2024-03-15 07:03:41.588000] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=14] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:114}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f54629e2c60, key:{tablet_id:{id:114}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482160328934}, this:0x7f54629e2c60, timestamp:1710482160328934, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:55, clock:14680064}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710300088531356512}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823796, mt_stat_.ready_for_flush_time:1710483939823836, mt_stat_.create_flush_dag_time:1710486221587975, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.588067] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=56] add dag success(dag=0x7f544482caa0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=198350566326085695, dag_cnt=9, dag_type_cnts=9) [2024-03-15 07:03:41.588082] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=14] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101001}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f54629e3430, key:{tablet_id:{id:101001}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482160329122}, this:0x7f54629e3430, timestamp:1710482160329122, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:56, clock:14680064}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710300088531356512}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823874, mt_stat_.ready_for_flush_time:1710483939823915, mt_stat_.create_flush_dag_time:1710486221588056, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.588150] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=56] add dag success(dag=0x7f544482cfb0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=6152367908818667172, dag_cnt=10, dag_type_cnts=10) [2024-03-15 07:03:41.588164] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=14] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:5}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f54629e3c00, key:{tablet_id:{id:5}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482160329297}, this:0x7f54629e3c00, timestamp:1710482160329297, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:57, clock:14680064}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710300088531356512}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823954, mt_stat_.ready_for_flush_time:1710483939823995, mt_stat_.create_flush_dag_time:1710486221588138, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.588231] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=55] add dag success(dag=0x7f544482d4c0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-4203726123947673128, dag_cnt=11, dag_type_cnts=11) [2024-03-15 07:03:41.588246] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=14] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100006}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f54629e43d0, key:{tablet_id:{id:100006}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482160329659}, this:0x7f54629e43d0, timestamp:1710482160329659, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:58, clock:14680064}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710300088531356512}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824033, mt_stat_.ready_for_flush_time:1710483939824074, mt_stat_.create_flush_dag_time:1710486221588220, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.588312] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=56] add dag success(dag=0x7f544482d9d0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=5462442946523183119, dag_cnt=12, dag_type_cnts=12) [2024-03-15 07:03:41.588327] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=14] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100001}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f54629e4ba0, key:{tablet_id:{id:100001}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482160329884}, this:0x7f54629e4ba0, timestamp:1710482160329884, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:59, clock:14680064}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710300088531356512}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824112, mt_stat_.ready_for_flush_time:1710483939824153, mt_stat_.create_flush_dag_time:1710486221588302, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.588395] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=56] add dag success(dag=0x7f54448b6080, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=3340047647417801619, dag_cnt=13, dag_type_cnts=13) [2024-03-15 07:03:41.588407] ERROR alloc_block (ob_local_device.cpp:716) [856][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=21][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.588434] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [856][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=26][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.588410] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=14] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:328}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f54448135b0, key:{tablet_id:{id:328}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710300091367964368}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482170806942}, this:0x7f54448135b0, timestamp:1710482170806942, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:80, clock:31457280}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710338418128522881}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824192, mt_stat_.ready_for_flush_time:1710483939824233, mt_stat_.create_flush_dag_time:1710486221588384, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.588452] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [856][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=0] partition merge iter row count(i=0, row_count=171, ghost_row_count=0, pkey={tablet_id:{id:323}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f54629d97f0, key:{tablet_id:{id:323}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482150021455}, this:0x7f54629d97f0, timestamp:1710482150021455, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:36, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:2097152}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710252001342354519}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823190, mt_stat_.ready_for_flush_time:1710483939823231, mt_stat_.create_flush_dag_time:1710486221582613, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.588504] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=82] add dag success(dag=0x7f54448b6590, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=3484783245450929688, dag_cnt=14, dag_type_cnts=14) [2024-03-15 07:03:41.588527] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [856][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221587392, end_time=1710486221588526, runtime=1134, *this={this:0x7f54448781b0, type:1, status:2, dag:{this:0x7f5444872080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC215-0-0, dag_ret:0, dag_status:2, start_time:1710486221585377, running_task_cnt:1, indegree:0, hash:-1962062846896452141}}) [2024-03-15 07:03:41.588519] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=15] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:121}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f5444813d80, key:{tablet_id:{id:121}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710362652435471403}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482174205290}, this:0x7f5444813d80, timestamp:1710482174205290, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:81, clock:56623104}, host:0x7f54b49fc030, arena_handle:{allocated:4194304}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710362654997374824}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824271, mt_stat_.ready_for_flush_time:1710483939824312, mt_stat_.create_flush_dag_time:1710486221588492, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.588654] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [856][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=3175, dag_cnt=13, dag_cnts_[dag.get_type()]=13, &dag=0x7f5444872080, dag={this:0x7f5444872080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC215-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221585377, running_task_cnt:0, indegree:0, hash:-1962062846896452141}) [2024-03-15 07:03:41.588697] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [856][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC215-0-0] [lt=37] succeed to del sys task(removed_task={start_time:1710486221585347, task_id:YB427F000001-000613ACAD3FC215-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=323"}) [2024-03-15 07:03:41.588984] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=38] succeed to add sys task(task={start_time:1710486221588977, task_id:YB427F000001-000613ACAD3FC216-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=329"}) [2024-03-15 07:03:41.589023] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=36] schedule one task(task={this:0x7f5444876080, type:15, status:2, dag:{this:0x7f5444872590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC216-0-0, dag_ret:0, dag_status:2, start_time:1710486221589019, running_task_cnt:1, indegree:0, hash:-2810766448934472}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=1, running_task_cnts_[priority]=1, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.589066] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=28] succeed to add sys task(task={start_time:1710486221589063, task_id:YB427F000001-000613ACAD3FC217-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=330"}) [2024-03-15 07:03:41.589088] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=19] schedule one task(task={this:0x7f544487a080, type:15, status:2, dag:{this:0x7f5444872aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC217-0-0, dag_ret:0, dag_status:2, start_time:1710486221589086, running_task_cnt:1, indegree:0, hash:-6605681042843664973}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=2, running_task_cnts_[priority]=2, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.589118] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=21] succeed to add sys task(task={start_time:1710486221589116, task_id:YB427F000001-000613ACAD3FC218-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=182"}) [2024-03-15 07:03:41.589132] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=13] schedule one task(task={this:0x7f544482e080, type:15, status:2, dag:{this:0x7f5444872fb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC218-0-0, dag_ret:0, dag_status:2, start_time:1710486221589131, running_task_cnt:1, indegree:0, hash:-2785108733753523045}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=3, running_task_cnts_[priority]=3, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.589150] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [824][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=20][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.589167] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=27] succeed to add sys task(task={start_time:1710486221589165, task_id:YB427F000001-000613ACAD3FC219-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=3"}) [2024-03-15 07:03:41.589182] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [824][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=32][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710234133899240143) [2024-03-15 07:03:41.589183] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=14] schedule one task(task={this:0x7f5444806080, type:15, status:2, dag:{this:0x7f54448734c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC219-0-0, dag_ret:0, dag_status:2, start_time:1710486221589181, running_task_cnt:1, indegree:0, hash:-6912080413595199522}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=4, running_task_cnts_[priority]=4, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.589199] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [848][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=12][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.589208] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [824][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=17] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.589222] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [848][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=23][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710234139196100281) [2024-03-15 07:03:41.589220] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=22] succeed to add sys task(task={start_time:1710486221589217, task_id:YB427F000001-000613ACAD3FC21A-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=100002"}) [2024-03-15 07:03:41.589242] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [848][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=13] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.589236] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=14] schedule one task(task={this:0x7f5444854080, type:15, status:2, dag:{this:0x7f54448739d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21A-0-0, dag_ret:0, dag_status:2, start_time:1710486221589235, running_task_cnt:1, indegree:0, hash:-1828233772001493880}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=5, running_task_cnts_[priority]=5, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.589252] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [824][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=18] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.589259] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=32][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.589266] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=21] succeed to add sys task(task={start_time:1710486221589264, task_id:YB427F000001-000613ACAD3FC21B-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=100003"}) [2024-03-15 07:03:41.589274] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=16][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710235938150066547) [2024-03-15 07:03:41.589280] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [848][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=15] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.589287] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=8] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.589282] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=14] schedule one task(task={this:0x7f5444856080, type:15, status:2, dag:{this:0x7f544482c080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21B-0-0, dag_ret:0, dag_status:2, start_time:1710486221589280, running_task_cnt:1, indegree:0, hash:-1633701860824767150}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.589293] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [847][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=26][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.589309] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [847][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=16][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710235938150066547) [2024-03-15 07:03:41.589321] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [847][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=8] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.589327] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=9] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.589345] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [847][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=9] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.589357] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [847][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=9] get storage schema to merge(ls_id={id:1}, tablet_id={id:100002}, schema_ctx={base_schema_version:0, schema_version:1681902227124680, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550ae95550, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902227124680, column_cnt:4, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.589467] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [847][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=101] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100002}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.589300] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [848][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=16] get storage schema to merge(ls_id={id:1}, tablet_id={id:330}, schema_ctx={base_schema_version:0, schema_version:1681902230986928, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547976b440, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902230986928, column_cnt:12, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:21, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:22, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.589511] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [847][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=15] succeed to build merge ctx(tablet_id={id:100002}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100002}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710234133899240143, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902227124680, schema_version:1681902227124680, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550ae95550, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902227124680, column_cnt:4, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:100002}, table_count:1, [{i:0, table_key:{tablet_id:{id:100002}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref:4}]}, schedule_major:false, scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f550ae94fd0, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221587815, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=50us|(0.25)|GET_PARALLEL_RANGE=151us|(0.75)|total=201us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.589279] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [824][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=21] get storage schema to merge(ls_id={id:1}, tablet_id={id:329}, schema_ctx={base_schema_version:0, schema_version:1681902230981912, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547976a9f0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902230981912, column_cnt:29, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:21, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.589629] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [847][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=105] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f54448739d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21A-0-0, dag_ret:0, dag_status:2, start_time:1710486221589235, running_task_cnt:1, indegree:0, hash:-1828233772001493880}, scanned_row_cnt_arr:0x7f542c02b5f0, output_block_cnt_arr:0x7f542c02b5f8, concurrent_cnt:1, estimate_row_cnt:6, estimate_occupy_size:0, latest_update_ts:1710486221587815, estimated_finish_time:1710486238589628}) [2024-03-15 07:03:41.589648] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [847][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=19] succeed to init merge ctx(task={this:0x7f5444854080, type:15, status:2, dag:{this:0x7f54448739d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21A-0-0, dag_ret:0, dag_status:2, start_time:1710486221589235, running_task_cnt:1, indegree:0, hash:-1828233772001493880}}) [2024-03-15 07:03:41.589662] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [847][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=14] task finish process(ret=0, start_time=1710486221589281, end_time=1710486221589661, runtime=380, *this={this:0x7f5444854080, type:15, status:2, dag:{this:0x7f54448739d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21A-0-0, dag_ret:0, dag_status:2, start_time:1710486221589235, running_task_cnt:1, indegree:0, hash:-1828233772001493880}}) [2024-03-15 07:03:41.589339] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=10] get storage schema to merge(ls_id={id:1}, tablet_id={id:3}, schema_ctx={base_schema_version:0, schema_version:1681902226856760, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547967b050, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902226856760, column_cnt:73, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT UNSIGNED", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT UNSIGNED":1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"LONGTEXT", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":134217728}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":10}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"encoding_row_store", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":2}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"TINYINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"TINYINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.589732] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=21] schedule one task(task={this:0x7f54448541b0, type:1, status:2, dag:{this:0x7f54448739d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21A-0-0, dag_ret:0, dag_status:2, start_time:1710486221589235, running_task_cnt:1, indegree:0, hash:-1828233772001493880}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.590226] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=399] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:3}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.590368] ERROR alloc_block (ob_local_device.cpp:716) [854][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=20][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.590402] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [854][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=33][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.590439] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [848][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=194] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:330}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.590458] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [854][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=0] partition merge iter row count(i=0, row_count=10, ghost_row_count=0, pkey={tablet_id:{id:100002}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f54629e1cc0, key:{tablet_id:{id:100002}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482160328459}, this:0x7f54629e1cc0, timestamp:1710482160328459, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:53, clock:14680064}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710300088531356512}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823635, mt_stat_.ready_for_flush_time:1710483939823677, mt_stat_.create_flush_dag_time:1710486221587807, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.590537] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [824][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=365] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:329}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.589535] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [838][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=14][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.590276] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=18] succeed to build merge ctx(tablet_id={id:3}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:3}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710234246707813220, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902226856760, schema_version:1681902226856760, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547967b050, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902226856760, column_cnt:73, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT UNSIGNED", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT UNSIGNED":1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"LONGTEXT", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":134217728}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":10}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"encoding_row_store", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":2}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"TINYINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"TINYINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:3}, table_count:1, [{i:0, table_key:{tablet_id:{id:3}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref:4}]}, schedule_major:false, scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f547967aad0, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221587727, estimated_finish_time:0}, compaction_filter:NULL, time_guard:GET_PARALLEL_RANGE=946us|(0.95)|total=996us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.590705] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=415] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f54448734c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC219-0-0, dag_ret:0, dag_status:2, start_time:1710486221589181, running_task_cnt:1, indegree:0, hash:-6912080413595199522}, scanned_row_cnt_arr:0x7f542c0275f0, output_block_cnt_arr:0x7f542c0275f8, concurrent_cnt:1, estimate_row_cnt:5, estimate_occupy_size:0, latest_update_ts:1710486221587727, estimated_finish_time:1710486238590704}) [2024-03-15 07:03:41.590733] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=28] succeed to init merge ctx(task={this:0x7f5444806080, type:15, status:2, dag:{this:0x7f54448734c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC219-0-0, dag_ret:0, dag_status:2, start_time:1710486221589181, running_task_cnt:1, indegree:0, hash:-6912080413595199522}}) [2024-03-15 07:03:41.590510] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [848][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=30] succeed to build merge ctx(tablet_id={id:330}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:330}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710234136026595822, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902230986928, schema_version:1681902230986928, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547976b440, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902230986928, column_cnt:12, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:21, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:22, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:330}, table_count:1, [{i:0, table_key:{tablet_id:{id:330}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234139332999384}, end_scn:{val:1710506547144172701}}}, ref:4}]}, schedule_major:false, scn_range:{start_scn:{val:1710234139332999384}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f547976aec0, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221587543, estimated_finish_time:0}, compaction_filter:NULL, time_guard:GET_PARALLEL_RANGE=1.21ms|(0.94)|total=1.29ms, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.590748] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=13] task finish process(ret=0, start_time=1710486221589247, end_time=1710486221590746, runtime=1499, *this={this:0x7f5444806080, type:15, status:2, dag:{this:0x7f54448734c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC219-0-0, dag_ret:0, dag_status:2, start_time:1710486221589181, running_task_cnt:1, indegree:0, hash:-6912080413595199522}}) [2024-03-15 07:03:41.590802] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=31] schedule one task(task={this:0x7f54448061b0, type:1, status:2, dag:{this:0x7f54448734c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC219-0-0, dag_ret:0, dag_status:2, start_time:1710486221589181, running_task_cnt:1, indegree:0, hash:-6912080413595199522}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.590609] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [824][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=28] succeed to build merge ctx(tablet_id={id:329}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:329}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710118072591257993, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902230981912, schema_version:1681902230981912, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547976a9f0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902230981912, column_cnt:29, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:21, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:329}, table_count:1, [{i:0, table_key:{tablet_id:{id:329}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref:4}]}, schedule_major:false, scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f547976a470, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221587400, estimated_finish_time:0}, compaction_filter:NULL, time_guard:GET_PARALLEL_RANGE=1.33ms|(0.91)|total=1.47ms, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.590639] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [854][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221589783, end_time=1710486221590636, runtime=853, *this={this:0x7f54448541b0, type:1, status:2, dag:{this:0x7f54448739d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21A-0-0, dag_ret:0, dag_status:2, start_time:1710486221589235, running_task_cnt:1, indegree:0, hash:-1828233772001493880}}) [2024-03-15 07:03:41.590702] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [838][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=1142][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710235938150066547) [2024-03-15 07:03:41.591005] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [838][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=313] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.591114] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [848][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=242] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f5444872aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC217-0-0, dag_ret:0, dag_status:2, start_time:1710486221589086, running_task_cnt:1, indegree:0, hash:-6605681042843664973}, scanned_row_cnt_arr:0x7f542c00b5f0, output_block_cnt_arr:0x7f542c00b5f8, concurrent_cnt:1, estimate_row_cnt:6208, estimate_occupy_size:4194304, latest_update_ts:1710486221587543, estimated_finish_time:1710486238591112}) [2024-03-15 07:03:41.591153] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [848][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=38] succeed to init merge ctx(task={this:0x7f544487a080, type:15, status:2, dag:{this:0x7f5444872aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC217-0-0, dag_ret:0, dag_status:2, start_time:1710486221589086, running_task_cnt:1, indegree:0, hash:-6605681042843664973}}) [2024-03-15 07:03:41.591179] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [848][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=23] task finish process(ret=0, start_time=1710486221589175, end_time=1710486221591176, runtime=2001, *this={this:0x7f544487a080, type:15, status:2, dag:{this:0x7f5444872aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC217-0-0, dag_ret:0, dag_status:2, start_time:1710486221589086, running_task_cnt:1, indegree:0, hash:-6605681042843664973}}) [2024-03-15 07:03:41.591037] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [854][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=2] dag finished(dag_ret=-4184, runtime=1800, dag_cnt=12, dag_cnts_[dag.get_type()]=12, &dag=0x7f54448739d0, dag={this:0x7f54448739d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21A-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221589235, running_task_cnt:0, indegree:0, hash:-1828233772001493880}) [2024-03-15 07:03:41.591242] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=21][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.591261] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=19][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710235938150066547) [2024-03-15 07:03:41.591246] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [838][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=22] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.591255] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [854][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21A-0-0] [lt=209] succeed to del sys task(removed_task={start_time:1710486221589217, task_id:YB427F000001-000613ACAD3FC21A-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=100002"}) [2024-03-15 07:03:41.591279] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=11] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.591314] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=13] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.591307] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [824][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=308] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f5444872590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC216-0-0, dag_ret:0, dag_status:2, start_time:1710486221589019, running_task_cnt:1, indegree:0, hash:-2810766448934472}, scanned_row_cnt_arr:0x7f542c0215f0, output_block_cnt_arr:0x7f542c0215f8, concurrent_cnt:1, estimate_row_cnt:665, estimate_occupy_size:0, latest_update_ts:1710486221587400, estimated_finish_time:1710486238591302}) [2024-03-15 07:03:41.591359] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [824][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=52] succeed to init merge ctx(task={this:0x7f5444876080, type:15, status:2, dag:{this:0x7f5444872590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC216-0-0, dag_ret:0, dag_status:2, start_time:1710486221589019, running_task_cnt:1, indegree:0, hash:-2810766448934472}}) [2024-03-15 07:03:41.591285] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [838][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=32] get storage schema to merge(ls_id={id:1}, tablet_id={id:182}, schema_ctx={base_schema_version:0, schema_version:1681902229740080, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f54796d7fa0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902229740080, column_cnt:9, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT UNSIGNED", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT UNSIGNED", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.591393] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [824][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=30] task finish process(ret=0, start_time=1710486221589095, end_time=1710486221591389, runtime=2294, *this={this:0x7f5444876080, type:15, status:2, dag:{this:0x7f5444872590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC216-0-0, dag_ret:0, dag_status:2, start_time:1710486221589019, running_task_cnt:1, indegree:0, hash:-2810766448934472}}) [2024-03-15 07:03:41.591342] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=15] get storage schema to merge(ls_id={id:1}, tablet_id={id:100003}, schema_ctx={base_schema_version:0, schema_version:1681902227255568, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550ae95fa0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902227255568, column_cnt:3, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:16, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.591452] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=113] succeed to add sys task(task={start_time:1710486221591415, task_id:YB427F000001-000613ACAD3FC21C-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=114"}) [2024-03-15 07:03:41.591485] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=29] schedule one task(task={this:0x7f544484c080, type:15, status:2, dag:{this:0x7f544482c590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21C-0-0, dag_ret:0, dag_status:2, start_time:1710486221591481, running_task_cnt:1, indegree:0, hash:1878867541797627461}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.591632] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=106] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100003}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.591690] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=34] schedule one task(task={this:0x7f54448761b0, type:1, status:2, dag:{this:0x7f5444872590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC216-0-0, dag_ret:0, dag_status:2, start_time:1710486221589019, running_task_cnt:1, indegree:0, hash:-2810766448934472}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.591726] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=32] succeed to build merge ctx(tablet_id={id:100003}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100003}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710234133899240143, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902227255568, schema_version:1681902227255568, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550ae95fa0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902227255568, column_cnt:3, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:16, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:100003}, table_count:1, [{i:0, table_key:{tablet_id:{id:100003}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref:4}]}, schedule_major:false, scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f550ae95a20, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221587901, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=68us|(0.16)|GET_PARALLEL_RANGE=370us|(0.84)|total=438us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.591949] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=202] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544482c080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21B-0-0, dag_ret:0, dag_status:2, start_time:1710486221589280, running_task_cnt:1, indegree:0, hash:-1633701860824767150}, scanned_row_cnt_arr:0x7f542c02d5f0, output_block_cnt_arr:0x7f542c02d5f8, concurrent_cnt:1, estimate_row_cnt:6, estimate_occupy_size:0, latest_update_ts:1710486221587901, estimated_finish_time:1710486238591946}) [2024-03-15 07:03:41.591973] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=23] succeed to init merge ctx(task={this:0x7f5444856080, type:15, status:2, dag:{this:0x7f544482c080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21B-0-0, dag_ret:0, dag_status:2, start_time:1710486221589280, running_task_cnt:1, indegree:0, hash:-1633701860824767150}}) [2024-03-15 07:03:41.591981] ERROR alloc_block (ob_local_device.cpp:716) [821][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=12][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.591991] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=16] task finish process(ret=0, start_time=1710486221591227, end_time=1710486221591989, runtime=762, *this={this:0x7f5444856080, type:15, status:2, dag:{this:0x7f544482c080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21B-0-0, dag_ret:0, dag_status:2, start_time:1710486221589280, running_task_cnt:1, indegree:0, hash:-1633701860824767150}}) [2024-03-15 07:03:41.592005] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [821][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=24][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.592064] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=43] schedule one task(task={this:0x7f54448561b0, type:1, status:2, dag:{this:0x7f544482c080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21B-0-0, dag_ret:0, dag_status:2, start_time:1710486221589280, running_task_cnt:1, indegree:0, hash:-1633701860824767150}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.592060] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [838][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=133] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:182}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.592030] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [821][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=1] partition merge iter row count(i=0, row_count=9, ghost_row_count=0, pkey={tablet_id:{id:3}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f54629e14f0, key:{tablet_id:{id:3}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482160328110}, this:0x7f54629e14f0, timestamp:1710482160328110, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:52, clock:14680064}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710300088531356512}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823534, mt_stat_.ready_for_flush_time:1710483939823596, mt_stat_.create_flush_dag_time:1710486221587719, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.592148] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [821][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221591489, end_time=1710486221592147, runtime=658, *this={this:0x7f54448061b0, type:1, status:2, dag:{this:0x7f54448734c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC219-0-0, dag_ret:0, dag_status:2, start_time:1710486221589181, running_task_cnt:1, indegree:0, hash:-6912080413595199522}}) [2024-03-15 07:03:41.592147] INFO [COMMON] compute_tenant_wash_size (ob_kvcache_store.cpp:1140) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=49] Wash compute wash size(is_wash_valid=true, sys_total_wash_size=2720698368, global_cache_size=12484608, tenant_max_wash_size=4161536, tenant_min_wash_size=4161536, tenant_ids_=[512, 500, 999, 506, 508, 509, 510, 1, 1003, 1004]) [2024-03-15 07:03:41.592192] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [821][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=3008, dag_cnt=11, dag_cnts_[dag.get_type()]=11, &dag=0x7f54448734c0, dag={this:0x7f54448734c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC219-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221589181, running_task_cnt:0, indegree:0, hash:-6912080413595199522}) [2024-03-15 07:03:41.592224] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [821][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC219-0-0] [lt=26] succeed to del sys task(removed_task={start_time:1710486221589165, task_id:YB427F000001-000613ACAD3FC219-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=3"}) [2024-03-15 07:03:41.592290] ERROR alloc_block (ob_local_device.cpp:716) [843][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=17][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.592304] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [843][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=14][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.592310] INFO [COMMON] wash (ob_kvcache_store.cpp:343) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=41] Wash time detail, (compute_wash_size_time=182, refresh_score_time=105, wash_time=16) [2024-03-15 07:03:41.592330] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [855][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=10][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.592318] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [843][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=0] partition merge iter row count(i=0, row_count=10, ghost_row_count=0, pkey={tablet_id:{id:100003}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f54629e2490, key:{tablet_id:{id:100003}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482160328679}, this:0x7f54629e2490, timestamp:1710482160328679, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:54, clock:14680064}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710300088531356512}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823715, mt_stat_.ready_for_flush_time:1710483939823757, mt_stat_.create_flush_dag_time:1710486221587893, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.592351] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [855][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=20][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710235938150066547) [2024-03-15 07:03:41.592147] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [838][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=44] succeed to build merge ctx(tablet_id={id:182}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:182}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710234246707813220, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902229740080, schema_version:1681902229740080, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f54796d7fa0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902229740080, column_cnt:9, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT UNSIGNED", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT UNSIGNED", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:182}, table_count:1, [{i:0, table_key:{tablet_id:{id:182}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref:4}]}, schedule_major:false, scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f54796d7a20, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221587640, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=1.51ms|(0.59)|GET_PARALLEL_RANGE=1.07ms|(0.41)|total=2.59ms, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.592370] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [855][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=13] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.592384] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [843][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221592171, end_time=1710486221592383, runtime=212, *this={this:0x7f54448561b0, type:1, status:2, dag:{this:0x7f544482c080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21B-0-0, dag_ret:0, dag_status:2, start_time:1710486221589280, running_task_cnt:1, indegree:0, hash:-1633701860824767150}}) [2024-03-15 07:03:41.592375] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=24][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.592405] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [855][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=14] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.592409] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [843][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=1] dag finished(dag_ret=-4184, runtime=3127, dag_cnt=10, dag_cnts_[dag.get_type()]=10, &dag=0x7f544482c080, dag={this:0x7f544482c080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21B-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221589280, running_task_cnt:0, indegree:0, hash:-1633701860824767150}) [2024-03-15 07:03:41.592414] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=39][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.592451] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [843][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21B-0-0] [lt=37] succeed to del sys task(removed_task={start_time:1710486221589264, task_id:YB427F000001-000613ACAD3FC21B-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=100003"}) [2024-03-15 07:03:41.592411] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [838][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=227] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f5444872fb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC218-0-0, dag_ret:0, dag_status:2, start_time:1710486221589131, running_task_cnt:1, indegree:0, hash:-2785108733753523045}, scanned_row_cnt_arr:0x7f542c0255f0, output_block_cnt_arr:0x7f542c0255f8, concurrent_cnt:1, estimate_row_cnt:33, estimate_occupy_size:0, latest_update_ts:1710486221587640, estimated_finish_time:1710486238592408}) [2024-03-15 07:03:41.592470] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [838][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=59] succeed to init merge ctx(task={this:0x7f544482e080, type:15, status:2, dag:{this:0x7f5444872fb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC218-0-0, dag_ret:0, dag_status:2, start_time:1710486221589131, running_task_cnt:1, indegree:0, hash:-2785108733753523045}}) [2024-03-15 07:03:41.592501] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [838][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=29] task finish process(ret=0, start_time=1710486221589516, end_time=1710486221592499, runtime=2983, *this={this:0x7f544482e080, type:15, status:2, dag:{this:0x7f5444872fb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC218-0-0, dag_ret:0, dag_status:2, start_time:1710486221589131, running_task_cnt:1, indegree:0, hash:-2785108733753523045}}) [2024-03-15 07:03:41.592521] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=59] succeed to add sys task(task={start_time:1710486221592512, task_id:YB427F000001-000613ACAD3FC21D-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=101001"}) [2024-03-15 07:03:41.592551] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=27] schedule one task(task={this:0x7f544484e080, type:15, status:2, dag:{this:0x7f544482caa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21D-0-0, dag_ret:0, dag_status:2, start_time:1710486221592547, running_task_cnt:1, indegree:0, hash:198350566326085695}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=5, running_task_cnts_[priority]=5, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.592781] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=33] succeed to add sys task(task={start_time:1710486221592773, task_id:YB427F000001-000613ACAD3FC21E-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=5"}) [2024-03-15 07:03:41.592448] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [855][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=15] get storage schema to merge(ls_id={id:1}, tablet_id={id:114}, schema_ctx={base_schema_version:0, schema_version:1681902229247224, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f5479683660, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902229247224, column_cnt:74, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT UNSIGNED", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT UNSIGNED":1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"LONGTEXT", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":134217728}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":10}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"encoding_row_store", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":2}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"TINYINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"TINYINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.593003] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=201] schedule one task(task={this:0x7f5444850080, type:15, status:2, dag:{this:0x7f544482cfb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21E-0-0, dag_ret:0, dag_status:2, start_time:1710486221592997, running_task_cnt:1, indegree:0, hash:6152367908818667172}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.593197] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [835][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=25][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.593211] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [835][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=14][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710235938150066547) [2024-03-15 07:03:41.593186] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=29][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.593222] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [835][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=7] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.593228] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=41][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710235938150066547) [2024-03-15 07:03:41.593244] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [835][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=8] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.593246] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=11] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.593208] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=127] schedule one task(task={this:0x7f544482e1b0, type:1, status:2, dag:{this:0x7f5444872fb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC218-0-0, dag_ret:0, dag_status:2, start_time:1710486221589131, running_task_cnt:1, indegree:0, hash:-2785108733753523045}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.593279] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=12] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.593296] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=13] get storage schema to merge(ls_id={id:1}, tablet_id={id:101001}, schema_ctx={base_schema_version:0, schema_version:1681902229254624, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550ae988e0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902229254624, column_cnt:4, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.593394] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=84] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101001}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.593254] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [835][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=8] get storage schema to merge(ls_id={id:1}, tablet_id={id:5}, schema_ctx={base_schema_version:0, schema_version:1681902228339576, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547967c4f0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902228339576, column_cnt:13, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"LONGTEXT", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":1}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.593552] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [835][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=284] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:5}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.593479] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=19] succeed to build merge ctx(tablet_id={id:101001}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101001}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710234133899240143, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902229254624, schema_version:1681902229254624, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550ae988e0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902229254624, column_cnt:4, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:101001}, table_count:1, [{i:0, table_key:{tablet_id:{id:101001}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref:4}]}, schedule_major:false, scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f550ae98360, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221588064, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=92us|(0.37)|GET_PARALLEL_RANGE=154us|(0.63)|total=246us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.593665] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=162] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544482caa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21D-0-0, dag_ret:0, dag_status:2, start_time:1710486221592547, running_task_cnt:1, indegree:0, hash:198350566326085695}, scanned_row_cnt_arr:0x7f542c0315f0, output_block_cnt_arr:0x7f542c0315f8, concurrent_cnt:1, estimate_row_cnt:15, estimate_occupy_size:0, latest_update_ts:1710486221588064, estimated_finish_time:1710486238593663}) [2024-03-15 07:03:41.593692] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=26] succeed to init merge ctx(task={this:0x7f544484e080, type:15, status:2, dag:{this:0x7f544482caa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21D-0-0, dag_ret:0, dag_status:2, start_time:1710486221592547, running_task_cnt:1, indegree:0, hash:198350566326085695}}) [2024-03-15 07:03:41.593591] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [835][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=15] succeed to build merge ctx(tablet_id={id:5}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:5}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710234246707813220, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902228339576, schema_version:1681902228339576, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547967c4f0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902228339576, column_cnt:13, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"LONGTEXT", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":1}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:5}, table_count:1, [{i:0, table_key:{tablet_id:{id:5}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref:4}]}, schedule_major:false, scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f547967bf70, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221588146, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=46us|(0.12)|GET_PARALLEL_RANGE=335us|(0.88)|total=381us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.593713] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=19] task finish process(ret=0, start_time=1710486221592807, end_time=1710486221593711, runtime=904, *this={this:0x7f544484e080, type:15, status:2, dag:{this:0x7f544482caa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21D-0-0, dag_ret:0, dag_status:2, start_time:1710486221592547, running_task_cnt:1, indegree:0, hash:198350566326085695}}) [2024-03-15 07:03:41.593749] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [835][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=129] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544482cfb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21E-0-0, dag_ret:0, dag_status:2, start_time:1710486221592997, running_task_cnt:1, indegree:0, hash:6152367908818667172}, scanned_row_cnt_arr:0x7f542c0335f0, output_block_cnt_arr:0x7f542c0335f8, concurrent_cnt:1, estimate_row_cnt:29, estimate_occupy_size:0, latest_update_ts:1710486221588146, estimated_finish_time:1710486238593748}) [2024-03-15 07:03:41.593767] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [835][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=18] succeed to init merge ctx(task={this:0x7f5444850080, type:15, status:2, dag:{this:0x7f544482cfb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21E-0-0, dag_ret:0, dag_status:2, start_time:1710486221592997, running_task_cnt:1, indegree:0, hash:6152367908818667172}}) [2024-03-15 07:03:41.593781] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [835][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=12] task finish process(ret=0, start_time=1710486221593185, end_time=1710486221593779, runtime=594, *this={this:0x7f5444850080, type:15, status:2, dag:{this:0x7f544482cfb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21E-0-0, dag_ret:0, dag_status:2, start_time:1710486221592997, running_task_cnt:1, indegree:0, hash:6152367908818667172}}) [2024-03-15 07:03:41.593793] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=58] schedule one task(task={this:0x7f544484e1b0, type:1, status:2, dag:{this:0x7f544482caa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21D-0-0, dag_ret:0, dag_status:2, start_time:1710486221592547, running_task_cnt:1, indegree:0, hash:198350566326085695}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.594071] ERROR alloc_block (ob_local_device.cpp:716) [832][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=8][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.594089] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [832][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=17][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.594107] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [832][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=0] partition merge iter row count(i=0, row_count=15, ghost_row_count=0, pkey={tablet_id:{id:101001}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f54629e3430, key:{tablet_id:{id:101001}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482160329122}, this:0x7f54629e3430, timestamp:1710482160329122, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:56, clock:14680064}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710300088531356512}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823874, mt_stat_.ready_for_flush_time:1710483939823915, mt_stat_.create_flush_dag_time:1710486221588056, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.594204] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [832][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221593862, end_time=1710486221594203, runtime=341, *this={this:0x7f544484e1b0, type:1, status:2, dag:{this:0x7f544482caa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21D-0-0, dag_ret:0, dag_status:2, start_time:1710486221592547, running_task_cnt:1, indegree:0, hash:198350566326085695}}) [2024-03-15 07:03:41.594219] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=55] add dag success(dag=0x7f54448b6aa0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-1728883537140023654, dag_cnt=11, dag_type_cnts=11) [2024-03-15 07:03:41.594280] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [832][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=1731, dag_cnt=10, dag_cnts_[dag.get_type()]=10, &dag=0x7f544482caa0, dag={this:0x7f544482caa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21D-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221592547, running_task_cnt:0, indegree:0, hash:198350566326085695}) [2024-03-15 07:03:41.594309] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [832][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21D-0-0] [lt=24] succeed to del sys task(removed_task={start_time:1710486221592512, task_id:YB427F000001-000613ACAD3FC21D-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=101001"}) [2024-03-15 07:03:41.594267] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=35] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:1}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f5444814550, key:{tablet_id:{id:1}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710362652435471403}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482174228242}, this:0x7f5444814550, timestamp:1710482174228242, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:82, clock:56623104}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710362672384988561}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824350, mt_stat_.ready_for_flush_time:1710483939824391, mt_stat_.create_flush_dag_time:1710486221588575, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.594351] ERROR alloc_block (ob_local_device.cpp:716) [820][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=39][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.594367] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=76] add dag success(dag=0x7f54448b6fb0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-1599854902035152836, dag_cnt=11, dag_type_cnts=11) [2024-03-15 07:03:41.594375] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [820][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=22][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.594400] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [820][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=0] partition merge iter row count(i=0, row_count=161, ghost_row_count=0, pkey={tablet_id:{id:182}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f54629db730, key:{tablet_id:{id:182}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482152646140}, this:0x7f54629db730, timestamp:1710482152646140, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:40, clock:2097152}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710263819867968870}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823453, mt_stat_.ready_for_flush_time:1710483939823495, mt_stat_.create_flush_dag_time:1710486221587630, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.594394] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=18] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:319}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f5444817430, key:{tablet_id:{id:319}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482178162140}, this:0x7f5444817430, timestamp:1710482178162140, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:88, clock:62914560}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710385376303090625}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824505, mt_stat_.ready_for_flush_time:1710483939824558, mt_stat_.create_flush_dag_time:1710486221594343, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.594537] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=40] schedule one task(task={this:0x7f54448501b0, type:1, status:2, dag:{this:0x7f544482cfb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21E-0-0, dag_ret:0, dag_status:2, start_time:1710486221592997, running_task_cnt:1, indegree:0, hash:6152367908818667172}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=5, running_task_cnts_[priority]=5, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.594538] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [820][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221593314, end_time=1710486221594536, runtime=1222, *this={this:0x7f544482e1b0, type:1, status:2, dag:{this:0x7f5444872fb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC218-0-0, dag_ret:0, dag_status:2, start_time:1710486221589131, running_task_cnt:1, indegree:0, hash:-2785108733753523045}}) [2024-03-15 07:03:41.594577] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=30] succeed to add sys task(task={start_time:1710486221594573, task_id:YB427F000001-000613ACAD3FC21F-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=100006"}) [2024-03-15 07:03:41.594612] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=32] schedule one task(task={this:0x7f5444890080, type:15, status:2, dag:{this:0x7f544482d4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21F-0-0, dag_ret:0, dag_status:2, start_time:1710486221594610, running_task_cnt:1, indegree:0, hash:-4203726123947673128}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.594671] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [855][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=586] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:114}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.594729] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=100] add dag success(dag=0x7f54448b74c0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=663552013962694154, dag_cnt=12, dag_type_cnts=12) [2024-03-15 07:03:41.594764] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=21] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101004}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f5444817c00, key:{tablet_id:{id:101004}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482178162389}, this:0x7f5444817c00, timestamp:1710482178162389, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:89, clock:62914560}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710385376303090625}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824699, mt_stat_.ready_for_flush_time:1710483939824751, mt_stat_.create_flush_dag_time:1710486221594494, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.594795] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [820][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=5661, dag_cnt=11, dag_cnts_[dag.get_type()]=11, &dag=0x7f5444872fb0, dag={this:0x7f5444872fb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC218-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221589131, running_task_cnt:0, indegree:0, hash:-2785108733753523045}) [2024-03-15 07:03:41.594931] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [820][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC218-0-0] [lt=62] succeed to del sys task(removed_task={start_time:1710486221589116, task_id:YB427F000001-000613ACAD3FC218-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=182"}) [2024-03-15 07:03:41.595004] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=66] add dag success(dag=0x7f54448b79d0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-7283149547720778478, dag_cnt=12, dag_type_cnts=12) [2024-03-15 07:03:41.595039] ERROR alloc_block (ob_local_device.cpp:716) [830][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=25][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.595018] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=13] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:4}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f5444818ba0, key:{tablet_id:{id:4}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482180521922}, this:0x7f5444818ba0, timestamp:1710482180521922, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:91, clock:67108864}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710399127821303489}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824792, mt_stat_.ready_for_flush_time:1710483939824847, mt_stat_.create_flush_dag_time:1710486221594830, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.595058] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [830][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=19][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.595070] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=41] add dag success(dag=0x7f544484a080, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-245371073307898354, dag_cnt=13, dag_type_cnts=13) [2024-03-15 07:03:41.594721] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [855][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=22] succeed to build merge ctx(tablet_id={id:114}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:114}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710234133899240143, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902229247224, schema_version:1681902229247224, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f5479683660, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902229247224, column_cnt:74, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT UNSIGNED", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT UNSIGNED":1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"LONGTEXT", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":134217728}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":10}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"encoding_row_store", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":2}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"TINYINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"TINYINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:114}, table_count:1, [{i:0, table_key:{tablet_id:{id:114}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref:4}]}, schedule_major:false, scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f54796830e0, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221587982, estimated_finish_time:0}, compaction_filter:NULL, time_guard:GET_PARALLEL_RANGE=2.31ms|(0.97)|total=2.38ms, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.595080] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=9] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100004}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f5444819370, key:{tablet_id:{id:100004}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482180522242}, this:0x7f5444819370, timestamp:1710482180522242, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:92, clock:67108864}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710399127821303489}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824886, mt_stat_.ready_for_flush_time:1710483939824939, mt_stat_.create_flush_dag_time:1710486221595060, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.595120] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=33] add dag success(dag=0x7f544484a590, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-708937556109252, dag_cnt=14, dag_type_cnts=14) [2024-03-15 07:03:41.595077] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [830][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=0] partition merge iter row count(i=0, row_count=29, ghost_row_count=0, pkey={tablet_id:{id:5}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f54629e3c00, key:{tablet_id:{id:5}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482160329297}, this:0x7f54629e3c00, timestamp:1710482160329297, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:57, clock:14680064}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710300088531356512}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823954, mt_stat_.ready_for_flush_time:1710483939823995, mt_stat_.create_flush_dag_time:1710486221588138, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.595129] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [855][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=390] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544482c590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21C-0-0, dag_ret:0, dag_status:2, start_time:1710486221591481, running_task_cnt:1, indegree:0, hash:1878867541797627461}, scanned_row_cnt_arr:0x7f542c02f5f0, output_block_cnt_arr:0x7f542c02f5f8, concurrent_cnt:1, estimate_row_cnt:15, estimate_occupy_size:0, latest_update_ts:1710486221587982, estimated_finish_time:1710486238595127}) [2024-03-15 07:03:41.595128] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=7] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100005}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f5444819b40, key:{tablet_id:{id:100005}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482180522450}, this:0x7f5444819b40, timestamp:1710482180522450, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:93, clock:67108864}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710399127821303489}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824978, mt_stat_.ready_for_flush_time:1710483939825074, mt_stat_.create_flush_dag_time:1710486221595113, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.595161] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [855][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=31] succeed to init merge ctx(task={this:0x7f544484c080, type:15, status:2, dag:{this:0x7f544482c590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21C-0-0, dag_ret:0, dag_status:2, start_time:1710486221591481, running_task_cnt:1, indegree:0, hash:1878867541797627461}}) [2024-03-15 07:03:41.595166] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=31] add dag success(dag=0x7f544484aaa0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=5238607534375264894, dag_cnt=15, dag_type_cnts=15) [2024-03-15 07:03:41.595175] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [855][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=14] task finish process(ret=0, start_time=1710486221592315, end_time=1710486221595174, runtime=2859, *this={this:0x7f544484c080, type:15, status:2, dag:{this:0x7f544482c590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21C-0-0, dag_ret:0, dag_status:2, start_time:1710486221591481, running_task_cnt:1, indegree:0, hash:1878867541797627461}}) [2024-03-15 07:03:41.595174] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [830][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221594723, end_time=1710486221595173, runtime=450, *this={this:0x7f54448501b0, type:1, status:2, dag:{this:0x7f544482cfb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21E-0-0, dag_ret:0, dag_status:2, start_time:1710486221592997, running_task_cnt:1, indegree:0, hash:6152367908818667172}}) [2024-03-15 07:03:41.595175] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=8] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:115}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f544481a310, key:{tablet_id:{id:115}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482180522638}, this:0x7f544481a310, timestamp:1710482180522638, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:94, clock:67108864}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710399127821303489}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939825116, mt_stat_.ready_for_flush_time:1710483939825158, mt_stat_.create_flush_dag_time:1710486221595160, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.595207] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [830][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=2209, dag_cnt=14, dag_cnts_[dag.get_type()]=14, &dag=0x7f544482cfb0, dag={this:0x7f544482cfb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21E-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221592997, running_task_cnt:0, indegree:0, hash:6152367908818667172}) [2024-03-15 07:03:41.595233] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [830][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21E-0-0] [lt=22] succeed to del sys task(removed_task={start_time:1710486221592773, task_id:YB427F000001-000613ACAD3FC21E-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=5"}) [2024-03-15 07:03:41.595303] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=31] add dag success(dag=0x7f544484afb0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=5347700507801701344, dag_cnt=15, dag_type_cnts=15) [2024-03-15 07:03:41.595384] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=28] schedule one task(task={this:0x7f544487a1b0, type:1, status:2, dag:{this:0x7f5444872aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC217-0-0, dag_ret:0, dag_status:2, start_time:1710486221589086, running_task_cnt:1, indegree:0, hash:-6605681042843664973}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=3, running_task_cnts_[priority]=3, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.595437] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=29] schedule one task(task={this:0x7f544484c1b0, type:1, status:2, dag:{this:0x7f544482c590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21C-0-0, dag_ret:0, dag_status:2, start_time:1710486221591481, running_task_cnt:1, indegree:0, hash:1878867541797627461}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=4, running_task_cnts_[priority]=4, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.595314] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=10] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:104}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f544481aae0, key:{tablet_id:{id:104}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705473963136408068}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482181204870}, this:0x7f544481aae0, timestamp:1710482181204870, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:95, clock:71303168}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710434834810234890}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939825197, mt_stat_.ready_for_flush_time:1710483939825238, mt_stat_.create_flush_dag_time:1710486221595207, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.595489] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=22] succeed to add sys task(task={start_time:1710486221595485, task_id:YB427F000001-000613ACAD3FC220-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=100001"}) [2024-03-15 07:03:41.595507] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC220-0-0] [lt=16] schedule one task(task={this:0x7f5444892080, type:15, status:2, dag:{this:0x7f544482d9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC220-0-0, dag_ret:0, dag_status:2, start_time:1710486221595505, running_task_cnt:1, indegree:0, hash:5462442946523183119}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=5, running_task_cnts_[priority]=5, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.595537] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=21] succeed to add sys task(task={start_time:1710486221595534, task_id:YB427F000001-000613ACAD3FC221-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=328"}) [2024-03-15 07:03:41.595552] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=14] schedule one task(task={this:0x7f5444894080, type:15, status:2, dag:{this:0x7f54448b6080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC221-0-0, dag_ret:0, dag_status:2, start_time:1710486221595550, running_task_cnt:1, indegree:0, hash:3340047647417801619}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.595603] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=181] add dag success(dag=0x7f544484b4c0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=2191978846819725395, dag_cnt=16, dag_type_cnts=16) [2024-03-15 07:03:41.595623] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=16] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101006}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f544481b2b0, key:{tablet_id:{id:101006}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705473963136408068}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482181205417}, this:0x7f544481b2b0, timestamp:1710482181205417, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:96, clock:71303168}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710434834810234890}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939825299, mt_stat_.ready_for_flush_time:1710483939825357, mt_stat_.create_flush_dag_time:1710486221595496, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.595695] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=53] add dag success(dag=0x7f544484b9d0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-374125254385398122, dag_cnt=17, dag_type_cnts=17) [2024-03-15 07:03:41.595704] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=8] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:105}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f544481c080, key:{tablet_id:{id:105}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705473963136408068}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482181205880}, this:0x7f544481c080, timestamp:1710482181205880, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:97, clock:71303168}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710434834810234890}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939825396, mt_stat_.ready_for_flush_time:1710483939825456, mt_stat_.create_flush_dag_time:1710486221595677, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.595752] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=32] add dag success(dag=0x7f5444866080, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-7831358327520612074, dag_cnt=18, dag_type_cnts=18) [2024-03-15 07:03:41.595826] ERROR alloc_block (ob_local_device.cpp:716) [845][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=32][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.595853] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [845][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=26][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.595776] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=16] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:102}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f544481c850, key:{tablet_id:{id:102}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705544383577535181}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482181350363}, this:0x7f544481c850, timestamp:1710482181350363, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:98, clock:71303168}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710434961555661114}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939825506, mt_stat_.ready_for_flush_time:1710483939825569, mt_stat_.create_flush_dag_time:1710486221595736, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.595889] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=98] add dag success(dag=0x7f5444866590, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=724552403687646686, dag_cnt=19, dag_type_cnts=19) [2024-03-15 07:03:41.595869] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [845][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=1] partition merge iter row count(i=0, row_count=15, ghost_row_count=0, pkey={tablet_id:{id:114}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f54629e2c60, key:{tablet_id:{id:114}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:4, upper_trans_version:9223372036854775807, timestamp:1710482160328934}, this:0x7f54629e2c60, timestamp:1710482160328934, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:55, clock:14680064}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710300088531356512}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823796, mt_stat_.ready_for_flush_time:1710483939823836, mt_stat_.create_flush_dag_time:1710486221587975, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.595908] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=19] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101005}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f544481d020, key:{tablet_id:{id:101005}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705475766103355460}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482181350654}, this:0x7f544481d020, timestamp:1710482181350654, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:99, clock:71303168}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710434961555661114}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939825610, mt_stat_.ready_for_flush_time:1710483939825652, mt_stat_.create_flush_dag_time:1710486221595867, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.595935] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [845][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221595511, end_time=1710486221595934, runtime=423, *this={this:0x7f544484c1b0, type:1, status:2, dag:{this:0x7f544482c590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21C-0-0, dag_ret:0, dag_status:2, start_time:1710486221591481, running_task_cnt:1, indegree:0, hash:1878867541797627461}}) [2024-03-15 07:03:41.595951] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=34] add dag success(dag=0x7f5444866aa0, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=-285687946438837293, dag_cnt=20, dag_type_cnts=20) [2024-03-15 07:03:41.595971] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [845][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=4489, dag_cnt=19, dag_cnts_[dag.get_type()]=19, &dag=0x7f544482c590, dag={this:0x7f544482c590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21C-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221591481, running_task_cnt:0, indegree:0, hash:1878867541797627461}) [2024-03-15 07:03:41.595962] INFO [STORAGE.TRANS] flush (ob_memtable.cpp:2098) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=7] schedule tablet merge dag successfully(ret=0, param={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:103}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, this={ObITable:{this:0x7f544481d7f0, key:{tablet_id:{id:103}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705544383577535181}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482181350878}, this:0x7f544481d7f0, timestamp:1710482181350878, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:100, clock:71303168}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710434961555661114}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939825690, mt_stat_.ready_for_flush_time:1710483939825766, mt_stat_.create_flush_dag_time:1710486221595943, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.595999] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [845][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21C-0-0] [lt=24] succeed to del sys task(removed_task={start_time:1710486221591415, task_id:YB427F000001-000613ACAD3FC21C-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=114"}) [2024-03-15 07:03:41.595995] INFO [STORAGE] traversal_flush_ (ob_data_checkpoint.cpp:657) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=31] traversal_flush successfully(ls_->get_ls_id()={id:1}, flush_tasks=[{table:0x7f545f256080, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f545f25be40, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f545f2623d0, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f545f263370, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f545f263b40, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f545f264310, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f545f264ae0, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f545f2652b0, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f54629d8080, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f545f262ba0, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f54629d97f0, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f54629d9020, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f54629d8850, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f54629db730, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f54629e14f0, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f54629e1cc0, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f54629e2490, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f54629e2c60, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f54629e3430, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f54629e3c00, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f54629e43d0, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f54629e4ba0, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f54448135b0, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f5444813d80, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f5444814550, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f5444817430, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f5444817c00, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f5444818ba0, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f5444819370, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f5444819b40, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f544481a310, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}, {table:0x7f544481aae0, t3m_:0x7f54a23e2030, allocator_:null, table_type_:0}]) [2024-03-15 07:03:41.596071] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=21] succeed to add sys task(task={start_time:1710486221596064, task_id:YB427F000001-000613ACAD3FC222-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=121"}) [2024-03-15 07:03:41.596101] INFO [STORAGE] runTimerTask (ob_checkpoint_service.cpp:341) [900][T1004_Flush][T1004][Y0-0000000000000000-0-0] [lt=77] succeed to traversal_flush(ret=0, ls_cnt=2) [2024-03-15 07:03:41.596094] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=20] schedule one task(task={this:0x7f5444896080, type:15, status:2, dag:{this:0x7f54448b6590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC222-0-0, dag_ret:0, dag_status:2, start_time:1710486221596091, running_task_cnt:1, indegree:0, hash:3484783245450929688}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.596212] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [822][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=23][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.596233] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [822][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=22][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710300091165675250) [2024-03-15 07:03:41.596245] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [822][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=8] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.596275] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [822][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=16] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.596161] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [836][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=22][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.596359] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [836][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=196][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710362631914846675) [2024-03-15 07:03:41.596374] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [836][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=10] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.596398] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [836][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=8] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.596290] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [822][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=12] get storage schema to merge(ls_id={id:1}, tablet_id={id:328}, schema_ctx={base_schema_version:0, schema_version:1681902230976520, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f5479769fa0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902230976520, column_cnt:26, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TINYINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.596409] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [836][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=8] get storage schema to merge(ls_id={id:1}, tablet_id={id:121}, schema_ctx={base_schema_version:0, schema_version:1681902229327912, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f5479685550, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902229327912, column_cnt:8, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}, {column_idx:20, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.596503] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [836][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=85] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:121}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.596449] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [822][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=146] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:328}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.596541] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [836][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=13] succeed to build merge ctx(tablet_id={id:121}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:121}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710300091165675250, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902229327912, schema_version:1681902229327912, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f5479685550, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902229327912, column_cnt:8, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}, {column_idx:20, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:121}, table_count:1, [{i:0, table_key:{tablet_id:{id:121}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710362652435471403}, end_scn:{val:1710506547144172701}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1710362652435471403}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f5479684fd0, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221588500, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=234us|(0.64)|GET_PARALLEL_RANGE=132us|(0.36)|total=366us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.596682] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [836][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=128] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f54448b6590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC222-0-0, dag_ret:0, dag_status:2, start_time:1710486221596091, running_task_cnt:1, indegree:0, hash:3484783245450929688}, scanned_row_cnt_arr:0x7f542c0155f0, output_block_cnt_arr:0x7f542c0155f8, concurrent_cnt:1, estimate_row_cnt:2, estimate_occupy_size:4194304, latest_update_ts:1710486221588500, estimated_finish_time:1710486238596683}) [2024-03-15 07:03:41.596701] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [836][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=18] succeed to init merge ctx(task={this:0x7f5444896080, type:15, status:2, dag:{this:0x7f54448b6590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC222-0-0, dag_ret:0, dag_status:2, start_time:1710486221596091, running_task_cnt:1, indegree:0, hash:3484783245450929688}}) [2024-03-15 07:03:41.596719] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [836][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=17] task finish process(ret=0, start_time=1710486221596149, end_time=1710486221596718, runtime=569, *this={this:0x7f5444896080, type:15, status:2, dag:{this:0x7f54448b6590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC222-0-0, dag_ret:0, dag_status:2, start_time:1710486221596091, running_task_cnt:1, indegree:0, hash:3484783245450929688}}) [2024-03-15 07:03:41.596871] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=24] schedule one task(task={this:0x7f54448961b0, type:1, status:2, dag:{this:0x7f54448b6590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC222-0-0, dag_ret:0, dag_status:2, start_time:1710486221596091, running_task_cnt:1, indegree:0, hash:3484783245450929688}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.596733] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [822][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=185] succeed to build merge ctx(tablet_id={id:328}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:328}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710234133899240143, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902230976520, schema_version:1681902230976520, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f5479769fa0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902230976520, column_cnt:26, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"DOUBLE", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TINYINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:328}, table_count:1, [{i:0, table_key:{tablet_id:{id:328}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710300091367964368}, end_scn:{val:1710506547144172701}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1710300091367964368}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f5479769a20, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221588391, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=60us|(0.14)|GET_PARALLEL_RANGE=374us|(0.86)|total=434us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.597053] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [822][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=293] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f54448b6080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC221-0-0, dag_ret:0, dag_status:2, start_time:1710486221595550, running_task_cnt:1, indegree:0, hash:3340047647417801619}, scanned_row_cnt_arr:0x7f542c0135f0, output_block_cnt_arr:0x7f542c0135f8, concurrent_cnt:1, estimate_row_cnt:40, estimate_occupy_size:0, latest_update_ts:1710486221588391, estimated_finish_time:1710486238597051}) [2024-03-15 07:03:41.597072] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [822][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=19] succeed to init merge ctx(task={this:0x7f5444894080, type:15, status:2, dag:{this:0x7f54448b6080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC221-0-0, dag_ret:0, dag_status:2, start_time:1710486221595550, running_task_cnt:1, indegree:0, hash:3340047647417801619}}) [2024-03-15 07:03:41.597086] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [822][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=12] task finish process(ret=0, start_time=1710486221596201, end_time=1710486221597085, runtime=884, *this={this:0x7f5444894080, type:15, status:2, dag:{this:0x7f54448b6080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC221-0-0, dag_ret:0, dag_status:2, start_time:1710486221595550, running_task_cnt:1, indegree:0, hash:3340047647417801619}}) [2024-03-15 07:03:41.597194] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=61] schedule one task(task={this:0x7f54448941b0, type:1, status:2, dag:{this:0x7f54448b6080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC221-0-0, dag_ret:0, dag_status:2, start_time:1710486221595550, running_task_cnt:1, indegree:0, hash:3340047647417801619}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.597188] ERROR alloc_block (ob_local_device.cpp:716) [817][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=28][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.597307] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [817][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=117][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.597334] ERROR alloc_block (ob_local_device.cpp:716) [834][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=16][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.597324] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [817][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=0] partition merge iter row count(i=0, row_count=22, ghost_row_count=0, pkey={tablet_id:{id:121}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710362652435471403}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f5444813d80, key:{tablet_id:{id:121}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710362652435471403}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482174205290}, this:0x7f5444813d80, timestamp:1710482174205290, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:81, clock:56623104}, host:0x7f54b49fc030, arena_handle:{allocated:4194304}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710362654997374824}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824271, mt_stat_.ready_for_flush_time:1710483939824312, mt_stat_.create_flush_dag_time:1710486221588492, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.597391] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [817][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221597019, end_time=1710486221597390, runtime=371, *this={this:0x7f54448961b0, type:1, status:2, dag:{this:0x7f54448b6590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC222-0-0, dag_ret:0, dag_status:2, start_time:1710486221596091, running_task_cnt:1, indegree:0, hash:3484783245450929688}}) [2024-03-15 07:03:41.597386] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [834][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=51][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.597413] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [817][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=1321, dag_cnt=18, dag_cnts_[dag.get_type()]=18, &dag=0x7f54448b6590, dag={this:0x7f54448b6590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC222-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221596091, running_task_cnt:0, indegree:0, hash:3484783245450929688}) [2024-03-15 07:03:41.597443] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [817][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC222-0-0] [lt=27] succeed to del sys task(removed_task={start_time:1710486221596064, task_id:YB427F000001-000613ACAD3FC222-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=121"}) [2024-03-15 07:03:41.597445] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [834][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=0] partition merge iter row count(i=0, row_count=115, ghost_row_count=0, pkey={tablet_id:{id:330}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234139332999384}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f54629d8850, key:{tablet_id:{id:330}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234139332999384}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482150020716}, this:0x7f54629d8850, timestamp:1710482150020716, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:34, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:4194304}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710252037663609291}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823356, mt_stat_.ready_for_flush_time:1710483939823397, mt_stat_.create_flush_dag_time:1710486221587531, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.597551] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [834][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221595476, end_time=1710486221597549, runtime=2073, *this={this:0x7f544487a1b0, type:1, status:2, dag:{this:0x7f5444872aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC217-0-0, dag_ret:0, dag_status:2, start_time:1710486221589086, running_task_cnt:1, indegree:0, hash:-6605681042843664973}}) [2024-03-15 07:03:41.597598] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [834][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=8509, dag_cnt=17, dag_cnts_[dag.get_type()]=17, &dag=0x7f5444872aa0, dag={this:0x7f5444872aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC217-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221589086, running_task_cnt:0, indegree:0, hash:-6605681042843664973}) [2024-03-15 07:03:41.597624] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [834][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC217-0-0] [lt=20] succeed to del sys task(removed_task={start_time:1710486221589063, task_id:YB427F000001-000613ACAD3FC217-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=330"}) [2024-03-15 07:03:41.597963] ERROR alloc_block (ob_local_device.cpp:716) [840][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=18][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.597991] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [840][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=26][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.598006] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [840][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=0] partition merge iter row count(i=0, row_count=40, ghost_row_count=0, pkey={tablet_id:{id:328}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710300091367964368}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f54448135b0, key:{tablet_id:{id:328}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710300091367964368}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482170806942}, this:0x7f54448135b0, timestamp:1710482170806942, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:80, clock:31457280}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710338418128522881}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824192, mt_stat_.ready_for_flush_time:1710483939824233, mt_stat_.create_flush_dag_time:1710486221588384, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.598083] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [840][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221597271, end_time=1710486221598082, runtime=811, *this={this:0x7f54448941b0, type:1, status:2, dag:{this:0x7f54448b6080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC221-0-0, dag_ret:0, dag_status:2, start_time:1710486221595550, running_task_cnt:1, indegree:0, hash:3340047647417801619}}) [2024-03-15 07:03:41.598152] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [840][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=2600, dag_cnt=16, dag_cnts_[dag.get_type()]=16, &dag=0x7f54448b6080, dag={this:0x7f54448b6080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC221-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221595550, running_task_cnt:0, indegree:0, hash:3340047647417801619}) [2024-03-15 07:03:41.598216] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [840][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC221-0-0] [lt=28] succeed to del sys task(removed_task={start_time:1710486221595534, task_id:YB427F000001-000613ACAD3FC221-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=328"}) [2024-03-15 07:03:41.598288] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=49] succeed to add sys task(task={start_time:1710486221598285, task_id:YB427F000001-000613ACAD3FC223-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=1"}) [2024-03-15 07:03:41.598309] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=19] schedule one task(task={this:0x7f5444878080, type:15, status:2, dag:{this:0x7f54448b6aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC223-0-0, dag_ret:0, dag_status:2, start_time:1710486221598307, running_task_cnt:1, indegree:0, hash:-1728883537140023654}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=4, running_task_cnts_[priority]=4, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.598337] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=17] succeed to add sys task(task={start_time:1710486221598335, task_id:YB427F000001-000613ACAD3FC224-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=319"}) [2024-03-15 07:03:41.598347] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=9] schedule one task(task={this:0x7f5444806080, type:15, status:2, dag:{this:0x7f54448b6fb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC224-0-0, dag_ret:0, dag_status:2, start_time:1710486221598346, running_task_cnt:1, indegree:0, hash:-1599854902035152836}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=5, running_task_cnts_[priority]=5, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.598371] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=13] succeed to add sys task(task={start_time:1710486221598365, task_id:YB427F000001-000613ACAD3FC225-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=101004"}) [2024-03-15 07:03:41.598384] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [825][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=13][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.598383] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=10] schedule one task(task={this:0x7f5444856080, type:15, status:2, dag:{this:0x7f54448b74c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC225-0-0, dag_ret:0, dag_status:2, start_time:1710486221598381, running_task_cnt:1, indegree:0, hash:663552013962694154}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.598401] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [825][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=16][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710362631914846675) [2024-03-15 07:03:41.598450] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [825][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=12] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.598495] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=26][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.598521] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=25][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710234133899240143) [2024-03-15 07:03:41.598518] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [825][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=25] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.598541] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=14] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.598579] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=14] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.598543] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [825][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=20] get storage schema to merge(ls_id={id:1}, tablet_id={id:1}, schema_ctx={base_schema_version:0, schema_version:0, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547967a600, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:0, column_cnt:6, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.598599] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=15] get storage schema to merge(ls_id={id:1}, tablet_id={id:101004}, schema_ctx={base_schema_version:0, schema_version:1681902230869456, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550ae99330, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:2, index_status:1, row_store_type:1, schema_version:1681902230869456, column_cnt:5, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.598679] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [825][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=122] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:1}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.598711] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=98] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101004}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.598774] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=24] succeed to build merge ctx(tablet_id={id:101004}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101004}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1709708556674153395, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902230869456, schema_version:1681902230869456, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550ae99330, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:2, index_status:1, row_store_type:1, schema_version:1681902230869456, column_cnt:5, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:101004}, table_count:1, [{i:0, table_key:{tablet_id:{id:101004}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f550ae98db0, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221594711, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=81us|(0.32)|GET_PARALLEL_RANGE=176us|(0.68)|total=257us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.598727] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [825][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=20] succeed to build merge ctx(tablet_id={id:1}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:1}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710235938150066547, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:0, schema_version:0, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547967a600, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:0, column_cnt:6, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:1}, table_count:1, [{i:0, table_key:{tablet_id:{id:1}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710362652435471403}, end_scn:{val:1710506547144172701}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1710362652435471403}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f547967a080, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221588974, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=125us|(0.38)|GET_PARALLEL_RANGE=202us|(0.62)|total=327us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.598972] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=180] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f54448b74c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC225-0-0, dag_ret:0, dag_status:2, start_time:1710486221598381, running_task_cnt:1, indegree:0, hash:663552013962694154}, scanned_row_cnt_arr:0x7f542c0315f0, output_block_cnt_arr:0x7f542c0315f8, concurrent_cnt:1, estimate_row_cnt:4, estimate_occupy_size:0, latest_update_ts:1710486221594711, estimated_finish_time:1710486238598970}) [2024-03-15 07:03:41.599005] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=32] succeed to init merge ctx(task={this:0x7f5444856080, type:15, status:2, dag:{this:0x7f54448b74c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC225-0-0, dag_ret:0, dag_status:2, start_time:1710486221598381, running_task_cnt:1, indegree:0, hash:663552013962694154}}) [2024-03-15 07:03:41.599001] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [825][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=247] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f54448b6aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC223-0-0, dag_ret:0, dag_status:2, start_time:1710486221598307, running_task_cnt:1, indegree:0, hash:-1728883537140023654}, scanned_row_cnt_arr:0x7f542c0175f0, output_block_cnt_arr:0x7f542c0175f8, concurrent_cnt:1, estimate_row_cnt:2, estimate_occupy_size:0, latest_update_ts:1710486221588974, estimated_finish_time:1710486238598997}) [2024-03-15 07:03:41.599029] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [823][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=22] task finish process(ret=0, start_time=1710486221598477, end_time=1710486221599027, runtime=550, *this={this:0x7f5444856080, type:15, status:2, dag:{this:0x7f54448b74c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC225-0-0, dag_ret:0, dag_status:2, start_time:1710486221598381, running_task_cnt:1, indegree:0, hash:663552013962694154}}) [2024-03-15 07:03:41.599041] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [825][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=39] succeed to init merge ctx(task={this:0x7f5444878080, type:15, status:2, dag:{this:0x7f54448b6aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC223-0-0, dag_ret:0, dag_status:2, start_time:1710486221598307, running_task_cnt:1, indegree:0, hash:-1728883537140023654}}) [2024-03-15 07:03:41.599070] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [825][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=27] task finish process(ret=0, start_time=1710486221598363, end_time=1710486221599067, runtime=704, *this={this:0x7f5444878080, type:15, status:2, dag:{this:0x7f54448b6aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC223-0-0, dag_ret:0, dag_status:2, start_time:1710486221598307, running_task_cnt:1, indegree:0, hash:-1728883537140023654}}) [2024-03-15 07:03:41.599090] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=24] schedule one task(task={this:0x7f54448561b0, type:1, status:2, dag:{this:0x7f54448b74c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC225-0-0, dag_ret:0, dag_status:2, start_time:1710486221598381, running_task_cnt:1, indegree:0, hash:663552013962694154}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.599149] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [850][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=12][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.599164] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [850][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=15][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710234133899240143) [2024-03-15 07:03:41.599177] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [850][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=7] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.599203] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [850][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=8] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.599215] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [850][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=10] get storage schema to merge(ls_id={id:1}, tablet_id={id:319}, schema_ctx={base_schema_version:0, schema_version:1681902230865928, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f54797661c0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902230865928, column_cnt:17, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT UNSIGNED", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT UNSIGNED":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"LONGTEXT", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"LONGTEXT", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.599357] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [850][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=131] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:319}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.599530] ERROR alloc_block (ob_local_device.cpp:716) [833][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=17][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.599402] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [850][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=15] succeed to build merge ctx(tablet_id={id:319}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:319}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710232452548914501, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902230865928, schema_version:1681902230865928, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f54797661c0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902230865928, column_cnt:17, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT UNSIGNED", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT UNSIGNED":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"LONGTEXT", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"LONGTEXT", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:319}, table_count:1, [{i:0, table_key:{tablet_id:{id:319}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f5479765c40, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221594359, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=52us|(0.22)|GET_PARALLEL_RANGE=185us|(0.78)|total=237us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.599584] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [833][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=51][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.599607] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [850][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=191] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f54448b6fb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC224-0-0, dag_ret:0, dag_status:2, start_time:1710486221598346, running_task_cnt:1, indegree:0, hash:-1599854902035152836}, scanned_row_cnt_arr:0x7f542c0275f0, output_block_cnt_arr:0x7f542c0275f8, concurrent_cnt:1, estimate_row_cnt:4, estimate_occupy_size:0, latest_update_ts:1710486221594359, estimated_finish_time:1710486238599605}) [2024-03-15 07:03:41.599628] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [850][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=20] succeed to init merge ctx(task={this:0x7f5444806080, type:15, status:2, dag:{this:0x7f54448b6fb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC224-0-0, dag_ret:0, dag_status:2, start_time:1710486221598346, running_task_cnt:1, indegree:0, hash:-1599854902035152836}}) [2024-03-15 07:03:41.599643] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [850][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=14] task finish process(ret=0, start_time=1710486221599135, end_time=1710486221599641, runtime=506, *this={this:0x7f5444806080, type:15, status:2, dag:{this:0x7f54448b6fb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC224-0-0, dag_ret:0, dag_status:2, start_time:1710486221598346, running_task_cnt:1, indegree:0, hash:-1599854902035152836}}) [2024-03-15 07:03:41.599617] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [833][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=1] partition merge iter row count(i=0, row_count=25, ghost_row_count=0, pkey={tablet_id:{id:329}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f54629d9020, key:{tablet_id:{id:329}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482150021077}, this:0x7f54629d9020, timestamp:1710482150021077, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:35, clock:0}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710252037157080262}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939823270, mt_stat_.ready_for_flush_time:1710483939823317, mt_stat_.create_flush_dag_time:1710486221585337, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.599722] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=21] schedule one task(task={this:0x7f54448781b0, type:1, status:2, dag:{this:0x7f54448b6aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC223-0-0, dag_ret:0, dag_status:2, start_time:1710486221598307, running_task_cnt:1, indegree:0, hash:-1728883537140023654}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=5, running_task_cnts_[priority]=5, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.599750] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=21] schedule one task(task={this:0x7f54448061b0, type:1, status:2, dag:{this:0x7f54448b6fb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC224-0-0, dag_ret:0, dag_status:2, start_time:1710486221598346, running_task_cnt:1, indegree:0, hash:-1599854902035152836}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.599770] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [833][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=1] task finish process(ret=-4184, start_time=1710486221599096, end_time=1710486221599768, runtime=672, *this={this:0x7f54448761b0, type:1, status:2, dag:{this:0x7f5444872590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC216-0-0, dag_ret:0, dag_status:2, start_time:1710486221589019, running_task_cnt:1, indegree:0, hash:-2810766448934472}}) [2024-03-15 07:03:41.599814] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [833][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=10793, dag_cnt=15, dag_cnts_[dag.get_type()]=15, &dag=0x7f5444872590, dag={this:0x7f5444872590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC216-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221589019, running_task_cnt:0, indegree:0, hash:-2810766448934472}) [2024-03-15 07:03:41.599841] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [833][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC216-0-0] [lt=20] succeed to del sys task(removed_task={start_time:1710486221588977, task_id:YB427F000001-000613ACAD3FC216-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=329"}) [2024-03-15 07:03:41.599915] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=14] succeed to add sys task(task={start_time:1710486221599909, task_id:YB427F000001-000613ACAD3FC226-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=4"}) [2024-03-15 07:03:41.599931] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC226-0-0] [lt=15] schedule one task(task={this:0x7f5444832080, type:15, status:2, dag:{this:0x7f54448b79d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC226-0-0, dag_ret:0, dag_status:2, start_time:1710486221599929, running_task_cnt:1, indegree:0, hash:-7283149547720778478}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.599991] ERROR alloc_block (ob_local_device.cpp:716) [827][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=25][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.599997] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC226-0-0] [lt=11][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.600013] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [827][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=21][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.600022] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC226-0-0] [lt=26][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710235938150066547) [2024-03-15 07:03:41.600035] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC226-0-0] [lt=8] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.600056] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC226-0-0] [lt=8] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.600036] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [827][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=0] partition merge iter row count(i=0, row_count=4, ghost_row_count=0, pkey={tablet_id:{id:319}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f5444817430, key:{tablet_id:{id:319}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482178162140}, this:0x7f5444817430, timestamp:1710482178162140, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:88, clock:62914560}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710385376303090625}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824505, mt_stat_.ready_for_flush_time:1710483939824558, mt_stat_.create_flush_dag_time:1710486221594343, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.600147] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [827][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221599816, end_time=1710486221600145, runtime=329, *this={this:0x7f54448061b0, type:1, status:2, dag:{this:0x7f54448b6fb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC224-0-0, dag_ret:0, dag_status:2, start_time:1710486221598346, running_task_cnt:1, indegree:0, hash:-1599854902035152836}}) [2024-03-15 07:03:41.600187] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [827][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=1839, dag_cnt=14, dag_cnts_[dag.get_type()]=14, &dag=0x7f54448b6fb0, dag={this:0x7f54448b6fb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC224-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221598346, running_task_cnt:0, indegree:0, hash:-1599854902035152836}) [2024-03-15 07:03:41.600211] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [831][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=72][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.600068] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC226-0-0] [lt=8] get storage schema to merge(ls_id={id:1}, tablet_id={id:4}, schema_ctx={base_schema_version:0, schema_version:1681902227546176, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547967baa0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902227546176, column_cnt:30, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"LONGTEXT", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"VARCHAR", collation:"binary", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"binary", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"binary", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-32}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.600228] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [831][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=18][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710235938150066547) [2024-03-15 07:03:41.600219] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [827][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC224-0-0] [lt=25] succeed to del sys task(removed_task={start_time:1710486221598335, task_id:YB427F000001-000613ACAD3FC224-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=319"}) [2024-03-15 07:03:41.600244] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [831][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=10] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.600274] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [831][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=11] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.600316] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=16] succeed to add sys task(task={start_time:1710486221600311, task_id:YB427F000001-000613ACAD3FC227-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=100004"}) [2024-03-15 07:03:41.600289] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [831][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=12] get storage schema to merge(ls_id={id:1}, tablet_id={id:100006}, schema_ctx={base_schema_version:0, schema_version:1681902228471808, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550ae97e90, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902228471808, column_cnt:2, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.600331] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=14] schedule one task(task={this:0x7f5444834080, type:15, status:2, dag:{this:0x7f544484a080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC227-0-0, dag_ret:0, dag_status:2, start_time:1710486221600330, running_task_cnt:1, indegree:0, hash:-245371073307898354}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.600389] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=13][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.600400] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=11][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710235938150066547) [2024-03-15 07:03:41.600410] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=7] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.600440] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=8] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.600451] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=9] get storage schema to merge(ls_id={id:1}, tablet_id={id:100004}, schema_ctx={base_schema_version:0, schema_version:1681902227739344, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550ae969f0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902227739344, column_cnt:4, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.600509] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=53] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100004}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.600553] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=22] succeed to build merge ctx(tablet_id={id:100004}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100004}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710234133899240143, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902227739344, schema_version:1681902227739344, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550ae969f0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902227739344, column_cnt:4, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:100004}, table_count:1, [{i:0, table_key:{tablet_id:{id:100004}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f550ae96470, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221595068, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=49us|(0.32)|GET_PARALLEL_RANGE=103us|(0.68)|total=152us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.600682] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=117] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544484a080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC227-0-0, dag_ret:0, dag_status:2, start_time:1710486221600330, running_task_cnt:1, indegree:0, hash:-245371073307898354}, scanned_row_cnt_arr:0x7f542c0255f0, output_block_cnt_arr:0x7f542c0255f8, concurrent_cnt:1, estimate_row_cnt:1, estimate_occupy_size:0, latest_update_ts:1710486221595068, estimated_finish_time:1710486238600681}) [2024-03-15 07:03:41.600703] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=20] succeed to init merge ctx(task={this:0x7f5444834080, type:15, status:2, dag:{this:0x7f544484a080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC227-0-0, dag_ret:0, dag_status:2, start_time:1710486221600330, running_task_cnt:1, indegree:0, hash:-245371073307898354}}) [2024-03-15 07:03:41.600717] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [842][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=13] task finish process(ret=0, start_time=1710486221600380, end_time=1710486221600715, runtime=335, *this={this:0x7f5444834080, type:15, status:2, dag:{this:0x7f544484a080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC227-0-0, dag_ret:0, dag_status:2, start_time:1710486221600330, running_task_cnt:1, indegree:0, hash:-245371073307898354}}) [2024-03-15 07:03:41.600756] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC226-0-0] [lt=163] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:4}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.600758] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=16] schedule one task(task={this:0x7f54448341b0, type:1, status:2, dag:{this:0x7f544484a080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC227-0-0, dag_ret:0, dag_status:2, start_time:1710486221600330, running_task_cnt:1, indegree:0, hash:-245371073307898354}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.600818] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [831][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=52] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100006}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.600872] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [831][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=21] succeed to build merge ctx(tablet_id={id:100006}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100006}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710234246707813220, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902228471808, schema_version:1681902228471808, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550ae97e90, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902228471808, column_cnt:2, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:100006}, table_count:1, [{i:0, table_key:{tablet_id:{id:100006}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f550ae97910, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221588227, estimated_finish_time:0}, compaction_filter:NULL, time_guard:GET_PARALLEL_RANGE=582us|(0.90)|total=645us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.600792] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC226-0-0] [lt=13] succeed to build merge ctx(tablet_id={id:4}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:4}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710234133899240143, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902227546176, schema_version:1681902227546176, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547967baa0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902227546176, column_cnt:30, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"LONGTEXT", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"VARCHAR", collation:"binary", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"binary", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"binary", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-32}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:4}, table_count:1, [{i:0, table_key:{tablet_id:{id:4}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f547967b520, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221594998, estimated_finish_time:0}, compaction_filter:NULL, time_guard:GET_PARALLEL_RANGE=725us|(0.92)|total=784us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.601067] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [831][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=167] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544482d4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21F-0-0, dag_ret:0, dag_status:2, start_time:1710486221594610, running_task_cnt:1, indegree:0, hash:-4203726123947673128}, scanned_row_cnt_arr:0x7f542c0355f0, output_block_cnt_arr:0x7f542c0355f8, concurrent_cnt:1, estimate_row_cnt:29, estimate_occupy_size:0, latest_update_ts:1710486221588227, estimated_finish_time:1710486238601065}) [2024-03-15 07:03:41.601077] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC226-0-0] [lt=271] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f54448b79d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC226-0-0, dag_ret:0, dag_status:2, start_time:1710486221599929, running_task_cnt:1, indegree:0, hash:-7283149547720778478}, scanned_row_cnt_arr:0x7f542c0835f0, output_block_cnt_arr:0x7f542c0835f8, concurrent_cnt:1, estimate_row_cnt:1, estimate_occupy_size:0, latest_update_ts:1710486221594998, estimated_finish_time:1710486238601075}) [2024-03-15 07:03:41.601092] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [831][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=24] succeed to init merge ctx(task={this:0x7f5444890080, type:15, status:2, dag:{this:0x7f544482d4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21F-0-0, dag_ret:0, dag_status:2, start_time:1710486221594610, running_task_cnt:1, indegree:0, hash:-4203726123947673128}}) [2024-03-15 07:03:41.601095] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC226-0-0] [lt=18] succeed to init merge ctx(task={this:0x7f5444832080, type:15, status:2, dag:{this:0x7f54448b79d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC226-0-0, dag_ret:0, dag_status:2, start_time:1710486221599929, running_task_cnt:1, indegree:0, hash:-7283149547720778478}}) [2024-03-15 07:03:41.601110] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [831][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=16] task finish process(ret=0, start_time=1710486221600193, end_time=1710486221601108, runtime=915, *this={this:0x7f5444890080, type:15, status:2, dag:{this:0x7f544482d4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21F-0-0, dag_ret:0, dag_status:2, start_time:1710486221594610, running_task_cnt:1, indegree:0, hash:-4203726123947673128}}) [2024-03-15 07:03:41.601109] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [839][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC226-0-0] [lt=12] task finish process(ret=0, start_time=1710486221599985, end_time=1710486221601108, runtime=1123, *this={this:0x7f5444832080, type:15, status:2, dag:{this:0x7f54448b79d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC226-0-0, dag_ret:0, dag_status:2, start_time:1710486221599929, running_task_cnt:1, indegree:0, hash:-7283149547720778478}}) [2024-03-15 07:03:41.601177] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=19] schedule one task(task={this:0x7f54448901b0, type:1, status:2, dag:{this:0x7f544482d4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21F-0-0, dag_ret:0, dag_status:2, start_time:1710486221594610, running_task_cnt:1, indegree:0, hash:-4203726123947673128}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.601363] ERROR alloc_block (ob_local_device.cpp:716) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=11][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.601380] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=17][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.601398] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=1] partition merge iter row count(i=0, row_count=4, ghost_row_count=0, pkey={tablet_id:{id:101004}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f5444817c00, key:{tablet_id:{id:101004}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710234134068595581}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482178162389}, this:0x7f5444817c00, timestamp:1710482178162389, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:89, clock:62914560}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710385376303090625}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824699, mt_stat_.ready_for_flush_time:1710483939824751, mt_stat_.create_flush_dag_time:1710486221594494, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.601482] ERROR alloc_block (ob_local_device.cpp:716) [837][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=18][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.601501] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [837][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=18][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.601503] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221601229, end_time=1710486221601501, runtime=272, *this={this:0x7f54448561b0, type:1, status:2, dag:{this:0x7f54448b74c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC225-0-0, dag_ret:0, dag_status:2, start_time:1710486221598381, running_task_cnt:1, indegree:0, hash:663552013962694154}}) [2024-03-15 07:03:41.601518] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [837][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=0] partition merge iter row count(i=0, row_count=29, ghost_row_count=0, pkey={tablet_id:{id:100006}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f54629e43d0, key:{tablet_id:{id:100006}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482160329659}, this:0x7f54629e43d0, timestamp:1710482160329659, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:58, clock:14680064}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710300088531356512}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824033, mt_stat_.ready_for_flush_time:1710483939824074, mt_stat_.create_flush_dag_time:1710486221588220, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.601625] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [837][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221601218, end_time=1710486221601623, runtime=405, *this={this:0x7f54448901b0, type:1, status:2, dag:{this:0x7f544482d4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21F-0-0, dag_ret:0, dag_status:2, start_time:1710486221594610, running_task_cnt:1, indegree:0, hash:-4203726123947673128}}) [2024-03-15 07:03:41.601612] ERROR alloc_block (ob_local_device.cpp:716) [857][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=22][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.601653] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [857][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=40][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.601681] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [857][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=0] partition merge iter row count(i=0, row_count=158, ghost_row_count=0, pkey={tablet_id:{id:1}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710362652435471403}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f5444814550, key:{tablet_id:{id:1}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710362652435471403}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482174228242}, this:0x7f5444814550, timestamp:1710482174228242, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:82, clock:56623104}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710362672384988561}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824350, mt_stat_.ready_for_flush_time:1710483939824391, mt_stat_.create_flush_dag_time:1710486221588575, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.601791] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [857][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221600343, end_time=1710486221601789, runtime=1446, *this={this:0x7f54448781b0, type:1, status:2, dag:{this:0x7f54448b6aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC223-0-0, dag_ret:0, dag_status:2, start_time:1710486221598307, running_task_cnt:1, indegree:0, hash:-1728883537140023654}}) [2024-03-15 07:03:41.601543] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=1] dag finished(dag_ret=-4184, runtime=3160, dag_cnt=13, dag_cnts_[dag.get_type()]=13, &dag=0x7f54448b74c0, dag={this:0x7f54448b74c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC225-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221598381, running_task_cnt:0, indegree:0, hash:663552013962694154}) [2024-03-15 07:03:41.601874] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [852][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC225-0-0] [lt=320] succeed to del sys task(removed_task={start_time:1710486221598365, task_id:YB427F000001-000613ACAD3FC225-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=101004"}) [2024-03-15 07:03:41.601986] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=22] succeed to add sys task(task={start_time:1710486221601980, task_id:YB427F000001-000613ACAD3FC228-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=100005"}) [2024-03-15 07:03:41.602002] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC228-0-0] [lt=14] schedule one task(task={this:0x7f5444836080, type:15, status:2, dag:{this:0x7f544484a590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC228-0-0, dag_ret:0, dag_status:2, start_time:1710486221602000, running_task_cnt:1, indegree:0, hash:-708937556109252}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.602046] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [837][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=1] dag finished(dag_ret=-4184, runtime=7434, dag_cnt=12, dag_cnts_[dag.get_type()]=12, &dag=0x7f544482d4c0, dag={this:0x7f544482d4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC21F-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221594610, running_task_cnt:0, indegree:0, hash:-4203726123947673128}) [2024-03-15 07:03:41.602062] INFO [COMMON] clean_garbage_node (ob_kvcache_map.cpp:647) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=22] Cache wash clean map node details(ret=0, clean_node_count=0, clean_time=9728, clean_start_pos=1258280, clean_num=31457) [2024-03-15 07:03:41.602070] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [837][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC21F-0-0] [lt=19] succeed to del sys task(removed_task={start_time:1710486221594573, task_id:YB427F000001-000613ACAD3FC21F-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=100006"}) [2024-03-15 07:03:41.602118] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC228-0-0] [lt=14][errcode=-4018] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.602140] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC226-0-0] [lt=17] schedule one task(task={this:0x7f54448321b0, type:1, status:2, dag:{this:0x7f54448b79d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC226-0-0, dag_ret:0, dag_status:2, start_time:1710486221599929, running_task_cnt:1, indegree:0, hash:-7283149547720778478}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.602148] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC228-0-0] [lt=0] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.602200] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC228-0-0] [lt=25] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.602222] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC228-0-0] [lt=19] get storage schema to merge(ls_id={id:1}, tablet_id={id:100005}, schema_ctx={base_schema_version:0, schema_version:1681902227896256, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550ae97440, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902227896256, column_cnt:4, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:16, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.602322] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=17] succeed to add sys task(task={start_time:1710486221602319, task_id:YB427F000001-000613ACAD3FC229-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=115"}) [2024-03-15 07:03:41.602334] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC229-0-0] [lt=11] schedule one task(task={this:0x7f544486c080, type:15, status:2, dag:{this:0x7f544484aaa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC229-0-0, dag_ret:0, dag_status:2, start_time:1710486221602333, running_task_cnt:1, indegree:0, hash:5238607534375264894}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.602360] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC228-0-0] [lt=122] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100005}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.602394] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC229-0-0] [lt=25][errcode=-4018] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.602445] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC229-0-0] [lt=1] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.602484] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC229-0-0] [lt=15] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.602560] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=46][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.602585] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=24][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.602457] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC228-0-0] [lt=26] succeed to build merge ctx(tablet_id={id:100005}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100005}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710234133899240143, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902227896256, schema_version:1681902227896256, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550ae97440, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902227896256, column_cnt:4, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:16, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:100005}, table_count:1, [{i:0, table_key:{tablet_id:{id:100005}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f550ae96ec0, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221595118, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=79us|(0.27)|GET_PARALLEL_RANGE=209us|(0.73)|total=288us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.602691] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC228-0-0] [lt=214] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544484a590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC228-0-0, dag_ret:0, dag_status:2, start_time:1710486221602000, running_task_cnt:1, indegree:0, hash:-708937556109252}, scanned_row_cnt_arr:0x7f542c0855f0, output_block_cnt_arr:0x7f542c0855f8, concurrent_cnt:1, estimate_row_cnt:1, estimate_occupy_size:0, latest_update_ts:1710486221595118, estimated_finish_time:1710486238602688}) [2024-03-15 07:03:41.602727] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC228-0-0] [lt=35] succeed to init merge ctx(task={this:0x7f5444836080, type:15, status:2, dag:{this:0x7f544484a590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC228-0-0, dag_ret:0, dag_status:2, start_time:1710486221602000, running_task_cnt:1, indegree:0, hash:-708937556109252}}) [2024-03-15 07:03:41.602504] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC229-0-0] [lt=16] get storage schema to merge(ls_id={id:1}, tablet_id={id:115}, schema_ctx={base_schema_version:0, schema_version:1681902229317776, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f54796840b0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902229317776, column_cnt:31, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:21, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"LONGTEXT", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"VARCHAR", collation:"binary", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"binary", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"binary", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-32}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.602755] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [818][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC228-0-0] [lt=27] task finish process(ret=0, start_time=1710486221602098, end_time=1710486221602753, runtime=655, *this={this:0x7f5444836080, type:15, status:2, dag:{this:0x7f544484a590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC228-0-0, dag_ret:0, dag_status:2, start_time:1710486221602000, running_task_cnt:1, indegree:0, hash:-708937556109252}}) [2024-03-15 07:03:41.602785] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC229-0-0] [lt=269] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:115}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.602895] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC228-0-0] [lt=15] schedule one task(task={this:0x7f54448361b0, type:1, status:2, dag:{this:0x7f544484a590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC228-0-0, dag_ret:0, dag_status:2, start_time:1710486221602000, running_task_cnt:1, indegree:0, hash:-708937556109252}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.603131] ERROR alloc_block (ob_local_device.cpp:716) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC226-0-0] [lt=11][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.602855] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC229-0-0] [lt=24] succeed to build merge ctx(tablet_id={id:115}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:115}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710234246707813220, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902229317776, schema_version:1681902229317776, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f54796840b0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902229317776, column_cnt:31, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:21, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"LONGTEXT", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"VARCHAR", collation:"binary", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"binary", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"binary", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-32}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:115}, table_count:1, [{i:0, table_key:{tablet_id:{id:115}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f5479683b30, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221595164, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=88us|(0.20)|GET_PARALLEL_RANGE=346us|(0.80)|total=434us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.603219] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC229-0-0] [lt=343] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544484aaa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC229-0-0, dag_ret:0, dag_status:2, start_time:1710486221602333, running_task_cnt:1, indegree:0, hash:5238607534375264894}, scanned_row_cnt_arr:0x7f542c0195f0, output_block_cnt_arr:0x7f542c0195f8, concurrent_cnt:1, estimate_row_cnt:1, estimate_occupy_size:0, latest_update_ts:1710486221595164, estimated_finish_time:1710486238603216}) [2024-03-15 07:03:41.603174] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC226-0-0] [lt=1] partition merge iter row count(i=0, row_count=1, ghost_row_count=0, pkey={tablet_id:{id:4}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f5444818ba0, key:{tablet_id:{id:4}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482180521922}, this:0x7f5444818ba0, timestamp:1710482180521922, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:91, clock:67108864}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710399127821303489}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824792, mt_stat_.ready_for_flush_time:1710483939824847, mt_stat_.create_flush_dag_time:1710486221594830, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.603252] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC229-0-0] [lt=31] succeed to init merge ctx(task={this:0x7f544486c080, type:15, status:2, dag:{this:0x7f544484aaa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC229-0-0, dag_ret:0, dag_status:2, start_time:1710486221602333, running_task_cnt:1, indegree:0, hash:5238607534375264894}}) [2024-03-15 07:03:41.603277] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [826][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC229-0-0] [lt=23] task finish process(ret=0, start_time=1710486221602375, end_time=1710486221603274, runtime=899, *this={this:0x7f544486c080, type:15, status:2, dag:{this:0x7f544484aaa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC229-0-0, dag_ret:0, dag_status:2, start_time:1710486221602333, running_task_cnt:1, indegree:0, hash:5238607534375264894}}) [2024-03-15 07:03:41.603308] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC226-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221602946, end_time=1710486221603306, runtime=360, *this={this:0x7f54448321b0, type:1, status:2, dag:{this:0x7f54448b79d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC226-0-0, dag_ret:0, dag_status:2, start_time:1710486221599929, running_task_cnt:1, indegree:0, hash:-7283149547720778478}}) [2024-03-15 07:03:41.603354] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC226-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=3422, dag_cnt=11, dag_cnts_[dag.get_type()]=11, &dag=0x7f54448b79d0, dag={this:0x7f54448b79d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC226-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221599929, running_task_cnt:0, indegree:0, hash:-7283149547720778478}) [2024-03-15 07:03:41.603391] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [846][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC226-0-0] [lt=29] succeed to del sys task(removed_task={start_time:1710486221599909, task_id:YB427F000001-000613ACAD3FC226-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=4"}) [2024-03-15 07:03:41.603560] ERROR alloc_block (ob_local_device.cpp:716) [819][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC228-0-0] [lt=10][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.603578] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [819][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC228-0-0] [lt=0] partition merge iter row count(i=0, row_count=1, ghost_row_count=0, pkey={tablet_id:{id:100005}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f5444819b40, key:{tablet_id:{id:100005}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482180522450}, this:0x7f5444819b40, timestamp:1710482180522450, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:93, clock:67108864}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710399127821303489}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824978, mt_stat_.ready_for_flush_time:1710483939825074, mt_stat_.create_flush_dag_time:1710486221595113, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.603642] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [819][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC228-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221603460, end_time=1710486221603641, runtime=181, *this={this:0x7f54448361b0, type:1, status:2, dag:{this:0x7f544484a590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC228-0-0, dag_ret:0, dag_status:2, start_time:1710486221602000, running_task_cnt:1, indegree:0, hash:-708937556109252}}) [2024-03-15 07:03:41.603662] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [829][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC220-0-0] [lt=13][errcode=-4018] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.603692] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [819][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC228-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=1690, dag_cnt=10, dag_cnts_[dag.get_type()]=10, &dag=0x7f544484a590, dag={this:0x7f544484a590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC228-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221602000, running_task_cnt:0, indegree:0, hash:-708937556109252}) [2024-03-15 07:03:41.603696] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [829][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC220-0-0] [lt=1] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.603711] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [819][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC228-0-0] [lt=15] succeed to del sys task(removed_task={start_time:1710486221601980, task_id:YB427F000001-000613ACAD3FC228-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=100005"}) [2024-03-15 07:03:41.603733] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [829][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC220-0-0] [lt=15] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.603770] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC229-0-0] [lt=22] schedule one task(task={this:0x7f544486c1b0, type:1, status:2, dag:{this:0x7f544484aaa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC229-0-0, dag_ret:0, dag_status:2, start_time:1710486221602333, running_task_cnt:1, indegree:0, hash:5238607534375264894}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=4, running_task_cnts_[priority]=4, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.603795] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=18] succeed to add sys task(task={start_time:1710486221603793, task_id:YB427F000001-000613ACAD3FC22A-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=104"}) [2024-03-15 07:03:41.603807] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC22A-0-0] [lt=10] schedule one task(task={this:0x7f544482e080, type:15, status:2, dag:{this:0x7f544484afb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22A-0-0, dag_ret:0, dag_status:2, start_time:1710486221603805, running_task_cnt:1, indegree:0, hash:5347700507801701344}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=5, running_task_cnts_[priority]=5, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.603827] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=14] succeed to add sys task(task={start_time:1710486221603825, task_id:YB427F000001-000613ACAD3FC22B-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=101006"}) [2024-03-15 07:03:41.603836] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC22B-0-0] [lt=9] schedule one task(task={this:0x7f5444870080, type:15, status:2, dag:{this:0x7f544484b4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22B-0-0, dag_ret:0, dag_status:2, start_time:1710486221603835, running_task_cnt:1, indegree:0, hash:2191978846819725395}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.603779] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [829][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC220-0-0] [lt=39] get storage schema to merge(ls_id={id:1}, tablet_id={id:100001}, schema_ctx={base_schema_version:0, schema_version:1681902227002552, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550ae94b00, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902227002552, column_cnt:3, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.603883] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [829][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC220-0-0] [lt=90] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100001}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.603921] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [856][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22B-0-0] [lt=0] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.603937] ERROR alloc_block (ob_local_device.cpp:716) [841][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC229-0-0] [lt=12][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.603938] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22A-0-0] [lt=24][errcode=-4018] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.603964] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [856][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22B-0-0] [lt=23] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.603968] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22A-0-0] [lt=0] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.603969] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [841][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC229-0-0] [lt=0] partition merge iter row count(i=0, row_count=1, ghost_row_count=0, pkey={tablet_id:{id:115}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f544481a310, key:{tablet_id:{id:115}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482180522638}, this:0x7f544481a310, timestamp:1710482180522638, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:94, clock:67108864}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710399127821303489}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939825116, mt_stat_.ready_for_flush_time:1710483939825158, mt_stat_.create_flush_dag_time:1710486221595160, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.604005] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22A-0-0] [lt=14] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.603982] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [856][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22B-0-0] [lt=15] get storage schema to merge(ls_id={id:1}, tablet_id={id:101006}, schema_ctx={base_schema_version:0, schema_version:1681902229095512, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550aebb050, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902229095512, column_cnt:3, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}, {column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.604036] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [841][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC229-0-0] [lt=1] task finish process(ret=-4184, start_time=1710486221603837, end_time=1710486221604035, runtime=198, *this={this:0x7f544486c1b0, type:1, status:2, dag:{this:0x7f544484aaa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC229-0-0, dag_ret:0, dag_status:2, start_time:1710486221602333, running_task_cnt:1, indegree:0, hash:5238607534375264894}}) [2024-03-15 07:03:41.604072] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [841][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC229-0-0] [lt=1] dag finished(dag_ret=-4184, runtime=1738, dag_cnt=9, dag_cnts_[dag.get_type()]=9, &dag=0x7f544484aaa0, dag={this:0x7f544484aaa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC229-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221602333, running_task_cnt:0, indegree:0, hash:5238607534375264894}) [2024-03-15 07:03:41.604068] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [856][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22B-0-0] [lt=73] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101006}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.604091] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [841][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC229-0-0] [lt=15] succeed to del sys task(removed_task={start_time:1710486221602319, task_id:YB427F000001-000613ACAD3FC229-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=115"}) [2024-03-15 07:03:41.603964] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [829][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC220-0-0] [lt=38] succeed to build merge ctx(tablet_id={id:100001}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:100001}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1710234133899240143, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902227002552, schema_version:1681902227002552, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550ae94b00, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902227002552, column_cnt:3, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:100001}, table_count:1, [{i:0, table_key:{tablet_id:{id:100001}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f550ae94580, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221588309, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=69us|(0.25)|GET_PARALLEL_RANGE=210us|(0.75)|total=279us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.604025] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22A-0-0] [lt=16] get storage schema to merge(ls_id={id:1}, tablet_id={id:104}, schema_ctx={base_schema_version:0, schema_version:1681902229091880, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547967e3e0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902229091880, column_cnt:10, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.604167] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22A-0-0] [lt=130] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:104}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.604161] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [829][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC220-0-0] [lt=171] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544482d9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC220-0-0, dag_ret:0, dag_status:2, start_time:1710486221595505, running_task_cnt:1, indegree:0, hash:5462442946523183119}, scanned_row_cnt_arr:0x7f542c0115f0, output_block_cnt_arr:0x7f542c0115f8, concurrent_cnt:1, estimate_row_cnt:3, estimate_occupy_size:0, latest_update_ts:1710486221588309, estimated_finish_time:1710486238604158}) [2024-03-15 07:03:41.604200] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [829][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC220-0-0] [lt=37] succeed to init merge ctx(task={this:0x7f5444892080, type:15, status:2, dag:{this:0x7f544482d9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC220-0-0, dag_ret:0, dag_status:2, start_time:1710486221595505, running_task_cnt:1, indegree:0, hash:5462442946523183119}}) [2024-03-15 07:03:41.604230] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [829][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC220-0-0] [lt=28] task finish process(ret=0, start_time=1710486221603642, end_time=1710486221604227, runtime=585, *this={this:0x7f5444892080, type:15, status:2, dag:{this:0x7f544482d9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC220-0-0, dag_ret:0, dag_status:2, start_time:1710486221595505, running_task_cnt:1, indegree:0, hash:5462442946523183119}}) [2024-03-15 07:03:41.604139] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [856][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22B-0-0] [lt=35] succeed to build merge ctx(tablet_id={id:101006}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101006}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1704165798110799937, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902229095512, schema_version:1681902229095512, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550aebb050, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902229095512, column_cnt:3, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}, {column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:101006}, table_count:1, [{i:0, table_key:{tablet_id:{id:101006}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705473963136408068}, end_scn:{val:1710506547144172701}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1705473963136408068}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f550aebaad0, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221595591, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=51us|(0.24)|GET_PARALLEL_RANGE=159us|(0.76)|total=210us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.604275] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC220-0-0] [lt=14] schedule one task(task={this:0x7f54448921b0, type:1, status:2, dag:{this:0x7f544482d9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC220-0-0, dag_ret:0, dag_status:2, start_time:1710486221595505, running_task_cnt:1, indegree:0, hash:5462442946523183119}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=5, running_task_cnts_[priority]=5, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.604305] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=22] succeed to add sys task(task={start_time:1710486221604302, task_id:YB427F000001-000613ACAD3FC22C-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=105"}) [2024-03-15 07:03:41.604317] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC22C-0-0] [lt=11] schedule one task(task={this:0x7f5444850080, type:15, status:2, dag:{this:0x7f544484b9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22C-0-0, dag_ret:0, dag_status:2, start_time:1710486221604316, running_task_cnt:1, indegree:0, hash:-374125254385398122}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.604305] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [856][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22B-0-0] [lt=149] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544484b4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22B-0-0, dag_ret:0, dag_status:2, start_time:1710486221603835, running_task_cnt:1, indegree:0, hash:2191978846819725395}, scanned_row_cnt_arr:0x7f542c0335f0, output_block_cnt_arr:0x7f542c0335f8, concurrent_cnt:1, estimate_row_cnt:2, estimate_occupy_size:0, latest_update_ts:1710486221595591, estimated_finish_time:1710486238604303}) [2024-03-15 07:03:41.604227] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22A-0-0] [lt=23] succeed to build merge ctx(tablet_id={id:104}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:104}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1704165798110799937, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902229091880, schema_version:1681902229091880, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547967e3e0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902229091880, column_cnt:10, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:104}, table_count:1, [{i:0, table_key:{tablet_id:{id:104}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705473963136408068}, end_scn:{val:1710506547144172701}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1705473963136408068}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f547967de60, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221595299, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=63us|(0.23)|GET_PARALLEL_RANGE=207us|(0.77)|total=270us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.604474] ERROR alloc_block (ob_local_device.cpp:716) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=12][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.604500] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=25][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.604491] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22A-0-0] [lt=244] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544484afb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22A-0-0, dag_ret:0, dag_status:2, start_time:1710486221603805, running_task_cnt:1, indegree:0, hash:5347700507801701344}, scanned_row_cnt_arr:0x7f542c01b5f0, output_block_cnt_arr:0x7f542c01b5f8, concurrent_cnt:1, estimate_row_cnt:2, estimate_occupy_size:0, latest_update_ts:1710486221595299, estimated_finish_time:1710486238604489}) [2024-03-15 07:03:41.604523] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22A-0-0] [lt=31] succeed to init merge ctx(task={this:0x7f544482e080, type:15, status:2, dag:{this:0x7f544484afb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22A-0-0, dag_ret:0, dag_status:2, start_time:1710486221603805, running_task_cnt:1, indegree:0, hash:5347700507801701344}}) [2024-03-15 07:03:41.604543] ERROR alloc_block (ob_local_device.cpp:716) [847][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC220-0-0] [lt=28][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.604335] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [856][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22B-0-0] [lt=29] succeed to init merge ctx(task={this:0x7f5444870080, type:15, status:2, dag:{this:0x7f544484b4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22B-0-0, dag_ret:0, dag_status:2, start_time:1710486221603835, running_task_cnt:1, indegree:0, hash:2191978846819725395}}) [2024-03-15 07:03:41.604448] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22C-0-0] [lt=15][errcode=-4018] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.604557] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [847][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC220-0-0] [lt=13][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.604547] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [828][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22A-0-0] [lt=22] task finish process(ret=0, start_time=1710486221603914, end_time=1710486221604545, runtime=631, *this={this:0x7f544482e080, type:15, status:2, dag:{this:0x7f544484afb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22A-0-0, dag_ret:0, dag_status:2, start_time:1710486221603805, running_task_cnt:1, indegree:0, hash:5347700507801701344}}) [2024-03-15 07:03:41.604576] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22C-0-0] [lt=1] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.604564] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [856][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22B-0-0] [lt=224] task finish process(ret=0, start_time=1710486221603893, end_time=1710486221604560, runtime=667, *this={this:0x7f5444870080, type:15, status:2, dag:{this:0x7f544484b4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22B-0-0, dag_ret:0, dag_status:2, start_time:1710486221603835, running_task_cnt:1, indegree:0, hash:2191978846819725395}}) [2024-03-15 07:03:41.604523] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=0] partition merge iter row count(i=0, row_count=1, ghost_row_count=0, pkey={tablet_id:{id:100004}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f5444819370, key:{tablet_id:{id:100004}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482180522242}, this:0x7f5444819370, timestamp:1710482180522242, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:92, clock:67108864}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710399127821303489}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824886, mt_stat_.ready_for_flush_time:1710483939824939, mt_stat_.create_flush_dag_time:1710486221595060, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.604571] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [847][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC220-0-0] [lt=0] partition merge iter row count(i=0, row_count=7, ghost_row_count=0, pkey={tablet_id:{id:100001}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f54629e4ba0, key:{tablet_id:{id:100001}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710235938364579048}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482160329884}, this:0x7f54629e4ba0, timestamp:1710482160329884, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:59, clock:14680064}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710300088531356512}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939824112, mt_stat_.ready_for_flush_time:1710483939824153, mt_stat_.create_flush_dag_time:1710486221588302, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.604605] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22C-0-0] [lt=12] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.604639] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [847][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC220-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221604402, end_time=1710486221604638, runtime=236, *this={this:0x7f54448921b0, type:1, status:2, dag:{this:0x7f544482d9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC220-0-0, dag_ret:0, dag_status:2, start_time:1710486221595505, running_task_cnt:1, indegree:0, hash:5462442946523183119}}) [2024-03-15 07:03:41.604629] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [857][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=6319, dag_cnt=8, dag_cnts_[dag.get_type()]=8, &dag=0x7f54448b6aa0, dag={this:0x7f54448b6aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC223-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221598307, running_task_cnt:0, indegree:0, hash:-1728883537140023654}) [2024-03-15 07:03:41.604650] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221604301, end_time=1710486221604648, runtime=347, *this={this:0x7f54448341b0, type:1, status:2, dag:{this:0x7f544484a080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC227-0-0, dag_ret:0, dag_status:2, start_time:1710486221600330, running_task_cnt:1, indegree:0, hash:-245371073307898354}}) [2024-03-15 07:03:41.604666] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [857][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC223-0-0] [lt=30] succeed to del sys task(removed_task={start_time:1710486221598285, task_id:YB427F000001-000613ACAD3FC223-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=1"}) [2024-03-15 07:03:41.604619] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22C-0-0] [lt=11] get storage schema to merge(ls_id={id:1}, tablet_id={id:105}, schema_ctx={base_schema_version:0, schema_version:1681902229103224, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547967ee30, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902229103224, column_cnt:12, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.604789] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [847][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC220-0-0] [lt=1] dag finished(dag_ret=-4184, runtime=9282, dag_cnt=7, dag_cnts_[dag.get_type()]=7, &dag=0x7f544482d9d0, dag={this:0x7f544482d9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC220-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221595505, running_task_cnt:0, indegree:0, hash:5462442946523183119}) [2024-03-15 07:03:41.604809] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [847][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC220-0-0] [lt=16] succeed to del sys task(removed_task={start_time:1710486221595485, task_id:YB427F000001-000613ACAD3FC220-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=100001"}) [2024-03-15 07:03:41.604726] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22C-0-0] [lt=98] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:105}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.604942] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22C-0-0] [lt=181] succeed to build merge ctx(tablet_id={id:105}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:105}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1705472206856509154, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902229103224, schema_version:1681902229103224, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547967ee30, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902229103224, column_cnt:12, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:105}, table_count:1, [{i:0, table_key:{tablet_id:{id:105}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705473963136408068}, end_scn:{val:1710506547144172701}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1705473963136408068}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f547967e8b0, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221595684, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=154us|(0.32)|GET_PARALLEL_RANGE=321us|(0.68)|total=475us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.605526] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22C-0-0] [lt=536] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f544484b9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22C-0-0, dag_ret:0, dag_status:2, start_time:1710486221604316, running_task_cnt:1, indegree:0, hash:-374125254385398122}, scanned_row_cnt_arr:0x7f542c02b5f0, output_block_cnt_arr:0x7f542c02b5f8, concurrent_cnt:1, estimate_row_cnt:2, estimate_occupy_size:0, latest_update_ts:1710486221595684, estimated_finish_time:1710486238605522}) [2024-03-15 07:03:41.605573] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC22A-0-0] [lt=55] schedule one task(task={this:0x7f544482e1b0, type:1, status:2, dag:{this:0x7f544484afb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22A-0-0, dag_ret:0, dag_status:2, start_time:1710486221603805, running_task_cnt:1, indegree:0, hash:5347700507801701344}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=3, running_task_cnts_[priority]=3, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.605566] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22C-0-0] [lt=39] succeed to init merge ctx(task={this:0x7f5444850080, type:15, status:2, dag:{this:0x7f544484b9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22C-0-0, dag_ret:0, dag_status:2, start_time:1710486221604316, running_task_cnt:1, indegree:0, hash:-374125254385398122}}) [2024-03-15 07:03:41.605619] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [849][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22C-0-0] [lt=51] task finish process(ret=0, start_time=1710486221604418, end_time=1710486221605617, runtime=1199, *this={this:0x7f5444850080, type:15, status:2, dag:{this:0x7f544484b9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22C-0-0, dag_ret:0, dag_status:2, start_time:1710486221604316, running_task_cnt:1, indegree:0, hash:-374125254385398122}}) [2024-03-15 07:03:41.605637] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC22B-0-0] [lt=41] schedule one task(task={this:0x7f54448701b0, type:1, status:2, dag:{this:0x7f544484b4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22B-0-0, dag_ret:0, dag_status:2, start_time:1710486221603835, running_task_cnt:1, indegree:0, hash:2191978846819725395}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=4, running_task_cnts_[priority]=4, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.605675] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=28] succeed to add sys task(task={start_time:1710486221605670, task_id:YB427F000001-000613ACAD3FC22D-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=102"}) [2024-03-15 07:03:41.605698] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC22D-0-0] [lt=20] schedule one task(task={this:0x7f544485a080, type:15, status:2, dag:{this:0x7f5444866080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22D-0-0, dag_ret:0, dag_status:2, start_time:1710486221605696, running_task_cnt:1, indegree:0, hash:-7831358327520612074}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=5, running_task_cnts_[priority]=5, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.605738] ERROR alloc_block (ob_local_device.cpp:716) [854][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22A-0-0] [lt=20][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.605736] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=25] succeed to add sys task(task={start_time:1710486221605732, task_id:YB427F000001-000613ACAD3FC22E-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=101005"}) [2024-03-15 07:03:41.605755] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC22E-0-0] [lt=18] schedule one task(task={this:0x7f544485c080, type:15, status:2, dag:{this:0x7f5444866590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22E-0-0, dag_ret:0, dag_status:2, start_time:1710486221605753, running_task_cnt:1, indegree:0, hash:724552403687646686}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=6, running_task_cnts_[priority]=6, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.605758] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [854][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22A-0-0] [lt=0] partition merge iter row count(i=0, row_count=2, ghost_row_count=0, pkey={tablet_id:{id:104}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705473963136408068}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f544481aae0, key:{tablet_id:{id:104}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705473963136408068}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482181204870}, this:0x7f544481aae0, timestamp:1710482181204870, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:95, clock:71303168}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710434834810234890}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939825197, mt_stat_.ready_for_flush_time:1710483939825238, mt_stat_.create_flush_dag_time:1710486221595207, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.605823] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=1] dag finished(dag_ret=-4184, runtime=5489, dag_cnt=6, dag_cnts_[dag.get_type()]=6, &dag=0x7f544484a080, dag={this:0x7f544484a080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC227-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221600330, running_task_cnt:0, indegree:0, hash:-245371073307898354}) [2024-03-15 07:03:41.605821] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [854][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22A-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221605647, end_time=1710486221605820, runtime=173, *this={this:0x7f544482e1b0, type:1, status:2, dag:{this:0x7f544484afb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22A-0-0, dag_ret:0, dag_status:2, start_time:1710486221603805, running_task_cnt:1, indegree:0, hash:5347700507801701344}}) [2024-03-15 07:03:41.605836] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [821][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22E-0-0] [lt=0] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.605857] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [853][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC227-0-0] [lt=27] succeed to del sys task(removed_task={start_time:1710486221600311, task_id:YB427F000001-000613ACAD3FC227-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=100004"}) [2024-03-15 07:03:41.605878] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [821][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22E-0-0] [lt=19] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.605931] ERROR alloc_block (ob_local_device.cpp:716) [824][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22B-0-0] [lt=53][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.605965] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [824][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22B-0-0] [lt=32][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.605898] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [821][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22E-0-0] [lt=17] get storage schema to merge(ls_id={id:1}, tablet_id={id:101005}, schema_ctx={base_schema_version:0, schema_version:1681902229072200, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550aeba600, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902229072200, column_cnt:3, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}, {column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.606005] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [821][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22E-0-0] [lt=94] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101005}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.606038] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [854][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22A-0-0] [lt=1] dag finished(dag_ret=-4184, runtime=2232, dag_cnt=5, dag_cnts_[dag.get_type()]=5, &dag=0x7f544484afb0, dag={this:0x7f544484afb0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22A-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221603805, running_task_cnt:0, indegree:0, hash:5347700507801701344}) [2024-03-15 07:03:41.606060] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [854][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22A-0-0] [lt=18] succeed to del sys task(removed_task={start_time:1710486221603793, task_id:YB427F000001-000613ACAD3FC22A-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=104"}) [2024-03-15 07:03:41.605995] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [824][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22B-0-0] [lt=1] partition merge iter row count(i=0, row_count=2, ghost_row_count=0, pkey={tablet_id:{id:101006}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705473963136408068}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f544481b2b0, key:{tablet_id:{id:101006}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705473963136408068}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482181205417}, this:0x7f544481b2b0, timestamp:1710482181205417, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:96, clock:71303168}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710434834810234890}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939825299, mt_stat_.ready_for_flush_time:1710483939825357, mt_stat_.create_flush_dag_time:1710486221595496, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.606114] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [824][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22B-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221605692, end_time=1710486221606112, runtime=420, *this={this:0x7f54448701b0, type:1, status:2, dag:{this:0x7f544484b4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22B-0-0, dag_ret:0, dag_status:2, start_time:1710486221603835, running_task_cnt:1, indegree:0, hash:2191978846819725395}}) [2024-03-15 07:03:41.606149] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [824][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22B-0-0] [lt=1] dag finished(dag_ret=-4184, runtime=2312, dag_cnt=4, dag_cnts_[dag.get_type()]=4, &dag=0x7f544484b4c0, dag={this:0x7f544484b4c0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22B-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221603835, running_task_cnt:0, indegree:0, hash:2191978846819725395}) [2024-03-15 07:03:41.606063] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [821][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22E-0-0] [lt=24] succeed to build merge ctx(tablet_id={id:101005}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:101005}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1705474017723060197, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902229072200, schema_version:1681902229072200, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f550aeba600, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:5, index_type:1, index_status:1, row_store_type:1, schema_version:1681902229072200, column_cnt:3, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, order:0}, {column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:17, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:101005}, table_count:1, [{i:0, table_key:{tablet_id:{id:101005}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705475766103355460}, end_scn:{val:1710506547144172701}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1705475766103355460}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f550aeba080, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221595875, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=45us|(0.21)|GET_PARALLEL_RANGE=170us|(0.79)|total=215us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.606252] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [821][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22E-0-0] [lt=170] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f5444866590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22E-0-0, dag_ret:0, dag_status:2, start_time:1710486221605753, running_task_cnt:1, indegree:0, hash:724552403687646686}, scanned_row_cnt_arr:0x7f542c0715f0, output_block_cnt_arr:0x7f542c0715f8, concurrent_cnt:1, estimate_row_cnt:1, estimate_occupy_size:0, latest_update_ts:1710486221595875, estimated_finish_time:1710486238606249}) [2024-03-15 07:03:41.606176] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [824][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22B-0-0] [lt=20] succeed to del sys task(removed_task={start_time:1710486221603825, task_id:YB427F000001-000613ACAD3FC22B-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=101006"}) [2024-03-15 07:03:41.606284] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [821][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22E-0-0] [lt=31] succeed to init merge ctx(task={this:0x7f544485c080, type:15, status:2, dag:{this:0x7f5444866590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22E-0-0, dag_ret:0, dag_status:2, start_time:1710486221605753, running_task_cnt:1, indegree:0, hash:724552403687646686}}) [2024-03-15 07:03:41.606309] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [821][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22E-0-0] [lt=23] task finish process(ret=0, start_time=1710486221605813, end_time=1710486221606306, runtime=493, *this={this:0x7f544485c080, type:15, status:2, dag:{this:0x7f5444866590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22E-0-0, dag_ret:0, dag_status:2, start_time:1710486221605753, running_task_cnt:1, indegree:0, hash:724552403687646686}}) [2024-03-15 07:03:41.606370] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22D-0-0] [lt=18][errcode=-4018] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.606363] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC22C-0-0] [lt=25] schedule one task(task={this:0x7f54448501b0, type:1, status:2, dag:{this:0x7f544484b9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22C-0-0, dag_ret:0, dag_status:2, start_time:1710486221604316, running_task_cnt:1, indegree:0, hash:-374125254385398122}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=2, running_task_cnts_[priority]=2, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.606394] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22D-0-0] [lt=1] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.606450] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22D-0-0] [lt=10] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.606415] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC22E-0-0] [lt=29] schedule one task(task={this:0x7f544485c1b0, type:1, status:2, dag:{this:0x7f5444866590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22E-0-0, dag_ret:0, dag_status:2, start_time:1710486221605753, running_task_cnt:1, indegree:0, hash:724552403687646686}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=3, running_task_cnts_[priority]=3, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.606479] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [858][T1004_DagSchedu][T1004][Y0-0000000000000000-0-0] [lt=50] succeed to add sys task(task={start_time:1710486221606473, task_id:YB427F000001-000613ACAD3FC22F-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=103"}) [2024-03-15 07:03:41.606501] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC22F-0-0] [lt=20] schedule one task(task={this:0x7f544485e080, type:15, status:2, dag:{this:0x7f5444866aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22F-0-0, dag_ret:0, dag_status:2, start_time:1710486221606499, running_task_cnt:1, indegree:0, hash:-285687946438837293}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=4, running_task_cnts_[priority]=4, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.606567] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22F-0-0] [lt=21][errcode=-4018] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.606573] ERROR alloc_block (ob_local_device.cpp:716) [843][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22C-0-0] [lt=12][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.606586] INFO [STORAGE] get_freeze_info_behind_snapshot_version_ (ob_tenant_freeze_info_mgr.cpp:297) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22F-0-0] [lt=1] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.606607] INFO [STORAGE] check_tx_table_ready (ob_tenant_tablet_scheduler.cpp:589) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22F-0-0] [lt=8] tx table ready(sstable_end_scn={val:1710506547144172701}, max_decided_scn={val:1710506547144173838}) [2024-03-15 07:03:41.606592] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [843][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22C-0-0] [lt=0] partition merge iter row count(i=0, row_count=2, ghost_row_count=0, pkey={tablet_id:{id:105}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705473963136408068}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f544481c080, key:{tablet_id:{id:105}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705473963136408068}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482181205880}, this:0x7f544481c080, timestamp:1710482181205880, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:97, clock:71303168}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710434834810234890}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939825396, mt_stat_.ready_for_flush_time:1710483939825456, mt_stat_.create_flush_dag_time:1710486221595677, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.606658] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [843][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22C-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221606445, end_time=1710486221606657, runtime=212, *this={this:0x7f54448501b0, type:1, status:2, dag:{this:0x7f544484b9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22C-0-0, dag_ret:0, dag_status:2, start_time:1710486221604316, running_task_cnt:1, indegree:0, hash:-374125254385398122}}) [2024-03-15 07:03:41.606681] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [843][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22C-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=2364, dag_cnt=3, dag_cnts_[dag.get_type()]=3, &dag=0x7f544484b9d0, dag={this:0x7f544484b9d0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22C-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221604316, running_task_cnt:0, indegree:0, hash:-374125254385398122}) [2024-03-15 07:03:41.606698] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [843][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22C-0-0] [lt=14] succeed to del sys task(removed_task={start_time:1710486221604302, task_id:YB427F000001-000613ACAD3FC22C-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=105"}) [2024-03-15 07:03:41.606466] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22D-0-0] [lt=13] get storage schema to merge(ls_id={id:1}, tablet_id={id:102}, schema_ctx={base_schema_version:0, schema_version:1681902229006752, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547967cf40, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902229006752, column_cnt:41, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"%", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.606786] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22D-0-0] [lt=308] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:102}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.606618] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22F-0-0] [lt=9] get storage schema to merge(ls_id={id:1}, tablet_id={id:103}, schema_ctx={base_schema_version:0, schema_version:1681902229079216, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547967d990, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902229079216, column_cnt:43, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"%", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=true) [2024-03-15 07:03:41.606845] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22F-0-0] [lt=219] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:103}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.606883] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22F-0-0] [lt=13] succeed to build merge ctx(tablet_id={id:103}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:103}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1705475766002111961, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902229079216, schema_version:1681902229079216, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547967d990, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902229079216, column_cnt:43, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:20, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"%", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:103}, table_count:1, [{i:0, table_key:{tablet_id:{id:103}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705544383577535181}, end_scn:{val:1710506547144172701}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1705544383577535181}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f547967d410, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221595949, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=40us|(0.13)|GET_PARALLEL_RANGE=262us|(0.87)|total=302us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.606861] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22D-0-0] [lt=21] succeed to build merge ctx(tablet_id={id:102}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:102}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:1705475766002111961, base_version:0, snapshot_version:1710506547144172701}, create_snapshot_version:0, is_full_merge:false, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1681902229006752, schema_version:1681902229006752, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f547967cf40, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:0, index_type:0, index_status:1, row_store_type:1, schema_version:1681902229006752, column_cnt:41, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:1, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:18, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}, {column_idx:19, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"%", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"VARCHAR", collation:"utf8mb4_general_ci", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"VARCHAR":"", collation:"utf8mb4_general_ci", coercibility:"IMPLICIT"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":-1}}, {meta_type:{type:"TIMESTAMP", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}, {meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"BIGINT":0}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54a23e2030, allocator_:null, tablet_id:{id:102}, table_count:1, [{i:0, table_key:{tablet_id:{id:102}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705544383577535181}, end_scn:{val:1710506547144172701}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1705544383577535181}, end_scn:{val:1710506547144172701}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f547a96e040, ls_:0x7f54639da150, mod_:1}, tablet_handle:{obj:0x7f547967c9c0, obj_pool:0x7f54a23f3cb0, wash_priority:0}, merge_progress:{is_inited:false, merge_dag:NULL, scanned_row_cnt_arr:null, output_block_cnt_arr:null, concurrent_cnt:0, estimate_row_cnt:0, estimate_occupy_size:0, latest_update_ts:1710486221595741, estimated_finish_time:0}, compaction_filter:NULL, time_guard:COMPACTION_POLICY=50us|(0.11)|GET_PARALLEL_RANGE=401us|(0.89)|total=451us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.607273] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22D-0-0] [lt=394] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f5444866080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22D-0-0, dag_ret:0, dag_status:2, start_time:1710486221605696, running_task_cnt:1, indegree:0, hash:-7831358327520612074}, scanned_row_cnt_arr:0x7f542c02d5f0, output_block_cnt_arr:0x7f542c02d5f8, concurrent_cnt:1, estimate_row_cnt:1, estimate_occupy_size:0, latest_update_ts:1710486221595741, estimated_finish_time:1710486238607271}) [2024-03-15 07:03:41.607298] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22D-0-0] [lt=24] succeed to init merge ctx(task={this:0x7f544485a080, type:15, status:2, dag:{this:0x7f5444866080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22D-0-0, dag_ret:0, dag_status:2, start_time:1710486221605696, running_task_cnt:1, indegree:0, hash:-7831358327520612074}}) [2024-03-15 07:03:41.607315] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [851][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22D-0-0] [lt=15] task finish process(ret=0, start_time=1710486221606356, end_time=1710486221607313, runtime=957, *this={this:0x7f544485a080, type:15, status:2, dag:{this:0x7f5444866080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22D-0-0, dag_ret:0, dag_status:2, start_time:1710486221605696, running_task_cnt:1, indegree:0, hash:-7831358327520612074}}) [2024-03-15 07:03:41.607301] INFO [STORAGE.COMPACTION] prepare_merge_progress (ob_tablet_merge_ctx.cpp:1192) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22F-0-0] [lt=401] succeed to init merge progress(ret=0, merge_progress_={is_inited:true, merge_dag:{this:0x7f5444866aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22F-0-0, dag_ret:0, dag_status:2, start_time:1710486221606499, running_task_cnt:1, indegree:0, hash:-285687946438837293}, scanned_row_cnt_arr:0x7f542c0735f0, output_block_cnt_arr:0x7f542c0735f8, concurrent_cnt:1, estimate_row_cnt:1, estimate_occupy_size:0, latest_update_ts:1710486221595949, estimated_finish_time:1710486238607299}) [2024-03-15 07:03:41.607357] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22F-0-0] [lt=55] succeed to init merge ctx(task={this:0x7f544485e080, type:15, status:2, dag:{this:0x7f5444866aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22F-0-0, dag_ret:0, dag_status:2, start_time:1710486221606499, running_task_cnt:1, indegree:0, hash:-285687946438837293}}) [2024-03-15 07:03:41.607373] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [844][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22F-0-0] [lt=15] task finish process(ret=0, start_time=1710486221606554, end_time=1710486221607371, runtime=817, *this={this:0x7f544485e080, type:15, status:2, dag:{this:0x7f5444866aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22F-0-0, dag_ret:0, dag_status:2, start_time:1710486221606499, running_task_cnt:1, indegree:0, hash:-285687946438837293}}) [2024-03-15 07:03:41.607389] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=11][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:41.607405] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=16][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1", warnings:[]}, tenant_id_=1, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:41.607476] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=15][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, local_server_version={val:1710506547196065857}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486221607377) [2024-03-15 07:03:41.607498] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=22][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, last_post_cluster_heartbeat_tstamp_=1710486221406495, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:41.607508] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC22D-0-0] [lt=25] schedule one task(task={this:0x7f544485a1b0, type:1, status:2, dag:{this:0x7f5444866080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22D-0-0, dag_ret:0, dag_status:2, start_time:1710486221605696, running_task_cnt:1, indegree:0, hash:-7831358327520612074}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=2, running_task_cnts_[priority]=2, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.607564] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [858][T1004_DagSchedu][T1004][YB427F000001-000613ACAD3FC22F-0-0] [lt=36] schedule one task(task={this:0x7f544485e1b0, type:1, status:2, dag:{this:0x7f5444866aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22F-0-0, dag_ret:0, dag_status:2, start_time:1710486221606499, running_task_cnt:1, indegree:0, hash:-285687946438837293}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=3, running_task_cnts_[priority]=3, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.607731] ERROR alloc_block (ob_local_device.cpp:716) [832][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22F-0-0] [lt=19][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.607762] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [832][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22F-0-0] [lt=0] partition merge iter row count(i=0, row_count=1, ghost_row_count=0, pkey={tablet_id:{id:103}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705544383577535181}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f544481d7f0, key:{tablet_id:{id:103}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705544383577535181}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482181350878}, this:0x7f544481d7f0, timestamp:1710482181350878, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:100, clock:71303168}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710434961555661114}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939825690, mt_stat_.ready_for_flush_time:1710483939825766, mt_stat_.create_flush_dag_time:1710486221595943, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.607789] ERROR alloc_block (ob_local_device.cpp:716) [835][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22D-0-0] [lt=13][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.607821] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [835][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22D-0-0] [lt=32][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.607834] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [832][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22F-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221607621, end_time=1710486221607834, runtime=213, *this={this:0x7f544485e1b0, type:1, status:2, dag:{this:0x7f5444866aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22F-0-0, dag_ret:0, dag_status:2, start_time:1710486221606499, running_task_cnt:1, indegree:0, hash:-285687946438837293}}) [2024-03-15 07:03:41.607858] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [832][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22F-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=1357, dag_cnt=2, dag_cnts_[dag.get_type()]=2, &dag=0x7f5444866aa0, dag={this:0x7f5444866aa0, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22F-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221606499, running_task_cnt:0, indegree:0, hash:-285687946438837293}) [2024-03-15 07:03:41.607879] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [832][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22F-0-0] [lt=18] succeed to del sys task(removed_task={start_time:1710486221606473, task_id:YB427F000001-000613ACAD3FC22F-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=103"}) [2024-03-15 07:03:41.607850] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [835][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22D-0-0] [lt=0] partition merge iter row count(i=0, row_count=1, ghost_row_count=0, pkey={tablet_id:{id:102}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705544383577535181}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f544481c850, key:{tablet_id:{id:102}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705544383577535181}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482181350363}, this:0x7f544481c850, timestamp:1710482181350363, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:98, clock:71303168}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710434961555661114}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939825506, mt_stat_.ready_for_flush_time:1710483939825569, mt_stat_.create_flush_dag_time:1710486221595736, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.608062] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [835][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22D-0-0] [lt=1] task finish process(ret=-4184, start_time=1710486221607594, end_time=1710486221608057, runtime=463, *this={this:0x7f544485a1b0, type:1, status:2, dag:{this:0x7f5444866080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22D-0-0, dag_ret:0, dag_status:2, start_time:1710486221605696, running_task_cnt:1, indegree:0, hash:-7831358327520612074}}) [2024-03-15 07:03:41.608168] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [835][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22D-0-0] [lt=0] dag finished(dag_ret=-4184, runtime=2469, dag_cnt=1, dag_cnts_[dag.get_type()]=1, &dag=0x7f5444866080, dag={this:0x7f5444866080, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22D-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221605696, running_task_cnt:0, indegree:0, hash:-7831358327520612074}) [2024-03-15 07:03:41.608255] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [835][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22D-0-0] [lt=77] succeed to del sys task(removed_task={start_time:1710486221605670, task_id:YB427F000001-000613ACAD3FC22D-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=102"}) [2024-03-15 07:03:41.608605] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486221532205, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486220606063, use_das=false, nested_level=0, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:41.608706] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=0] will sleep(sleep_us=13000, remain_us=996484, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=13, timeout_timestamp=1710486222605189) [2024-03-15 07:03:41.610448] ERROR alloc_block (ob_local_device.cpp:716) [838][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22E-0-0] [lt=29][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.610503] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [838][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22E-0-0] [lt=54][errcode=-4184] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.610534] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [838][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22E-0-0] [lt=1] partition merge iter row count(i=0, row_count=1, ghost_row_count=0, pkey={tablet_id:{id:101005}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705475766103355460}, end_scn:{val:1710506547144172701}}}, table={ObITable:{this:0x7f544481d020, key:{tablet_id:{id:101005}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1705475766103355460}, end_scn:{val:1710506547144172701}}}, ref_cnt:3, upper_trans_version:9223372036854775807, timestamp:1710482181350654}, this:0x7f544481d020, timestamp:1710482181350654, state:0, freeze_clock:0, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:99, clock:71303168}, host:0x7f54b49fc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482146675053}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547144172701}, rec_scn:{val:1710434961555661114}, snapshot_version:{val:1710506547144172701}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710483939825610, mt_stat_.ready_for_flush_time:1710483939825652, mt_stat_.create_flush_dag_time:1710486221595867, mt_stat_.release_time:0, mt_stat_.last_print_time:0}) [2024-03-15 07:03:41.610712] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [838][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22E-0-0] [lt=0] task finish process(ret=-4184, start_time=1710486221610316, end_time=1710486221610710, runtime=394, *this={this:0x7f544485c1b0, type:1, status:2, dag:{this:0x7f5444866590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22E-0-0, dag_ret:0, dag_status:2, start_time:1710486221605753, running_task_cnt:1, indegree:0, hash:724552403687646686}}) [2024-03-15 07:03:41.610759] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [838][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22E-0-0] [lt=1] dag finished(dag_ret=-4184, runtime=5004, dag_cnt=0, dag_cnts_[dag.get_type()]=0, &dag=0x7f5444866590, dag={this:0x7f5444866590, type:0, name:"MINI_MERGE", id:YB427F000001-000613ACAD3FC22E-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221605753, running_task_cnt:0, indegree:0, hash:724552403687646686}) [2024-03-15 07:03:41.610783] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [838][T1004_MINI_MERG][T1004][YB427F000001-000613ACAD3FC22E-0-0] [lt=18] succeed to del sys task(removed_task={start_time:1710486221605732, task_id:YB427F000001-000613ACAD3FC22E-0-0, task_type:3, svr_ip:"127.0.0.1:2882", tenant_id:1004, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=101005"}) [2024-03-15 07:03:41.610966] WDIAG [STORAGE.TRANS] check_gts_ (ob_keep_alive_ls_handler.cpp:237) [569][T1_TxLoopWorker][T1][Y0-0000000000000000-0-0] [lt=11][errcode=-4023] get gts error(ret=-4023) [2024-03-15 07:03:41.611047] INFO [PALF] handle_next_submit_log_ (log_sliding_window.cpp:1000) [569][T1_TxLoopWorker][T1][Y0-0000000000000000-0-0] [lt=36] [PALF STAT GROUP LOG INFO](palf_id=1, self="127.0.0.1:2882", role="LEADER", total_group_log_cnt=1, avg_log_batch_cnt=1, total_group_log_size=122, avg_group_log_size=122) [2024-03-15 07:03:41.611080] INFO [PALF] submit_log (palf_handle_impl.cpp:403) [569][T1_TxLoopWorker][T1][Y0-0000000000000000-0-0] [lt=29] [PALF STAT APPEND DATA SIZE](this={palf_id:1, self:"127.0.0.1:2882", has_set_deleted:false}, append size=122) [2024-03-15 07:03:41.611127] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=19] get wrs ts(ls_id={id:1}, delta=146266759052, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:41.611176] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=44] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:41.611618] WDIAG [SHARE] refresh (ob_task_define.cpp:382) [79][LogLimiterRefre][T0][Y0-0000000000000000-0-0] [lt=31][errcode=0] Throttled WDIAG logs in last second(details {error code, dropped logs, earliest tid}=[{errcode:-4018, dropped:18, tid:884}, {errcode:-4184, dropped:411, tid:779}, {errcode:-5627, dropped:147, tid:913}]) [2024-03-15 07:03:41.612714] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=27][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.612741] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=26][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.617533] INFO [RPC.FRAME] mysql_easy_timer_cb (ob_net_easy.cpp:657) [202][MysqlIO][T0][Y0-0000000000000000-0-0] [lt=28] [MYSQL EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:41.618124] INFO [RPC.FRAME] mysql_easy_timer_cb (ob_net_easy.cpp:657) [204][MysqlIO][T0][Y0-0000000000000000-0-0] [lt=11] [MYSQL EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:41.618129] INFO [RPC.FRAME] mysql_easy_timer_cb (ob_net_easy.cpp:657) [203][MysqlIO][T0][Y0-0000000000000000-0-0] [lt=20] [MYSQL EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:41.619281] INFO [RPC.FRAME] batch_rpc_easy_timer_cb (ob_net_easy.cpp:633) [197][BatchIO][T0][Y0-0000000000000000-0-0] [lt=14] [BATCH_RPC EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:41.619477] INFO [RPC.FRAME] mysql_easy_timer_cb (ob_net_easy.cpp:657) [206][MysqlUnix][T0][Y0-0000000000000000-0-0] [lt=11] [MYSQL EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:41.619782] INFO [RPC.FRAME] batch_rpc_easy_timer_cb (ob_net_easy.cpp:633) [198][BatchIO][T0][Y0-0000000000000000-0-0] [lt=14] [BATCH_RPC EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:41.620317] INFO [RPC.FRAME] batch_rpc_easy_timer_cb (ob_net_easy.cpp:633) [199][BatchIO][T0][Y0-0000000000000000-0-0] [lt=79] [BATCH_RPC EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:41.620642] INFO [RPC.FRAME] batch_rpc_easy_timer_cb (ob_net_easy.cpp:633) [200][BatchIO][T0][Y0-0000000000000000-0-0] [lt=21] [BATCH_RPC EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:41.620812] INFO [RPC.FRAME] rpc_easy_timer_cb (ob_net_easy.cpp:595) [208][RpcUnix][T0][Y0-0000000000000000-0-0] [lt=41] [RPC EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:41.622645] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:41.622695] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=49][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:41.622975] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=15][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.623039] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=61][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.625854] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7C-0-0] [lt=1] table not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:41.625896] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7C-0-0] [lt=40][errcode=-5019] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.629187] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1][errcode=-4002] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.633183] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=44][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.633224] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=40][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.639303] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=25][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:41.643359] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.643417] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=58][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.650329] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=13][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:41.650371] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=41][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1004", warnings:[]}, tenant_id_=1004, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:41.650393] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, local_server_version={val:1710506547039047538}, valid_part_count=2, total_part_count=2, generate_timestamp=1710486221650311) [2024-03-15 07:03:41.650415] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, last_post_cluster_heartbeat_tstamp_=1710486221550254, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:41.650470] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:755) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=44] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] current server is WRS leader, need start CLUSTER weak read service(tenant_id=1004, serve_leader_epoch=0, cur_leader_epoch=431, cluster_service_tablet_id_={id:226}, in_service=false, can_update_version=false, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:41.650528] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:347) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=31] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] begin start service(tenant_id=1004, is_in_service()=false, can_update_version=false) [2024-03-15 07:03:41.650552] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:349) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=23] start TenantWeakReadClusterService(tenant_id=1004) [2024-03-15 07:03:41.652149] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [898][T1004_TenantWea][T1003][YB427F000001-000613ACAC1F9848-0-0] [lt=26][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:41.652199] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [898][T1004_TenantWea][T1003][YB427F000001-000613ACAC1F9848-0-0] [lt=49][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:41.652237] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [898][T1004_TenantWea][T1003][YB427F000001-000613ACAC1F9848-0-0] [lt=15][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = ''"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:41.652279] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [898][T1004_TenantWea][T1004][YB427F000001-000613ACAC1F9848-0-0] [lt=40][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = ''"}, aret=-5627, ret=-5627) [2024-03-15 07:03:41.652311] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=29][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = '') [2024-03-15 07:03:41.652343] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=29][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:41.652361] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=17][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:41.652378] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-5627] query failed(ret=-5627, conn=0x7f547e5f2050, start=1710486221652102, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = '') [2024-03-15 07:03:41.652398] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:41.652415] WDIAG [STORAGE.TRANS] query_cluster_version_range_ (ob_tenant_weak_read_cluster_service.cpp:196) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-5627] execute sql read fail(ret=-5627, ret="OB_SCHEMA_EAGAIN", exec_tenant_id=1003, tenant_id=1004, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = '') [2024-03-15 07:03:41.652555] WDIAG [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:378) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=68][errcode=-5627] query cluster version range from WRS table fail(ret=-5627, ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:41.652596] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:432) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=37] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] start service done(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004, in_service=false, leader_epoch=0, current_version={val:0}, delta=1710486221652591, min_version={val:0}, max_version={val:0}, max_stale_time=5000000000, all_valid_server_count=0, total_time=2091, wlock_time=77, check_leader_time=3, query_version_time=0, persist_version_time=0) [2024-03-15 07:03:41.652630] WDIAG [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:798) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=33][errcode=-5627] start CLUSTER weak read service fail(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004) [2024-03-15 07:03:41.652646] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:808) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=13] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] done(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004, need_start_service=true, need_stop_service=false, need_change_leader=false, is_in_service()=false, can_update_version=false, cur_leader_epoch=431, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:41.653668] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=53][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.653732] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=64][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.657407] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486221657393577, start_id=0, end_id=0) [2024-03-15 07:03:41.663881] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5B-0-0] [lt=139][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:41.663997] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=32][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.664060] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=63][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.663936] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5B-0-0] [lt=54][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:41.664285] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5B-0-0] [lt=345][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:41.664322] WDIAG iterate (ob_tuple.h:272) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5B-0-0] [lt=36][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:41.664340] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5B-0-0] [lt=18][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:41.669205] INFO [STORAGE] runTimerTask (ob_checkpoint_service.cpp:351) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=13] ====== check clog disk timer task ====== [2024-03-15 07:03:41.669282] INFO [PALF] get_disk_usage (palf_env_impl.cpp:777) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=70] get_disk_usage(ret=0, capacity(MB):=921, used(MB):=875) [2024-03-15 07:03:41.669344] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91EF-0-0] [lt=0][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:41.669381] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91EF-0-0] [lt=35][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:41.669565] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:41.669585] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=19][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486221669553], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.669604] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=16][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486221669553], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.669616] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91EF-0-0] [lt=210][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:41.669634] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=11] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486221669553]}) [2024-03-15 07:03:41.669648] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [801][T1004_TenantInf][T1004][YB427F000001-000613ACB04F91EF-0-0] [lt=31][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, aret=-5627, ret=-5627) [2024-03-15 07:03:41.669660] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=12] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:41.669669] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=9][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486221669655], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.669669] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=18][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:41.669690] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=10] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486221669655]}) [2024-03-15 07:03:41.669689] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=17][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:41.669706] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=16][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:41.669723] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-5627] query failed(ret=-5627, conn=0x7f53fb6d8050, start=1710486221669300, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:41.669745] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:41.669762] WDIAG [SHARE] load_tenant_info (ob_tenant_info_proxy.cpp:338) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-5627] failed to read(ret=-5627, ret="OB_SCHEMA_EAGAIN", exec_tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:41.669911] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:41.669921] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=8][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486221669906], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.669939] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=10] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486221669906]}) [2024-03-15 07:03:41.670471] INFO [LIB] runTimerTask (ob_work_queue.cpp:24) [135][ObTimer][T0][Y0-0000000000000000-0-0] [lt=34] add async task(this=tasktype:N9oceanbase10rootserver13ObRootService19ObRefreshServerTaskE) [2024-03-15 07:03:41.671281] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [139][RSAsyncTask3][T1][YB427F000001-000613ACAACF84AA-0-0] [lt=16] table not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:41.671311] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [139][RSAsyncTask3][T1][YB427F000001-000613ACAACF84AA-0-0] [lt=27][errcode=-5019] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.671359] INFO [SERVER] runTimerTask (ob_eliminate_task.cpp:199) [552][T1_ReqMemEvict][T1][Y0-0000000000000000-0-0] [lt=21] sql audit evict task end(evict_high_mem_level=32212254, evict_high_size_level=90000, evict_batch_count=0, elapse_time=1, size_used=15051, mem_used=31196160) [2024-03-15 07:03:41.672057] INFO [STORAGE.TRANS] get_rec_scn (ob_trans_ctx_mgr_v4.cpp:1295) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=32] succ to get rec scn(*this={this:0x7f547e804030, ls_id:{id:1}, tenant_id:1003, state:"F_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204930519490}, last_push_gc_task_ts_:1710485741683482, skip_remove_cnt_:0}, aggre_rec_scn:{val:18446744073709551615}, prev_aggre_rec_scn:{val:18446744073709551615}, uref:3}, aggre_rec_scn={val:4611686018427387903}) [2024-03-15 07:03:41.672122] INFO [STORAGE.TRANS] get_rec_scn (ob_tx_ctx_memtable.cpp:232) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=64] tx ctx memtable get rec scn(this={ObITable:{this:0x7f548010a080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710486219668548}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f548010a080, snapshot_version:{val:1710486219668548}, ls_id:{id:1}, is_frozen:false}, rec_scn={val:1710295204909211866}) [2024-03-15 07:03:41.672175] INFO [STORAGE.TABLELOCK] get_rec_scn (ob_lock_memtable.cpp:742) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=44] rec_scn of ObLockMemtable is (rec_scn_={val:4611686018427387903}, flushed_scn_={val:0}, pre_rec_scn_={val:18446744073709551615}, freeze_scn_={val:0}, max_committed_scn_={val:18446744073709551615}, is_frozen_=false, ls_id_={id:1}) [2024-03-15 07:03:41.672211] INFO [STORAGE.TRANS] get_rec_scn (ob_ls_tx_service.cpp:441) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=30] [CHECKPOINT] ObLSTxService::get_rec_scn(common_checkpoint_type="DATA_CHECKPOINT_TYPE", common_checkpoints_[min_rec_scn_common_checkpoint_type_index]={this:0x7f5484df2290}, min_rec_scn={val:1710278140261191947}, ls_id_={id:1}) [2024-03-15 07:03:41.674120] INFO [STORAGE.TRANS] get_rec_scn (ob_id_service.cpp:306) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=46] get rec log scn(service_type_=0, rec_log_ts={val:1710339949200647547}) [2024-03-15 07:03:41.674156] INFO [STORAGE.TRANS] get_rec_scn (ob_id_service.cpp:306) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=36] get rec log scn(service_type_=1, rec_log_ts={val:1710324545958121911}) [2024-03-15 07:03:41.674172] INFO [STORAGE.TRANS] get_rec_scn (ob_id_service.cpp:306) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=14] get rec log scn(service_type_=2, rec_log_ts={val:4611686018427387903}) [2024-03-15 07:03:41.674190] INFO [STORAGE] update_clog_checkpoint (ob_checkpoint_executor.cpp:158) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=11] [CHECKPOINT] clog checkpoint no change(checkpoint_scn={val:1710278140261191947}, checkpoint_scn_in_ls_meta={val:1710278140261191947}, ls_id={id:1}, service_type="TRANS_SERVICE") [2024-03-15 07:03:41.674221] INFO [STORAGE] cannot_recycle_log_over_threshold_ (ob_checkpoint_service.cpp:264) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=22] cannot_recycle_log_size statistics(cannot_recycle_log_size=783782571, threshold=289910292) [2024-03-15 07:03:41.674213] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=26][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.674259] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=64][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.674980] INFO [PALF] locate_by_lsn_coarsely (palf_handle_impl.cpp:1547) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=15] locate_by_lsn_coarsely(ret=0, ret="OB_SUCCESS", this={palf_id:1, self:"127.0.0.1:2882", has_set_deleted:false}, lsn={lsn:365453102694}, committed_lsn={lsn:365766615723}, result_scn={val:1710311365828975114}) [2024-03-15 07:03:41.675042] INFO [STORAGE] advance_checkpoint_by_flush (ob_checkpoint_executor.cpp:218) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=61] advance checkpoint by flush to avoid clog disk full(recycle_scn={val:1710311365828975114}, end_lsn={lsn:365766615723}, clog_checkpoint_lsn={lsn:364982833152}, calcu_recycle_lsn={lsn:365453102694}, ls_->get_ls_id()={id:1}) [2024-03-15 07:03:41.675062] INFO [STORAGE] advance_checkpoint_by_flush (ob_checkpoint_executor.cpp:236) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=17] start flush(recycle_scn={val:1710311365828975114}, ls_->get_clog_checkpoint_scn()={val:1710278140261191947}, ls_->get_ls_id()={id:1}) [2024-03-15 07:03:41.676916] INFO [STORAGE.TRANS] get_rec_scn (ob_trans_ctx_mgr_v4.cpp:1295) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=11] succ to get rec scn(*this={this:0x7f547e804030, ls_id:{id:1}, tenant_id:1003, state:"F_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204930519490}, last_push_gc_task_ts_:1710485741683482, skip_remove_cnt_:0}, aggre_rec_scn:{val:18446744073709551615}, prev_aggre_rec_scn:{val:18446744073709551615}, uref:3}, aggre_rec_scn={val:4611686018427387903}) [2024-03-15 07:03:41.676969] INFO [STORAGE.TRANS] get_rec_scn (ob_tx_ctx_memtable.cpp:232) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=52] tx ctx memtable get rec scn(this={ObITable:{this:0x7f548010a080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710486219668548}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f548010a080, snapshot_version:{val:1710486219668548}, ls_id:{id:1}, is_frozen:false}, rec_scn={val:1710295204909211866}) [2024-03-15 07:03:41.679047] INFO [STORAGE.TRANS] get_rec_scn (ob_trans_ctx_mgr_v4.cpp:1295) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=47] succ to get rec scn(*this={this:0x7f547e804030, ls_id:{id:1}, tenant_id:1003, state:"F_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204930519490}, last_push_gc_task_ts_:1710485741683482, skip_remove_cnt_:0}, aggre_rec_scn:{val:18446744073709551615}, prev_aggre_rec_scn:{val:18446744073709551615}, uref:3}, aggre_rec_scn={val:4611686018427387903}) [2024-03-15 07:03:41.679118] INFO [STORAGE.TRANS] get_rec_scn (ob_tx_ctx_memtable.cpp:232) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=71] tx ctx memtable get rec scn(this={ObITable:{this:0x7f548010a080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710486219668548}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f548010a080, snapshot_version:{val:1710486219668548}, ls_id:{id:1}, is_frozen:false}, rec_scn={val:1710295204909211866}) [2024-03-15 07:03:41.679194] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=26] add dag success(dag=0x7f54801da080, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=3526853371410145563, dag_cnt=1, dag_type_cnts=1) [2024-03-15 07:03:41.679245] INFO [STORAGE.TRANS] flush (ob_tx_ctx_memtable.cpp:298) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=47] tx ctx memtable flush successfully(this={ObITable:{this:0x7f548010a080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710486221679144}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f548010a080, snapshot_version:{val:1710486221679144}, ls_id:{id:1}, is_frozen:true}, ls_id_={id:1}) [2024-03-15 07:03:41.679281] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:41.679289] INFO [STORAGE] freeze (ob_tx_data_memtable_mgr.cpp:193) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=34] start freeze tx data memtable(ls_id_={id:1}) [2024-03-15 07:03:41.679309] INFO [STORAGE] freeze_ (ob_tx_data_memtable_mgr.cpp:229) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=17] There is a freezed memetable existed. Try freeze after flushing it.(ret=-4023, ret="OB_EAGAIN", get_memtable_count_()=2) [2024-03-15 07:03:41.679303] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=21][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:41.679308] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [677][T1003_DagSchedu][T1003][Y0-0000000000000000-0-0] [lt=44] succeed to add sys task(task={start_time:1710486221679297, task_id:YB427F000001-000613ACAB2FBA94-0-0, task_type:4, svr_ip:"127.0.0.1:2882", tenant_id:1003, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=49401"}) [2024-03-15 07:03:41.679326] WDIAG [STORAGE] freeze (ob_tx_data_memtable_mgr.cpp:207) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=-4023] freeze tx data memtable fail.(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:41.679340] WDIAG [STORAGE] flush (ob_tx_data_memtable_mgr.cpp:483) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=14][errcode=-4023] freeze failed(ret=-4023, ret="OB_EAGAIN", this=0x7f5484de61b0) [2024-03-15 07:03:41.679334] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [677][T1003_DagSchedu][T1003][YB427F000001-000613ACAB2FBA94-0-0] [lt=23] schedule one task(task={this:0x7f5447a2e080, type:15, status:2, dag:{this:0x7f54801da080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAB2FBA94-0-0, dag_ret:0, dag_status:2, start_time:1710486221679330, running_task_cnt:1, indegree:0, hash:3526853371410145563}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=1, running_task_cnts_[priority]=1, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.679353] WDIAG [STORAGE.TRANS] flush (ob_ls_tx_service.cpp:455) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=12][errcode=0] obCommonCheckpoint flush failed(tmp_ret=-4023, common_checkpoints_[i]=0x7f5484de6288) [2024-03-15 07:03:41.679372] INFO [STORAGE.TABLELOCK] get_rec_scn (ob_lock_memtable.cpp:742) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=15] rec_scn of ObLockMemtable is (rec_scn_={val:4611686018427387903}, flushed_scn_={val:0}, pre_rec_scn_={val:18446744073709551615}, freeze_scn_={val:0}, max_committed_scn_={val:18446744073709551615}, is_frozen_=false, ls_id_={id:1}) [2024-03-15 07:03:41.679446] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [676][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA94-0-0] [lt=12][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.679479] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [676][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA94-0-0] [lt=32][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710506028960191) [2024-03-15 07:03:41.679493] INFO [STORAGE] release_head_memtable_ (ob_tx_ctx_memtable_mgr.cpp:178) [676][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA94-0-0] [lt=10] tx ctx memtable mgr release head memtable(*imemtable={ObITable:{this:0x7f548010a080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710486221679144}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f548010a080, snapshot_version:{val:1710486221679144}, ls_id:{id:1}, is_frozen:true}) [2024-03-15 07:03:41.680948] INFO [STORAGE.TRANS] on_tx_ctx_table_flushed (ob_trans_ctx_mgr_v4.cpp:1323) [676][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA94-0-0] [lt=33] succ to on tx ctx table flushed(*this={this:0x7f547e804030, ls_id:{id:1}, tenant_id:1003, state:"F_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204930519490}, last_push_gc_task_ts_:1710485741683482, skip_remove_cnt_:0}, aggre_rec_scn:{val:18446744073709551615}, prev_aggre_rec_scn:{val:18446744073709551615}, uref:3}) [2024-03-15 07:03:41.680985] INFO [STORAGE] release_memtables (ob_i_memtable_mgr.cpp:164) [676][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA94-0-0] [lt=35] succeed to release memtable(ret=0, i=0, scn={val:1710506028960191}) [2024-03-15 07:03:41.680998] WDIAG [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:919) [676][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA94-0-0] [lt=11][errcode=-4677] fail to inner init ctx(ret=-4677, tablet_id={id:49401}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:49401}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:-1, base_version:-1, snapshot_version:-1}, create_snapshot_version:0, is_full_merge:false, merge_level:1, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:4, range_array:[], concurrent_cnt:0, is_inited:false}, schema_ctx:{base_schema_version:0, schema_version:0, storage_schema:NULL}, tables_handle count:0, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:null, allocator_:null, tablet_id:{id:0}, table_count:0, []}, schedule_major:false, scn_range:{start_scn:{val:0}, end_scn:{val:0}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f5497b6e040, ls_:0x7f5484de6150, mod_:1}, tablet_handle:{obj:0x7f547e643910, obj_pool:0x7f54affb9cb0, wash_priority:0}, merge_progress:NULL, compaction_filter:NULL, time_guard:total=0us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}) [2024-03-15 07:03:41.681063] WDIAG [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:854) [676][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA94-0-0] [lt=65][errcode=-4677] sstable merge finish(ret=-4677, ctx=0x7f54298c6060, task={this:0x7f5447a2e080, type:15, status:2, dag:{this:0x7f54801da080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAB2FBA94-0-0, dag_ret:0, dag_status:2, start_time:1710486221679330, running_task_cnt:1, indegree:0, hash:3526853371410145563}}) [2024-03-15 07:03:41.681083] WDIAG [COMMON] do_work (ob_dag_scheduler.cpp:241) [676][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA94-0-0] [lt=18][errcode=-4677] failed to process task(ret=-4677) [2024-03-15 07:03:41.681091] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [676][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA94-0-0] [lt=7] task finish process(ret=-4677, start_time=1710486221679409, end_time=1710486221681089, runtime=1680, *this={this:0x7f5447a2e080, type:15, status:2, dag:{this:0x7f54801da080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAB2FBA94-0-0, dag_ret:0, dag_status:2, start_time:1710486221679330, running_task_cnt:1, indegree:0, hash:3526853371410145563}}) [2024-03-15 07:03:41.681117] WDIAG [COMMON] run1 (ob_dag_scheduler.cpp:1424) [676][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA94-0-0] [lt=25][errcode=-4677] failed to do work(ret=-4677, *task_={this:0x7f5447a2e080, type:15, status:2, dag:{this:0x7f54801da080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAB2FBA94-0-0, dag_ret:0, dag_status:2, start_time:1710486221679330, running_task_cnt:1, indegree:0, hash:3526853371410145563}}, compat_mode=0) [2024-03-15 07:03:41.681140] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [676][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA94-0-0] [lt=15] dag finished(dag_ret=-4677, runtime=1808, dag_cnt=0, dag_cnts_[dag.get_type()]=0, &dag=0x7f54801da080, dag={this:0x7f54801da080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAB2FBA94-0-0, dag_ret:-4677, dag_status:5, start_time:1710486221679330, running_task_cnt:0, indegree:0, hash:3526853371410145563}) [2024-03-15 07:03:41.681159] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [676][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA94-0-0] [lt=17] succeed to del sys task(removed_task={start_time:1710486221679297, task_id:YB427F000001-000613ACAB2FBA94-0-0, task_type:4, svr_ip:"127.0.0.1:2882", tenant_id:1003, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=49401"}) [2024-03-15 07:03:41.684613] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=31][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.684694] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=79][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.687710] WDIAG [SQL] create_sessid (ob_sql_session_mgr.cpp:339) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=16][errcode=0] server is initiating(server_id=0, local_seq=27136, max_local_seq=262143, max_server_id=4095) [2024-03-15 07:03:41.687747] INFO [RPC.OBMYSQL] sm_conn_build_handshake (obsm_conn_callback.cpp:104) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=34] new mysql sessid created(conn.sessid_=3221252608, support_ssl=false) [2024-03-15 07:03:41.687881] INFO [RPC.OBMYSQL] init (obsm_conn_callback.cpp:120) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=21] sm conn init succ(conn.sessid_=3221252608, sess.client_addr_="172.21.122.86:42704") [2024-03-15 07:03:41.687921] INFO [RPC.OBMYSQL] do_accept_one (ob_sql_nio.cpp:899) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=50] accept one succ(*s={this:0x7f5450387230, fd:133, err:0, last_decode_time_:0, last_write_time_:1710486221687856, read_buffer_.get_consume_sz():0, get_pending_flag():0, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:41.693456] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=27] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:41.693480] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=24][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:41.693490] WDIAG [SERVER] extract_tenant_id (ob_srv_deliver.cpp:100) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=9][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:41.693502] WDIAG [SERVER] dispatch_req (ob_srv_deliver.cpp:115) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=9][errcode=-5160] extract tenant_id fail(ret=-5160, tenant_id=18446744073709551615, req={packet:{header:{length:232, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486221693430, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:41.693535] WDIAG [SERVER] deliver_mysql_request (ob_srv_deliver.cpp:507) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=32][errcode=-5150] cannot dispatch success(ret=-5150, req={packet:{header:{length:232, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486221693430, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:41.693604] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB11F-0-0] [lt=30] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:41.693629] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB11F-0-0] [lt=25][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:41.693646] WDIAG [SERVER] get_tenant_id (obmp_connect.cpp:1339) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB11F-0-0] [lt=15][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:41.693662] WDIAG [SERVER] check_update_tenant_id (obmp_connect.cpp:1840) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB11F-0-0] [lt=15][errcode=-5160] get_tenant_id failed(ret=-5160) [2024-03-15 07:03:41.693669] WDIAG [SERVER] process (obmp_connect.cpp:242) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB11F-0-0] [lt=7][errcode=-5160] fail to check update tenant id(ret=-5160) [2024-03-15 07:03:41.693693] INFO [SERVER] send_error_packet (obmp_packet_sender.cpp:311) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB11F-0-0] [lt=5] sending error packet(err=-4043, extra_err_info=NULL, lbt()="0xd9f6cf5 0x75d3e81 0x7596e3a 0x75be943 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:41.693720] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:745) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB11F-0-0] [lt=10][errcode=0] server close connection(sessid=3221252608, proxy_sessid=0, stack="0xd9f6cf5 0x75d6bf2 0x75b2979 0x75bde02 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:41.693741] WDIAG [SERVER] get_session (obmp_packet_sender.cpp:515) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB11F-0-0] [lt=17][errcode=-4018] get session fail(ret=-4018, sessid=3221252608, proxy_sessid=0) [2024-03-15 07:03:41.693768] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:749) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB11F-0-0] [lt=25][errcode=-4016] session is null [2024-03-15 07:03:41.693778] INFO [SERVER] process (obmp_connect.cpp:369) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB11F-0-0] [lt=6] MySQL LOGIN(direct_client_ip="172.21.122.86", client_ip=, tenant_name=obmysql, tenant_id=18446744073709551615, user_name=yyyth, host_name=xxx.xxx.xxx.xxx, sessid=3221252608, proxy_sessid=0, sess_create_time=0, from_proxy=false, from_java_client=false, from_oci_client=false, from_jdbc_client=false, capability=270377487, proxy_capability=0, use_ssl=false, c/s protocol="OB_MYSQL_CS_TYPE", autocommit=false, proc_ret=-5160, ret=0) [2024-03-15 07:03:41.693935] WDIAG [RPC.OBMYSQL] push_close_req (ob_sql_nio.cpp:704) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=16][errcode=-4015] close sql sock by user req(*s={this:0x7f5450387230, fd:133, err:5, last_decode_time_:1710486221693430, last_write_time_:1710486221693931, read_buffer_.get_consume_sz():236, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:41.693971] INFO [RPC.OBMYSQL] on_disconnect (obsm_conn_callback.cpp:231) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=31] kill and revert session(conn.sessid_=3221252608, proxy_sessid=0, server_id=0, ret=0) [2024-03-15 07:03:41.693981] INFO [RPC.OBMYSQL] handle_pending_destroy_list (ob_sql_nio.cpp:791) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=10] can close safely, do destroy(*s={this:0x7f5450387230, fd:133, err:5, last_decode_time_:1710486221693430, last_write_time_:1710486221693931, read_buffer_.get_consume_sz():236, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:41.693994] INFO [RPC.OBMYSQL] sm_conn_log_close (obsm_conn_callback.cpp:159) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=10] connection close(sessid=3221252608, proxy_sessid=0, tenant_id=0, server_id=0, from_proxy=false, from_java_client=false, c/s protocol="OB_MYSQL_CS_TYPE", is_need_clear_sessid_=true, ret=0) [2024-03-15 07:03:41.694174] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486221621965, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486220606063, use_das=false, nested_level=0, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:41.694300] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1] will sleep(sleep_us=14000, remain_us=910891, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=14, timeout_timestamp=1710486222605189) [2024-03-15 07:03:41.694868] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=35][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.694939] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=68][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.705142] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=55][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.705216] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=71][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.707639] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=16][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:41.707692] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=77][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1", warnings:[]}, tenant_id_=1, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:41.707741] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=44][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, local_server_version={val:1710506547196065857}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486221707597) [2024-03-15 07:03:41.707766] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=25][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, last_post_cluster_heartbeat_tstamp_=1710486221607515, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:41.708696] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:41.708741] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=45][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:5}) [2024-03-15 07:03:41.709556] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=22][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:41.709587] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=30][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1003", warnings:[]}, tenant_id_=1003, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:41.710292] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=702][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, local_server_version={val:1710482141336457000}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486221709545) [2024-03-15 07:03:41.710306] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=15][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, last_post_cluster_heartbeat_tstamp_=1710486221509026, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:41.711541] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=11] get wrs ts(ls_id={id:1}, delta=146266859439, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:41.711575] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=27] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:41.715469] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=38][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.715525] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=56][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.725679] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=27][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.725740] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=62][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.735882] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=26][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.735951] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=68][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.739389] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=47][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:41.746139] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=41][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.746262] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=121][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.746465] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54e845a228, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486221678602, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486200007067, use_das=false, nested_level=0, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:41.746653] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1] will sleep(sleep_us=100000, remain_us=8260398, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=160, timeout_timestamp=1710486230007049) [2024-03-15 07:03:41.756522] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=39][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.756595] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=46][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.757264] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4002] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.757455] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486221757446924, start_id=0, end_id=0) [2024-03-15 07:03:41.763173] INFO [COMMON] replace_fragment_node (ob_kvcache_map.cpp:697) [103][KVCacheRep][T0][Y0-0000000000000000-0-0] [lt=31] Cache replace map node details(ret=0, replace_node_count=0, replace_time=1591, replace_start_pos=251648, replace_num=15728) [2024-03-15 07:03:41.764143] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62B-0-0] [lt=150][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:41.764183] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62B-0-0] [lt=40][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:41.764230] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62B-0-0] [lt=45][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:41.764354] WDIAG iterate (ob_tuple.h:272) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62B-0-0] [lt=120][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:41.764389] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62B-0-0] [lt=34][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:41.766720] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=22][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.766761] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=40][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.769571] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABA-0-0] [lt=185][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:41.769616] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABA-0-0] [lt=45][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:41.769635] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABA-0-0] [lt=18][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:41.769650] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABA-0-0] [lt=14][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:41.769670] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABA-0-0] [lt=20][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:41.770150] INFO [COORDINATOR] detect_recover (ob_failure_detector.cpp:138) [607][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=0] doing detect recover operation(events_with_ops=[{event:{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}}]) [2024-03-15 07:03:41.770279] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=2][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:41.770298] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=17][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486221770265], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.770333] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=32][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486221770265], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.770354] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=11] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486221770265]}) [2024-03-15 07:03:41.770377] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=12] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:41.770386] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=9][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486221770373], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.770409] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=10] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486221770373]}) [2024-03-15 07:03:41.770673] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:41.770683] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=9][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486221770666], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.770701] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=10] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486221770666]}) [2024-03-15 07:03:41.771018] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B97-0-0] [lt=30] table not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:41.771043] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91F0-0-0] [lt=16][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:41.771065] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91F0-0-0] [lt=21][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:41.771094] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91F0-0-0] [lt=11][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:41.771119] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [801][T1004_TenantInf][T1004][YB427F000001-000613ACB04F91F0-0-0] [lt=24][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, aret=-5627, ret=-5627) [2024-03-15 07:03:41.771134] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:41.771149] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:41.771209] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=57][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:41.771226] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-5627] query failed(ret=-5627, conn=0x7f5435182050, start=1710486221771012, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:41.771246] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:41.771260] WDIAG [SHARE] load_tenant_info (ob_tenant_info_proxy.cpp:338) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=12][errcode=-5627] failed to read(ret=-5627, ret="OB_SCHEMA_EAGAIN", exec_tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:41.774037] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486221707894, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486220606063, use_das=false, nested_level=0, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:41.774159] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=0] will sleep(sleep_us=15000, remain_us=831032, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=15, timeout_timestamp=1710486222605189) [2024-03-15 07:03:41.776886] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.776939] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=53][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.783934] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABB-0-0] [lt=122][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1001}, all_ls_election_reference_info=[]) [2024-03-15 07:03:41.783968] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABB-0-0] [lt=35][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:41.783989] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABB-0-0] [lt=19][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1001}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:41.784015] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABB-0-0] [lt=25][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:41.784027] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABB-0-0] [lt=12][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1001}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:41.787082] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=24][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.787126] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=43][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.789419] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:41.789461] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=42][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:41.797279] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=57][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.797337] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=57][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.802469] INFO [COMMON] compute_tenant_wash_size (ob_kvcache_store.cpp:1140) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=16] Wash compute wash size(is_wash_valid=true, sys_total_wash_size=2718601216, global_cache_size=12484608, tenant_max_wash_size=4161536, tenant_min_wash_size=4161536, tenant_ids_=[512, 500, 999, 506, 508, 509, 510, 1, 1003, 1004]) [2024-03-15 07:03:41.802575] INFO [COMMON] wash (ob_kvcache_store.cpp:343) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=43] Wash time detail, (compute_wash_size_time=146, refresh_score_time=57, wash_time=7) [2024-03-15 07:03:41.806277] INFO [COMMON] clean_garbage_node (ob_kvcache_map.cpp:647) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=18] Cache wash clean map node details(ret=0, clean_node_count=0, clean_time=3680, clean_start_pos=1289737, clean_num=31457) [2024-03-15 07:03:41.807462] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=21][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.807519] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=56][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.810857] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=19] get wrs ts(ls_id={id:1}, delta=146266958630, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:41.810912] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=34] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:41.817627] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=21][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.817671] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=43][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.823705] INFO [COMMON] print_sender_status (ob_io_struct.cpp:716) [80][IO_TUNING0][T0][Y0-0000000000000000-0-0] [lt=9] [IO SENDER STATUS](send_index=1, req_count=0, reservation_ts=9223372036854775807, group_limitation_ts=9223372036854775807, tenant_limitation_ts=9223372036854775807, proportion_ts=9223372036854775807) [2024-03-15 07:03:41.823738] INFO [COMMON] print_sender_status (ob_io_struct.cpp:716) [80][IO_TUNING0][T0][Y0-0000000000000000-0-0] [lt=35] [IO SENDER STATUS](send_index=2, req_count=0, reservation_ts=9223372036854775807, group_limitation_ts=9223372036854775807, tenant_limitation_ts=9223372036854775807, proportion_ts=9223372036854775807) [2024-03-15 07:03:41.827245] WDIAG [STORAGE.TRANS] check_gts_ (ob_keep_alive_ls_handler.cpp:237) [914][T1004_TxLoopWor][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-4023] get gts error(ret=-4023) [2024-03-15 07:03:41.827312] INFO [PALF] handle_next_submit_log_ (log_sliding_window.cpp:1000) [914][T1004_TxLoopWor][T1004][Y0-0000000000000000-0-0] [lt=24] [PALF STAT GROUP LOG INFO](palf_id=1, self="127.0.0.1:2882", role="LEADER", total_group_log_cnt=1, avg_log_batch_cnt=1, total_group_log_size=122, avg_group_log_size=122) [2024-03-15 07:03:41.827510] INFO [PALF] submit_log (palf_handle_impl.cpp:403) [914][T1004_TxLoopWor][T1004][Y0-0000000000000000-0-0] [lt=187] [PALF STAT APPEND DATA SIZE](this={palf_id:1, self:"127.0.0.1:2882", has_set_deleted:false}, append size=121) [2024-03-15 07:03:41.827809] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=27][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.827849] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=40][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.832603] INFO [PALF] inner_write_once_ (log_block_handler.cpp:408) [792][T1004_IOWorker][T1004][Y0-0000000000000000-0-0] [lt=12] inner_write_once_ success(ret=0, offset=30862300, this={dio_aligned_buf:{buf_write_offset:3158, buf_padding_size:938, align_size:4096, aligned_buf_size:2125824, aligned_used_ts:5728, truncate_used_ts:2072}, log_block_size:67108864, dir_fd:131, io_fd:132}, write_size=0, aligned_buf_len=4096, aligned_buf="", aligned_block_offset=30859264, buf_len=122, total_write_size_=608, total_write_size_after_dio_=20480, ob_pwrite_used_ts=18460749, count=5) [2024-03-15 07:03:41.832649] INFO [LIB] stat (utility.h:1140) [792][T1004_IOWorker][T1004][Y0-0000000000000000-0-0] [lt=41] [PALF STAT WRITE LOG](cur_stat_count=1, stat_interval=1000000, avg cost=5308, this=0x7f5474395ad8) [2024-03-15 07:03:41.832661] INFO [PALF] inner_append_log (palf_handle_impl.cpp:1660) [792][T1004_IOWorker][T1004][Y0-0000000000000000-0-0] [lt=11] [PALF STAT INNER APPEND LOG](this={palf_id:1, self:"127.0.0.1:2882", has_set_deleted:false}, accum_size=122) [2024-03-15 07:03:41.832731] WDIAG [PALF] try_update_match_lsn_map_ (log_sliding_window.cpp:3790) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=4][errcode=0] [MATCH LSN ADVANCE DELAY]match_lsn advance delay too much time(ret=0, palf_id=1, self="127.0.0.1:2882", server="127.0.0.1:2882", update_func={old_end_lsn:{lsn:87401266140}, new_end_lsn:{lsn:87401266262}, old_advance_time_us:1710486219829126, new_ack_time_us:1710486221832724, advance delay(us):2003598}) [2024-03-15 07:03:41.832763] INFO [PALF] try_advance_committed_lsn_ (log_sliding_window.cpp:1572) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=28] [PALF STAT COMMITTED LOG SIZE](palf_id=1, self="127.0.0.1:2882", committed size=122) [2024-03-15 07:03:41.832799] INFO [LIB] stat (utility.h:1140) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=11] [PALF STAT FS CB](cur_stat_count=1, stat_interval=1000000, avg cost=15, this=0x7f547438d3a8) [2024-03-15 07:03:41.832837] INFO [LIB] stat (utility.h:1140) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=38] [PALF STAT LOG LIFETIME](cur_stat_count=1, stat_interval=1000000, avg cost=5507, this=0x7f547438d3d8) [2024-03-15 07:03:41.832846] INFO [LIB] stat (utility.h:1140) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=8] [PALF STAT LOG SUBMIT WAIT](cur_stat_count=1, stat_interval=1000000, avg cost=19, this=0x7f547438d408) [2024-03-15 07:03:41.832852] INFO [LIB] stat (utility.h:1140) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=7] [PALF STAT LOG SLIDE WAIT](cur_stat_count=1, stat_interval=1000000, avg cost=5488, this=0x7f547438d438) [2024-03-15 07:03:41.832863] INFO [LIB] stat (utility.h:1140) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=6] [PALF STAT FLUSH CB](cur_stat_count=1, stat_interval=1000000, avg cost=164, this=0x7f5474395b08) [2024-03-15 07:03:41.837276] INFO [ARCHIVE] gc_stale_ls_task_ (ob_ls_mgr.cpp:537) [559][T1_LSArchiveMgr][T1][YB427F000001-000613ACA7FF7BAA-0-0] [lt=23] gc stale ls task succ [2024-03-15 07:03:41.838235] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=20][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.838288] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=51][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.839632] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=48][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:41.846964] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:41.847006] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=41][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:41.848460] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=28][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.848519] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=57][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.850486] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:41.850592] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=101][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1004", warnings:[]}, tenant_id_=1004, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:41.850640] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=31][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, local_server_version={val:1710506547039047538}, valid_part_count=2, total_part_count=2, generate_timestamp=1710486221850474) [2024-03-15 07:03:41.850662] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, last_post_cluster_heartbeat_tstamp_=1710486221650458, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:41.856276] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486221789257, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486220606063, use_das=false, nested_level=0, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:41.856466] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1] will sleep(sleep_us=16000, remain_us=748725, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=16, timeout_timestamp=1710486222605189) [2024-03-15 07:03:41.857758] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486221857745800, start_id=0, end_id=0) [2024-03-15 07:03:41.858633] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=27][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.858706] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=72][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.864524] WDIAG [SQL] create_sessid (ob_sql_session_mgr.cpp:339) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=17][errcode=0] server is initiating(server_id=0, local_seq=27137, max_local_seq=262143, max_server_id=4095) [2024-03-15 07:03:41.864554] INFO [RPC.OBMYSQL] sm_conn_build_handshake (obsm_conn_callback.cpp:104) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=29] new mysql sessid created(conn.sessid_=3221252609, support_ssl=false) [2024-03-15 07:03:41.864624] INFO [RPC.OBMYSQL] init (obsm_conn_callback.cpp:120) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=10] sm conn init succ(conn.sessid_=3221252609, sess.client_addr_="172.21.122.86:42706") [2024-03-15 07:03:41.864660] INFO [RPC.OBMYSQL] do_accept_one (ob_sql_nio.cpp:899) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=29] accept one succ(*s={this:0x7f544f6052b0, fd:120, err:0, last_decode_time_:0, last_write_time_:1710486221864622, read_buffer_.get_consume_sz():0, get_pending_flag():0, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:41.865517] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=22] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:41.865548] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=31][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:41.865563] WDIAG [SERVER] extract_tenant_id (ob_srv_deliver.cpp:100) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=14][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:41.865581] WDIAG [SERVER] dispatch_req (ob_srv_deliver.cpp:115) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=14][errcode=-5160] extract tenant_id fail(ret=-5160, tenant_id=18446744073709551615, req={packet:{header:{length:369, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486221865492, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:41.865629] WDIAG [SERVER] deliver_mysql_request (ob_srv_deliver.cpp:507) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=46][errcode=-5150] cannot dispatch success(ret=-5150, req={packet:{header:{length:369, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486221865492, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:41.865726] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB034-0-0] [lt=20] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:41.865775] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB034-0-0] [lt=48][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:41.865788] WDIAG [SERVER] get_tenant_id (obmp_connect.cpp:1339) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB034-0-0] [lt=12][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:41.865801] WDIAG [SERVER] check_update_tenant_id (obmp_connect.cpp:1840) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB034-0-0] [lt=12][errcode=-5160] get_tenant_id failed(ret=-5160) [2024-03-15 07:03:41.865830] WDIAG [SERVER] process (obmp_connect.cpp:242) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB034-0-0] [lt=12][errcode=-5160] fail to check update tenant id(ret=-5160) [2024-03-15 07:03:41.865864] INFO [SERVER] send_error_packet (obmp_packet_sender.cpp:311) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB034-0-0] [lt=27] sending error packet(err=-4043, extra_err_info=NULL, lbt()="0xd9f6cf5 0x75d3e81 0x7596e3a 0x75be943 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:41.865929] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:745) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB034-0-0] [lt=15][errcode=0] server close connection(sessid=3221252609, proxy_sessid=0, stack="0xd9f6cf5 0x75d6bf2 0x75b2979 0x75bde02 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:41.865962] WDIAG [SERVER] get_session (obmp_packet_sender.cpp:515) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB034-0-0] [lt=29][errcode=-4018] get session fail(ret=-4018, sessid=3221252609, proxy_sessid=0) [2024-03-15 07:03:41.865978] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:749) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB034-0-0] [lt=14][errcode=-4016] session is null [2024-03-15 07:03:41.865992] INFO [SERVER] process (obmp_connect.cpp:369) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB034-0-0] [lt=11] MySQL LOGIN(direct_client_ip="172.21.122.86", client_ip=, tenant_name=obmysql, tenant_id=18446744073709551615, user_name=uccenter, host_name=xxx.xxx.xxx.xxx, sessid=3221252609, proxy_sessid=0, sess_create_time=0, from_proxy=false, from_java_client=false, from_oci_client=false, from_jdbc_client=true, capability=683647754, proxy_capability=0, use_ssl=false, c/s protocol="OB_MYSQL_CS_TYPE", autocommit=false, proc_ret=-5160, ret=0) [2024-03-15 07:03:41.866125] INFO [CLOG] do_fetch_log_ (ob_remote_fetch_log.cpp:154) [872][T1004_LogRessvr][T1004][YB427F000001-000613ACA9BF7BAA-0-0] [lt=43] print do_fetch_log_(lsn={lsn:18446744073709551615}, max_fetch_lsn={lsn:18446744073709551615}, need_schedule=false, proposal_id=-1, last_fetch_ts=-1, task_count=0, ls={ls_meta:{tenant_id:1004, ls_id:{id:1}, replica_type:0, ls_create_status:1, clog_checkpoint_scn:{val:1710235938936212294}, clog_base_lsn:{lsn:86430941184}, rebuild_seq:0, migration_status:0, gc_state_:1, offline_scn_:{val:18446744073709551615}, restore_status:{status:0}, replayable_point:{val:1710339948785102625}, tablet_change_checkpoint_scn:{val:1710384841822240797}, all_id_meta:{id_meta:[{limited_id:1710506562631136223, latest_log_ts:{val:1710506546001774991}}, {limited_id:109000001, latest_log_ts:{val:1710506430849800962}}, {limited_id:1, latest_log_ts:{val:18446744073709551615}}]}}, log_handler:{role:1, proposal_id:431, palf_env_:0x7f54765f0030, is_in_stop_state_:false, is_inited_:true}, restore_handler:{is_inited:true, is_in_stop_state:false, id:1, proposal_id:9223372036854775807, role:2, parent:null, context:{issue_task_num:0, issue_version:-1, last_fetch_ts:-1, max_submit_lsn:{lsn:18446744073709551615}, max_fetch_lsn:{lsn:18446744073709551615}, max_fetch_scn:{val:18446744073709551615}, error_context:{ret_code:0, trace_id:Y0-0000000000000000-0-0}, task_count:0}, restore_context:{seek_done:false, lsn:{lsn:18446744073709551615}}}, is_inited:true, tablet_gc_handler:{tablet_persist_trigger:0, is_inited:true}}) [2024-03-15 07:03:41.866193] INFO [CLOG] do_fetch_log_ (ob_remote_fetch_log.cpp:154) [872][T1004_LogRessvr][T1004][YB427F000001-000613ACA9BF7BAA-0-0] [lt=66] print do_fetch_log_(lsn={lsn:18446744073709551615}, max_fetch_lsn={lsn:18446744073709551615}, need_schedule=false, proposal_id=-1, last_fetch_ts=-1, task_count=0, ls={ls_meta:{tenant_id:1004, ls_id:{id:1001}, replica_type:0, ls_create_status:1, clog_checkpoint_scn:{val:1710235941799359711}, clog_base_lsn:{lsn:33753698304}, rebuild_seq:0, migration_status:0, gc_state_:1, offline_scn_:{val:18446744073709551615}, restore_status:{status:0}, replayable_point:{val:1710339948785102625}, tablet_change_checkpoint_scn:{val:1710384842533059633}, all_id_meta:{id_meta:[{limited_id:1710264987596709396, latest_log_ts:{val:1710264970963877439}}, {limited_id:98000001, latest_log_ts:{val:1710035127719876240}}, {limited_id:1, latest_log_ts:{val:18446744073709551615}}]}}, log_handler:{role:1, proposal_id:428, palf_env_:0x7f54765f0030, is_in_stop_state_:false, is_inited_:true}, restore_handler:{is_inited:true, is_in_stop_state:false, id:1001, proposal_id:9223372036854775807, role:2, parent:null, context:{issue_task_num:0, issue_version:-1, last_fetch_ts:-1, max_submit_lsn:{lsn:18446744073709551615}, max_fetch_lsn:{lsn:18446744073709551615}, max_fetch_scn:{val:18446744073709551615}, error_context:{ret_code:0, trace_id:Y0-0000000000000000-0-0}, task_count:0}, restore_context:{seek_done:false, lsn:{lsn:18446744073709551615}}}, is_inited:true, tablet_gc_handler:{tablet_persist_trigger:0, is_inited:true}}) [2024-03-15 07:03:41.866553] WDIAG [RPC.OBMYSQL] push_close_req (ob_sql_nio.cpp:704) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=40][errcode=-4015] close sql sock by user req(*s={this:0x7f544f6052b0, fd:120, err:5, last_decode_time_:1710486221865492, last_write_time_:1710486221866544, read_buffer_.get_consume_sz():373, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:41.866645] INFO [RPC.OBMYSQL] on_disconnect (obsm_conn_callback.cpp:231) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=84] kill and revert session(conn.sessid_=3221252609, proxy_sessid=0, server_id=0, ret=0) [2024-03-15 07:03:41.866667] INFO [RPC.OBMYSQL] handle_pending_destroy_list (ob_sql_nio.cpp:791) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=21] can close safely, do destroy(*s={this:0x7f544f6052b0, fd:120, err:5, last_decode_time_:1710486221865492, last_write_time_:1710486221866544, read_buffer_.get_consume_sz():373, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:41.866709] INFO [RPC.OBMYSQL] sm_conn_log_close (obsm_conn_callback.cpp:159) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=37] connection close(sessid=3221252609, proxy_sessid=0, tenant_id=0, server_id=0, from_proxy=false, from_java_client=false, c/s protocol="OB_MYSQL_CS_TYPE", is_need_clear_sessid_=true, ret=0) [2024-03-15 07:03:41.868931] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=35][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.868996] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=64][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.871090] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:41.871119] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=26][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486221871077], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.871138] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=17][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486221871077], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.871172] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=18] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486221871077]}) [2024-03-15 07:03:41.871196] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=13] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:41.871206] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=9][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486221871192], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.871223] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=10] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486221871192]}) [2024-03-15 07:03:41.871558] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:41.872414] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91F1-0-0] [lt=16][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:41.872447] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91F1-0-0] [lt=34][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:41.872470] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91F1-0-0] [lt=8][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:41.872482] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [801][T1004_TenantInf][T1004][YB427F000001-000613ACB04F91F1-0-0] [lt=11][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, aret=-5627, ret=-5627) [2024-03-15 07:03:41.872491] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:41.872505] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=12][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:41.872511] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:41.872523] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-5627] query failed(ret=-5627, conn=0x7f54b5ff8050, start=1710486221872384, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:41.872547] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:41.872555] WDIAG [SHARE] load_tenant_info (ob_tenant_info_proxy.cpp:338) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5627] failed to read(ret=-5627, ret="OB_SCHEMA_EAGAIN", exec_tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:41.871587] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=27][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486221871547], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.872711] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=1112] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486221871547]}) [2024-03-15 07:03:41.872955] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:41.872981] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=25][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:41.877126] WDIAG [ARCHIVE] do_thread_task_ (ob_archive_sender.cpp:260) [746][T1003_ArcSender][T1003][YB427F000001-000613ACA93F7BAA-0-0] [lt=17][errcode=-4018] try free send task failed(ret=-4018) [2024-03-15 07:03:41.879161] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=35][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.879216] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=54][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.880485] INFO [RPC.FRAME] rpc_easy_timer_cb (ob_net_easy.cpp:595) [193][RpcIO][T0][Y0-0000000000000000-0-0] [lt=21] [RPC EASY STAT](log_str=conn count=1/1, request done=47755/47755, request doing=0/0) [2024-03-15 07:03:41.889363] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=24][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.889409] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=46][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.890239] INFO [STORAGE] runTimerTask (ob_checkpoint_service.cpp:351) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=53] ====== check clog disk timer task ====== [2024-03-15 07:03:41.890273] INFO [PALF] get_disk_usage (palf_env_impl.cpp:777) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=30] get_disk_usage(ret=0, capacity(MB):=2048, used(MB):=1636) [2024-03-15 07:03:41.891418] INFO [STORAGE.TRANS] get_rec_scn (ob_trans_ctx_mgr_v4.cpp:1295) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=10] succ to get rec scn(*this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:3}, aggre_rec_scn={val:1710230200691799540}) [2024-03-15 07:03:41.891473] INFO [STORAGE.TRANS] get_rec_scn (ob_tx_ctx_memtable.cpp:232) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=54] tx ctx memtable get rec scn(this={ObITable:{this:0x7f549eda2080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710482167841511}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f549eda2080, snapshot_version:{val:1710482167841511}, ls_id:{id:1}, is_frozen:true}, rec_scn={val:1710230200691799540}) [2024-03-15 07:03:41.891506] INFO [STORAGE.TABLELOCK] get_rec_scn (ob_lock_memtable.cpp:742) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=26] rec_scn of ObLockMemtable is (rec_scn_={val:4611686018427387903}, flushed_scn_={val:0}, pre_rec_scn_={val:18446744073709551615}, freeze_scn_={val:0}, max_committed_scn_={val:18446744073709551615}, is_frozen_=false, ls_id_={id:1}) [2024-03-15 07:03:41.891524] INFO [STORAGE.TRANS] get_rec_scn (ob_ls_tx_service.cpp:441) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=15] [CHECKPOINT] ObLSTxService::get_rec_scn(common_checkpoint_type="TX_DATA_MEMTABLE_TYPE", common_checkpoints_[min_rec_scn_common_checkpoint_type_index]={ObIMemtableMgr:{Memtables:this:0x7f549ecc81b0, ref_cnt:1, is_inited:true, tablet_id:{id:49402}, freezer:0x7f549eccd290, table_type:1, memtable_head:0, memtable_tail:2, t3m:0x7f54b39e8030, tables:[0x7f549ed92080, 0x7f549ed92b00, null, null, null, null, null, null, null, null, null, null, null, null, null, null]}, is_freezing:false, ls_id:{id:1}, tx_data_table:0x7f549ecce690, ls_tablet_svr:0x7f549ecc8190, slice_allocator:0x7f549ecce6d0}, min_rec_scn={val:1710208801027009356}, ls_id_={id:1}) [2024-03-15 07:03:41.892457] INFO [STORAGE.TRANS] get_rec_scn (ob_id_service.cpp:306) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=29] get rec log scn(service_type_=0, rec_log_ts={val:1710506540610900036}) [2024-03-15 07:03:41.892476] INFO [STORAGE.TRANS] get_rec_scn (ob_id_service.cpp:306) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=19] get rec log scn(service_type_=1, rec_log_ts={val:1710506427288403526}) [2024-03-15 07:03:41.892484] INFO [STORAGE.TRANS] get_rec_scn (ob_id_service.cpp:306) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=7] get rec log scn(service_type_=2, rec_log_ts={val:1710506427288403525}) [2024-03-15 07:03:41.892495] INFO [STORAGE] update_clog_checkpoint (ob_checkpoint_executor.cpp:158) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=6] [CHECKPOINT] clog checkpoint no change(checkpoint_scn={val:1710208801027009356}, checkpoint_scn_in_ls_meta={val:1710208801027009356}, ls_id={id:1}, service_type="TRANS_SERVICE") [2024-03-15 07:03:41.892508] INFO [STORAGE] cannot_recycle_log_over_threshold_ (ob_checkpoint_service.cpp:264) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=10] cannot_recycle_log_size statistics(cannot_recycle_log_size=1111753774, threshold=644245094) [2024-03-15 07:03:41.892826] INFO [PALF] locate_by_lsn_coarsely (palf_handle_impl.cpp:1547) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=6] locate_by_lsn_coarsely(ret=0, ret="OB_SUCCESS", this={palf_id:1, self:"127.0.0.1:2882", has_set_deleted:false}, lsn={lsn:115349100776}, committed_lsn={lsn:115793802286}, result_scn={val:1710346876268445819}) [2024-03-15 07:03:41.892855] INFO [STORAGE] advance_checkpoint_by_flush (ob_checkpoint_executor.cpp:218) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=27] advance checkpoint by flush to avoid clog disk full(recycle_scn={val:1710346876268445819}, end_lsn={lsn:115793802286}, clog_checkpoint_lsn={lsn:114682048512}, calcu_recycle_lsn={lsn:115349100776}, ls_->get_ls_id()={id:1}) [2024-03-15 07:03:41.892869] INFO [STORAGE] advance_checkpoint_by_flush (ob_checkpoint_executor.cpp:236) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=13] start flush(recycle_scn={val:1710346876268445819}, ls_->get_clog_checkpoint_scn()={val:1710208801027009356}, ls_->get_ls_id()={id:1}) [2024-03-15 07:03:41.893482] INFO [STORAGE] scheduler_ls_ha_handler_ (ob_storage_ha_service.cpp:186) [568][T1_HAService][T1][Y0-0000000000000000-0-0] [lt=30] start do ls ha handler(ls_id_array_=[{id:1}]) [2024-03-15 07:03:41.894038] INFO [STORAGE.TRANS] get_rec_scn (ob_trans_ctx_mgr_v4.cpp:1295) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=8] succ to get rec scn(*this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:3}, aggre_rec_scn={val:1710230200691799540}) [2024-03-15 07:03:41.894065] INFO [STORAGE.TRANS] get_rec_scn (ob_tx_ctx_memtable.cpp:232) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=27] tx ctx memtable get rec scn(this={ObITable:{this:0x7f549eda2080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710482167841511}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f549eda2080, snapshot_version:{val:1710482167841511}, ls_id:{id:1}, is_frozen:true}, rec_scn={val:1710230200691799540}) [2024-03-15 07:03:41.895143] INFO [STORAGE.TRANS] get_rec_scn (ob_trans_ctx_mgr_v4.cpp:1295) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=18] succ to get rec scn(*this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:3}, aggre_rec_scn={val:1710230200691799540}) [2024-03-15 07:03:41.895170] INFO [STORAGE.TRANS] get_rec_scn (ob_tx_ctx_memtable.cpp:232) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=26] tx ctx memtable get rec scn(this={ObITable:{this:0x7f549eda2080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710482167841511}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f549eda2080, snapshot_version:{val:1710482167841511}, ls_id:{id:1}, is_frozen:true}, rec_scn={val:1710230200691799540}) [2024-03-15 07:03:41.895224] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=16] add dag success(dag=0x7f5509d9c080, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=3526853371410145563, dag_cnt=1, dag_type_cnts=1) [2024-03-15 07:03:41.895240] INFO [STORAGE.TRANS] flush (ob_tx_ctx_memtable.cpp:298) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=17] tx ctx memtable flush successfully(this={ObITable:{this:0x7f549eda2080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710482167841511}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f549eda2080, snapshot_version:{val:1710482167841511}, ls_id:{id:1}, is_frozen:true}, ls_id_={id:1}) [2024-03-15 07:03:41.895267] INFO [STORAGE] freeze (ob_tx_data_memtable_mgr.cpp:193) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=23] start freeze tx data memtable(ls_id_={id:1}) [2024-03-15 07:03:41.895277] INFO [STORAGE] freeze_ (ob_tx_data_memtable_mgr.cpp:229) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=8] There is a freezed memetable existed. Try freeze after flushing it.(ret=-4023, ret="OB_EAGAIN", get_memtable_count_()=2) [2024-03-15 07:03:41.895287] WDIAG [STORAGE] freeze (ob_tx_data_memtable_mgr.cpp:207) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=7][errcode=-4023] freeze tx data memtable fail.(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:41.895294] WDIAG [STORAGE] flush (ob_tx_data_memtable_mgr.cpp:483) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=7][errcode=-4023] freeze failed(ret=-4023, ret="OB_EAGAIN", this=0x7f549ecc81b0) [2024-03-15 07:03:41.895301] WDIAG [STORAGE.TRANS] flush (ob_ls_tx_service.cpp:455) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=7][errcode=0] obCommonCheckpoint flush failed(tmp_ret=-4023, common_checkpoints_[i]=0x7f549ecc8288) [2024-03-15 07:03:41.895295] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [525][T1_DagScheduler][T1][Y0-0000000000000000-0-0] [lt=29] succeed to add sys task(task={start_time:1710486221895285, task_id:YB427F000001-000613ACAC0FFCAA-0-0, task_type:4, svr_ip:"127.0.0.1:2882", tenant_id:1, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=49401"}) [2024-03-15 07:03:41.895309] INFO [STORAGE.TABLELOCK] get_rec_scn (ob_lock_memtable.cpp:742) [556][T1_CKClogDisk][T1][Y0-0000000000000000-0-0] [lt=7] rec_scn of ObLockMemtable is (rec_scn_={val:4611686018427387903}, flushed_scn_={val:0}, pre_rec_scn_={val:18446744073709551615}, freeze_scn_={val:0}, max_committed_scn_={val:18446744073709551615}, is_frozen_=false, ls_id_={id:1}) [2024-03-15 07:03:41.895315] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [525][T1_DagScheduler][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=19] schedule one task(task={this:0x7f549edf8080, type:15, status:2, dag:{this:0x7f5509d9c080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAC0FFCAA-0-0, dag_ret:0, dag_status:2, start_time:1710486221895312, running_task_cnt:1, indegree:0, hash:3526853371410145563}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=1, running_task_cnts_[priority]=1, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.895400] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [499][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=30][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:41.895436] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [499][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=36][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710295204694917) [2024-03-15 07:03:41.895453] INFO [STORAGE.COMPACTION] get_storage_schema_to_merge (ob_tablet_merge_ctx.cpp:1131) [499][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=9] get storage schema to merge(ls_id={id:1}, tablet_id={id:49401}, schema_ctx={base_schema_version:1, schema_version:1, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f549d277fa0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:3, index_type:0, index_status:1, row_store_type:1, schema_version:1, column_cnt:3, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:0, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"CHAR", collation:"binary", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"CHAR", collation:"binary", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, get_storage_schema_flag=true, get_schema_on_memtable=false) [2024-03-15 07:03:41.895528] INFO [STORAGE] init (ob_partition_parallel_merge_ctx.cpp:107) [499][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=66] Succ to init parallel merge ctx(enable_parallel_minor_merge=true, tablet_size=134217728, merge_ctx.param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:49401}, report_:null, for_diagnose:false, is_tenant_major_merge:false}) [2024-03-15 07:03:41.895600] INFO [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:929) [499][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=22] succeed to build merge ctx(tablet_id={id:49401}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:49401}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:9223372036854775807, base_version:0, snapshot_version:1710482167841511}, create_snapshot_version:0, is_full_merge:true, merge_level:0, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:3, range_array:[{start_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MIN_OBJ,]store_rowkey:MIN}, end_key:{datum_cnt:1, group_idx:0, hash:0, [idx=0:MAX_OBJ,]store_rowkey:MAX}, group_idx:0, border_flag:{flag:0}}], concurrent_cnt:1, is_inited:true}, schema_ctx:{base_schema_version:1, schema_version:1, storage_schema:{ObIMultiSourceDataUnit:{is_tx_end:false, unsynced_cnt_for_multi_data:0, sync_finish:true}, this:0x7f549d277fa0, version:0, is_use_bloomfilter:0, column_info_simplified:0, compat_mode:0, table_type:3, index_type:0, index_status:1, row_store_type:1, schema_version:1, column_cnt:3, tablet_size:134217728, pctfree:10, block_size:16384, progressive_merge_round:0, master_key_id:18446744073709551615, compressor_type:1, encryption:"", encrypt_key:"", rowkey_array:[{column_idx:16, meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, order:0}], column_array:[{meta_type:{type:"BIGINT", collation:"binary", coercibility:"NUMERIC"}, is_column_stored_in_sstable:1, is_rowkey_column:1, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"CHAR", collation:"binary", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}, {meta_type:{type:"CHAR", collation:"binary", coercibility:"INVALID"}, is_column_stored_in_sstable:1, is_rowkey_column:0, is_generated_column:0, orig_default_value:{"NULL":"NULL"}}]}}, tables_handle count:1, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:0x7f54b39e8030, allocator_:null, tablet_id:{id:49401}, table_count:1, [{i:0, table_key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710482167841511}}}, ref:3}]}, schedule_major:false, scn_range:{start_scn:{val:1}, end_scn:{val:1710482167841511}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f54b1b14040, ls_:0x7f549ecc8150, mod_:1}, tablet_handle:{obj:0x7f549d277a20, obj_pool:0x7f54b39f9cb0, wash_priority:0}, merge_progress:NULL, compaction_filter:NULL, time_guard:GET_PARALLEL_RANGE=167us|(1.00)|total=167us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}, skip_rest_operation=false) [2024-03-15 07:03:41.895769] INFO [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:851) [499][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=159] succeed to init merge ctx(task={this:0x7f549edf8080, type:15, status:2, dag:{this:0x7f5509d9c080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAC0FFCAA-0-0, dag_ret:0, dag_status:2, start_time:1710486221895312, running_task_cnt:1, indegree:0, hash:3526853371410145563}}) [2024-03-15 07:03:41.895792] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [499][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=22] task finish process(ret=0, start_time=1710486221895384, end_time=1710486221895790, runtime=406, *this={this:0x7f549edf8080, type:15, status:2, dag:{this:0x7f5509d9c080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAC0FFCAA-0-0, dag_ret:0, dag_status:2, start_time:1710486221895312, running_task_cnt:1, indegree:0, hash:3526853371410145563}}) [2024-03-15 07:03:41.895861] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [525][T1_DagScheduler][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=23] schedule one task(task={this:0x7f549edf81b0, type:1, status:2, dag:{this:0x7f5509d9c080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAC0FFCAA-0-0, dag_ret:0, dag_status:2, start_time:1710486221895312, running_task_cnt:1, indegree:0, hash:3526853371410145563}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=1, running_task_cnts_[priority]=1, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:41.896028] WDIAG [STORAGE.TRANS] refresh_aggre_rec_scn (ob_trans_ctx_mgr_v4.cpp:1383) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=51][errcode=0] Concurrent merge may be because of previous failure(*this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:3}) [2024-03-15 07:03:41.896057] INFO [STORAGE] init (ob_tx_table_iterator.cpp:678) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=28] ObTxCtxMemtableScanIterator init succ(this={type:0, is_sstable_iter:false, block_row_store:null}) [2024-03-15 07:03:41.896066] INFO [STORAGE.TRANS] scan (ob_tx_ctx_memtable.cpp:103) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=9] ob tx ctx memtable scan successfully(this={ObITable:{this:0x7f549eda2080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710482167841511}}}, ref_cnt:3, upper_trans_version:-4007, timestamp:0}, this:0x7f549eda2080, snapshot_version:{val:1710482167841511}, ls_id:{id:1}, is_frozen:true}) [2024-03-15 07:03:41.897746] INFO [STORAGE.TRANS] tx_calc_checksum_before_scn (ob_tx_callback_list.cpp:298) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=23] calc checksum before log ts(functor={target_scn:{val:1710295204634496822}, checksum_scn:{val:4611686018427387903}, checksum_last_scn:{val:0}}, *this={get_trans_ctx():{this:0x7f54b1a209d0, trans_id:{txid:198924641}, tenant_id:1, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482131475757}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710295204634496822}, max_applying_log_ts:{val:1710295204634496822}, max_applying_part_log_no:3, max_submitted_seq_no:1710295204563520, checksum:128717978, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:115068115490}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f54b1a221e0, is_inited_:true, trans_id:{txid:198924641}, ls_id:{id:1}, ctx:0x7f54b1a209d0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f549d404030, tx_data_guard_:{tx_data:{tx_id:{txid:198924641}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710295204634496822}, start_scn:{val:1710295204594435588}, end_scn:{val:1710295204634496822}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:198924641}, state:1, commit_version:{val:1710295204634496822}, start_scn:{val:1710295204594435588}, end_scn:{val:1710295204634496822}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710295204634496822}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710295204634496822} lock_wait_start_ts=0 replay_compact_version={val:1710294555750258313}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:198924641} ls_id=1 callback_alloc_count=2 callback_free_count=2 checksum=128717978 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=5, slave=0, merge=0, tx_end=5, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710295204634496822}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482324447953}, length:0, checksum_scn:{val:4611686018427387903}, checksum:128717978, tmp_checksum:0}) [2024-03-15 07:03:41.897884] INFO [STORAGE.TRANS] get_checksum_and_scn (ob_tx_callback_list.cpp:427) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=137] get checksum and checksum_scn(this={get_trans_ctx():{this:0x7f54b1a209d0, trans_id:{txid:198924641}, tenant_id:1, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482131475757}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710295204634496822}, max_applying_log_ts:{val:1710295204634496822}, max_applying_part_log_no:3, max_submitted_seq_no:1710295204563520, checksum:128717978, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:115068115490}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f54b1a221e0, is_inited_:true, trans_id:{txid:198924641}, ls_id:{id:1}, ctx:0x7f54b1a209d0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f549d404030, tx_data_guard_:{tx_data:{tx_id:{txid:198924641}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710295204634496822}, start_scn:{val:1710295204594435588}, end_scn:{val:1710295204634496822}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:198924641}, state:1, commit_version:{val:1710295204634496822}, start_scn:{val:1710295204594435588}, end_scn:{val:1710295204634496822}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710295204634496822}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710295204634496822} lock_wait_start_ts=0 replay_compact_version={val:1710294555750258313}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:198924641} ls_id=1 callback_alloc_count=2 callback_free_count=2 checksum=128717978 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=5, slave=0, merge=0, tx_end=5, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710295204634496822}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482324447953}, length:0, checksum_scn:{val:4611686018427387903}, checksum:128717978, tmp_checksum:0}, checksum=128717978, checksum_scn={val:4611686018427387903}) [2024-03-15 07:03:41.897994] INFO [STORAGE.TRANS] get_tx_ctx_table_info_ (ob_trans_part_ctx.cpp:5377) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=105] store ctx_info: (ret=0, info={tx_id:{txid:198924641}, ls_id:{id:1}, cluster_id:1, state_info:{tx_id:{txid:198924641}, ref_cnt:0, state:"COMMIT", commit_version:{val:1710295204634496822}, start_scn:{val:1710295204594435588}, end_scn:{val:1710295204634496822}, undo_status_list:{head:null, undo_node_cnt:0}}, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710295204634496822}, max_applying_log_ts:{val:1710295204634496822}, max_applying_part_log_no:3, max_submitted_seq_no:1710295204563520, checksum:128717978, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:115068115490}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}}, this={this:0x7f54b1a209d0, trans_id:{txid:198924641}, tenant_id:1, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482131475757}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710295204634496822}, max_applying_log_ts:{val:1710295204634496822}, max_applying_part_log_no:3, max_submitted_seq_no:1710295204563520, checksum:128717978, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:115068115490}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f54b1a221e0, is_inited_:true, trans_id:{txid:198924641}, ls_id:{id:1}, ctx:0x7f54b1a209d0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f549d404030, tx_data_guard_:{tx_data:{tx_id:{txid:198924641}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710295204634496822}, start_scn:{val:1710295204594435588}, end_scn:{val:1710295204634496822}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:198924641}, state:1, commit_version:{val:1710295204634496822}, start_scn:{val:1710295204594435588}, end_scn:{val:1710295204634496822}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710295204634496822}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710295204634496822} lock_wait_start_ts=0 replay_compact_version={val:1710294555750258313}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:198924641} ls_id=1 callback_alloc_count=2 callback_free_count=2 checksum=128717978 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=5, slave=0, merge=0, tx_end=5, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710295204634496822}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482324447953}) [2024-03-15 07:03:41.898104] WDIAG [STORAGE.TRANS] refresh_rec_log_ts_ (ob_trans_part_ctx.cpp:5337) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=109][errcode=0] we should not allow concurrent merge of tx ctx table(*this={this:0x7f54b1a209d0, trans_id:{txid:198924641}, tenant_id:1, is_exiting:true, trans_expired_time:9223372036854775807, cluster_version:17179934720, trans_need_wait_wrap:{receive_gts_ts_:[mts=0], need_wait_interval_us:0}, stc:[mts=0], ctx_create_time:1710482131475757}{ls_id:{id:1}, session_id:0, part_trans_action:1, pending_write:0, exec_info:{state:50, upstream:{id:-1}, participants:[{id:1}], incremental_participants:[], prev_record_lsn:{lsn:18446744073709551615}, redo_lsns:[], redo_log_no:0, multi_data_source:[{has_submitted_:true, has_synced_:true, type:6, data_.length():17}], scheduler:"127.0.0.1:2882", prepare_version:{val:18446744073709551615}, trans_type:0, next_log_entry_no:2, max_applied_log_ts:{val:1710295204634496822}, max_applying_log_ts:{val:1710295204634496822}, max_applying_part_log_no:3, max_submitted_seq_no:1710295204563520, checksum:128717978, checksum_scn:{val:4611686018427387903}, max_durable_lsn:{lsn:115068115490}, data_complete:false, is_dup_tx:false, prepare_log_info_arr:[], xid:{gtrid_str:"", bqual_str:"", format_id:1, gtrid_str_.ptr():"data_size:0, data:", bqual_str_.ptr():"data_size:0, data:", g_hv:0, b_hv:0}, need_checksum:true, is_sub2pc:false}, sub_state:{flag:18}, is_leaf():false, is_root():false, busy_cbs_.get_size():0, final_log_cb_:{ObTxBaseLogCb:{log_ts:{val:18446744073709551615}, lsn:{lsn:18446744073709551615}, submit_ts:0}, this:0x7f54b1a221e0, is_inited_:true, trans_id:{txid:198924641}, ls_id:{id:1}, ctx:0x7f54b1a209d0, tx_data_guard:{tx_data:NULL}, is_callbacked_:false, mds_range_:{count_:0}, cb_arg_array_:[], first_part_scn_:{val:18446744073709551615}}, ctx_tx_data_:{ctx_mgr_:0x7f549d404030, tx_data_guard_:{tx_data:{tx_id:{txid:198924641}, ref_cnt:2, state:"COMMIT", commit_version:{val:1710295204634496822}, start_scn:{val:1710295204594435588}, end_scn:{val:1710295204634496822}, undo_status_list:{head:null, undo_node_cnt:0}}}, tx_commit_data_:{tx_id:{txid:198924641}, state:1, commit_version:{val:1710295204634496822}, start_scn:{val:1710295204594435588}, end_scn:{val:1710295204634496822}}, read_only_:true}, role_state_:1, start_replay_ts_:{val:1710295204634496822}, is_incomplete_replay_ctx_:false, mt_ctx_:{ObIMvccCtx={alloc_type=0 ctx_descriptor=0 min_table_version=0 max_table_version=0 trans_version={val:4611686018427387903} commit_version={val:1710295204634496822} lock_wait_start_ts=0 replay_compact_version={val:1710294555750258313}} end_code=-6233 tx_status=0 is_readonly=false ref=0 trans_id={txid:198924641} ls_id=1 callback_alloc_count=2 callback_free_count=2 checksum=128717978 tmp_checksum=0 checksum_scn={val:4611686018427387903} redo_filled_count=0 redo_sync_succ_count=0 redo_sync_fail_count=0 main_list_length=0 unsynced_cnt=0 unsubmitted_cnt_=0 cb_statistics:[main=5, slave=0, merge=0, tx_end=5, rollback_to=0, fast_commit=0, remove_memtable=0]}, coord_prepare_info_arr_:[], upstream_state:50, retain_cause:0, 2pc_role:-1, collected:[], ref:2, rec_log_ts:{val:18446744073709551615}, prev_rec_log_ts:{val:1710295204634496822}, lastest_snapshot:{val:18446744073709551615}, state_info_array:[], last_request_ts:1710482324447953}) [2024-03-15 07:03:41.898213] INFO [STORAGE.TRANS] serialize_ (ob_tx_table_define.cpp:215) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=91] ObTxCtxTableMeta encode succ(buf_len=26, pos=26) [2024-03-15 07:03:41.898567] ERROR alloc_block (ob_local_device.cpp:716) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=8][errcode=-4184] Server out of disk space(msg="Fail to alloc block", ret=-4184, free_block_cnt_=0, total_block_cnt_=2560) [2024-03-15 07:03:41.898589] WDIAG [STORAGE.BLKMGR] alloc_block (ob_block_manager.cpp:304) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=21][errcode=-4184] Failed to alloc block from io device(ret=-4184) [2024-03-15 07:03:41.898597] WDIAG [STORAGE] alloc_block (ob_macro_block_writer.cpp:1338) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=7][errcode=-4184] Fail to pre-alloc block for new macro block(ret=-4184, current_index=0, current_macro_seq=0) [2024-03-15 07:03:41.898604] WDIAG [STORAGE] write_micro_block (ob_macro_block_writer.cpp:1116) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=7][errcode=-4184] Fail to pre-alloc block(ret=-4184) [2024-03-15 07:03:41.898611] WDIAG [STORAGE] build_micro_block (ob_macro_block_writer.cpp:938) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=6][errcode=-4184] fail to write micro block (ret=-4184, micro_block_desc={last_rowkey:{datum_cnt:3, group_idx:0, hash:0, [idx=0:{len: 8, flag: 0, null: 0, ptr: 0x7f5411262070, hex: 0000000000000000, int: 0},idx=1:{len: 8, flag: 0, null: 0, ptr: 0x7f54112620a8, hex: 00F0FFFFFFFFFFFF, int: -4096},idx=2:{len: 8, flag: 0, null: 0, ptr: 0x7f54112620e0, hex: 0000000000000000, int: 0},]store_rowkey:}, header:{magic:1005, version:2, header_size:64, header_checksum:-7062, column_count:5, rowkey_column_count:3, has_column_checksum:0, row_count:1, row_store_type:0, opt:5, var_column_count:0, row_offset:364, original_length:308, max_merged_trans_version:4096, data_length:308, data_zlength:308, data_checksum:3959364704, column_checksums:null, single_version_rows:1, contain_uncommitted_rows:0, is_last_row_last_flag:1, is_valid():true}, buf:0x7f53ec604090, buf_size:308, data_size:308, row_count:1, column_count:5, max_merged_trans_version:4096, macro_id:[9223372036854775807](ver=0,mode=0,seq=0), block_offset:0, block_checksum:2849402702, row_count_delta:1, contain_uncommitted_row:false, can_mark_deletion:false, has_string_out_row:false, has_lob_out_row:false, is_last_row_last_flag:true, original_size:308}) [2024-03-15 07:03:41.898657] WDIAG [STORAGE] build_micro_block (ob_data_macro_block_merge_writer.cpp:137) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=46][errcode=-4184] ObMacroBlockWriter fail to build_micro_block(ret=-4184) [2024-03-15 07:03:41.898665] WDIAG [STORAGE] close (ob_macro_block_writer.cpp:707) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=6][errcode=-4184] macro block writer fail to build current micro block.(ret=-4184) [2024-03-15 07:03:41.898671] WDIAG [STORAGE] close (ob_partition_merger.cpp:170) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=6][errcode=-4184] Failed to close macro block writer(ret=-4184) [2024-03-15 07:03:41.898678] WDIAG [STORAGE] close (ob_partition_merger.cpp:988) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=6][errcode=-4184] Failed to finish merge for partition merger(ret=-4184) [2024-03-15 07:03:41.898684] WDIAG [STORAGE] merge_partition (ob_partition_merger.cpp:1156) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=6][errcode=-4184] failed to close partition merger(ret=-4184) [2024-03-15 07:03:41.898691] INFO [STORAGE.COMPACTION] reset (ob_partition_rows_merger.cpp:908) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=5] partition merge iter row count(i=0, row_count=1, ghost_row_count=0, pkey={tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710482167841511}}}, table={ObITable:{this:0x7f549eda2080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710482167841511}}}, ref_cnt:3, upper_trans_version:-4007, timestamp:0}, this:0x7f549eda2080, snapshot_version:{val:1710482167841511}, ls_id:{id:1}, is_frozen:true}) [2024-03-15 07:03:41.898724] WDIAG [STORAGE] process (ob_tablet_merge_task.cpp:1434) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=23][errcode=-4184] failed to merge partition(ret=-4184) [2024-03-15 07:03:41.898756] WDIAG [STORAGE] process (ob_tablet_merge_task.cpp:1446) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=10][errcode=-4184] failed to merge(ret=-4184, ctx_->param_={merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:49401}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, idx_=0) [2024-03-15 07:03:41.898777] WDIAG [COMMON] do_work (ob_dag_scheduler.cpp:241) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=20][errcode=-4184] failed to process task(ret=-4184) [2024-03-15 07:03:41.898784] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=7] task finish process(ret=-4184, start_time=1710486221895936, end_time=1710486221898783, runtime=2847, *this={this:0x7f549edf81b0, type:1, status:2, dag:{this:0x7f5509d9c080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAC0FFCAA-0-0, dag_ret:0, dag_status:2, start_time:1710486221895312, running_task_cnt:1, indegree:0, hash:3526853371410145563}}) [2024-03-15 07:03:41.898803] WDIAG [COMMON] run1 (ob_dag_scheduler.cpp:1424) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=19][errcode=-4184] failed to do work(ret=-4184, *task_={this:0x7f549edf81b0, type:1, status:2, dag:{this:0x7f5509d9c080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAC0FFCAA-0-0, dag_ret:0, dag_status:2, start_time:1710486221895312, running_task_cnt:1, indegree:0, hash:3526853371410145563}}, compat_mode=0) [2024-03-15 07:03:41.898825] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=13] dag finished(dag_ret=-4184, runtime=3512, dag_cnt=0, dag_cnts_[dag.get_type()]=0, &dag=0x7f5509d9c080, dag={this:0x7f5509d9c080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAC0FFCAA-0-0, dag_ret:-4184, dag_status:5, start_time:1710486221895312, running_task_cnt:0, indegree:0, hash:3526853371410145563}) [2024-03-15 07:03:41.898852] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [509][T1_TX_TABLE_MER][T1][YB427F000001-000613ACAC0FFCAA-0-0] [lt=22] succeed to del sys task(removed_task={start_time:1710486221895285, task_id:YB427F000001-000613ACAC0FFCAA-0-0, task_type:4, svr_ip:"127.0.0.1:2882", tenant_id:1, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=49401"}) [2024-03-15 07:03:41.899547] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=32][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.899584] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=35][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.907719] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=39][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:41.907747] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=28][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1", warnings:[]}, tenant_id_=1, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:41.907762] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=14][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, local_server_version={val:1710506547196065858}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486221907709) [2024-03-15 07:03:41.907772] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=10][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, last_post_cluster_heartbeat_tstamp_=1710486221707804, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:41.909416] INFO [STORAGE] operator() (ob_tenant_freezer.cpp:125) [808][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=11] ====== tenant freeze timer task ====== [2024-03-15 07:03:41.909687] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=22][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:41.909714] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=26][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1003", warnings:[]}, tenant_id_=1003, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:41.909752] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, local_server_version={val:1710482141336457000}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486221909675) [2024-03-15 07:03:41.909770] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, last_post_cluster_heartbeat_tstamp_=1710486221710317, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:41.909800] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.909846] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=45][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.910215] WDIAG [SHARE.SCHEMA] get_tenant_status (ob_schema_getter_guard.cpp:8471) [808][T1004_Occam][T1004][YB427F000001-000613ACABBF839F-0-0] [lt=43][errcode=-5157] tenant not exist(ret=-5157, ret="OB_TENANT_NOT_EXIST", tenant_id=1004) [2024-03-15 07:03:41.910229] WDIAG [SHARE.SCHEMA] check_tenant_is_restore (ob_schema_getter_guard.cpp:8435) [808][T1004_Occam][T1004][YB427F000001-000613ACABBF839F-0-0] [lt=14][errcode=-5157] fail to get tenant status(ret=-5157, ret="OB_TENANT_NOT_EXIST", tenant_id=1004) [2024-03-15 07:03:41.910238] WDIAG [SHARE.SCHEMA] check_tenant_is_restore (ob_multi_version_schema_service.cpp:3852) [808][T1004_Occam][T1004][YB427F000001-000613ACABBF839F-0-0] [lt=8][errcode=-5157] fail to check tenant is restore(ret=-5157, tenant_id=1004) [2024-03-15 07:03:41.910245] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1171) [808][T1004_Occam][T1004][YB427F000001-000613ACABBF839F-0-0] [lt=7][errcode=-5157] fail to check restore tenant exist(ret=-5157, tenant_id=1004) [2024-03-15 07:03:41.910253] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [808][T1004_Occam][T1004][YB427F000001-000613ACABBF839F-0-0] [lt=6][errcode=-5157] get schema guard failed(ret=-5157) [2024-03-15 07:03:41.910274] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [808][T1004_Occam][T1004][YB427F000001-000613ACABBF839F-0-0] [lt=7][errcode=-5157] failed to process record(executor={ObIExecutor:, sql:"SELECT * FROM __all_freeze_info ORDER BY frozen_scn DESC LIMIT 1"}, record_ret=-5157, ret=-5157) [2024-03-15 07:03:41.910285] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [808][T1004_Occam][T1004][YB427F000001-000613ACABBF839F-0-0] [lt=10][errcode=-5157] failed to process final(executor={ObIExecutor:, sql:"SELECT * FROM __all_freeze_info ORDER BY frozen_scn DESC LIMIT 1"}, aret=-5157, ret=-5157) [2024-03-15 07:03:41.910294] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [808][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5157] execute sql failed(ret=-5157, tenant_id=1004, sql=SELECT * FROM __all_freeze_info ORDER BY frozen_scn DESC LIMIT 1) [2024-03-15 07:03:41.910303] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [808][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5157] retry_while_no_tenant_resource failed(ret=-5157, tenant_id=1004) [2024-03-15 07:03:41.910310] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [808][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5157] execute_read failed(ret=-5157, cluster_id=1, tenant_id=1004) [2024-03-15 07:03:41.910319] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [808][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5157] query failed(ret=-5157, conn=0x7f54a2df2050, start=1710486221910185, sql=SELECT * FROM __all_freeze_info ORDER BY frozen_scn DESC LIMIT 1) [2024-03-15 07:03:41.910329] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [808][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-5157] read failed(ret=-5157) [2024-03-15 07:03:41.910336] WDIAG [SHARE] get_freeze_info (ob_freeze_info_proxy.cpp:69) [808][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5157] fail to execute sql(ret=-5157, ret="OB_TENANT_NOT_EXIST", sql=SELECT * FROM __all_freeze_info ORDER BY frozen_scn DESC LIMIT 1, tenant_id=1004) [2024-03-15 07:03:41.910407] WDIAG [STORAGE] get_global_frozen_scn_ (ob_tenant_freezer.cpp:1115) [808][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5157] get_frozen_scn failed(ret=-5157, ret="OB_TENANT_NOT_EXIST", tenant_id=1004) [2024-03-15 07:03:41.910415] WDIAG [STORAGE] do_major_if_need_ (ob_tenant_freezer.cpp:1217) [808][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5157] fail to get global frozen version(ret=-5157) [2024-03-15 07:03:41.910436] WDIAG [STORAGE] check_and_freeze_normal_data_ (ob_tenant_freezer.cpp:408) [808][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=0] [TenantFreezer] fail to do major freeze(tmp_ret=-5157) [2024-03-15 07:03:41.911372] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=15] get wrs ts(ls_id={id:1}, delta=146267058760, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:41.911399] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=21] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:41.914297] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5C-0-0] [lt=220][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:41.914320] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5C-0-0] [lt=23][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:41.914342] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5C-0-0] [lt=19][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:41.914357] WDIAG iterate (ob_tuple.h:272) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5C-0-0] [lt=14][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:41.914373] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5C-0-0] [lt=15][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:41.917636] INFO [ARCHIVE] gc_stale_ls_task_ (ob_ls_mgr.cpp:537) [743][T1003_LSArchive][T1003][YB427F000001-000613ACA90F7BAA-0-0] [lt=34] gc stale ls task succ [2024-03-15 07:03:41.920016] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=27][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.920086] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=68][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.922959] INFO [STORAGE] gc_tables_in_queue (ob_tenant_meta_mem_mgr.cpp:360) [602][T1003_T3mGC][T1003][Y0-0000000000000000-0-0] [lt=49] Recycle 0 table(ret=0, allocator_={used:1489502, total:1846656}, tablet_pool_={typeid(T).name():"N9oceanbase7storage8ObTabletE", sizeof(T):2560, used_obj_cnt:728, free_obj_hold_cnt:0, allocator used:1910272, allocator total:2027648}, sstable_pool_={typeid(T).name():"N9oceanbase12blocksstable9ObSSTableE", sizeof(T):1088, used_obj_cnt:1537, free_obj_hold_cnt:0, allocator used:1770624, allocator total:1831424}, ddl_kv_pool_={typeid(T).name():"N9oceanbase7storage7ObDDLKVE", sizeof(T):3008, used_obj_cnt:0, free_obj_hold_cnt:0, allocator used:0, allocator total:0}, memtable_pool_={typeid(T).name():"N9oceanbase8memtable10ObMemtableE", sizeof(T):1920, used_obj_cnt:70, free_obj_hold_cnt:0, allocator used:138880, allocator total:196224}, tablet count=728, min_minor_cnt=0, pinned_tablet_cnt=0) [2024-03-15 07:03:41.922932] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54e845a228, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486221845623, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486200007067, use_das=false, nested_level=0, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:41.923065] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0] will sleep(sleep_us=100000, remain_us=8083986, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=161, timeout_timestamp=1710486230007049) [2024-03-15 07:03:41.931367] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=27][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.931470] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=102][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.934976] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486221872736, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486220606063, use_das=false, nested_level=0, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:41.935084] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=0] will sleep(sleep_us=17000, remain_us=670107, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=17, timeout_timestamp=1710486222605189) [2024-03-15 07:03:41.939740] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=51][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:41.941662] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=56][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.941718] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=55][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.951929] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=35][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.951995] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=66][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.952715] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:41.952778] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=62][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:41.957855] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486221957840648, start_id=0, end_id=0) [2024-03-15 07:03:41.962114] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=26][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.962150] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=36][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.965945] INFO [COMMON] replace_fragment_node (ob_kvcache_map.cpp:697) [103][KVCacheRep][T0][Y0-0000000000000000-0-0] [lt=33] Cache replace map node details(ret=0, replace_node_count=0, replace_time=2626, replace_start_pos=267376, replace_num=15728) [2024-03-15 07:03:41.967484] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8C-0-0] [lt=7] table not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:41.972289] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.972360] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=71][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.973207] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=2][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:41.973273] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=62][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486221973182], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.973321] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=44][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486221973182], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.973374] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=34] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486221973182]}) [2024-03-15 07:03:41.973418] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=27] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:41.973480] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=59][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486221973411], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.973537] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=36] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486221973411]}) [2024-03-15 07:03:41.973615] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91F2-0-0] [lt=30][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:41.973662] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91F2-0-0] [lt=47][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:41.973687] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91F2-0-0] [lt=11][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:41.973716] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [801][T1004_TenantInf][T1004][YB427F000001-000613ACB04F91F2-0-0] [lt=27][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, aret=-5627, ret=-5627) [2024-03-15 07:03:41.973735] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=17][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:41.973747] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:41.973767] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:41.973779] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-5627] query failed(ret=-5627, conn=0x7f54609f8050, start=1710486221973581, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:41.973792] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=13][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:41.973802] WDIAG [SHARE] load_tenant_info (ob_tenant_info_proxy.cpp:338) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] failed to read(ret=-5627, ret="OB_SCHEMA_EAGAIN", exec_tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:41.973857] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:41.973886] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=26][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486221973842], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:41.973911] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=15] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486221973842]}) [2024-03-15 07:03:41.982530] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=40][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:41.982594] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=62][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.984996] INFO do_work (ob_rl_mgr.cpp:704) [124][rl_mgr0][T0][Y0-0000000000000000-0-0] [lt=21] swc wakeup.(stat_period_=1000000, ready=false) [2024-03-15 07:03:41.992781] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=54][errcode=0] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.992853] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=70][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:41.997644] WDIAG [SQL] create_sessid (ob_sql_session_mgr.cpp:339) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=13][errcode=0] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.997713] INFO [RPC.OBMYSQL] sm_conn_build_handshake (obsm_conn_callback.cpp:104) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=53] new mysql sessid created(conn.sessid_=3221252610, support_ssl=false) [2024-03-15 07:03:41.997816] INFO [RPC.OBMYSQL] init (obsm_conn_callback.cpp:120) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=19] sm conn init succ(conn.sessid_=3221252610, sess.client_addr_="172.21.122.86:42708") [2024-03-15 07:03:41.997858] INFO [RPC.OBMYSQL] do_accept_one (ob_sql_nio.cpp:899) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=26] accept one succ(*s={this:0x7f5494dbd270, fd:136, err:0, last_decode_time_:0, last_write_time_:1710486221997813, read_buffer_.get_consume_sz():0, get_pending_flag():0, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:41.999049] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=41] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:41.999089] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=40][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:41.999100] WDIAG [SERVER] extract_tenant_id (ob_srv_deliver.cpp:100) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=10][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:41.999113] WDIAG [SERVER] dispatch_req (ob_srv_deliver.cpp:115) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=9][errcode=-5160] extract tenant_id fail(ret=-5160, tenant_id=18446744073709551615, req={packet:{header:{length:398, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486221999029, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:41.999152] WDIAG [SERVER] deliver_mysql_request (ob_srv_deliver.cpp:507) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=38][errcode=-5150] cannot dispatch success(ret=-5150, req={packet:{header:{length:398, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486221999029, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:41.999257] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB035-0-0] [lt=163] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:41.999279] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB035-0-0] [lt=29][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:41.999288] WDIAG [SERVER] get_tenant_id (obmp_connect.cpp:1339) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB035-0-0] [lt=8][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:41.999296] WDIAG [SERVER] check_update_tenant_id (obmp_connect.cpp:1840) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB035-0-0] [lt=8][errcode=-5160] get_tenant_id failed(ret=-5160) [2024-03-15 07:03:41.999303] WDIAG [SERVER] process (obmp_connect.cpp:242) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB035-0-0] [lt=6][errcode=-5160] fail to check update tenant id(ret=-5160) [2024-03-15 07:03:41.999329] INFO [SERVER] send_error_packet (obmp_packet_sender.cpp:311) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB035-0-0] [lt=6] sending error packet(err=-4043, extra_err_info=NULL, lbt()="0xd9f6cf5 0x75d3e81 0x7596e3a 0x75be943 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:41.999380] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:745) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB035-0-0] [lt=11][errcode=0] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:41.999415] WDIAG [SERVER] get_session (obmp_packet_sender.cpp:515) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB035-0-0] [lt=31][errcode=-4018] get session fail(ret=-4018, sessid=3221252610, proxy_sessid=0) [2024-03-15 07:03:41.999451] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:749) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB035-0-0] [lt=35][errcode=-4016] session is null [2024-03-15 07:03:41.999462] INFO [SERVER] process (obmp_connect.cpp:369) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB035-0-0] [lt=7] MySQL LOGIN(direct_client_ip="172.21.122.86", client_ip=, tenant_name=obmysql, tenant_id=18446744073709551615, user_name=yyyth, host_name=xxx.xxx.xxx.xxx, sessid=3221252610, proxy_sessid=0, sess_create_time=0, from_proxy=false, from_java_client=false, from_oci_client=false, from_jdbc_client=true, capability=683647754, proxy_capability=0, use_ssl=false, c/s protocol="OB_MYSQL_CS_TYPE", autocommit=false, proc_ret=-5160, ret=0) [2024-03-15 07:03:41.999596] WDIAG [RPC.OBMYSQL] push_close_req (ob_sql_nio.cpp:704) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=21][errcode=-4015] close sql sock by user req(*s={this:0x7f5494dbd270, fd:136, err:5, last_decode_time_:1710486221999029, last_write_time_:1710486221999592, read_buffer_.get_consume_sz():402, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:41.999634] INFO [RPC.OBMYSQL] on_disconnect (obsm_conn_callback.cpp:231) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=33] kill and revert session(conn.sessid_=3221252610, proxy_sessid=0, server_id=0, ret=0) [2024-03-15 07:03:41.999645] INFO [RPC.OBMYSQL] handle_pending_destroy_list (ob_sql_nio.cpp:791) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=10] can close safely, do destroy(*s={this:0x7f5494dbd270, fd:136, err:5, last_decode_time_:1710486221999029, last_write_time_:1710486221999592, read_buffer_.get_consume_sz():402, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:41.999671] INFO [RPC.OBMYSQL] sm_conn_log_close (obsm_conn_callback.cpp:159) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=23] connection close(sessid=3221252610, proxy_sessid=0, tenant_id=0, server_id=0, from_proxy=false, from_java_client=false, c/s protocol="OB_MYSQL_CS_TYPE", is_need_clear_sessid_=true, ret=0) [2024-03-15 07:03:42.003038] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=1][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.006994] INFO [COMMON] compute_tenant_wash_size (ob_kvcache_store.cpp:1140) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=42] Wash compute wash size(is_wash_valid=true, sys_total_wash_size=2718601216, global_cache_size=12484608, tenant_max_wash_size=4161536, tenant_min_wash_size=4161536, tenant_ids_=[512, 500, 999, 506, 508, 509, 510, 1, 1003, 1004]) [2024-03-15 07:03:42.007115] INFO [COMMON] wash (ob_kvcache_store.cpp:343) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=40] Wash time detail, (compute_wash_size_time=172, refresh_score_time=71, wash_time=9) [2024-03-15 07:03:42.008120] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=7][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.008170] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=52][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1", warnings:[]}, tenant_id_=1, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.008201] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=28][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, local_server_version={val:1710506547196065858}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486222008099) [2024-03-15 07:03:42.008218] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=18][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, last_post_cluster_heartbeat_tstamp_=1710486221907779, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:42.008241] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:755) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=12] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] current server is WRS leader, need start CLUSTER weak read service(tenant_id=1, serve_leader_epoch=0, cur_leader_epoch=420, cluster_service_tablet_id_={id:226}, in_service=false, can_update_version=false, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:42.008279] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:347) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=25] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] begin start service(tenant_id=1, is_in_service()=false, can_update_version=false) [2024-03-15 07:03:42.008291] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:349) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=12] start TenantWeakReadClusterService(tenant_id=1) [2024-03-15 07:03:42.009845] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=41][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.009894] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=48][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1003", warnings:[]}, tenant_id_=1003, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.009895] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FE-0-0] [lt=12] table not exist(tenant_id=1, database_id=201001, table_name=__all_weak_read_service, ret=-5019) [2024-03-15 07:03:42.009921] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=25][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, local_server_version={val:1710482141336457000}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486222009830) [2024-03-15 07:03:42.009930] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FE-0-0] [lt=34][errcode=-5019] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:42.009943] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=22][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, last_post_cluster_heartbeat_tstamp_=1710486221909784, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:42.009972] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:755) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=19] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] current server is WRS leader, need start CLUSTER weak read service(tenant_id=1003, serve_leader_epoch=0, cur_leader_epoch=1984, cluster_service_tablet_id_={id:226}, in_service=false, can_update_version=false, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:42.010024] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:347) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=23] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] begin start service(tenant_id=1003, is_in_service()=false, can_update_version=false) [2024-03-15 07:03:42.010040] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:349) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=15] start TenantWeakReadClusterService(tenant_id=1003) [2024-03-15 07:03:42.010147] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:432) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=1] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] start service done(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1, in_service=false, leader_epoch=0, current_version={val:0}, delta=1710486222010142, min_version={val:0}, max_version={val:0}, max_stale_time=5000000000, all_valid_server_count=0, total_time=1877, wlock_time=38, check_leader_time=2, query_version_time=0, persist_version_time=0) [2024-03-15 07:03:42.010197] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:808) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=1] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] done(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1, need_start_service=true, need_stop_service=false, need_change_leader=false, is_in_service()=false, can_update_version=false, cur_leader_epoch=420, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:42.011154] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [737][T1003_TenantWea][T1003][YB427F000001-000613ACABAF95EB-0-0] [lt=11][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.011181] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [737][T1003_TenantWea][T1003][YB427F000001-000613ACABAF95EB-0-0] [lt=26][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:42.011212] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [737][T1003_TenantWea][T1003][YB427F000001-000613ACABAF95EB-0-0] [lt=14][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select min_version, max_version from __all_weak_read_service where tenant_id = 1003 and level_id = 0 and level_value = ''"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:42.011233] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [737][T1003_TenantWea][T1003][YB427F000001-000613ACABAF95EB-0-0] [lt=20][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select min_version, max_version from __all_weak_read_service where tenant_id = 1003 and level_id = 0 and level_value = ''"}, aret=-5627, ret=-5627) [2024-03-15 07:03:42.011248] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=14][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1003 and level_id = 0 and level_value = '') [2024-03-15 07:03:42.011263] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=14][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.011276] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=12][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:42.011291] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=13][errcode=-5627] query failed(ret=-5627, conn=0x7f5457d38050, start=1710486222011120, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1003 and level_id = 0 and level_value = '') [2024-03-15 07:03:42.011309] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:42.011324] WDIAG [STORAGE.TRANS] query_cluster_version_range_ (ob_tenant_weak_read_cluster_service.cpp:196) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=11][errcode=-5627] execute sql read fail(ret=-5627, ret="OB_SCHEMA_EAGAIN", exec_tenant_id=1003, tenant_id=1003, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1003 and level_id = 0 and level_value = '') [2024-03-15 07:03:42.011391] WDIAG [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:378) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=17][errcode=-5627] query cluster version range from WRS table fail(ret=-5627, ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:42.011408] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:432) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=15] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] start service done(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1003, in_service=false, leader_epoch=0, current_version={val:0}, delta=1710486222011405, min_version={val:0}, max_version={val:0}, max_stale_time=5000000000, all_valid_server_count=0, total_time=1411, wlock_time=57, check_leader_time=2, query_version_time=0, persist_version_time=0) [2024-03-15 07:03:42.011472] WDIAG [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:798) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=61][errcode=-5627] start CLUSTER weak read service fail(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1003) [2024-03-15 07:03:42.011488] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:808) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=14] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] done(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1003, need_start_service=true, need_stop_service=false, need_change_leader=false, is_in_service()=false, can_update_version=false, cur_leader_epoch=1984, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:42.012215] WDIAG [SHARE] refresh (ob_task_define.cpp:382) [79][LogLimiterRefre][T0][Y0-0000000000000000-0-0] [lt=54][errcode=0] Throttled WDIAG logs in last second(details {error code, dropped logs, earliest tid}=[{errcode:0, dropped:4, tid:79}, {errcode:-4002, dropped:6649, tid:915}]) [2024-03-15 07:03:42.012249] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=0][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.012832] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486221952044, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486220606063, use_das=false, nested_level=0, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.012928] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=0] will sleep(sleep_us=18000, remain_us=592263, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=18, timeout_timestamp=1710486222605189) [2024-03-15 07:03:42.013196] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=46][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.013226] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=30][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.013755] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=19] get wrs ts(ls_id={id:1}, delta=146267160918, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:42.013791] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=30] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:42.014242] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.014277] INFO [COMMON] clean_garbage_node (ob_kvcache_map.cpp:647) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=29] Cache wash clean map node details(ret=0, clean_node_count=0, clean_time=7127, clean_start_pos=1321194, clean_num=31457) [2024-03-15 07:03:42.014319] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62C-0-0] [lt=496][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:42.014337] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62C-0-0] [lt=18][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.014358] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62C-0-0] [lt=20][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.014374] WDIAG iterate (ob_tuple.h:272) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62C-0-0] [lt=16][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.014395] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62C-0-0] [lt=21][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:42.014469] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=29][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.014692] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.014911] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.015128] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.015320] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.015533] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.015748] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.015972] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.016181] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.016415] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.016655] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=63][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.016930] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=18][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.017129] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=36][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.017333] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=18][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.017564] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=37][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.017801] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=35][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.018047] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=33][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.018310] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=51][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.018566] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=48][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.018860] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=57][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.019097] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.019313] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.019528] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.019742] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.019871] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABC-0-0] [lt=127][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:42.019893] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABC-0-0] [lt=22][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.019912] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABC-0-0] [lt=18][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.019925] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABC-0-0] [lt=13][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.019938] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABC-0-0] [lt=12][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:42.019968] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.020181] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.020411] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.020695] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=50][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.020912] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=25][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.021131] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=16][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.021339] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.021546] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=16][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.021773] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.021999] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.022242] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=31][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.022465] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.022683] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=26][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.022856] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.023043] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.023276] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.023353] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.023350] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.023377] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=27][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.023387] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=30] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.023401] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=15][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.023441] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=36][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.023481] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=17][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.023672] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.023884] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=25][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.024077] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.024239] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.024284] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=8][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.024473] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.024720] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=48][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.024896] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=17][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.024991] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=26][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.025232] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.025400] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=39][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.025508] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.025598] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.025994] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=25][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.026102] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.026208] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.026418] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.026629] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=33][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.026706] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=17][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.026850] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=26][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.027063] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.027279] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.027383] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=21][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.027501] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.027709] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [909][T1004_ArcSrv][T1003][YB427F000001-000613ACAAAF7BAA-0-0] [lt=40][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.027737] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [909][T1004_ArcSrv][T1003][YB427F000001-000613ACAAAF7BAA-0-0] [lt=28][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:42.027742] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=56][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.027762] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [909][T1004_ArcSrv][T1003][YB427F000001-000613ACAAAF7BAA-0-0] [lt=12][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:42.027778] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=15][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc"}, aret=-5627, ret=-5627) [2024-03-15 07:03:42.027790] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=10][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc) [2024-03-15 07:03:42.027801] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=10][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.027811] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=9][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:42.027821] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=9][errcode=-5627] query failed(ret=-5627, conn=0x7f53faa86050, start=1710486222027662, sql=select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc) [2024-03-15 07:03:42.027839] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=17][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:42.027849] WDIAG [SHARE] get_valid_dest_pairs (ob_archive_persist_helper.cpp:459) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=8][errcode=-5627] failed to exec sql(ret=-5627, sql=select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc) [2024-03-15 07:03:42.027907] WDIAG [ARCHIVE] load_archive_round_attr (ob_archive_persist_mgr.cpp:308) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=10][errcode=-5627] get valid dest pair failed(ret=-5627, tenant_id_=1004) [2024-03-15 07:03:42.027926] WDIAG [ARCHIVE] do_check_switch_archive_ (ob_archive_service.cpp:261) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=18][errcode=-5627] load archive round attr failed(ret=-5627, tenant_id=1004) [2024-03-15 07:03:42.028005] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=30][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.028124] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.028327] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=29][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.028599] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=42][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.028600] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=17][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.028623] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [909][T1004_ArcSrv][T1003][YB427F000001-000613ACAAAF7BAA-0-0] [lt=10][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.028640] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [909][T1004_ArcSrv][T1003][YB427F000001-000613ACAAAF7BAA-0-0] [lt=16][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:42.028657] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [909][T1004_ArcSrv][T1003][YB427F000001-000613ACAAAF7BAA-0-0] [lt=9][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:42.028670] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=12][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc"}, aret=-5627, ret=-5627) [2024-03-15 07:03:42.028681] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=10][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc) [2024-03-15 07:03:42.028691] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=9][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.028699] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=8][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:42.028709] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=8][errcode=-5627] query failed(ret=-5627, conn=0x7f5455992050, start=1710486222028606, sql=select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc) [2024-03-15 07:03:42.028720] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=11][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:42.028728] WDIAG [SHARE] get_valid_dest_pairs (ob_archive_persist_helper.cpp:459) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=8][errcode=-5627] failed to exec sql(ret=-5627, sql=select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc) [2024-03-15 07:03:42.028770] WDIAG [ARCHIVE] load_archive_round_attr (ob_archive_persist_mgr.cpp:308) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=8][errcode=-5627] get valid dest pair failed(ret=-5627, tenant_id_=1004) [2024-03-15 07:03:42.028780] WDIAG [ARCHIVE] persist_archive_progress_ (ob_archive_persist_mgr.cpp:355) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=9][errcode=-5627] load archive round attr failed(ret=-5627, attr={key:{tenant_id:0, dest_no:-1}, incarnation:1, dest_id:0, round_id:0, state:{status:"INVALID"}, start_scn:{val:0}, checkpoint_scn:{val:0}, max_scn:{val:0}, compatible:{version:1}, base_piece_id:0, used_piece_id:0, piece_switch_interval:0, frozen_input_bytes:0, frozen_output_bytes:0, active_input_bytes:0, active_output_bytes:0, deleted_input_bytes:0, deleted_output_bytes:0, path:"", comment:""}) [2024-03-15 07:03:42.028995] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=18][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.029199] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=28][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.029199] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=17][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.029406] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.029629] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=38][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.029835] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=17][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.029931] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=136][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.030010] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.030203] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.030442] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.030534] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.030642] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.030843] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=16][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.031039] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=16][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.031151] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=25][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.031240] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.031447] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.031410] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.031471] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=56] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.031494] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=23][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.031526] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=25][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.031647] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.031799] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.031804] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.032002] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=12][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.032147] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=10][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.032231] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=13][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.032382] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.032396] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=21][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.032532] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.032737] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.032767] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=31][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.032962] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.033186] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.033251] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=120][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.033385] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=17][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.033443] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=16][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.033458] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=17][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.033485] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=27][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.033532] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.033737] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=13][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.033925] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=64][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.033940] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.033980] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABD-0-0] [lt=103][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1001}, all_ls_election_reference_info=[]) [2024-03-15 07:03:42.034008] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABD-0-0] [lt=28][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.034031] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABD-0-0] [lt=22][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1001}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.034073] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABD-0-0] [lt=40][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.034090] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABD-0-0] [lt=17][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1001}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:42.034124] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=18][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.034347] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.034441] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=37][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.034533] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=19][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.034581] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.034795] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.035010] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=18][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.035055] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=28][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.035142] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=12][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.035234] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.035446] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.035667] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.035743] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=25][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.035795] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.035901] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.036129] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=35][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.036315] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=30][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.036367] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.036388] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=17][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.036542] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=18][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.036753] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.036938] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.036960] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=12][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.036979] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.037142] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.037317] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.037546] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.037597] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=12][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.037782] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.037950] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.038176] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.038201] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.038220] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=21][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.038400] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.038667] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=42][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.038796] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=12][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.038837] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=32][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.038891] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=36][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.039125] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=54][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.039285] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.039410] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=21][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.039590] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.039794] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:42.039834] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.040019] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=31][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.040096] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.040382] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=25][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.040587] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.040684] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=19][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.040824] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=19][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.040841] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=16][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.041033] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:42.041581] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=43][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.042247] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=74][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.042875] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=34][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.043503] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=42][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.043621] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.043696] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=74][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.044125] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=32][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.044816] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.045473] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=30][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.046097] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.046786] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.047996] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.050242] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.050700] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.050725] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=25][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1004", warnings:[]}, tenant_id_=1004, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.050747] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, local_server_version={val:1710506547039047538}, valid_part_count=2, total_part_count=2, generate_timestamp=1710486222050688) [2024-03-15 07:03:42.050763] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, last_post_cluster_heartbeat_tstamp_=1710486221850673, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:42.053065] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=33][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.053812] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.053821] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.053863] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=42][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.054406] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=35][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.055027] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=39][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.055793] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.056418] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.057065] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=53][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.057683] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.057974] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486222057958374, start_id=0, end_id=0) [2024-03-15 07:03:42.058336] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=31][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.058951] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.059566] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=28][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.060199] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=29][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.060845] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=32][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.061457] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.062073] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.062687] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=19][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.063307] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.063967] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=29][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.063982] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=24][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.064013] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=30][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.064545] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=35][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.065149] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.065755] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=28][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.066369] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.066913] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.067528] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=17][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:42.074180] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.074232] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=52][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.074486] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222074467], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.074539] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=50][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222074467], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.074586] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=27] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222074467]}) [2024-03-15 07:03:42.074627] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=24] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:42.074647] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=18][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486222074620], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.074698] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=21] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222074620]}) [2024-03-15 07:03:42.074782] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91F3-0-0] [lt=12][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.074798] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91F3-0-0] [lt=16][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:42.074819] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91F3-0-0] [lt=8][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:42.074832] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [801][T1004_TenantInf][T1004][YB427F000001-000613ACB04F91F3-0-0] [lt=11][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, aret=-5627, ret=-5627) [2024-03-15 07:03:42.074850] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=17][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:42.074860] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.074867] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:42.074876] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5627] query failed(ret=-5627, conn=0x7f54b4bf0050, start=1710486222074741, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:42.074887] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:42.074902] WDIAG [SHARE] load_tenant_info (ob_tenant_info_proxy.cpp:338) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=13][errcode=-5627] failed to read(ret=-5627, ret="OB_SCHEMA_EAGAIN", exec_tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:42.075077] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=2][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486222075037], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.075167] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=59] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222075037]}) [2024-03-15 07:03:42.084411] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=20][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.084460] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=48][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.085176] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54e845a228, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222022161, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486200007067, use_das=false, nested_level=0, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.085289] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1] will sleep(sleep_us=100000, remain_us=7921763, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=162, timeout_timestamp=1710486230007049) [2024-03-15 07:03:42.087796] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [859][T1004_FreInfoRe][T1003][YB427F000001-000613ACAB4F9B58-0-0] [lt=8][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.087814] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [859][T1004_FreInfoRe][T1003][YB427F000001-000613ACAB4F9B58-0-0] [lt=19][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:42.087834] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [859][T1004_FreInfoRe][T1003][YB427F000001-000613ACAB4F9B58-0-0] [lt=9][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"SELECT * FROM __all_merge_info WHERE tenant_id = '1004'"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:42.087846] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [859][T1004_FreInfoRe][T1004][YB427F000001-000613ACAB4F9B58-0-0] [lt=10][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"SELECT * FROM __all_merge_info WHERE tenant_id = '1004'"}, aret=-5627, ret=-5627) [2024-03-15 07:03:42.087856] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=SELECT * FROM __all_merge_info WHERE tenant_id = '1004') [2024-03-15 07:03:42.087865] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.087872] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:42.087881] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5627] query failed(ret=-5627, conn=0x7f547e5f2050, start=1710486222087757, sql=SELECT * FROM __all_merge_info WHERE tenant_id = '1004') [2024-03-15 07:03:42.087891] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=9][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:42.087900] WDIAG [SHARE] load_global_merge_info (ob_global_merge_table_operator.cpp:49) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5627] fail to execute sql(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004, meta_tenant_id=1003, sql=SELECT * FROM __all_merge_info WHERE tenant_id = '1004') [2024-03-15 07:03:42.087965] WDIAG [STORAGE] refresh_merge_info (ob_tenant_freeze_info_mgr.cpp:856) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-5627] failed to load global merge info(ret=-5627, ret="OB_SCHEMA_EAGAIN", global_merge_info={tenant_id:1004, cluster:{name:"cluster", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, frozen_scn:{name:"frozen_scn", is_scn:true, scn:{val:1}, value:-1, need_update:false}, global_broadcast_scn:{name:"global_broadcast_scn", is_scn:true, scn:{val:1}, value:-1, need_update:false}, last_merged_scn:{name:"last_merged_scn", is_scn:true, scn:{val:1}, value:-1, need_update:false}, is_merge_error:{name:"is_merge_error", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, merge_status:{name:"merge_status", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, error_type:{name:"error_type", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, suspend_merging:{name:"suspend_merging", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, merge_start_time:{name:"merge_start_time", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, last_merged_time:{name:"last_merged_time", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}}) [2024-03-15 07:03:42.088009] WDIAG [STORAGE] runTimerTask (ob_tenant_freeze_info_mgr.cpp:967) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=43][errcode=-5627] fail to refresh merge info(tmp_ret=-5627, tmp_ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:42.088577] WDIAG [SHARE.SCHEMA] get_tenant_status (ob_schema_getter_guard.cpp:8471) [859][T1004_FreInfoRe][T1004][YB427F000001-000613ACAB4F9B59-0-0] [lt=7][errcode=-5157] tenant not exist(ret=-5157, ret="OB_TENANT_NOT_EXIST", tenant_id=1004) [2024-03-15 07:03:42.088590] WDIAG [SHARE.SCHEMA] check_tenant_is_restore (ob_schema_getter_guard.cpp:8435) [859][T1004_FreInfoRe][T1004][YB427F000001-000613ACAB4F9B59-0-0] [lt=13][errcode=-5157] fail to get tenant status(ret=-5157, ret="OB_TENANT_NOT_EXIST", tenant_id=1004) [2024-03-15 07:03:42.088599] WDIAG [SHARE.SCHEMA] check_tenant_is_restore (ob_multi_version_schema_service.cpp:3852) [859][T1004_FreInfoRe][T1004][YB427F000001-000613ACAB4F9B59-0-0] [lt=8][errcode=-5157] fail to check tenant is restore(ret=-5157, tenant_id=1004) [2024-03-15 07:03:42.088607] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1171) [859][T1004_FreInfoRe][T1004][YB427F000001-000613ACAB4F9B59-0-0] [lt=8][errcode=-5157] fail to check restore tenant exist(ret=-5157, tenant_id=1004) [2024-03-15 07:03:42.088614] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [859][T1004_FreInfoRe][T1004][YB427F000001-000613ACAB4F9B59-0-0] [lt=7][errcode=-5157] get schema guard failed(ret=-5157) [2024-03-15 07:03:42.088627] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [859][T1004_FreInfoRe][T1004][YB427F000001-000613ACAB4F9B59-0-0] [lt=7][errcode=-5157] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-5157, ret=-5157) [2024-03-15 07:03:42.088637] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [859][T1004_FreInfoRe][T1004][YB427F000001-000613ACAB4F9B59-0-0] [lt=9][errcode=-5157] failed to process final(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, aret=-5157, ret=-5157) [2024-03-15 07:03:42.088645] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5157] execute sql failed(ret=-5157, tenant_id=1004, sql=SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name) [2024-03-15 07:03:42.088653] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5157] retry_while_no_tenant_resource failed(ret=-5157, tenant_id=1004) [2024-03-15 07:03:42.088659] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5157] execute_read failed(ret=-5157, cluster_id=1, tenant_id=1004) [2024-03-15 07:03:42.088667] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5157] query failed(ret=-5157, conn=0x7f53fb6d8050, start=1710486222088550, sql=SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name) [2024-03-15 07:03:42.088675] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5157] read failed(ret=-5157) [2024-03-15 07:03:42.088682] WDIAG [SHARE] load (ob_core_table_proxy.cpp:436) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=5][errcode=-5157] execute sql failed(ret=-5157, ret="OB_TENANT_NOT_EXIST", tenant_id=1004, sql=SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name) [2024-03-15 07:03:42.088711] WDIAG [SHARE] load (ob_core_table_proxy.cpp:368) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5157] load failed(ret=-5157, for_update=false) [2024-03-15 07:03:42.088721] WDIAG [SHARE] get (ob_global_stat_proxy.cpp:422) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=9][errcode=-5157] core_table load failed(ret=-5157, ret="OB_TENANT_NOT_EXIST") [2024-03-15 07:03:42.088729] WDIAG [SHARE] get_snapshot_gc_scn (ob_global_stat_proxy.cpp:164) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5157] get failed(ret=-5157) [2024-03-15 07:03:42.088736] WDIAG [STORAGE] try_update_info (ob_tenant_freeze_info_mgr.cpp:923) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5157] tenant not exists, maybe has been removed(ret=-5157, MTL_ID()=1004) [2024-03-15 07:03:42.088744] WDIAG [STORAGE] runTimerTask (ob_tenant_freeze_info_mgr.cpp:970) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5157] fail to try update info(tmp_ret=-5157, tmp_ret="OB_TENANT_NOT_EXIST") [2024-03-15 07:03:42.088771] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [736][T1003_ReqMemEvi][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.088781] WDIAG get_global_sys_variable (ob_basic_session_info.cpp:887) [736][T1003_ReqMemEvi][T1003][Y0-0000000000000000-0-0] [lt=10][errcode=-4029] fail get schema guard(ret=-4029) [2024-03-15 07:03:42.088789] WDIAG [SERVER] get_mem_limit (ob_mysql_request_manager.cpp:270) [736][T1003_ReqMemEvi][T1003][Y0-0000000000000000-0-0] [lt=7][errcode=-4029] failed to get global sys variable(ret=-4029, tenant_id=1003, OB_SV_SQL_AUDIT_PERCENTAGE="ob_sql_audit_percentage", obj_val={"NULL":"NULL"}) [2024-03-15 07:03:42.088801] WDIAG [SERVER] check_config_mem_limit (ob_eliminate_task.cpp:65) [736][T1003_ReqMemEvi][T1003][Y0-0000000000000000-0-0] [lt=12][errcode=-4029] failed to get mem limit(ret=-4029) [2024-03-15 07:03:42.088810] INFO [SERVER] runTimerTask (ob_eliminate_task.cpp:199) [736][T1003_ReqMemEvi][T1003][Y0-0000000000000000-0-0] [lt=6] sql audit evict task end(evict_high_mem_level=858993459, evict_high_size_level=90000, evict_batch_count=0, elapse_time=0, size_used=0, mem_used=0) [2024-03-15 07:03:42.094656] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.094721] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=65][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.101027] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222031083, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486220606063, use_das=false, nested_level=0, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.101154] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1] will sleep(sleep_us=19000, remain_us=504037, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=19, timeout_timestamp=1710486222605189) [2024-03-15 07:03:42.104831] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=22][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.104867] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=36][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.111974] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=30] get wrs ts(ls_id={id:1}, delta=146267259371, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:42.112061] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=81] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:42.112387] WDIAG [SHARE] refresh (ob_task_define.cpp:382) [79][LogLimiterRefre][T0][Y0-0000000000000000-0-0] [lt=20][errcode=0] Throttled WDIAG logs in last second(details {error code, dropped logs, earliest tid}=[{errcode:-4283, dropped:195, tid:138}]) [2024-03-15 07:03:42.114976] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.115025] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=49][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.117716] INFO [ARCHIVE] gc_stale_ls_task_ (ob_ls_mgr.cpp:537) [904][T1004_LSArchive][T1004][YB427F000001-000613ACAA6F7BAA-0-0] [lt=32] gc stale ls task succ [2024-03-15 07:03:42.120278] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=22][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:19, local_retry_times:19, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:42.120351] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=35][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.120365] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=13][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.120374] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=8][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:42.120401] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=9][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:42.120601] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=2] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.120634] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=32][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.125138] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=22][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.125202] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=63][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.125707] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7D-0-0] [lt=0] table not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:42.135345] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=49][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.135382] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=37][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.139822] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=25][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:42.145530] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.145615] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=85][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.147934] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [678][T1003_FreInfoRe][T1003][YB427F000001-000613ACA97F9B6A-0-0] [lt=0][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.147984] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [678][T1003_FreInfoRe][T1003][YB427F000001-000613ACA97F9B6A-0-0] [lt=50][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:42.148013] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [678][T1003_FreInfoRe][T1003][YB427F000001-000613ACA97F9B6A-0-0] [lt=12][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"SELECT * FROM __all_merge_info WHERE tenant_id = '1003'"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:42.148068] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [678][T1003_FreInfoRe][T1003][YB427F000001-000613ACA97F9B6A-0-0] [lt=53][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"SELECT * FROM __all_merge_info WHERE tenant_id = '1003'"}, aret=-5627, ret=-5627) [2024-03-15 07:03:42.148083] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=14][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=SELECT * FROM __all_merge_info WHERE tenant_id = '1003') [2024-03-15 07:03:42.148106] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=21][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.148115] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:42.148125] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] query failed(ret=-5627, conn=0x7f5492b2e050, start=1710486222147895, sql=SELECT * FROM __all_merge_info WHERE tenant_id = '1003') [2024-03-15 07:03:42.148138] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=12][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:42.148151] WDIAG [SHARE] load_global_merge_info (ob_global_merge_table_operator.cpp:49) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=10][errcode=-5627] fail to execute sql(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1003, meta_tenant_id=1003, sql=SELECT * FROM __all_merge_info WHERE tenant_id = '1003') [2024-03-15 07:03:42.148227] WDIAG [STORAGE] refresh_merge_info (ob_tenant_freeze_info_mgr.cpp:856) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=26][errcode=-5627] failed to load global merge info(ret=-5627, ret="OB_SCHEMA_EAGAIN", global_merge_info={tenant_id:1003, cluster:{name:"cluster", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, frozen_scn:{name:"frozen_scn", is_scn:true, scn:{val:1}, value:-1, need_update:false}, global_broadcast_scn:{name:"global_broadcast_scn", is_scn:true, scn:{val:1}, value:-1, need_update:false}, last_merged_scn:{name:"last_merged_scn", is_scn:true, scn:{val:1}, value:-1, need_update:false}, is_merge_error:{name:"is_merge_error", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, merge_status:{name:"merge_status", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, error_type:{name:"error_type", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, suspend_merging:{name:"suspend_merging", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, merge_start_time:{name:"merge_start_time", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, last_merged_time:{name:"last_merged_time", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}}) [2024-03-15 07:03:42.148289] WDIAG [STORAGE] runTimerTask (ob_tenant_freeze_info_mgr.cpp:967) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=62][errcode=-5627] fail to refresh merge info(tmp_ret=-5627, tmp_ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:42.149302] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [678][T1003_FreInfoRe][T1003][YB427F000001-000613ACA97F9B6B-0-0] [lt=9][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.149352] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [678][T1003_FreInfoRe][T1003][YB427F000001-000613ACA97F9B6B-0-0] [lt=49][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:42.149378] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [678][T1003_FreInfoRe][T1003][YB427F000001-000613ACA97F9B6B-0-0] [lt=11][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:42.149395] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [678][T1003_FreInfoRe][T1003][YB427F000001-000613ACA97F9B6B-0-0] [lt=16][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, aret=-5627, ret=-5627) [2024-03-15 07:03:42.149406] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=9][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name) [2024-03-15 07:03:42.149416] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=9][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.149450] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=33][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:42.149460] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] query failed(ret=-5627, conn=0x7f5435182050, start=1710486222149184, sql=SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name) [2024-03-15 07:03:42.149477] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:42.149487] WDIAG [SHARE] load (ob_core_table_proxy.cpp:436) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=7][errcode=-5627] execute sql failed(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1003, sql=SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name) [2024-03-15 07:03:42.149567] WDIAG [SHARE] load (ob_core_table_proxy.cpp:368) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=22][errcode=-5627] load failed(ret=-5627, for_update=false) [2024-03-15 07:03:42.149580] WDIAG [SHARE] get (ob_global_stat_proxy.cpp:422) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=12][errcode=-5627] core_table load failed(ret=-5627, ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:42.149590] WDIAG [SHARE] get_snapshot_gc_scn (ob_global_stat_proxy.cpp:164) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=9][errcode=-5627] get failed(ret=-5627) [2024-03-15 07:03:42.149599] WDIAG [STORAGE] get_global_info (ob_tenant_freeze_info_mgr.cpp:777) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] fail to get global info(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.149608] WDIAG [STORAGE] try_update_info (ob_tenant_freeze_info_mgr.cpp:921) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] failed to get global info(ret=-5627) [2024-03-15 07:03:42.149616] WDIAG [STORAGE] runTimerTask (ob_tenant_freeze_info_mgr.cpp:970) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=7][errcode=-5627] fail to try update info(tmp_ret=-5627, tmp_ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:42.151168] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=36][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.151206] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=38][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1004", warnings:[]}, tenant_id_=1004, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.151257] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=47][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, local_server_version={val:1710506547039047538}, valid_part_count=2, total_part_count=2, generate_timestamp=1710486222151149) [2024-03-15 07:03:42.151282] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=25][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, last_post_cluster_heartbeat_tstamp_=1710486222050799, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:42.152621] WDIAG [SHARE.SCHEMA] get_tenant_status (ob_schema_getter_guard.cpp:8471) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-5157] tenant not exist(ret=-5157, ret="OB_TENANT_NOT_EXIST", tenant_id=1004) [2024-03-15 07:03:42.152679] WDIAG [SHARE.SCHEMA] check_tenant_is_restore (ob_schema_getter_guard.cpp:8435) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=59][errcode=-5157] fail to get tenant status(ret=-5157, ret="OB_TENANT_NOT_EXIST", tenant_id=1004) [2024-03-15 07:03:42.152700] WDIAG [SHARE.SCHEMA] check_tenant_is_restore (ob_multi_version_schema_service.cpp:3852) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-5157] fail to check tenant is restore(ret=-5157, tenant_id=1004) [2024-03-15 07:03:42.152718] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1171) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=17][errcode=-5157] fail to check restore tenant exist(ret=-5157, tenant_id=1004) [2024-03-15 07:03:42.152736] WDIAG get_global_sys_variable (ob_basic_session_info.cpp:887) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-4029] fail get schema guard(ret=-4029) [2024-03-15 07:03:42.152753] INFO [SQL.PC] update_memory_conf (ob_plan_cache.cpp:1330) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=17] update plan cache memory config(ob_plan_cache_percentage=5, ob_plan_cache_evict_high_percentage=90, ob_plan_cache_evict_low_percentage=50, tenant_id=1004) [2024-03-15 07:03:42.152767] WDIAG [SQL.PC] run_plan_cache_task (ob_plan_cache.cpp:2039) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-4029] fail to update plan cache memory sys val(ret=-4029) [2024-03-15 07:03:42.152798] INFO [SQL.PC] cache_evict (ob_plan_cache.cpp:1021) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=20] start lib cache evict(tenant_id=1004, mem_hold=0, mem_limit=107374180, cache_obj_num=0, cache_node_num=0) [2024-03-15 07:03:42.152823] INFO [SQL.PC] cache_evict (ob_plan_cache.cpp:1038) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=19] end lib cache evict(tenant_id=1004, cache_evict_num=0, mem_hold=0, mem_limit=107374180, cache_obj_num=0, cache_node_num=0) [2024-03-15 07:03:42.152840] INFO [SQL.PC] runTimerTask (ob_plan_cache.cpp:2023) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=17] schedule next cache evict task(evict_interval=1000000) [2024-03-15 07:03:42.155776] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=37][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.155869] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=91][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.156260] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [888][T1004_LSMetaCh][T1003][YB427F000001-000613ACAB3F8B88-0-0] [lt=0][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.156299] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [888][T1004_LSMetaCh][T1003][YB427F000001-000613ACAB3F8B88-0-0] [lt=37][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:42.156323] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [888][T1004_LSMetaCh][T1003][YB427F000001-000613ACAB3F8B88-0-0] [lt=12][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1004 ORDER BY tenant_id, ls_id, svr_ip, svr_port"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:42.156351] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B88-0-0] [lt=27][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1004 ORDER BY tenant_id, ls_id, svr_ip, svr_port"}, aret=-5627, ret=-5627) [2024-03-15 07:03:42.156363] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B88-0-0] [lt=9][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1004 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:42.156375] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B88-0-0] [lt=10][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.156391] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B88-0-0] [lt=15][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:42.156468] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B88-0-0] [lt=72][errcode=-5627] query failed(ret=-5627, conn=0x7f54b5ff8050, start=1710486222156232, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1004 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:42.156501] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B88-0-0] [lt=31][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:42.156518] WDIAG [SHARE.PT] get_by_tenant (ob_persistent_ls_table.cpp:609) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B88-0-0] [lt=15][errcode=-5627] execute sql failed(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1004 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:42.156569] WDIAG [SHARE.PT] get_by_tenant (ob_ls_table_operator.cpp:252) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B88-0-0] [lt=12][errcode=-5627] get all ls info by persistent_ls_ failed(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004) [2024-03-15 07:03:42.156607] WDIAG [SHARE] inner_open_ (ob_ls_table_iterator.cpp:104) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B88-0-0] [lt=36][errcode=-5627] fail to get ls infos by tenant(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004, inner_table_only=false) [2024-03-15 07:03:42.156637] WDIAG [SHARE] next (ob_ls_table_iterator.cpp:71) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B88-0-0] [lt=29][errcode=-5627] fail to open iterator(ret=-5627, ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:42.156663] WDIAG [SERVER] build_replica_map_ (ob_tenant_meta_checker.cpp:332) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B88-0-0] [lt=24][errcode=-5627] ls table iterator next failed(ret=-5627, ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:42.156675] WDIAG [SERVER] check_ls_table_ (ob_tenant_meta_checker.cpp:214) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B88-0-0] [lt=9][errcode=-5627] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:42.158544] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486222158531977, start_id=0, end_id=0) [2024-03-15 07:03:42.159415] INFO [SQL.PC] dump_all_objs (ob_plan_cache.cpp:1798) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=10] Dumping All Cache Objs(alloc_obj_list.count()=0, alloc_obj_list=[]) [2024-03-15 07:03:42.159484] INFO [SQL.PC] runTimerTask (ob_plan_cache.cpp:2031) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=65] schedule next cache evict task(evict_interval=1000000) [2024-03-15 07:03:42.164585] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5D-0-0] [lt=144][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:42.164617] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5D-0-0] [lt=32][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.164639] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5D-0-0] [lt=20][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.164654] WDIAG iterate (ob_tuple.h:272) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5D-0-0] [lt=15][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.164668] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5D-0-0] [lt=14][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:42.166032] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=35][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.166077] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=44][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.167802] INFO [COMMON] replace_fragment_node (ob_kvcache_map.cpp:697) [103][KVCacheRep][T0][Y0-0000000000000000-0-0] [lt=42] Cache replace map node details(ret=0, replace_node_count=0, replace_time=1718, replace_start_pos=283104, replace_num=15728) [2024-03-15 07:03:42.172800] WDIAG [SERVER] batch_process_tasks (ob_ls_table_updater.cpp:333) [133][LSMetaTblUp0][T0][YB427F000001-000613ACB09F8D35-0-0] [lt=80][errcode=-4076] tenant schema is not ready, need wait(ret=-4076, ret="OB_NEED_WAIT", superior_tenant_id=1, task={tenant_id:1, ls_id:{id:1}, add_timestamp:1710482324446323}) [2024-03-15 07:03:42.175956] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=2][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222175928], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.176023] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=63][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222175928], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.176066] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=27] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222175928]}) [2024-03-15 07:03:42.176106] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=24] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:42.176124] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=16][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486222176098], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.176168] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=22] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222176098]}) [2024-03-15 07:03:42.176236] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.176323] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=85][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.176561] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486222176533], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.176608] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=30] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222176533]}) [2024-03-15 07:03:42.184181] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:42.184219] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=38][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486222535189, now=[mts=1710486222120712], now0=[mts=1710486222120712], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:42.184251] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=30][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222119787, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.184344] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=78][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}) [2024-03-15 07:03:42.184384] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=40][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222119787, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.184493] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=88] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222119787, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486220606063, use_das=false, nested_level=0, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.184606] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=126][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:42.184637] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=24][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:42.184654] WDIAG [SQL] open (ob_result_set.cpp:157) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=15][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:42.184668] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=11][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:42.184678] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=8][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:42.184696] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=15][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=20, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.184720] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=21] will sleep(sleep_us=20000, remain_us=420471, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=20, timeout_timestamp=1710486222605189) [2024-03-15 07:03:42.185380] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=12][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:162, local_retry_times:162, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:42.185442] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=33][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.185451] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=9][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.185458] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=5][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:42.185475] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=6][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_schema_status' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:42.185611] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.185631] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=20][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.186509] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=46][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.186581] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=71][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.196705] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=32][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.196748] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=43][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.199459] WDIAG [SQL] create_sessid (ob_sql_session_mgr.cpp:339) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=34][errcode=0] server is initiating(server_id=0, local_seq=27139, max_local_seq=262143, max_server_id=4095) [2024-03-15 07:03:42.199491] INFO [RPC.OBMYSQL] sm_conn_build_handshake (obsm_conn_callback.cpp:104) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=33] new mysql sessid created(conn.sessid_=3221252611, support_ssl=false) [2024-03-15 07:03:42.199561] INFO [RPC.OBMYSQL] init (obsm_conn_callback.cpp:120) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=11] sm conn init succ(conn.sessid_=3221252611, sess.client_addr_="172.21.122.86:42710") [2024-03-15 07:03:42.199588] INFO [RPC.OBMYSQL] do_accept_one (ob_sql_nio.cpp:899) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=20] accept one succ(*s={this:0x7f544f60d230, fd:137, err:0, last_decode_time_:0, last_write_time_:1710486222199559, read_buffer_.get_consume_sz():0, get_pending_flag():0, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.201397] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=19] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:42.201447] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=67][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:42.201472] WDIAG [SERVER] extract_tenant_id (ob_srv_deliver.cpp:100) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=23][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:42.201485] WDIAG [SERVER] dispatch_req (ob_srv_deliver.cpp:115) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=12][errcode=-5160] extract tenant_id fail(ret=-5160, tenant_id=18446744073709551615, req={packet:{header:{length:232, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486222201359, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.201534] WDIAG [SERVER] deliver_mysql_request (ob_srv_deliver.cpp:507) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=48][errcode=-5150] cannot dispatch success(ret=-5150, req={packet:{header:{length:232, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486222201359, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.201627] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB120-0-0] [lt=37] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:42.201651] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB120-0-0] [lt=25][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:42.201662] WDIAG [SERVER] get_tenant_id (obmp_connect.cpp:1339) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB120-0-0] [lt=9][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:42.201673] WDIAG [SERVER] check_update_tenant_id (obmp_connect.cpp:1840) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB120-0-0] [lt=10][errcode=-5160] get_tenant_id failed(ret=-5160) [2024-03-15 07:03:42.201683] WDIAG [SERVER] process (obmp_connect.cpp:242) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB120-0-0] [lt=10][errcode=-5160] fail to check update tenant id(ret=-5160) [2024-03-15 07:03:42.201713] INFO [SERVER] send_error_packet (obmp_packet_sender.cpp:311) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB120-0-0] [lt=8] sending error packet(err=-4043, extra_err_info=NULL, lbt()="0xd9f6cf5 0x75d3e81 0x7596e3a 0x75be943 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:42.201747] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:745) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB120-0-0] [lt=12][errcode=0] server close connection(sessid=3221252611, proxy_sessid=0, stack="0xd9f6cf5 0x75d6bf2 0x75b2979 0x75bde02 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:42.201780] WDIAG [SERVER] get_session (obmp_packet_sender.cpp:515) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB120-0-0] [lt=29][errcode=-4018] get session fail(ret=-4018, sessid=3221252611, proxy_sessid=0) [2024-03-15 07:03:42.201832] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:749) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB120-0-0] [lt=50][errcode=-4016] session is null [2024-03-15 07:03:42.201846] INFO [SERVER] process (obmp_connect.cpp:369) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB120-0-0] [lt=9] MySQL LOGIN(direct_client_ip="172.21.122.86", client_ip=, tenant_name=obmysql, tenant_id=18446744073709551615, user_name=yyyth, host_name=xxx.xxx.xxx.xxx, sessid=3221252611, proxy_sessid=0, sess_create_time=0, from_proxy=false, from_java_client=false, from_oci_client=false, from_jdbc_client=false, capability=270377487, proxy_capability=0, use_ssl=false, c/s protocol="OB_MYSQL_CS_TYPE", autocommit=false, proc_ret=-5160, ret=0) [2024-03-15 07:03:42.202005] WDIAG [RPC.OBMYSQL] push_close_req (ob_sql_nio.cpp:704) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=29][errcode=-4015] close sql sock by user req(*s={this:0x7f544f60d230, fd:137, err:5, last_decode_time_:1710486222201360, last_write_time_:1710486222202001, read_buffer_.get_consume_sz():236, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.202035] INFO [RPC.OBMYSQL] on_disconnect (obsm_conn_callback.cpp:231) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=23] kill and revert session(conn.sessid_=3221252611, proxy_sessid=0, server_id=0, ret=0) [2024-03-15 07:03:42.202048] INFO [RPC.OBMYSQL] handle_pending_destroy_list (ob_sql_nio.cpp:791) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=12] can close safely, do destroy(*s={this:0x7f544f60d230, fd:137, err:5, last_decode_time_:1710486222201360, last_write_time_:1710486222202001, read_buffer_.get_consume_sz():236, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.202064] INFO [RPC.OBMYSQL] sm_conn_log_close (obsm_conn_callback.cpp:159) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=13] connection close(sessid=3221252611, proxy_sessid=0, tenant_id=0, server_id=0, from_proxy=false, from_java_client=false, c/s protocol="OB_MYSQL_CS_TYPE", is_need_clear_sessid_=true, ret=0) [2024-03-15 07:03:42.204856] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=14][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:20, local_retry_times:20, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:42.204910] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=36][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.204920] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=9][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.204927] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=7][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:42.204950] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=7][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:42.205131] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.205174] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=28][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.206953] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=28][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.206991] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=38][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.208474] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=26][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.208518] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=44][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1", warnings:[]}, tenant_id_=1, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.208588] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=33][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, local_server_version={val:1710506547196065858}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486222208457) [2024-03-15 07:03:42.208638] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=49][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, last_post_cluster_heartbeat_tstamp_=1710486222008230, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:42.209211] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:42.209246] WDIAG get_global_sys_variable (ob_basic_session_info.cpp:887) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=43][errcode=-4029] fail get schema guard(ret=-4029) [2024-03-15 07:03:42.209261] INFO [SQL.PC] update_memory_conf (ob_plan_cache.cpp:1330) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=12] update plan cache memory config(ob_plan_cache_percentage=5, ob_plan_cache_evict_high_percentage=90, ob_plan_cache_evict_low_percentage=50, tenant_id=1003) [2024-03-15 07:03:42.209284] WDIAG [SQL.PC] run_plan_cache_task (ob_plan_cache.cpp:2039) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=23][errcode=-4029] fail to update plan cache memory sys val(ret=-4029) [2024-03-15 07:03:42.209301] INFO [SQL.PC] cache_evict (ob_plan_cache.cpp:1021) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=10] start lib cache evict(tenant_id=1003, mem_hold=0, mem_limit=53687090, cache_obj_num=0, cache_node_num=0) [2024-03-15 07:03:42.209315] INFO [SQL.PC] cache_evict (ob_plan_cache.cpp:1038) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=12] end lib cache evict(tenant_id=1003, cache_evict_num=0, mem_hold=0, mem_limit=53687090, cache_obj_num=0, cache_node_num=0) [2024-03-15 07:03:42.209337] INFO [SQL.PC] runTimerTask (ob_plan_cache.cpp:2023) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=21] schedule next cache evict task(evict_interval=1000000) [2024-03-15 07:03:42.210065] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=37][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.210137] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=50][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1003", warnings:[]}, tenant_id_=1003, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.210181] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=60][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, local_server_version={val:1710482141336457000}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486222210025) [2024-03-15 07:03:42.210208] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=26][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, last_post_cluster_heartbeat_tstamp_=1710486222009961, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:42.212482] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=24] get wrs ts(ls_id={id:1}, delta=146267360042, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:42.212546] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=54] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:42.214768] INFO [SQL.PC] dump_all_objs (ob_plan_cache.cpp:1798) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=8] Dumping All Cache Objs(alloc_obj_list.count()=0, alloc_obj_list=[]) [2024-03-15 07:03:42.214798] INFO [SQL.PC] runTimerTask (ob_plan_cache.cpp:2031) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=29] schedule next cache evict task(evict_interval=1000000) [2024-03-15 07:03:42.214979] INFO [COMMON] compute_tenant_wash_size (ob_kvcache_store.cpp:1140) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=45] Wash compute wash size(is_wash_valid=true, sys_total_wash_size=2718601216, global_cache_size=12484608, tenant_max_wash_size=4161536, tenant_min_wash_size=4161536, tenant_ids_=[512, 500, 999, 506, 508, 509, 510, 1, 1003, 1004]) [2024-03-15 07:03:42.215079] INFO [COMMON] wash (ob_kvcache_store.cpp:343) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=36] Wash time detail, (compute_wash_size_time=121, refresh_score_time=57, wash_time=7) [2024-03-15 07:03:42.217124] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=37][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.217211] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=84][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.218690] INFO [COMMON] clean_garbage_node (ob_kvcache_map.cpp:647) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=18] Cache wash clean map node details(ret=0, clean_node_count=0, clean_time=3590, clean_start_pos=1352651, clean_num=31457) [2024-03-15 07:03:42.218778] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0][errcode=-4002] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:42.227412] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=55][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.227530] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=116][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.237648] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=27][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.237686] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=38][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.239872] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:42.247834] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=27][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.247876] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=41][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.250328] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:42.250355] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=26][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486229937049, now=[mts=1710486222185653], now0=[mts=1710486222185653], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:42.250374] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=18][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222184375, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.250444] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=45][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}) [2024-03-15 07:03:42.250470] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=25][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222184375, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.250504] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=31] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54e845a228, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222184375, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486200007067, use_das=false, nested_level=0, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.250569] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=63][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:42.250584] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=12][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:42.250592] WDIAG [SQL] open (ob_result_set.cpp:157) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=7][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:42.250602] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=7][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:42.250610] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=6][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_schema_status' ORDER BY row_id, column_name"}) [2024-03-15 07:03:42.250622] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=11][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_schema_status' ORDER BY row_id, column_name"}, retry_cnt=163, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.250644] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=17] will sleep(sleep_us=100000, remain_us=7756408, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=163, timeout_timestamp=1710486230007049) [2024-03-15 07:03:42.251234] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:755) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=24] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] current server is WRS leader, need start CLUSTER weak read service(tenant_id=1004, serve_leader_epoch=0, cur_leader_epoch=431, cluster_service_tablet_id_={id:226}, in_service=false, can_update_version=false, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:42.251277] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:347) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=32] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] begin start service(tenant_id=1004, is_in_service()=false, can_update_version=false) [2024-03-15 07:03:42.251286] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:349) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=9] start TenantWeakReadClusterService(tenant_id=1004) [2024-03-15 07:03:42.252087] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [898][T1004_TenantWea][T1003][YB427F000001-000613ACAC1F9849-0-0] [lt=6][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.252116] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [898][T1004_TenantWea][T1003][YB427F000001-000613ACAC1F9849-0-0] [lt=29][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:42.252138] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [898][T1004_TenantWea][T1003][YB427F000001-000613ACAC1F9849-0-0] [lt=8][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = ''"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:42.252152] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [898][T1004_TenantWea][T1004][YB427F000001-000613ACAC1F9849-0-0] [lt=13][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = ''"}, aret=-5627, ret=-5627) [2024-03-15 07:03:42.252166] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=13][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = '') [2024-03-15 07:03:42.252175] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.252184] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:42.252196] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-5627] query failed(ret=-5627, conn=0x7f54bd1f4050, start=1710486222252057, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = '') [2024-03-15 07:03:42.252208] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:42.252220] WDIAG [STORAGE.TRANS] query_cluster_version_range_ (ob_tenant_weak_read_cluster_service.cpp:196) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=9][errcode=-5627] execute sql read fail(ret=-5627, ret="OB_SCHEMA_EAGAIN", exec_tenant_id=1003, tenant_id=1004, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = '') [2024-03-15 07:03:42.252289] WDIAG [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:378) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-5627] query cluster version range from WRS table fail(ret=-5627, ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:42.252304] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:432) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=13] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] start service done(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004, in_service=false, leader_epoch=0, current_version={val:0}, delta=1710486222252301, min_version={val:0}, max_version={val:0}, max_stale_time=5000000000, all_valid_server_count=0, total_time=1036, wlock_time=27, check_leader_time=1, query_version_time=0, persist_version_time=0) [2024-03-15 07:03:42.252324] WDIAG [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:798) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-5627] start CLUSTER weak read service fail(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004) [2024-03-15 07:03:42.252334] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:808) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=8] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] done(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004, need_start_service=true, need_stop_service=false, need_change_leader=false, is_in_service()=false, can_update_version=false, cur_leader_epoch=431, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:42.257982] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.258017] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=34][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.258653] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486222258645809, start_id=0, end_id=0) [2024-03-15 07:03:42.263453] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62D-0-0] [lt=125][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:42.263487] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62D-0-0] [lt=35][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.263521] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62D-0-0] [lt=31][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.263545] WDIAG iterate (ob_tuple.h:272) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62D-0-0] [lt=23][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.263478] INFO [STORAGE.TRANS] print_retain_ctx_info (ob_tx_retain_ctx_mgr.cpp:263) [753][T1003_TxLoopWor][T1003][Y0-0000000000000000-0-0] [lt=178] [RetainCtxMgr] print retain ctx(ls_id={id:1}, this={retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204930519490}, last_push_gc_task_ts_:1710485741683482, skip_remove_cnt_:0}, retain_ctx_list_.get_first()={cause_:0, tx_id_:{txid:595683393}, ls_id_:{id:1}, tx_ctx_:0x7f5497a86350}, retain_ctx_list_.get_last()={cause_:0, tx_id_:{txid:595683393}, ls_id_:{id:1}, tx_ctx_:0x7f5497a86350}) [2024-03-15 07:03:42.263563] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62D-0-0] [lt=19][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:42.267235] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:42.267270] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=35][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486222535189, now=[mts=1710486222205206], now0=[mts=1710486222205206], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:42.267290] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=19][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222204876, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.267380] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=78][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}) [2024-03-15 07:03:42.267413] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=32][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222204876, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.267494] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=78] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222204876, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486220606063, use_das=false, nested_level=0, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.267586] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=89][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:42.267609] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=21][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:42.267622] WDIAG [SQL] open (ob_result_set.cpp:157) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=11][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:42.267631] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=6][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:42.267639] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=8][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:42.267668] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=26][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=21, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.267683] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=12] will sleep(sleep_us=21000, remain_us=337508, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=21, timeout_timestamp=1710486222605189) [2024-03-15 07:03:42.268151] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=15][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.268195] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=44][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.270172] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABE-0-0] [lt=160][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:42.270200] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABE-0-0] [lt=28][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.270218] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABE-0-0] [lt=18][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.270233] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABE-0-0] [lt=13][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.270245] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABE-0-0] [lt=12][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:42.271906] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B98-0-0] [lt=1] table not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:42.271932] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B98-0-0] [lt=24][errcode=-5019] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:42.277026] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91F5-0-0] [lt=0][errcode=-5627] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:42.277238] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222277220], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.277266] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=27][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222277220], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.277295] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=12] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222277220]}) [2024-03-15 07:03:42.277317] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=12] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:42.277326] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=8][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486222277313], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.277346] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=10] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222277313]}) [2024-03-15 07:03:42.277580] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486222277573], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.277615] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=22] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222277573]}) [2024-03-15 07:03:42.278310] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=20][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.278347] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=37][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.284215] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABF-0-0] [lt=103][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1001}, all_ls_election_reference_info=[]) [2024-03-15 07:03:42.284249] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABF-0-0] [lt=34][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.284270] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABF-0-0] [lt=19][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1001}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.284286] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABF-0-0] [lt=14][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.284299] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFABF-0-0] [lt=13][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1001}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:42.285035] INFO [SQL.PC] update_memory_conf (ob_plan_cache.cpp:1330) [473][T1_PlanCacheEvi][T1][Y0-0000000000000000-0-0] [lt=10] update plan cache memory config(ob_plan_cache_percentage=5, ob_plan_cache_evict_high_percentage=90, ob_plan_cache_evict_low_percentage=50, tenant_id=1) [2024-03-15 07:03:42.285120] INFO [SQL.PC] cache_evict (ob_plan_cache.cpp:1021) [473][T1_PlanCacheEvi][T1][Y0-0000000000000000-0-0] [lt=79] start lib cache evict(tenant_id=1, mem_hold=2097152, mem_limit=107374180, cache_obj_num=1, cache_node_num=1) [2024-03-15 07:03:42.285153] INFO [SQL.PC] cache_evict (ob_plan_cache.cpp:1038) [473][T1_PlanCacheEvi][T1][Y0-0000000000000000-0-0] [lt=19] end lib cache evict(tenant_id=1, cache_evict_num=0, mem_hold=2097152, mem_limit=107374180, cache_obj_num=1, cache_node_num=1) [2024-03-15 07:03:42.285180] INFO [SQL.PC] runTimerTask (ob_plan_cache.cpp:2023) [473][T1_PlanCacheEvi][T1][Y0-0000000000000000-0-0] [lt=36] schedule next cache evict task(evict_interval=1000000) [2024-03-15 07:03:42.288468] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.288514] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=46][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.288798] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=11][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:21, local_retry_times:21, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:42.288863] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=39][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.288880] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=16][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.288907] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=26][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:42.288932] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=9][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:42.289116] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.289144] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=27][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.291721] INFO [CLOG] run1 (ob_garbage_collector.cpp:999) [542][T1_GCCollector][T1][Y0-0000000000000000-0-0] [lt=7] Garbage Collector is running(seq_=410, gc_interval=10000000) [2024-03-15 07:03:42.291793] INFO [CLOG] construct_server_ls_map_for_member_list_ (ob_garbage_collector.cpp:1114) [542][T1_GCCollector][T1][Y0-0000000000000000-0-0] [lt=35] self is leader, skip it(ls->get_ls_id()={id:1}) [2024-03-15 07:03:42.291852] INFO [CLOG] gc_check_member_list_ (ob_garbage_collector.cpp:1074) [542][T1_GCCollector][T1][Y0-0000000000000000-0-0] [lt=39] gc_check_member_list_ cost time(ret=0, time_us=94) [2024-03-15 07:03:42.291889] INFO [CLOG] execute_gc_ (ob_garbage_collector.cpp:1312) [542][T1_GCCollector][T1][Y0-0000000000000000-0-0] [lt=28] execute_gc cost time(ret=0, time_us=2) [2024-03-15 07:03:42.291908] INFO [CLOG] gc_check_ls_status_ (ob_garbage_collector.cpp:1257) [542][T1_GCCollector][T1][Y0-0000000000000000-0-0] [lt=13] gc_candidates push_back success(ret=0, candidate={ls_id_:{id:1}, ls_status_:1, gc_reason_:0}) [2024-03-15 07:03:42.291931] INFO [CLOG] execute_gc_ (ob_garbage_collector.cpp:1279) [542][T1_GCCollector][T1][Y0-0000000000000000-0-0] [lt=17] ls status is normal, skip(id={id:1}, gc_candidates=[{ls_id_:{id:1}, ls_status_:1, gc_reason_:0}]) [2024-03-15 07:03:42.291966] INFO [CLOG] execute_gc_ (ob_garbage_collector.cpp:1312) [542][T1_GCCollector][T1][Y0-0000000000000000-0-0] [lt=32] execute_gc cost time(ret=0, time_us=36) [2024-03-15 07:03:42.293023] INFO [SQL.PC] dump_all_objs (ob_plan_cache.cpp:1798) [473][T1_PlanCacheEvi][T1][Y0-0000000000000000-0-0] [lt=21] Dumping All Cache Objs(alloc_obj_list.count()=1, alloc_obj_list=[{obj_id:307, tenant_id:1, log_del_time:9223372036854775807, real_del_time:9223372036854775807, ref_count:4, added_to_lc:true, mem_used:108080}]) [2024-03-15 07:03:42.293110] INFO [SQL.PC] runTimerTask (ob_plan_cache.cpp:2031) [473][T1_PlanCacheEvi][T1][Y0-0000000000000000-0-0] [lt=81] schedule next cache evict task(evict_interval=1000000) [2024-03-15 07:03:42.298662] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=35][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.298699] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=37][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.308814] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=17][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.308868] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=53][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.311250] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=33] get wrs ts(ls_id={id:1}, delta=146267459243, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:42.311285] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=30] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=1, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:42.319008] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=23][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.319063] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=54][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.329232] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=33][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.329302] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=69][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.334807] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8688-0-0] [lt=16] table not exist(tenant_id=1, database_id=201001, table_name=__all_merge_info, ret=-5019) [2024-03-15 07:03:42.334857] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8688-0-0] [lt=48][errcode=-5019] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:42.336376] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=2] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.336418] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=40][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.339550] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=31][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.339621] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=71][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.339955] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=41][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:42.349739] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=25][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.349786] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=47][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.350393] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:42.350417] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=23][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486222535189, now=[mts=1710486222289170], now0=[mts=1710486222289170], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:42.350449] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=31][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222284068, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.350523] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=63][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}) [2024-03-15 07:03:42.350551] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=28][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222284068, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.350585] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=31] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222284068, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486220606063, use_das=false, nested_level=0, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.350653] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=66][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:42.350667] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=13][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:42.350675] WDIAG [SQL] open (ob_result_set.cpp:157) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=6][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:42.350684] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=6][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:42.350690] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=5][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:42.350701] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=9][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=22, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.350715] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=9] will sleep(sleep_us=22000, remain_us=254476, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=22, timeout_timestamp=1710486222605189) [2024-03-15 07:03:42.350741] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=21][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:163, local_retry_times:163, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:42.350791] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=33][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.350800] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=10][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.350807] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=6][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:42.350830] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=7][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_schema_status' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:42.350955] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.350981] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=24][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.351311] WDIAG [PALF] convert_to_ts (scn.cpp:265) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=13][errcode=-4016] invalid scn should not convert to ts (val_=18446744073709551615) [2024-03-15 07:03:42.351324] INFO [STORAGE.TRANS] print_stat_ (ob_tenant_weak_read_service.cpp:527) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=12] [WRS] [TENANT_WEAK_READ_SERVICE] [STAT](tenant_id=1004, server_version={version:{val:1710506547039047538}, total_part_count:2, valid_inner_part_count:1, valid_user_part_count:1}, server_version_delta=-20324687741, in_cluster_service=false, cluster_version={val:18446744073709551615}, min_cluster_version={val:18446744073709551615}, max_cluster_version={val:18446744073709551615}, get_cluster_version_err=0, cluster_version_delta=1710486222351306, cluster_service_master="0.0.0.0:0", cluster_service_tablet_id={id:226}, post_cluster_heartbeat_count=0, succ_cluster_heartbeat_count=0, cluster_heartbeat_interval=1000000, local_cluster_version={val:0}, local_cluster_delta=1710486222351306, force_self_check=false, weak_read_refresh_interval=100000) [2024-03-15 07:03:42.351360] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=29][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.351383] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1004", warnings:[]}, tenant_id_=1004, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.351431] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, local_server_version={val:1710506547039047538}, valid_part_count=2, total_part_count=2, generate_timestamp=1710486222351353) [2024-03-15 07:03:42.351442] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=22][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, last_post_cluster_heartbeat_tstamp_=1710486222151306, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:42.358740] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486222358727787, start_id=0, end_id=0) [2024-03-15 07:03:42.359906] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=17][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.359956] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=53][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.364222] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.364323] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=3][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.364583] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.364636] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.364852] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.365231] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.365269] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.365450] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.365498] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.365777] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.365838] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.366070] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.366073] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=3][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.366454] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.366657] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.366893] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.367097] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.367105] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.367100] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.367295] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.367507] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.367700] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.367744] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.367761] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.367911] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.368118] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.368338] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.368394] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.368746] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.368965] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.369175] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.369178] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.369398] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.369683] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.369833] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.369931] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.370099] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.370136] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=36][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.370212] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.370646] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.370643] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.370961] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.371184] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.371322] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.371410] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.371656] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.371693] INFO [COMMON] replace_fragment_node (ob_kvcache_map.cpp:697) [103][KVCacheRep][T0][Y0-0000000000000000-0-0] [lt=27] Cache replace map node details(ret=0, replace_node_count=0, replace_time=3751, replace_start_pos=298832, replace_num=15728) [2024-03-15 07:03:42.371930] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.371935] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.372107] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.372204] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.372448] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.372681] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.372826] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=10][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:22, local_retry_times:22, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:42.372836] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.372875] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=33][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.372885] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=9][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.372891] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=6][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:42.372860] WDIAG [SQL] create_sessid (ob_sql_session_mgr.cpp:339) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=15][errcode=0] server is initiating(server_id=0, local_seq=27140, max_local_seq=262143, max_server_id=4095) [2024-03-15 07:03:42.372914] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=7][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:42.372918] INFO [RPC.OBMYSQL] sm_conn_build_handshake (obsm_conn_callback.cpp:104) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=54] new mysql sessid created(conn.sessid_=3221252612, support_ssl=false) [2024-03-15 07:03:42.373038] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.373066] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=26] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.373081] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=14][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.373055] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.373105] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.373095] INFO [RPC.OBMYSQL] init (obsm_conn_callback.cpp:120) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=19] sm conn init succ(conn.sessid_=3221252612, sess.client_addr_="172.21.122.86:42712") [2024-03-15 07:03:42.373146] INFO [RPC.OBMYSQL] do_accept_one (ob_sql_nio.cpp:899) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=31] accept one succ(*s={this:0x7f544f6a32b0, fd:120, err:0, last_decode_time_:0, last_write_time_:1710486222373091, read_buffer_.get_consume_sz():0, get_pending_flag():0, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.373299] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.373528] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.373588] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.373705] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.373784] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.373923] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=54] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:42.373956] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=32][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:42.373987] WDIAG [SERVER] extract_tenant_id (ob_srv_deliver.cpp:100) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=29][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:42.374006] WDIAG [SERVER] dispatch_req (ob_srv_deliver.cpp:115) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=16][errcode=-5160] extract tenant_id fail(ret=-5160, tenant_id=18446744073709551615, req={packet:{header:{length:369, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486222373902, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.374036] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.374064] WDIAG [SERVER] deliver_mysql_request (ob_srv_deliver.cpp:507) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=56][errcode=-5150] cannot dispatch success(ret=-5150, req={packet:{header:{length:369, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486222373902, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.374185] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB121-0-0] [lt=29] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:42.374214] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB121-0-0] [lt=27][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:42.374216] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.374231] WDIAG [SERVER] get_tenant_id (obmp_connect.cpp:1339) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB121-0-0] [lt=15][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:42.374245] WDIAG [SERVER] check_update_tenant_id (obmp_connect.cpp:1840) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB121-0-0] [lt=13][errcode=-5160] get_tenant_id failed(ret=-5160) [2024-03-15 07:03:42.374257] WDIAG [SERVER] process (obmp_connect.cpp:242) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB121-0-0] [lt=12][errcode=-5160] fail to check update tenant id(ret=-5160) [2024-03-15 07:03:42.374293] INFO [SERVER] send_error_packet (obmp_packet_sender.cpp:311) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB121-0-0] [lt=10] sending error packet(err=-4043, extra_err_info=NULL, lbt()="0xd9f6cf5 0x75d3e81 0x7596e3a 0x75be943 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:42.374349] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:745) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB121-0-0] [lt=16][errcode=0] server close connection(sessid=3221252612, proxy_sessid=0, stack="0xd9f6cf5 0x75d6bf2 0x75b2979 0x75bde02 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:42.374374] WDIAG [SERVER] get_session (obmp_packet_sender.cpp:515) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB121-0-0] [lt=20][errcode=-4018] get session fail(ret=-4018, sessid=3221252612, proxy_sessid=0) [2024-03-15 07:03:42.374392] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:749) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB121-0-0] [lt=15][errcode=-4016] session is null [2024-03-15 07:03:42.374409] INFO [SERVER] process (obmp_connect.cpp:369) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB121-0-0] [lt=13] MySQL LOGIN(direct_client_ip="172.21.122.86", client_ip=, tenant_name=obmysql, tenant_id=18446744073709551615, user_name=uccenter, host_name=xxx.xxx.xxx.xxx, sessid=3221252612, proxy_sessid=0, sess_create_time=0, from_proxy=false, from_java_client=false, from_oci_client=false, from_jdbc_client=true, capability=683647754, proxy_capability=0, use_ssl=false, c/s protocol="OB_MYSQL_CS_TYPE", autocommit=false, proc_ret=-5160, ret=0) [2024-03-15 07:03:42.374477] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.374660] WDIAG [RPC.OBMYSQL] push_close_req (ob_sql_nio.cpp:704) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=45][errcode=-4015] close sql sock by user req(*s={this:0x7f544f6a32b0, fd:120, err:5, last_decode_time_:1710486222373902, last_write_time_:1710486222374654, read_buffer_.get_consume_sz():373, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.374698] INFO [RPC.OBMYSQL] on_disconnect (obsm_conn_callback.cpp:231) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=31] kill and revert session(conn.sessid_=3221252612, proxy_sessid=0, server_id=0, ret=0) [2024-03-15 07:03:42.374715] INFO [RPC.OBMYSQL] handle_pending_destroy_list (ob_sql_nio.cpp:791) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=17] can close safely, do destroy(*s={this:0x7f544f6a32b0, fd:120, err:5, last_decode_time_:1710486222373902, last_write_time_:1710486222374654, read_buffer_.get_consume_sz():373, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.374752] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.374771] INFO [RPC.OBMYSQL] sm_conn_log_close (obsm_conn_callback.cpp:159) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=52] connection close(sessid=3221252612, proxy_sessid=0, tenant_id=0, server_id=0, from_proxy=false, from_java_client=false, c/s protocol="OB_MYSQL_CS_TYPE", is_need_clear_sessid_=true, ret=0) [2024-03-15 07:03:42.374820] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.374835] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.374996] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.375125] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.375261] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.375445] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=21][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.375478] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.375479] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.375672] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.375755] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.375877] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.376101] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.376111] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.376337] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.376477] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.376561] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.376640] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.376758] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.376838] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.376971] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.377140] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.377225] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.377295] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.377439] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.377455] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.377651] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.377860] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.378042] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.378065] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.378083] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.378192] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=8][errcode=-4002] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:42.378240] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=42][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222378180], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.378271] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=27][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222378180], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.378283] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.378317] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=22] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222378180]}) [2024-03-15 07:03:42.378356] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=26] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:42.378373] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=16][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486222378350], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.378396] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=13] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222378350]}) [2024-03-15 07:03:42.378464] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.378542] WDIAG load_file_to_string (utility.h:630) [69][ServerGTimer][T0][Y0-0000000000000000-0-0] [lt=9][errcode=0] read /sys/class/net/lo/speed failed, errno 22 [2024-03-15 07:03:42.378584] WDIAG get_ethernet_speed (utility.cpp:625) [69][ServerGTimer][T0][Y0-0000000000000000-0-0] [lt=36][errcode=-4000] load file /sys/class/net/lo/speed failed, ret -4000 [2024-03-15 07:03:42.378608] WDIAG [SERVER] get_network_speed_from_sysfs (ob_server.cpp:2260) [69][ServerGTimer][T0][Y0-0000000000000000-0-0] [lt=19][errcode=-4000] cannot get Ethernet speed, use default(tmp_ret=0, devname="lo") [2024-03-15 07:03:42.378614] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.378623] WDIAG [SERVER] runTimerTask (ob_server.cpp:2782) [69][ServerGTimer][T0][Y0-0000000000000000-0-0] [lt=14][errcode=-4000] ObRefreshNetworkSpeedTask reload bandwidth throttle limit failed(ret=-4000, ret="OB_ERROR") [2024-03-15 07:03:42.378691] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.378687] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.378778] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.378824] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486222378807], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.378866] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=25] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222378807]}) [2024-03-15 07:03:42.378956] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.380257] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.380293] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=36][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.383763] INFO [LIB] stat (utility.h:1140) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] [PALF STAT APPEND COST](cur_stat_count=1921, stat_interval=1000000, avg cost=4, this=0x7f549eccc950) [2024-03-15 07:03:42.390527] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=17][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.390613] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=85][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.395170] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B87-0-0] [lt=8] table not exist(tenant_id=1, database_id=201001, table_name=__all_sys_parameter, ret=-5019) [2024-03-15 07:03:42.395218] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B87-0-0] [lt=45][errcode=-5019] synonym not exist(tenant_id=1, database_id=201001, table_name=__all_sys_parameter, ret=-5019) [2024-03-15 07:03:42.395552] WDIAG [SQL.RESV] resolve_table_relation_factor_normal (ob_dml_resolver.cpp:7359) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B87-0-0] [lt=14][errcode=-5019] fail to resolve table relation recursively(tenant_id=1, ret=-5019, database_id=201001, database_id=201001, table_name=__all_sys_parameter, db_name=oceanbase) [2024-03-15 07:03:42.395602] WDIAG [SQL.RESV] resolve_table_relation_factor (ob_dml_resolver.cpp:7204) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B87-0-0] [lt=61][errcode=-5019] resolve table relation factor failed(ret=-5019, table_name=__all_sys_parameter) [2024-03-15 07:03:42.395625] WDIAG [SQL.RESV] inner_resolve_sys_view (ob_dml_resolver.cpp:2579) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B87-0-0] [lt=15][errcode=-5019] fail to resolve table(ret=-5019) [2024-03-15 07:03:42.395652] WDIAG [SQL.RESV] resolve_table_relation_factor_wrapper (ob_dml_resolver.cpp:2634) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B87-0-0] [lt=26][errcode=-5019] fail to resolve sys view(ret=-5019) [2024-03-15 07:03:42.395672] WDIAG resolve_basic_table_without_cte (ob_dml_resolver.cpp:2730) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B87-0-0] [lt=12][errcode=-5019] Table 'oceanbase.__all_sys_parameter' doesn't exist [2024-03-15 07:03:42.395684] WDIAG [SQL.RESV] resolve_basic_table_with_cte (ob_dml_resolver.cpp:13473) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B87-0-0] [lt=11][errcode=-5019] resolve base or alias table factor failed(ret=-5019) [2024-03-15 07:03:42.395696] WDIAG [SQL.RESV] resolve_basic_table (ob_dml_resolver.cpp:13407) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B87-0-0] [lt=11][errcode=-5019] fail to resolve basic table with cte(ret=-5019) [2024-03-15 07:03:42.395707] WDIAG [SQL.RESV] resolve_table (ob_dml_resolver.cpp:3142) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B87-0-0] [lt=10][errcode=-5019] resolve basic table failed(ret=-5019) [2024-03-15 07:03:42.395718] WDIAG [SQL.RESV] resolve_from_clause (ob_select_resolver.cpp:3426) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B87-0-0] [lt=9][errcode=-5019] fail to exec resolve_table(*table_node, table_item)(ret=-5019) [2024-03-15 07:03:42.395729] WDIAG [SQL.RESV] resolve_normal_query (ob_select_resolver.cpp:1033) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B87-0-0] [lt=10][errcode=-5019] fail to exec resolve_from_clause(parse_tree.children_[PARSE_SELECT_FROM])(ret=-5019) [2024-03-15 07:03:42.395926] WDIAG [SQL.RESV] resolve (ob_select_resolver.cpp:1240) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B87-0-0] [lt=193][errcode=-5019] resolve normal query failed(ret=-5019) [2024-03-15 07:03:42.395934] INFO [RPC.FRAME] rpc_easy_timer_cb (ob_net_easy.cpp:595) [195][RpcIO][T0][Y0-0000000000000000-0-0] [lt=18] [RPC EASY STAT](log_str=conn count=1/1, request done=47205/47205, request doing=0/0) [2024-03-15 07:03:42.395940] WDIAG [SQL.RESV] select_stmt_resolver_func (ob_resolver.cpp:170) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B87-0-0] [lt=13][errcode=-5019] execute stmt_resolver failed(ret=-5019, parse_tree.type_=3073) [2024-03-15 07:03:42.395966] WDIAG [SQL] generate_stmt (ob_sql.cpp:2659) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B87-0-0] [lt=13][errcode=-5019] failed to resolve(ret=-5019) [2024-03-15 07:03:42.395978] WDIAG [SQL] generate_physical_plan (ob_sql.cpp:2781) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B87-0-0] [lt=12][errcode=-5019] Failed to generate stmt(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:42.395995] WDIAG [SQL] handle_physical_plan (ob_sql.cpp:4452) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B87-0-0] [lt=11][errcode=-5019] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:42.400782] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=63][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.400815] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=33][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.401761] WDIAG [SHARE.SCHEMA] get_tenant_status (ob_schema_getter_guard.cpp:8471) [897][T1004_ReqMemEvi][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5157] tenant not exist(ret=-5157, ret="OB_TENANT_NOT_EXIST", tenant_id=1004) [2024-03-15 07:03:42.401796] WDIAG [SHARE.SCHEMA] check_tenant_is_restore (ob_schema_getter_guard.cpp:8435) [897][T1004_ReqMemEvi][T1004][Y0-0000000000000000-0-0] [lt=36][errcode=-5157] fail to get tenant status(ret=-5157, ret="OB_TENANT_NOT_EXIST", tenant_id=1004) [2024-03-15 07:03:42.401812] WDIAG [SHARE.SCHEMA] check_tenant_is_restore (ob_multi_version_schema_service.cpp:3852) [897][T1004_ReqMemEvi][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-5157] fail to check tenant is restore(ret=-5157, tenant_id=1004) [2024-03-15 07:03:42.401825] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1171) [897][T1004_ReqMemEvi][T1004][Y0-0000000000000000-0-0] [lt=12][errcode=-5157] fail to check restore tenant exist(ret=-5157, tenant_id=1004) [2024-03-15 07:03:42.401839] WDIAG get_global_sys_variable (ob_basic_session_info.cpp:887) [897][T1004_ReqMemEvi][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-4029] fail get schema guard(ret=-4029) [2024-03-15 07:03:42.401854] WDIAG [SERVER] get_mem_limit (ob_mysql_request_manager.cpp:270) [897][T1004_ReqMemEvi][T1004][Y0-0000000000000000-0-0] [lt=12][errcode=-4029] failed to get global sys variable(ret=-4029, tenant_id=1004, OB_SV_SQL_AUDIT_PERCENTAGE="ob_sql_audit_percentage", obj_val={"NULL":"NULL"}) [2024-03-15 07:03:42.401875] WDIAG [SERVER] check_config_mem_limit (ob_eliminate_task.cpp:65) [897][T1004_ReqMemEvi][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-4029] failed to get mem limit(ret=-4029) [2024-03-15 07:03:42.401889] INFO [SERVER] runTimerTask (ob_eliminate_task.cpp:199) [897][T1004_ReqMemEvi][T1004][Y0-0000000000000000-0-0] [lt=11] sql audit evict task end(evict_high_mem_level=858993459, evict_high_size_level=90000, evict_batch_count=0, elapse_time=1, size_used=0, mem_used=0) [2024-03-15 07:03:42.405469] INFO [STORAGE] scheduler_ls_ha_handler_ (ob_storage_ha_service.cpp:186) [752][T1003_HAService][T1003][Y0-0000000000000000-0-0] [lt=8] start do ls ha handler(ls_id_array_=[{id:1}]) [2024-03-15 07:03:42.405531] WDIAG [STORAGE] do_ha_handler_ (ob_storage_ha_service.cpp:223) [752][T1003_HAService][T1003][Y0-0000000000000000-0-0] [lt=0][errcode=0] failed to do ls restore handler process(tmp_ret=-5627, ls_id={id:1}) [2024-03-15 07:03:42.408583] WDIAG [STORAGE.TRANS] get_cluster_service_master_ (ob_tenant_weak_read_service.cpp:286) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=28][errcode=-4076] tenant schema is not ready, need wait(ret=-4076, ret="OB_NEED_WAIT", superior_tenant_id=1) [2024-03-15 07:03:42.408604] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=20][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.408616] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=12][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1", warnings:[]}, tenant_id_=1, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.408633] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=14][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, local_server_version={val:1710506547196065858}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486222408569) [2024-03-15 07:03:42.408645] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=11][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, last_post_cluster_heartbeat_tstamp_=1710486222208664, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:42.410219] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=20][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.410246] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=27][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1003", warnings:[]}, tenant_id_=1003, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.410288] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, local_server_version={val:1710482141336457000}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486222410207) [2024-03-15 07:03:42.410305] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=17][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, last_post_cluster_heartbeat_tstamp_=1710486222210232, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:42.410899] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=26][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.410929] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=32][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.411753] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:42.411769] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=17][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486224265147, now=[mts=1710486222336893], now0=[mts=1710486222336893], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:42.411786] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=15][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:0, tx_consistency_type:0, isolation:1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222335713, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:false, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:0, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.411837] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=42][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}) [2024-03-15 07:03:42.411858] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=20][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:0, tx_consistency_type:0, isolation:1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222335713, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:false, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:0, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.411889] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=28] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:0, tx_consistency_type:0, isolation:1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222335713, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:false, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:0, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.411947] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=57][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:42.411961] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=12][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:42.411970] WDIAG [SQL] open (ob_result_set.cpp:157) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=7][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:42.411978] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=7][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:42.411970] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=14] get wrs ts(ls_id={id:1}, delta=146267559345, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:42.411986] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=7][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:42.411997] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=10][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=0, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.412012] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:99) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=10] already timeout, do not need sleep(sleep_us=0, remain_us=1923137, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=0, timeout_timestamp=1710486224335147) [2024-03-15 07:03:42.411999] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=20] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:42.412023] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=11][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:0, local_retry_times:0, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:42.412048] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=12][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.412056] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=6][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.412062] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=6][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:42.412097] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=6][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:42.412233] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.412246] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=13][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.413381] WDIAG [SHARE] refresh (ob_task_define.cpp:382) [79][LogLimiterRefre][T0][Y0-0000000000000000-0-0] [lt=25][errcode=0] Throttled WDIAG logs in last second(details {error code, dropped logs, earliest tid}=[{errcode:-5019, dropped:343, tid:75}]) [2024-03-15 07:03:42.413519] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5E-0-0] [lt=119][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:42.413577] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5E-0-0] [lt=55][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.413777] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5E-0-0] [lt=95][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.413875] WDIAG iterate (ob_tuple.h:272) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5E-0-0] [lt=193][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.413900] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5E-0-0] [lt=25][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:42.416060] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:42.416094] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=34][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486229937049, now=[mts=1710486222351003], now0=[mts=1710486222351003], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:42.416113] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=18][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222350071, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.416166] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=45][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}) [2024-03-15 07:03:42.416188] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=22][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222350071, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.416252] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=60] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54e845a228, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222350071, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486200007067, use_das=false, nested_level=0, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.416330] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=76][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:42.416347] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=14][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:42.416371] WDIAG [SQL] open (ob_result_set.cpp:157) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=22][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:42.416381] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=8][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:42.416390] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=8][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_schema_status' ORDER BY row_id, column_name"}) [2024-03-15 07:03:42.416403] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=11][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_schema_status' ORDER BY row_id, column_name"}, retry_cnt=164, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.416440] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=13] will sleep(sleep_us=100000, remain_us=7590631, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=164, timeout_timestamp=1710486230007049) [2024-03-15 07:03:42.419317] INFO [COMMON] compute_tenant_wash_size (ob_kvcache_store.cpp:1140) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=21] Wash compute wash size(is_wash_valid=true, sys_total_wash_size=2718601216, global_cache_size=12484608, tenant_max_wash_size=4161536, tenant_min_wash_size=4161536, tenant_ids_=[512, 500, 999, 506, 508, 509, 510, 1, 1003, 1004]) [2024-03-15 07:03:42.419412] INFO [COMMON] wash (ob_kvcache_store.cpp:343) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=32] Wash time detail, (compute_wash_size_time=129, refresh_score_time=56, wash_time=8) [2024-03-15 07:03:42.421059] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=17][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.421121] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=61][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.423291] INFO [COMMON] clean_garbage_node (ob_kvcache_map.cpp:647) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=32] Cache wash clean map node details(ret=0, clean_node_count=0, clean_time=3843, clean_start_pos=1384108, clean_num=31457) [2024-03-15 07:03:42.431250] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=54][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.431286] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=35][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.435902] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=0][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:42.435935] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=25][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486222535189, now=[mts=1710486222373102], now0=[mts=1710486222373102], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:42.435955] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=19][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222371915, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.436010] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=45][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}) [2024-03-15 07:03:42.436033] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=23][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222371915, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.436066] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=30] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222371915, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486220606063, use_das=false, nested_level=0, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.436130] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=63][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:42.436146] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=13][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:42.436155] WDIAG [SQL] open (ob_result_set.cpp:157) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=8][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:42.436165] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=7][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:42.436174] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=7][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:42.436186] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=10][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=23, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.436201] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=11] will sleep(sleep_us=23000, remain_us=168990, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=23, timeout_timestamp=1710486222605189) [2024-03-15 07:03:42.436477] INFO [PALF] runTimerTask (block_gc_timer_task.cpp:101) [793][T1004_PalfGC][T1004][Y0-0000000000000000-0-0] [lt=75] BlockGCTimerTask success(ret=0, cost_time_us=13, palf_env_impl_={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1004", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):8294, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):8294, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.440022] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:42.441658] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.441749] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=91][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.442836] INFO [LIB] stat (utility.h:1140) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0] [PALF STAT APPEND COST](cur_stat_count=4541, stat_interval=1000000, avg cost=3, this=0x7f54639de950) [2024-03-15 07:03:42.451898] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=34][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.451943] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=44][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.454322] INFO process_request (ob_th_worker.cpp:313) [965][T1_L0_G10][T1][Y0-0000000000000000-0-0] [lt=21] thd_flag=1 total=0 used=0 pm_hold=0 [2024-03-15 07:03:42.454502] INFO [SHARE.LOCATION] renew_all_ls_locations_by_rpc (ob_ls_location_service.cpp:795) [166][AutoLSLocRpc][T0][YB427F000001-000613ACAB5F8B85-0-0] [lt=64] [LS_LOCATION] Get ls leaders by RPC(ret=0, ret="OB_SUCCESS", dests=["127.0.0.1:2882"], leaders=[{key:{tenant_id:1, ls_id:{id:1}, cluster_id:1}, location:{server:"127.0.0.1:2882", role:1, sql_port:2881, replica_type:0, property:{memstore_percent_:100}, restore_status:{status:0}, proposal_id:420}}, {key:{tenant_id:1003, ls_id:{id:1}, cluster_id:1}, location:{server:"127.0.0.1:2882", role:1, sql_port:2881, replica_type:0, property:{memstore_percent_:100}, restore_status:{status:0}, proposal_id:1984}}, {key:{tenant_id:1004, ls_id:{id:1}, cluster_id:1}, location:{server:"127.0.0.1:2882", role:1, sql_port:2881, replica_type:0, property:{memstore_percent_:100}, restore_status:{status:0}, proposal_id:431}}, {key:{tenant_id:1004, ls_id:{id:1001}, cluster_id:1}, location:{server:"127.0.0.1:2882", role:1, sql_port:2881, replica_type:0, property:{memstore_percent_:100}, restore_status:{status:0}, proposal_id:428}}]) [2024-03-15 07:03:42.458933] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486222458921860, start_id=0, end_id=0) [2024-03-15 07:03:42.459314] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=12][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:23, local_retry_times:23, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:42.459360] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=29][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.459373] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=12][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.459380] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=6][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:42.459403] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=7][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:42.459562] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=0] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.459601] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=39][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.460249] INFO [PALF] check_and_switch_state (palf_handle_impl.cpp:2125) [795][T1004_LogLoop][T1004][Y0-0000000000000000-0-0] [lt=28] [PALF_DUMP](palf_id=1, self="127.0.0.1:2882", [SlidingWindow]={palf_id:1, self:"127.0.0.1:2882", lsn_allocator:{max_log_id:267489707, max_lsn:87401266262, max_scn:{val:1710506547144173839}}, group_buffer:{log_group_buffer: start_lsn:{lsn:87401021210}, reuse_lsn:{lsn:87401266262}, reserved_buffer_size:41943040, available_buffer_size:33554432}, last_submit_lsn:{lsn:87401266140}, last_submit_end_lsn:{lsn:87401266262}, last_submit_log_id:267489707, last_submit_log_pid:431, max_flushed_lsn:{lsn:87401266140}, max_flushed_end_lsn:{lsn:87401266262}, max_flushed_log_pid:431, committed_end_lsn:{lsn:87401266262}, last_slide_log_id:267489707, last_slide_scn:{val:1710506547144173839}, last_slide_lsn:{lsn:87401266140}, last_slide_end_lsn:{lsn:87401266262}, last_slide_log_pid:431, last_slide_log_accum_checksum:4266425706, last_fetch_end_lsn:{lsn:18446744073709551615}, last_fetch_max_log_id:-1, last_fetch_committed_end_lsn:{lsn:18446744073709551615}, last_truncate_lsn:{lsn:18446744073709551615}, this:0x7f547438c530}, [StateMgr]={this:0x7f547438f3f0, self:"127.0.0.1:2882", palf_id:1, role:"LEADER", replica_type:"NORMAL_REPLICA", state:"ACTIVE", prepare_meta:{version:1, voted_for:"0.0.0.0:0", log_proposal_id:431}, leader:"127.0.0.1:2882", leader_epoch:1, is_sync_enabled:true, allow_vote:true, pending_end_lsn:{lsn:18446744073709551615}, scan_disk_log_finished:true, last_check_start_id:267489708, is_changing_config_with_arb:false, reconfirm_start_time_us:1710482152815816, palf_role_change_cb:0x7f5474395670, allow_vote:true}, [ConfigMgr]={palf_id:1, self:"127.0.0.1:2882", alive_paxos_memberlist:1{server:"127.0.0.1:2882", timestamp:1, flag:0}, alive_paxos_replica_num:1, log_ms_meta:{version:2, proposal_id:431, prev:{config_version:{proposal_id:430, config_seq:431}, log_sync_memberlist:1{server:"127.0.0.1:2882", timestamp:1, flag:0}, log_sync_replica_num:1, arbitration_member:{server:"0.0.0.0:0", timestamp:-1, flag:0}, learnerlist:{learner_num:0, learner_array:[]}, degraded_learnerlist:{learner_num:0, learner_array:[]}}, curr:{config_version:{proposal_id:431, config_seq:432}, log_sync_memberlist:1{server:"127.0.0.1:2882", timestamp:1, flag:0}, log_sync_replica_num:1, arbitration_member:{server:"0.0.0.0:0", timestamp:-1, flag:0}, learnerlist:{learner_num:0, learner_array:[]}, degraded_learnerlist:{learner_num:0, learner_array:[]}}, prev_log_proposal_id:430, prev_lsn:{lsn:87401021210}, prev_mode_pid:431}, prev_log_proposal_id:430, prev_lsn:{lsn:87401021210}, prev_mode_pid:431, state:0, persistent_config_version:{proposal_id:431, config_seq:432}, ms_ack_list:0, resend_config_version:{proposal_id:431, config_seq:432}, resend_log_list:{learner_num:0, learner_array:[]}, last_submit_config_log_time_us:-1, region:"default_region", paxos_member_region_map:{}, register_time_us:-1, parent:"0.0.0.0:0", parent_keepalive_time_us:-1, last_submit_register_req_time_us:-1, children:{learner_num:0, learner_array:[]}, last_submit_keepalive_time_us:-1, this:0x7f547438d5b0}, [ModeMgr]={palf_id:1, self:"127.0.0.1:2882", applied_mode_meta:{version:1, proposal_id:431, mode_version:0, access_mode:1, ref_scn:{val:1}}, accepted_mode_meta:{version:1, proposal_id:431, mode_version:0, access_mode:1, ref_scn:{val:1}}, last_submit_mode_meta:{version:1, proposal_id:431, mode_version:0, access_mode:1, ref_scn:{val:1}}, state:"MODE_INIT", new_proposal_id:9223372036854775807, local_max_lsn:{lsn:18446744073709551615}, local_max_log_pid:9223372036854775807, max_majority_accepted_pid:9223372036854775807, max_majority_lsn:{lsn:18446744073709551615}, max_majority_accepted_mode_meta:{version:-1, proposal_id:9223372036854775807, mode_version:9223372036854775807, access_mode:0, ref_scn:{val:18446744073709551615}}, follower_list:0, ack_list:0, majority_cnt:9223372036854775807}, [LogEngine]={palf_id:1, is_inited:true, min_block_max_scn:{val:18446744073709551615}, min_block_id:274894685184, base_lsn_for_block_gc:{lsn:86430941184}, log_meta:{version:1, log_prepare_meta:{version:1, voted_for:"0.0.0.0:0", log_proposal_id:431}, log_config_meta:{version:2, proposal_id:431, prev:{config_version:{proposal_id:430, config_seq:431}, log_sync_memberlist:1{server:"127.0.0.1:2882", timestamp:1, flag:0}, log_sync_replica_num:1, arbitration_member:{server:"0.0.0.0:0", timestamp:-1, flag:0}, learnerlist:{learner_num:0, learner_array:[]}, degraded_learnerlist:{learner_num:0, learner_array:[]}}, curr:{config_version:{proposal_id:431, config_seq:432}, log_sync_memberlist:1{server:"127.0.0.1:2882", timestamp:1, flag:0}, log_sync_replica_num:1, arbitration_member:{server:"0.0.0.0:0", timestamp:-1, flag:0}, learnerlist:{learner_num:0, learner_array:[]}, degraded_learnerlist:{learner_num:0, learner_array:[]}}, prev_log_proposal_id:430, prev_lsn:{lsn:87401021210}, prev_mode_pid:431}, log_snapshot_meta:{version:1, base_lsn:{lsn:86430941184}, prev_log_info:{log_id:-1, lsn:{lsn:18446744073709551615}, scn:{val:18446744073709551615}, log_proposal_id:9223372036854775807, accum_checksum:-1}}, log_replica_property_meta:{allow_vote:true, replica_type:"NORMAL_REPLICA"}, log_mode_meta:{version:1, proposal_id:431, mode_version:0, access_mode:1, ref_scn:{val:1}}}, log_meta_storage:{log_tail:{lsn:6807552}, readable_log_tail:{lsn:6807552}, log_block_header:{magic:18754, version:1, min_lsn:{lsn:87437512704}, min_scn:{val:0}, curr_block_id:0, palf_id:1}, block_mgr:{log_dir:"/root/ob/store/clog/tenant_1004/1/meta", dir_fd:127, min_block_id:0, max_block_id:1, curr_writable_block_id:0}, logical_block_size_:67104768, curr_block_writable_size_:60297216, block_header_serialize_buf_:0x7f5474391030}, log_storage:{log_tail:{lsn:87401266262}, readable_log_tail:{lsn:87401266262}, log_block_header:{magic:18754, version:1, min_lsn:{lsn:18446744073709551615}, min_scn:{val:18446744073709551615}, curr_block_id:1302, palf_id:1}, block_mgr:{log_dir:"/root/ob/store/clog/tenant_1004/1/log", dir_fd:131, min_block_id:1223, max_block_id:1303, curr_writable_block_id:1302}, logical_block_size_:67104768, curr_block_writable_size_:36246442, block_header_serialize_buf_:0x7f5474392cb0}, palf_epoch:1, this:0x7f547438faf0}, [Reconfirm]={palf_id:1, self:"127.0.0.1:2882", state:"FINISHED", new_proposal_id:431, prepare_log_ack_list:0, curr_paxos_follower_list:0, majority_cnt:1, majority_max_log_server:"127.0.0.1:2882", majority_max_accept_pid:430, majority_max_lsn:{lsn:87401021331}, saved_end_lsn:{lsn:87401021331}, last_submit_prepare_req_time_us:1710482152816141, last_fetch_log_time_us:-1, last_record_sw_start_id:-1, has_notify_fetch:false, this:0x7f547438f530}) [2024-03-15 07:03:42.460499] INFO [PALF] check_and_switch_state (palf_handle_impl.cpp:2129) [795][T1004_LogLoop][T1004][Y0-0000000000000000-0-0] [lt=244] [PALF_DUMP](palf_id=1, self="127.0.0.1:2882", ack_info_list=[{member:{server:"127.0.0.1:2882", timestamp:1, flag:0}, last_ack_time_us:1710486221832724, last_flushed_end_lsn:{lsn:87401266262}}]) [2024-03-15 07:03:42.460517] INFO [PALF] report_log_task_trace (log_sliding_window.cpp:3978) [795][T1004_LogLoop][T1004][Y0-0000000000000000-0-0] [lt=15] current log_task status(palf_id=1, self="127.0.0.1:2882", log_id=267489708, log_task={header:{begin_lsn:{lsn:18446744073709551615}, end_lsn:{lsn:18446744073709551615}, log_id:-1, min_scn:{val:18446744073709551615}, max_scn:{val:18446744073709551615}, data_len:0, proposal_id:9223372036854775807, prev_lsn:{lsn:18446744073709551615}, prev_proposal_id:9223372036854775807, committed_end_lsn:{lsn:18446744073709551615}, data_checksum:-1, accum_checksum:-1, is_padding_log:false, is_raw_write:false}, state_map:{val:0}, ref_cnt:0, gen_ts:0, submit_wait_time:0, submit_ts:0, flush_cost:0, flushed_ts:0}, ack_info_list=[{member:{server:"127.0.0.1:2882", timestamp:1, flag:0}, last_ack_time_us:1710486221832724, last_flushed_end_lsn:{lsn:87401266262}}]) [2024-03-15 07:03:42.462078] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=29][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.462110] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=32][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.466694] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=0] table not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:42.466818] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=120][errcode=-5019] synonym not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:42.466853] WDIAG [SQL.RESV] resolve_table_relation_factor_normal (ob_dml_resolver.cpp:7359) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=34][errcode=-5019] fail to resolve table relation recursively(tenant_id=1, ret=-5019, database_id=201001, database_id=201001, table_name=__all_server, db_name=oceanbase) [2024-03-15 07:03:42.466868] WDIAG [SQL.RESV] resolve_table_relation_factor (ob_dml_resolver.cpp:7204) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=14][errcode=-5019] resolve table relation factor failed(ret=-5019, table_name=__all_server) [2024-03-15 07:03:42.466882] WDIAG [SQL.RESV] inner_resolve_sys_view (ob_dml_resolver.cpp:2579) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=9][errcode=-5019] fail to resolve table(ret=-5019) [2024-03-15 07:03:42.466896] WDIAG [SQL.RESV] resolve_table_relation_factor_wrapper (ob_dml_resolver.cpp:2634) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=13][errcode=-5019] fail to resolve sys view(ret=-5019) [2024-03-15 07:03:42.466911] WDIAG resolve_basic_table_without_cte (ob_dml_resolver.cpp:2730) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=7][errcode=-5019] Table 'oceanbase.__all_server' doesn't exist [2024-03-15 07:03:42.466938] WDIAG [SQL.RESV] resolve_basic_table_with_cte (ob_dml_resolver.cpp:13473) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=25][errcode=-5019] resolve base or alias table factor failed(ret=-5019) [2024-03-15 07:03:42.466954] WDIAG [SQL.RESV] resolve_basic_table (ob_dml_resolver.cpp:13407) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=16][errcode=-5019] fail to resolve basic table with cte(ret=-5019) [2024-03-15 07:03:42.466974] WDIAG [SQL.RESV] resolve_table (ob_dml_resolver.cpp:3142) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=19][errcode=-5019] resolve basic table failed(ret=-5019) [2024-03-15 07:03:42.466987] WDIAG [SQL.RESV] resolve_from_clause (ob_select_resolver.cpp:3426) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=12][errcode=-5019] fail to exec resolve_table(*table_node, table_item)(ret=-5019) [2024-03-15 07:03:42.466998] WDIAG [SQL.RESV] resolve_normal_query (ob_select_resolver.cpp:1033) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=11][errcode=-5019] fail to exec resolve_from_clause(parse_tree.children_[PARSE_SELECT_FROM])(ret=-5019) [2024-03-15 07:03:42.467014] WDIAG [SQL.RESV] resolve (ob_select_resolver.cpp:1240) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=14][errcode=-5019] resolve normal query failed(ret=-5019) [2024-03-15 07:03:42.467039] WDIAG [SQL.RESV] select_stmt_resolver_func (ob_resolver.cpp:170) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=22][errcode=-5019] execute stmt_resolver failed(ret=-5019, parse_tree.type_=3073) [2024-03-15 07:03:42.467066] WDIAG [SQL] generate_stmt (ob_sql.cpp:2659) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=15][errcode=-5019] failed to resolve(ret=-5019) [2024-03-15 07:03:42.467102] WDIAG [SQL] generate_physical_plan (ob_sql.cpp:2781) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=32][errcode=-5019] Failed to generate stmt(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:42.467122] WDIAG [SQL] handle_physical_plan (ob_sql.cpp:4452) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=16][errcode=-5019] Failed to generate plan(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:42.467137] WDIAG [SQL] handle_text_query (ob_sql.cpp:2383) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=11][errcode=-5019] fail to handle physical plan(ret=-5019) [2024-03-15 07:03:42.467170] WDIAG [SQL] stmt_query (ob_sql.cpp:206) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=29][errcode=-5019] fail to handle text query(stmt=SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882, ret=-5019) [2024-03-15 07:03:42.467185] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:636) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=13][errcode=-5019] executor execute failed(ret=-5019) [2024-03-15 07:03:42.467195] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=9][errcode=-5019] execute failed(ret=-5019, tenant_id=1, executor={ObIExecutor:, sql:"SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882"}, retry_cnt=0, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.467217] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=16][errcode=-5019] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:0, local_retry_times:0, err_:-5019, err_:"OB_TABLE_NOT_EXIST", retry_type:0, client_ret:-5019}, need_retry=false) [2024-03-15 07:03:42.467261] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=16][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:42.467280] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=19][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:42.467288] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=7][errcode=-5019] failed to close result(close_ret=-5019, ret=-5019) [2024-03-15 07:03:42.467311] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8D-0-0] [lt=8][errcode=-5019] failed to process record(executor={ObIExecutor:, sql:"SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882"}, record_ret=-5019, ret=-5019) [2024-03-15 07:03:42.467325] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [783][T1004_Occam][T1004][YB427F000001-000613ACA9FF9B8D-0-0] [lt=12][errcode=-5019] failed to process final(executor={ObIExecutor:, sql:"SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882"}, aret=-5019, ret=-5019) [2024-03-15 07:03:42.467335] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [783][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=9][errcode=-5019] execute sql failed(ret=-5019, tenant_id=1, sql=SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882) [2024-03-15 07:03:42.467345] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [783][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=9][errcode=-5019] retry_while_no_tenant_resource failed(ret=-5019, tenant_id=1) [2024-03-15 07:03:42.467353] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [783][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5019] execute_read failed(ret=-5019, cluster_id=1, tenant_id=1) [2024-03-15 07:03:42.467362] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [783][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5019] query failed(ret=-5019, conn=0x7f547e5f2050, start=1710486222466481, sql=SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882) [2024-03-15 07:03:42.467374] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [783][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=12][errcode=-5019] read failed(ret=-5019) [2024-03-15 07:03:42.467383] WDIAG get_my_sql_result_ (ob_table_access_helper.h:431) [783][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5019] GCTX.sql_proxy_ read failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", MTL_ID()=1004, tenant_id=1, columns=0x7f547525c1c8, table=__all_server, condition=where svr_ip='127.0.0.1' and svr_port=2882, sql=SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882, columns_str="zone") [2024-03-15 07:03:42.467404] WDIAG read_and_convert_to_values_ (ob_table_access_helper.h:332) [783][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=17][errcode=-5019] fail to get ObMySQLResult(ret=-5019, ret="OB_TABLE_NOT_EXIST", MTL_ID()=1004, table=__all_server, condition=where svr_ip='127.0.0.1' and svr_port=2882) [2024-03-15 07:03:42.467563] WDIAG [COORDINATOR] get_self_zone_name (table_accessor.cpp:530) [783][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=12][errcode=-5019] get zone from __all_server failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", columns=0x7f547525c1c8, where_condition="where svr_ip='127.0.0.1' and svr_port=2882", zone_name_holder=) [2024-03-15 07:03:42.467598] WDIAG [COORDINATOR] get_all_ls_election_reference_info (table_accessor.cpp:463) [783][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=35][errcode=-5019] get self zone name failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", all_ls_election_reference_info=[]) [2024-03-15 07:03:42.467611] WDIAG [COORDINATOR] get_all_ls_election_reference_info (table_accessor.cpp:472) [783][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=12][errcode=-5019] zone name is empty(ret=-5019, ret="OB_TABLE_NOT_EXIST", all_ls_election_reference_info=[]) [2024-03-15 07:03:42.467636] WDIAG [COORDINATOR] refresh (ob_leader_coordinator.cpp:143) [783][T1004_Occam][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5019] get all ls election reference info failed(ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:42.472246] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=23][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.472281] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=34][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.473643] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:42.473690] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=35][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486224265147, now=[mts=1710486222412264], now0=[mts=1710486222412264], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:42.473724] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=43][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222411034, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.473823] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=87][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}) [2024-03-15 07:03:42.473861] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=38][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222411034, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.473912] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=47] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222411034, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.474000] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=86][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:42.474020] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=17][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:42.474031] WDIAG [SQL] open (ob_result_set.cpp:157) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=9][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:42.474042] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=9][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:42.474052] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=8][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:42.474065] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=12][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=1, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.474087] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=18] will sleep(sleep_us=1000, remain_us=1861062, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=1, timeout_timestamp=1710486224335147) [2024-03-15 07:03:42.475184] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=18][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:1, local_retry_times:1, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:42.475235] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=34][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.475256] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=20][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.475264] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=8][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:42.475288] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=10][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:42.475483] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.475509] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=26][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.479536] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=2][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222479505], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.479579] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=40][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222479505], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.479610] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=17] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222479505]}) [2024-03-15 07:03:42.479650] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=26] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:42.479663] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=12][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486222479645], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.479693] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=14] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222479645]}) [2024-03-15 07:03:42.480055] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486222480043], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.480091] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=26] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222480043]}) [2024-03-15 07:03:42.482052] INFO [PALF] check_and_switch_state (palf_handle_impl.cpp:2125) [795][T1004_LogLoop][T1004][Y0-0000000000000000-0-0] [lt=27] [PALF_DUMP](palf_id=1001, self="127.0.0.1:2882", [SlidingWindow]={palf_id:1001, self:"127.0.0.1:2882", lsn_allocator:{max_log_id:233684893, max_lsn:34077856158, max_scn:{val:1710506547039047538}}, group_buffer:{log_group_buffer: start_lsn:{lsn:34077616604}, reuse_lsn:{lsn:34077856158}, reserved_buffer_size:41943040, available_buffer_size:33554432}, last_submit_lsn:{lsn:34077856045}, last_submit_end_lsn:{lsn:34077856158}, last_submit_log_id:233684893, last_submit_log_pid:428, max_flushed_lsn:{lsn:34077856045}, max_flushed_end_lsn:{lsn:34077856158}, max_flushed_log_pid:428, committed_end_lsn:{lsn:34077856158}, last_slide_log_id:233684893, last_slide_scn:{val:1710506547039047538}, last_slide_lsn:{lsn:34077856045}, last_slide_end_lsn:{lsn:34077856158}, last_slide_log_pid:428, last_slide_log_accum_checksum:2496911720, last_fetch_end_lsn:{lsn:18446744073709551615}, last_fetch_max_log_id:-1, last_fetch_committed_end_lsn:{lsn:18446744073709551615}, last_truncate_lsn:{lsn:18446744073709551615}, this:0x7f54743f6530}, [StateMgr]={this:0x7f54743f93f0, self:"127.0.0.1:2882", palf_id:1001, role:"LEADER", replica_type:"NORMAL_REPLICA", state:"ACTIVE", prepare_meta:{version:1, voted_for:"0.0.0.0:0", log_proposal_id:428}, leader:"127.0.0.1:2882", leader_epoch:1, is_sync_enabled:true, allow_vote:true, pending_end_lsn:{lsn:18446744073709551615}, scan_disk_log_finished:true, last_check_start_id:233684894, is_changing_config_with_arb:false, reconfirm_start_time_us:1710482154325002, palf_role_change_cb:0x7f54743ff670, allow_vote:true}, [ConfigMgr]={palf_id:1001, self:"127.0.0.1:2882", alive_paxos_memberlist:1{server:"127.0.0.1:2882", timestamp:1, flag:0}, alive_paxos_replica_num:1, log_ms_meta:{version:2, proposal_id:428, prev:{config_version:{proposal_id:427, config_seq:428}, log_sync_memberlist:1{server:"127.0.0.1:2882", timestamp:1, flag:0}, log_sync_replica_num:1, arbitration_member:{server:"0.0.0.0:0", timestamp:-1, flag:0}, learnerlist:{learner_num:0, learner_array:[]}, degraded_learnerlist:{learner_num:0, learner_array:[]}}, curr:{config_version:{proposal_id:428, config_seq:429}, log_sync_memberlist:1{server:"127.0.0.1:2882", timestamp:1, flag:0}, log_sync_replica_num:1, arbitration_member:{server:"0.0.0.0:0", timestamp:-1, flag:0}, learnerlist:{learner_num:0, learner_array:[]}, degraded_learnerlist:{learner_num:0, learner_array:[]}}, prev_log_proposal_id:427, prev_lsn:{lsn:34077616604}, prev_mode_pid:428}, prev_log_proposal_id:427, prev_lsn:{lsn:34077616604}, prev_mode_pid:428, state:0, persistent_config_version:{proposal_id:428, config_seq:429}, ms_ack_list:0, resend_config_version:{proposal_id:428, config_seq:429}, resend_log_list:{learner_num:0, learner_array:[]}, last_submit_config_log_time_us:-1, region:"default_region", paxos_member_region_map:{}, register_time_us:-1, parent:"0.0.0.0:0", parent_keepalive_time_us:-1, last_submit_register_req_time_us:-1, children:{learner_num:0, learner_array:[]}, last_submit_keepalive_time_us:-1, this:0x7f54743f75b0}, [ModeMgr]={palf_id:1001, self:"127.0.0.1:2882", applied_mode_meta:{version:1, proposal_id:428, mode_version:0, access_mode:1, ref_scn:{val:1681902235296312446}}, accepted_mode_meta:{version:1, proposal_id:428, mode_version:0, access_mode:1, ref_scn:{val:1681902235296312446}}, last_submit_mode_meta:{version:1, proposal_id:428, mode_version:0, access_mode:1, ref_scn:{val:1681902235296312446}}, state:"MODE_INIT", new_proposal_id:9223372036854775807, local_max_lsn:{lsn:18446744073709551615}, local_max_log_pid:9223372036854775807, max_majority_accepted_pid:9223372036854775807, max_majority_lsn:{lsn:18446744073709551615}, max_majority_accepted_mode_meta:{version:-1, proposal_id:9223372036854775807, mode_version:9223372036854775807, access_mode:0, ref_scn:{val:18446744073709551615}}, follower_list:0, ack_list:0, majority_cnt:9223372036854775807}, [LogEngine]={palf_id:1001, is_inited:true, min_block_max_scn:{val:18446744073709551615}, min_block_id:274894685184, base_lsn_for_block_gc:{lsn:33753698304}, log_meta:{version:1, log_prepare_meta:{version:1, voted_for:"0.0.0.0:0", log_proposal_id:428}, log_config_meta:{version:2, proposal_id:428, prev:{config_version:{proposal_id:427, config_seq:428}, log_sync_memberlist:1{server:"127.0.0.1:2882", timestamp:1, flag:0}, log_sync_replica_num:1, arbitration_member:{server:"0.0.0.0:0", timestamp:-1, flag:0}, learnerlist:{learner_num:0, learner_array:[]}, degraded_learnerlist:{learner_num:0, learner_array:[]}}, curr:{config_version:{proposal_id:428, config_seq:429}, log_sync_memberlist:1{server:"127.0.0.1:2882", timestamp:1, flag:0}, log_sync_replica_num:1, arbitration_member:{server:"0.0.0.0:0", timestamp:-1, flag:0}, learnerlist:{learner_num:0, learner_array:[]}, degraded_learnerlist:{learner_num:0, learner_array:[]}}, prev_log_proposal_id:427, prev_lsn:{lsn:34077616604}, prev_mode_pid:428}, log_snapshot_meta:{version:1, base_lsn:{lsn:33753698304}, prev_log_info:{log_id:-1, lsn:{lsn:18446744073709551615}, scn:{val:18446744073709551615}, log_proposal_id:9223372036854775807, accum_checksum:-1}}, log_replica_property_meta:{allow_vote:true, replica_type:"NORMAL_REPLICA"}, log_mode_meta:{version:1, proposal_id:428, mode_version:0, access_mode:1, ref_scn:{val:1681902235296312446}}}, log_meta_storage:{log_tail:{lsn:6512640}, readable_log_tail:{lsn:6512640}, log_block_header:{magic:18754, version:1, min_lsn:{lsn:34089222144}, min_scn:{val:0}, curr_block_id:0, palf_id:1001}, block_mgr:{log_dir:"/root/ob/store/clog/tenant_1004/1001/meta", dir_fd:114, min_block_id:0, max_block_id:1, curr_writable_block_id:0}, logical_block_size_:67104768, curr_block_writable_size_:60592128, block_header_serialize_buf_:0x7f54743fb030}, log_storage:{log_tail:{lsn:34077856158}, readable_log_tail:{lsn:34077856158}, log_block_header:{magic:18754, version:1, min_lsn:{lsn:18446744073709551615}, min_scn:{val:18446744073709551615}, curr_block_id:507, palf_id:1001}, block_mgr:{log_dir:"/root/ob/store/clog/tenant_1004/1001/log", dir_fd:135, min_block_id:486, max_block_id:508, curr_writable_block_id:507}, logical_block_size_:67104768, curr_block_writable_size_:11365986, block_header_serialize_buf_:0x7f54743fccb0}, palf_epoch:2, this:0x7f54743f9af0}, [Reconfirm]={palf_id:1001, self:"127.0.0.1:2882", state:"FINISHED", new_proposal_id:428, prepare_log_ack_list:0, curr_paxos_follower_list:0, majority_cnt:1, majority_max_log_server:"127.0.0.1:2882", majority_max_accept_pid:427, majority_max_lsn:{lsn:34077616717}, saved_end_lsn:{lsn:34077616717}, last_submit_prepare_req_time_us:1710482154325192, last_fetch_log_time_us:-1, last_record_sw_start_id:-1, has_notify_fetch:false, this:0x7f54743f9530}) [2024-03-15 07:03:42.482273] INFO [PALF] check_and_switch_state (palf_handle_impl.cpp:2129) [795][T1004_LogLoop][T1004][Y0-0000000000000000-0-0] [lt=215] [PALF_DUMP](palf_id=1001, self="127.0.0.1:2882", ack_info_list=[{member:{server:"127.0.0.1:2882", timestamp:1, flag:0}, last_ack_time_us:1710486220830382, last_flushed_end_lsn:{lsn:34077856158}}]) [2024-03-15 07:03:42.482291] INFO [PALF] report_log_task_trace (log_sliding_window.cpp:3978) [795][T1004_LogLoop][T1004][Y0-0000000000000000-0-0] [lt=15] current log_task status(palf_id=1001, self="127.0.0.1:2882", log_id=233684894, log_task={header:{begin_lsn:{lsn:18446744073709551615}, end_lsn:{lsn:18446744073709551615}, log_id:-1, min_scn:{val:18446744073709551615}, max_scn:{val:18446744073709551615}, data_len:0, proposal_id:9223372036854775807, prev_lsn:{lsn:18446744073709551615}, prev_proposal_id:9223372036854775807, committed_end_lsn:{lsn:18446744073709551615}, data_checksum:-1, accum_checksum:-1, is_padding_log:false, is_raw_write:false}, state_map:{val:0}, ref_cnt:0, gen_ts:0, submit_wait_time:0, submit_ts:0, flush_cost:0, flushed_ts:0}, ack_info_list=[{member:{server:"127.0.0.1:2882", timestamp:1, flag:0}, last_ack_time_us:1710486220830382, last_flushed_end_lsn:{lsn:34077856158}}]) [2024-03-15 07:03:42.482401] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=26][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.482507] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=104][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.492694] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=42][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.492728] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=32][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.493496] INFO [RPC.FRAME] rpc_easy_timer_cb (ob_net_easy.cpp:595) [194][RpcIO][T0][Y0-0000000000000000-0-0] [lt=23] [RPC EASY STAT](log_str=conn count=1/1, request done=47206/47206, request doing=0/0) [2024-03-15 07:03:42.502052] WDIAG [SQL] create_sessid (ob_sql_session_mgr.cpp:339) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=13][errcode=0] server is initiating(server_id=0, local_seq=27141, max_local_seq=262143, max_server_id=4095) [2024-03-15 07:03:42.502101] INFO [RPC.OBMYSQL] sm_conn_build_handshake (obsm_conn_callback.cpp:104) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=46] new mysql sessid created(conn.sessid_=3221252613, support_ssl=false) [2024-03-15 07:03:42.502222] INFO [RPC.OBMYSQL] init (obsm_conn_callback.cpp:120) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=17] sm conn init succ(conn.sessid_=3221252613, sess.client_addr_="172.21.122.86:42714") [2024-03-15 07:03:42.502253] INFO [RPC.OBMYSQL] do_accept_one (ob_sql_nio.cpp:899) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=22] accept one succ(*s={this:0x7f545a1fd240, fd:137, err:0, last_decode_time_:0, last_write_time_:1710486222502218, read_buffer_.get_consume_sz():0, get_pending_flag():0, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.502821] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=32] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:42.502849] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=28][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:42.502861] WDIAG [SERVER] extract_tenant_id (ob_srv_deliver.cpp:100) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=11][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:42.502859] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.502874] WDIAG [SERVER] dispatch_req (ob_srv_deliver.cpp:115) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=11][errcode=-5160] extract tenant_id fail(ret=-5160, tenant_id=18446744073709551615, req={packet:{header:{length:398, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486222502794, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.502897] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=38][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.502908] WDIAG [SERVER] deliver_mysql_request (ob_srv_deliver.cpp:507) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=33][errcode=-5150] cannot dispatch success(ret=-5150, req={packet:{header:{length:398, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486222502794, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.503005] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB036-0-0] [lt=36] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:42.503161] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB036-0-0] [lt=155][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:42.503176] WDIAG [SERVER] get_tenant_id (obmp_connect.cpp:1339) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB036-0-0] [lt=13][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:42.503188] WDIAG [SERVER] check_update_tenant_id (obmp_connect.cpp:1840) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB036-0-0] [lt=11][errcode=-5160] get_tenant_id failed(ret=-5160) [2024-03-15 07:03:42.503202] WDIAG [SERVER] process (obmp_connect.cpp:242) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB036-0-0] [lt=14][errcode=-5160] fail to check update tenant id(ret=-5160) [2024-03-15 07:03:42.503235] INFO [SERVER] send_error_packet (obmp_packet_sender.cpp:311) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB036-0-0] [lt=8] sending error packet(err=-4043, extra_err_info=NULL, lbt()="0xd9f6cf5 0x75d3e81 0x7596e3a 0x75be943 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:42.503297] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:745) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB036-0-0] [lt=23][errcode=0] server close connection(sessid=3221252613, proxy_sessid=0, stack="0xd9f6cf5 0x75d6bf2 0x75b2979 0x75bde02 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:42.503324] WDIAG [SERVER] get_session (obmp_packet_sender.cpp:515) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB036-0-0] [lt=23][errcode=-4018] get session fail(ret=-4018, sessid=3221252613, proxy_sessid=0) [2024-03-15 07:03:42.503337] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:749) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB036-0-0] [lt=11][errcode=-4016] session is null [2024-03-15 07:03:42.503363] INFO [SERVER] process (obmp_connect.cpp:369) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB036-0-0] [lt=8] MySQL LOGIN(direct_client_ip="172.21.122.86", client_ip=, tenant_name=obmysql, tenant_id=18446744073709551615, user_name=yyyth, host_name=xxx.xxx.xxx.xxx, sessid=3221252613, proxy_sessid=0, sess_create_time=0, from_proxy=false, from_java_client=false, from_oci_client=false, from_jdbc_client=true, capability=683647754, proxy_capability=0, use_ssl=false, c/s protocol="OB_MYSQL_CS_TYPE", autocommit=false, proc_ret=-5160, ret=0) [2024-03-15 07:03:42.503553] WDIAG [RPC.OBMYSQL] push_close_req (ob_sql_nio.cpp:704) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=19][errcode=-4015] close sql sock by user req(*s={this:0x7f545a1fd240, fd:137, err:5, last_decode_time_:1710486222502795, last_write_time_:1710486222503548, read_buffer_.get_consume_sz():402, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.503612] INFO [RPC.OBMYSQL] on_disconnect (obsm_conn_callback.cpp:231) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=50] kill and revert session(conn.sessid_=3221252613, proxy_sessid=0, server_id=0, ret=0) [2024-03-15 07:03:42.503635] INFO [RPC.OBMYSQL] handle_pending_destroy_list (ob_sql_nio.cpp:791) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=22] can close safely, do destroy(*s={this:0x7f545a1fd240, fd:137, err:5, last_decode_time_:1710486222502795, last_write_time_:1710486222503548, read_buffer_.get_consume_sz():402, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.503659] INFO [RPC.OBMYSQL] sm_conn_log_close (obsm_conn_callback.cpp:159) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=22] connection close(sessid=3221252613, proxy_sessid=0, tenant_id=0, server_id=0, from_proxy=false, from_java_client=false, c/s protocol="OB_MYSQL_CS_TYPE", is_need_clear_sessid_=true, ret=0) [2024-03-15 07:03:42.508716] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=10][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.508756] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=40][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1", warnings:[]}, tenant_id_=1, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.508778] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=19][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, local_server_version={val:1710506547196065858}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486222508701) [2024-03-15 07:03:42.508795] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=17][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, last_post_cluster_heartbeat_tstamp_=1710486222408654, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:42.511311] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=27] get wrs ts(ls_id={id:1}, delta=146267658572, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:42.511341] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=26] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:42.513027] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=23][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.513103] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=75][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.513996] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62E-0-0] [lt=146][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:42.514036] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62E-0-0] [lt=40][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.514101] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62E-0-0] [lt=62][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.514131] WDIAG iterate (ob_tuple.h:272) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62E-0-0] [lt=29][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.514169] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62E-0-0] [lt=37][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:42.516141] INFO [STORAGE] scheduler_ls_ha_handler_ (ob_storage_ha_service.cpp:186) [913][T1004_HAService][T1004][Y0-0000000000000000-0-0] [lt=8] start do ls ha handler(ls_id_array_=[{id:1}, {id:1001}]) [2024-03-15 07:03:42.516187] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [913][T1004_HAService][T1004][Y0-0000000000000000-0-0] [lt=31][errcode=-5627] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:42.516208] WDIAG [STORAGE] do_ha_handler_ (ob_storage_ha_service.cpp:223) [913][T1004_HAService][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=0] failed to do ls restore handler process(tmp_ret=-5627, ls_id={id:1}) [2024-03-15 07:03:42.516226] WDIAG [STORAGE] do_ha_handler_ (ob_storage_ha_service.cpp:223) [913][T1004_HAService][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=0] failed to do ls restore handler process(tmp_ret=-5627, ls_id={id:1001}) [2024-03-15 07:03:42.516693] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=36][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:164, local_retry_times:164, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:42.516771] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=52][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.516809] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=36][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.516837] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=26][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:42.516884] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=25][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_schema_status' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:42.517132] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.517195] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=62][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.520219] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC0-0-0] [lt=118][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:42.520262] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC0-0-0] [lt=43][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.520287] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC0-0-0] [lt=24][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.520307] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC0-0-0] [lt=19][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.520323] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC0-0-0] [lt=15][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:42.522399] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:42.522448] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=49][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486222535189, now=[mts=1710486222459625], now0=[mts=1710486222459625], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:42.522495] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=44][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222458551, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.522571] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=66][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}) [2024-03-15 07:03:42.522608] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=35][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222458551, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.522648] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=36] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222458551, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486220606063, use_das=false, nested_level=0, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.522755] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=104][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:42.522773] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=16][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:42.522783] WDIAG [SQL] open (ob_result_set.cpp:157) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=8][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:42.522792] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=8][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:42.522801] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=8][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:42.522814] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=11][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=24, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.522831] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=13] will sleep(sleep_us=24000, remain_us=82360, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=24, timeout_timestamp=1710486222605189) [2024-03-15 07:03:42.523206] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=29][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.523235] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=28][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.533386] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC1-0-0] [lt=307][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1001}, all_ls_election_reference_info=[]) [2024-03-15 07:03:42.533440] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC1-0-0] [lt=54][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.533462] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC1-0-0] [lt=21][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1001}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.533478] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC1-0-0] [lt=15][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.533494] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC1-0-0] [lt=16][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1001}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:42.533517] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=13][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.533584] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=66][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.539453] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:42.539496] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=41][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486224265147, now=[mts=1710486222475557], now0=[mts=1710486222475557], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:42.539523] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=25][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222474779, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.539633] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=96][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}) [2024-03-15 07:03:42.539671] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=38][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222474779, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.539737] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=61] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222474779, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.539865] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=126][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:42.539891] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=23][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:42.539917] WDIAG [SQL] open (ob_result_set.cpp:157) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=24][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:42.539960] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=40][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:42.539985] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=22][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:42.540018] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=30][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=2, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.540041] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=18] will sleep(sleep_us=2000, remain_us=1795108, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=2, timeout_timestamp=1710486224335147) [2024-03-15 07:03:42.540117] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=25][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:42.542147] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=17][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:2, local_retry_times:2, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:42.542215] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=44][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.542231] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=16][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.542245] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=13][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:42.542279] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=13][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:42.542508] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.542553] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=44][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.543752] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=30][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.543814] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=61][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.547055] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=22][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:24, local_retry_times:24, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:42.547115] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=45][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.547127] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=11][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.547135] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=8][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:42.547157] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=8][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:42.547304] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.547353] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=48][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.547416] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=38][errcode=-4012] acquire global snapshot fail(ret=-4012, gts_ahead=0, expire_ts=1710486222535189, now=[mts=1710486222547411], now0=[mts=1710486222547411], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:42.547798] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=378][errcode=-4012] acquire global snapshot fail(ret=-4012, tx={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222546921, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.547906] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=85][errcode=-4012] fail to get snapshot(ret=-4012, local_ls_id={id:1}, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}) [2024-03-15 07:03:42.547942] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=37][errcode=-4012] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4012, session_id=1, *tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222546921, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.548020] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=74] start stmt(ret=-4012, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539ce850, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222546921, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486220606063, use_das=false, nested_level=0, session={this:0x7f5509bf80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539ce850}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.548157] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=134][errcode=-4012] fail to start stmt(ret=-4012, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:42.548182] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=21][errcode=-4012] fail start stmt(ret=-4012) [2024-03-15 07:03:42.548195] WDIAG [SQL] open (ob_result_set.cpp:157) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=12][errcode=-4012] execute plan failed(ret=-4012) [2024-03-15 07:03:42.548209] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=12][errcode=-4012] open result set failed(ret=-4012) [2024-03-15 07:03:42.548221] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=10][errcode=-4012] result set open failed(ret=-4012, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:42.548239] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=15][errcode=-4012] execute failed(ret=-4012, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=25, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.548272] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=29][errcode=-4012] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:25, local_retry_times:25, err_:-4012, err_:"OB_TIMEOUT", retry_type:0, client_ret:-4012}, need_retry=false) [2024-03-15 07:03:42.548307] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=20][errcode=-4012] result set close failed(ret=-4012) [2024-03-15 07:03:42.548329] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=21][errcode=-4012] result set close failed(ret=-4012) [2024-03-15 07:03:42.548341] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=11][errcode=-4012] failed to close result(close_ret=-4012, ret=-4012) [2024-03-15 07:03:42.548367] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8027-0-0] [lt=12][errcode=-4012] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4012, ret=-4012) [2024-03-15 07:03:42.548387] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=0] table not exist(tenant_id=1, database_id=201001, table_name=__all_ls_meta_table, ret=-5019) [2024-03-15 07:03:42.548389] INFO [SERVER] process_final (ob_inner_sql_connection.cpp:615) [127][SerScheQueue1][T0][YB427F000001-000613ACABEF8027-0-0] [lt=17] slow inner sql(last_ret=-4012, sql={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, process_time=1942323) [2024-03-15 07:03:42.548410] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=21][errcode=-5019] synonym not exist(tenant_id=1, database_id=201001, table_name=__all_ls_meta_table, ret=-5019) [2024-03-15 07:03:42.548434] WDIAG [SQL.RESV] resolve_table_relation_factor_normal (ob_dml_resolver.cpp:7359) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=11][errcode=-5019] fail to resolve table relation recursively(tenant_id=1, ret=-5019, database_id=201001, database_id=201001, table_name=__all_ls_meta_table, db_name=oceanbase) [2024-03-15 07:03:42.548445] WDIAG [SQL.RESV] resolve_table_relation_factor (ob_dml_resolver.cpp:7204) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=10][errcode=-5019] resolve table relation factor failed(ret=-5019, table_name=__all_ls_meta_table) [2024-03-15 07:03:42.548405] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [127][SerScheQueue1][T0][YB427F000001-000613ACABEF8027-0-0] [lt=16][errcode=-4012] failed to process final(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, aret=-4012, ret=-4012) [2024-03-15 07:03:42.548456] WDIAG [SQL.RESV] inner_resolve_sys_view (ob_dml_resolver.cpp:2579) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=7][errcode=-5019] fail to resolve table(ret=-5019) [2024-03-15 07:03:42.548462] WDIAG [SQL.RESV] resolve_table_relation_factor_wrapper (ob_dml_resolver.cpp:2634) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] fail to resolve sys view(ret=-5019) [2024-03-15 07:03:42.548460] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [127][SerScheQueue1][T0][YB427F000001-000613ACABEF8027-0-0] [lt=50][errcode=-4012] execute sql failed(ret=-4012, tenant_id=1, sql=SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name) [2024-03-15 07:03:42.548474] WDIAG resolve_basic_table_without_cte (ob_dml_resolver.cpp:2730) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] Table 'oceanbase.__all_ls_meta_table' doesn't exist [2024-03-15 07:03:42.548482] WDIAG [SQL.RESV] resolve_basic_table_with_cte (ob_dml_resolver.cpp:13473) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] resolve base or alias table factor failed(ret=-5019) [2024-03-15 07:03:42.548479] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [127][SerScheQueue1][T0][YB427F000001-000613ACABEF8027-0-0] [lt=17][errcode=-4012] retry_while_no_tenant_resource failed(ret=-4012, tenant_id=1) [2024-03-15 07:03:42.548488] WDIAG [SQL.RESV] resolve_basic_table (ob_dml_resolver.cpp:13407) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] fail to resolve basic table with cte(ret=-5019) [2024-03-15 07:03:42.548494] WDIAG [SQL.RESV] resolve_table (ob_dml_resolver.cpp:3142) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] resolve basic table failed(ret=-5019) [2024-03-15 07:03:42.548500] WDIAG [SQL.RESV] resolve_from_clause (ob_select_resolver.cpp:3426) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] fail to exec resolve_table(*table_node, table_item)(ret=-5019) [2024-03-15 07:03:42.548495] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [127][SerScheQueue1][T0][YB427F000001-000613ACABEF8027-0-0] [lt=13][errcode=-4012] execute_read failed(ret=-4012, cluster_id=1, tenant_id=1) [2024-03-15 07:03:42.548506] WDIAG [SQL.RESV] resolve_normal_query (ob_select_resolver.cpp:1033) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] fail to exec resolve_from_clause(parse_tree.children_[PARSE_SELECT_FROM])(ret=-5019) [2024-03-15 07:03:42.548513] WDIAG [SQL.RESV] resolve (ob_select_resolver.cpp:1240) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] resolve normal query failed(ret=-5019) [2024-03-15 07:03:42.548520] WDIAG [SQL.RESV] select_stmt_resolver_func (ob_resolver.cpp:170) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] execute stmt_resolver failed(ret=-5019, parse_tree.type_=3073) [2024-03-15 07:03:42.548534] WDIAG [SQL] generate_stmt (ob_sql.cpp:2659) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=7][errcode=-5019] failed to resolve(ret=-5019) [2024-03-15 07:03:42.548533] WDIAG [COMMON.MYSQLP] read_without_check_sys_variable (ob_sql_client_decorator.cpp:119) [127][SerScheQueue1][T0][YB427F000001-000613ACABEF8027-0-0] [lt=13][errcode=-4012] failed to read without check sys variable(ret=-4012, sql="SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name", tenant_id=1, check_sys_variable=false, snapshot_timestamp=-1) [2024-03-15 07:03:42.548542] WDIAG [SQL] generate_physical_plan (ob_sql.cpp:2781) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=8][errcode=-5019] Failed to generate stmt(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:42.548551] WDIAG [SQL] handle_physical_plan (ob_sql.cpp:4452) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] Failed to generate plan(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:42.548551] WDIAG [SHARE] load (ob_core_table_proxy.cpp:436) [127][SerScheQueue1][T0][YB427F000001-000613ACABEF8027-0-0] [lt=36][errcode=-4012] execute sql failed(ret=-4012, ret="OB_TIMEOUT", tenant_id=1, sql=SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name) [2024-03-15 07:03:42.548559] WDIAG [SQL] handle_text_query (ob_sql.cpp:2383) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=5][errcode=-5019] fail to handle physical plan(ret=-5019) [2024-03-15 07:03:42.548566] WDIAG [SQL] stmt_query (ob_sql.cpp:206) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] fail to handle text query(stmt=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1003 ORDER BY tenant_id, ls_id, svr_ip, svr_port, ret=-5019) [2024-03-15 07:03:42.548573] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:636) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=7][errcode=-5019] executor execute failed(ret=-5019) [2024-03-15 07:03:42.548580] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] execute failed(ret=-5019, tenant_id=1, executor={ObIExecutor:, sql:"SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1003 ORDER BY tenant_id, ls_id, svr_ip, svr_port"}, retry_cnt=0, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.548594] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=9][errcode=-5019] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:0, local_retry_times:0, err_:-5019, err_:"OB_TABLE_NOT_EXIST", retry_type:0, client_ret:-5019}, need_retry=false) [2024-03-15 07:03:42.548605] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=11][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:42.548611] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=5][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:42.548616] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=4][errcode=-5019] failed to close result(close_ret=-5019, ret=-5019) [2024-03-15 07:03:42.548629] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] failed to process record(executor={ObIExecutor:, sql:"SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1003 ORDER BY tenant_id, ls_id, svr_ip, svr_port"}, record_ret=-5019, ret=-5019) [2024-03-15 07:03:42.548638] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B90-0-0] [lt=8][errcode=-5019] failed to process final(executor={ObIExecutor:, sql:"SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1003 ORDER BY tenant_id, ls_id, svr_ip, svr_port"}, aret=-5019, ret=-5019) [2024-03-15 07:03:42.548646] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B90-0-0] [lt=8][errcode=-5019] execute sql failed(ret=-5019, tenant_id=1, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1003 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:42.548653] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] retry_while_no_tenant_resource failed(ret=-5019, tenant_id=1) [2024-03-15 07:03:42.548660] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] execute_read failed(ret=-5019, cluster_id=1, tenant_id=1) [2024-03-15 07:03:42.548667] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] query failed(ret=-5019, conn=0x7f54845f4050, start=1710486222548209, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1003 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:42.548676] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B90-0-0] [lt=8][errcode=-5019] read failed(ret=-5019) [2024-03-15 07:03:42.548720] WDIAG [SHARE.PT] get_by_tenant (ob_persistent_ls_table.cpp:609) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B90-0-0] [lt=42][errcode=-5019] execute sql failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1003, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1003 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:42.548742] WDIAG [SHARE] load (ob_core_table_proxy.cpp:368) [127][SerScheQueue1][T0][YB427F000001-000613ACABEF8027-0-0] [lt=10][errcode=-4012] load failed(ret=-4012, for_update=false) [2024-03-15 07:03:42.548769] WDIAG [SHARE.PT] get_by_tenant (ob_ls_table_operator.cpp:252) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B90-0-0] [lt=9][errcode=-5019] get all ls info by persistent_ls_ failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1003) [2024-03-15 07:03:42.548779] WDIAG [SHARE] inner_open_ (ob_ls_table_iterator.cpp:104) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B90-0-0] [lt=10][errcode=-5019] fail to get ls infos by tenant(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1003, inner_table_only=false) [2024-03-15 07:03:42.548788] WDIAG [SHARE] next (ob_ls_table_iterator.cpp:71) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B90-0-0] [lt=8][errcode=-5019] fail to open iterator(ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:42.548764] WDIAG [SHARE] get (ob_global_stat_proxy.cpp:422) [127][SerScheQueue1][T0][YB427F000001-000613ACABEF8027-0-0] [lt=20][errcode=-4012] core_table load failed(ret=-4012, ret="OB_TIMEOUT") [2024-03-15 07:03:42.548795] WDIAG [SERVER] build_replica_map_ (ob_tenant_meta_checker.cpp:332) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] ls table iterator next failed(ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:42.548805] WDIAG [SERVER] check_ls_table_ (ob_tenant_meta_checker.cpp:214) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B90-0-0] [lt=7][errcode=-5019] build replica map from ls table failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", mode=0) [2024-03-15 07:03:42.548802] WDIAG [SHARE] get_baseline_schema_version (ob_global_stat_proxy.cpp:386) [127][SerScheQueue1][T0][YB427F000001-000613ACABEF8027-0-0] [lt=37][errcode=-4012] get failed(ret=-4012) [2024-03-15 07:03:42.548813] WDIAG [SERVER] check_ls_table (ob_tenant_meta_checker.cpp:188) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B90-0-0] [lt=7][errcode=-5019] check ls table failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", mode=0) [2024-03-15 07:03:42.548821] WDIAG [SERVER] runTimerTask (ob_tenant_meta_checker.cpp:44) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B90-0-0] [lt=6][errcode=-5019] fail to check ls meta table(ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:42.548817] WDIAG [SHARE.SCHEMA] get_baseline_schema_version (ob_schema_service_sql_impl.cpp:808) [127][SerScheQueue1][T0][YB427F000001-000613ACABEF8027-0-0] [lt=13][errcode=-4012] get_baseline_schema_version failed(ret=-4012, ret="OB_TIMEOUT", schema_status={tenant_id:1, snapshot_timestamp:-1, readable_schema_version:-1}) [2024-03-15 07:03:42.548854] WDIAG [SHARE.SCHEMA] get_baseline_schema_version (ob_multi_version_schema_service.cpp:4005) [127][SerScheQueue1][T0][YB427F000001-000613ACABEF8027-0-0] [lt=18][errcode=-4012] get baseline schema version failed(ret=-4012, ret="OB_TIMEOUT", schema_status={tenant_id:1, snapshot_timestamp:-1, readable_schema_version:-1}) [2024-03-15 07:03:42.548872] WDIAG [SERVER] try_load_baseline_schema_version_ (ob_server_schema_updater.cpp:511) [127][SerScheQueue1][T0][YB427F000001-000613ACABEF8027-0-0] [lt=16][errcode=0] fail to update baseline schema version(tmp_ret=-4012, tmp_ret="OB_TIMEOUT", *tenant_id=1) [2024-03-15 07:03:42.549234] WDIAG [SERVER] batch_process_tasks (ob_server_schema_updater.cpp:229) [127][SerScheQueue1][T0][YB427F000001-000613ACABEF8027-0-0] [lt=14][errcode=-4023] fail to process refresh task(ret=-4023, ret="OB_EAGAIN", tasks.at(0)={type:1, did_retry:true, schema_info:{schema_version:-1, tenant_id:0, sequence_id:18446744073709551615}}) [2024-03-15 07:03:42.549282] WDIAG [SERVER] batch_process_tasks (ob_uniq_task_queue.h:498) [127][SerScheQueue1][T0][Y0-0000000000000000-0-0] [lt=44][errcode=-4023] fail to batch process task(ret=-4023) [2024-03-15 07:03:42.549297] WDIAG [SERVER] run1 (ob_uniq_task_queue.h:449) [127][SerScheQueue1][T0][Y0-0000000000000000-0-0] [lt=13][errcode=-4023] fail to batch execute task(ret=-4023, tasks.count()=1) [2024-03-15 07:03:42.550953] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=0] table not exist(tenant_id=1, database_id=201001, table_name=__all_ls_meta_table, ret=-5019) [2024-03-15 07:03:42.551000] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=43][errcode=-5019] synonym not exist(tenant_id=1, database_id=201001, table_name=__all_ls_meta_table, ret=-5019) [2024-03-15 07:03:42.551021] WDIAG [SQL.RESV] resolve_table_relation_factor_normal (ob_dml_resolver.cpp:7359) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=19][errcode=-5019] fail to resolve table relation recursively(tenant_id=1, ret=-5019, database_id=201001, database_id=201001, table_name=__all_ls_meta_table, db_name=oceanbase) [2024-03-15 07:03:42.551080] WDIAG [SQL.RESV] resolve_table_relation_factor (ob_dml_resolver.cpp:7204) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=56][errcode=-5019] resolve table relation factor failed(ret=-5019, table_name=__all_ls_meta_table) [2024-03-15 07:03:42.551100] WDIAG [SQL.RESV] inner_resolve_sys_view (ob_dml_resolver.cpp:2579) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=16][errcode=-5019] fail to resolve table(ret=-5019) [2024-03-15 07:03:42.551112] WDIAG [SQL.RESV] resolve_table_relation_factor_wrapper (ob_dml_resolver.cpp:2634) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=12][errcode=-5019] fail to resolve sys view(ret=-5019) [2024-03-15 07:03:42.551130] WDIAG resolve_basic_table_without_cte (ob_dml_resolver.cpp:2730) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=11][errcode=-5019] Table 'oceanbase.__all_ls_meta_table' doesn't exist [2024-03-15 07:03:42.551162] WDIAG [SQL.RESV] resolve_basic_table_with_cte (ob_dml_resolver.cpp:13473) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=30][errcode=-5019] resolve base or alias table factor failed(ret=-5019) [2024-03-15 07:03:42.551176] WDIAG [SQL.RESV] resolve_basic_table (ob_dml_resolver.cpp:13407) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=13][errcode=-5019] fail to resolve basic table with cte(ret=-5019) [2024-03-15 07:03:42.551188] WDIAG [SQL.RESV] resolve_table (ob_dml_resolver.cpp:3142) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=11][errcode=-5019] resolve basic table failed(ret=-5019) [2024-03-15 07:03:42.551201] WDIAG [SQL.RESV] resolve_from_clause (ob_select_resolver.cpp:3426) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=12][errcode=-5019] fail to exec resolve_table(*table_node, table_item)(ret=-5019) [2024-03-15 07:03:42.551214] WDIAG [SQL.RESV] resolve_normal_query (ob_select_resolver.cpp:1033) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=12][errcode=-5019] fail to exec resolve_from_clause(parse_tree.children_[PARSE_SELECT_FROM])(ret=-5019) [2024-03-15 07:03:42.551227] WDIAG [SQL.RESV] resolve (ob_select_resolver.cpp:1240) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=12][errcode=-5019] resolve normal query failed(ret=-5019) [2024-03-15 07:03:42.551240] WDIAG [SQL.RESV] select_stmt_resolver_func (ob_resolver.cpp:170) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=12][errcode=-5019] execute stmt_resolver failed(ret=-5019, parse_tree.type_=3073) [2024-03-15 07:03:42.551263] WDIAG [SQL] generate_stmt (ob_sql.cpp:2659) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=13][errcode=-5019] failed to resolve(ret=-5019) [2024-03-15 07:03:42.551277] WDIAG [SQL] generate_physical_plan (ob_sql.cpp:2781) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=13][errcode=-5019] Failed to generate stmt(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:42.551296] WDIAG [SQL] handle_physical_plan (ob_sql.cpp:4452) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=13][errcode=-5019] Failed to generate plan(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:42.551309] WDIAG [SQL] handle_text_query (ob_sql.cpp:2383) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=13][errcode=-5019] fail to handle physical plan(ret=-5019) [2024-03-15 07:03:42.551321] WDIAG [SQL] stmt_query (ob_sql.cpp:206) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=11][errcode=-5019] fail to handle text query(stmt=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1 ORDER BY tenant_id, ls_id, svr_ip, svr_port, ret=-5019) [2024-03-15 07:03:42.551336] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:636) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=12][errcode=-5019] executor execute failed(ret=-5019) [2024-03-15 07:03:42.551349] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=11][errcode=-5019] execute failed(ret=-5019, tenant_id=1, executor={ObIExecutor:, sql:"SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1 ORDER BY tenant_id, ls_id, svr_ip, svr_port"}, retry_cnt=0, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.551375] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=21][errcode=-5019] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:0, local_retry_times:0, err_:-5019, err_:"OB_TABLE_NOT_EXIST", retry_type:0, client_ret:-5019}, need_retry=false) [2024-03-15 07:03:42.551411] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=32][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:42.551454] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=41][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:42.551467] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=12][errcode=-5019] failed to close result(close_ret=-5019, ret=-5019) [2024-03-15 07:03:42.551495] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=15][errcode=-5019] failed to process record(executor={ObIExecutor:, sql:"SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1 ORDER BY tenant_id, ls_id, svr_ip, svr_port"}, record_ret=-5019, ret=-5019) [2024-03-15 07:03:42.551525] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=27][errcode=-5019] failed to process final(executor={ObIExecutor:, sql:"SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1 ORDER BY tenant_id, ls_id, svr_ip, svr_port"}, aret=-5019, ret=-5019) [2024-03-15 07:03:42.551495] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.551539] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=13][errcode=-5019] execute sql failed(ret=-5019, tenant_id=1, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:42.551548] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=9][errcode=-5019] retry_while_no_tenant_resource failed(ret=-5019, tenant_id=1) [2024-03-15 07:03:42.551541] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=45][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1004", warnings:[]}, tenant_id_=1004, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.551556] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=7][errcode=-5019] execute_read failed(ret=-5019, cluster_id=1, tenant_id=1) [2024-03-15 07:03:42.551565] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=7][errcode=-5019] query failed(ret=-5019, conn=0x7f5492b2e050, start=1710486222550743, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:42.551577] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=11][errcode=-5019] read failed(ret=-5019) [2024-03-15 07:03:42.551566] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=23][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, local_server_version={val:1710506547039047538}, valid_part_count=2, total_part_count=2, generate_timestamp=1710486222551480) [2024-03-15 07:03:42.551585] WDIAG [SHARE.PT] get_by_tenant (ob_persistent_ls_table.cpp:609) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=6][errcode=-5019] execute sql failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:42.551586] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, last_post_cluster_heartbeat_tstamp_=1710486222351452, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:42.551700] WDIAG [SHARE.PT] get_by_tenant (ob_ls_table_operator.cpp:252) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=9][errcode=-5019] get all ls info by persistent_ls_ failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1) [2024-03-15 07:03:42.551722] WDIAG [SHARE] inner_open_ (ob_ls_table_iterator.cpp:104) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=21][errcode=-5019] fail to get ls infos by tenant(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1, inner_table_only=true) [2024-03-15 07:03:42.551747] WDIAG [SHARE] next (ob_ls_table_iterator.cpp:71) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=23][errcode=-5019] fail to open iterator(ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:42.551763] WDIAG [SERVER] build_replica_map_ (ob_tenant_meta_checker.cpp:332) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=14][errcode=-5019] ls table iterator next failed(ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:42.551778] WDIAG [SERVER] check_ls_table_ (ob_tenant_meta_checker.cpp:214) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=11][errcode=-5019] build replica map from ls table failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", mode=1) [2024-03-15 07:03:42.551795] WDIAG [SERVER] check_ls_table (ob_tenant_meta_checker.cpp:194) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=15][errcode=-5019] check ls table failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", mode=1) [2024-03-15 07:03:42.551814] WDIAG [SERVER] runTimerTask (ob_tenant_meta_checker.cpp:44) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B94-0-0] [lt=18][errcode=-5019] fail to check ls meta table(ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:42.554234] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=28][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.554300] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=64][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.559108] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486222559096607, start_id=0, end_id=0) [2024-03-15 07:03:42.564479] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=29][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.564536] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=57][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.573662] INFO [COMMON] replace_fragment_node (ob_kvcache_map.cpp:697) [103][KVCacheRep][T0][Y0-0000000000000000-0-0] [lt=66] Cache replace map node details(ret=0, replace_node_count=0, replace_time=1806, replace_start_pos=314560, replace_num=15728) [2024-03-15 07:03:42.574670] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=33][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.574737] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=66][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.580602] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=2][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.580635] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=29][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222580586], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.580655] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=17][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222580586], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.580685] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=12] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222580586]}) [2024-03-15 07:03:42.580698] WDIAG [STORAGE.TRANS] operator() (ob_ts_mgr.h:167) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=13][errcode=-4023] refresh gts failed(ret=-4023, ret="OB_EAGAIN", gts_tenant_info={v:1}) [2024-03-15 07:03:42.580708] INFO [STORAGE.TRANS] operator() (ob_ts_mgr.h:171) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=10] refresh gts functor(ret=-4023, ret="OB_EAGAIN", gts_tenant_info={v:1}) [2024-03-15 07:03:42.580727] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=8] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:42.580713] WDIAG [SHARE.LOCATION] batch_process_tasks (ob_ls_location_service.cpp:524) [159][SysLocAsyncUp0][T0][YB427F000001-000613ACAFBFB1B5-0-0] [lt=22][errcode=0] tenant schema is not ready, need wait(ret=0, ret="OB_SUCCESS", superior_tenant_id=1, tasks=[{cluster_id:1, tenant_id:1, ls_id:{id:1}, add_timestamp:1710486222580667}]) [2024-03-15 07:03:42.580740] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=12][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486222580723], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.580759] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=11] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222580723]}) [2024-03-15 07:03:42.581017] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.581032] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=13][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486222581010], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.581057] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=11] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222581010]}) [2024-03-15 07:03:42.584937] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=43][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.584992] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=53][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.585661] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:42.585714] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=51][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486229937049, now=[mts=1710486222517267], now0=[mts=1710486222517267], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:42.585741] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=25][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222516347, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.585849] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=93][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}) [2024-03-15 07:03:42.585902] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=53][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222516347, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.585982] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=76] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54e845a228, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222516347, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486200007067, use_das=false, nested_level=0, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.586075] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=91][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:42.586097] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=18][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:42.586108] WDIAG [SQL] open (ob_result_set.cpp:157) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=9][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:42.586120] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=10][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:42.586130] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=7][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_schema_status' ORDER BY row_id, column_name"}) [2024-03-15 07:03:42.586144] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=13][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_schema_status' ORDER BY row_id, column_name"}, retry_cnt=165, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.586160] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=12] will sleep(sleep_us=100000, remain_us=7420891, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=165, timeout_timestamp=1710486230007049) [2024-03-15 07:03:42.586378] INFO [STORAGE] gc_tables_in_queue (ob_tenant_meta_mem_mgr.cpp:360) [451][T1_T3mGC][T1][Y0-0000000000000000-0-0] [lt=62] Recycle 0 table(ret=0, allocator_={used:1634801, total:2011107}, tablet_pool_={typeid(T).name():"N9oceanbase7storage8ObTabletE", sizeof(T):2560, used_obj_cnt:797, free_obj_hold_cnt:0, allocator used:2091328, allocator total:2223872}, sstable_pool_={typeid(T).name():"N9oceanbase12blocksstable9ObSSTableE", sizeof(T):1088, used_obj_cnt:1681, free_obj_hold_cnt:0, allocator used:1936512, allocator total:2027648}, ddl_kv_pool_={typeid(T).name():"N9oceanbase7storage7ObDDLKVE", sizeof(T):3008, used_obj_cnt:0, free_obj_hold_cnt:0, allocator used:0, allocator total:0}, memtable_pool_={typeid(T).name():"N9oceanbase8memtable10ObMemtableE", sizeof(T):1920, used_obj_cnt:66, free_obj_hold_cnt:0, allocator used:130944, allocator total:196224}, tablet count=797, min_minor_cnt=0, pinned_tablet_cnt=0) [2024-03-15 07:03:42.595152] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=47][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.595269] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=88][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.605471] INFO [STORAGE.TRANS] print_statistics (ob_xa_define.cpp:429) [733][T1003_ObXAHbWor][T1003][Y0-0000000000000000-0-0] [lt=8] xa statistics(*this={total_active_xa_ctx_count:0}) [2024-03-15 07:03:42.605478] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=37][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.605533] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=55][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.605802] WDIAG [ARCHIVE] do_consume_send_task_ (ob_archive_sender.cpp:288) [562][T1_ArcSender][T1][YB427F000001-000613ACA81F7BAA-0-0] [lt=12][errcode=-4018] get send task failed(ret=-4018) [2024-03-15 07:03:42.606134] INFO [SERVER] try_reload_schema (ob_server_schema_updater.cpp:435) [949][LeaseHB][T0][Y0-0000000000000000-0-0] [lt=12] schedule fetch new schema task(ret=0, ret="OB_SUCCESS", schema_info={schema_version:-1, tenant_id:0, sequence_id:18446744073709551615}) [2024-03-15 07:03:42.606156] INFO [SERVER] process_refresh_task (ob_server_schema_updater.cpp:254) [127][SerScheQueue1][T0][YB427F000001-000613ACABEF8028-0-0] [lt=13] [REFRESH_SCHEMA] start to process schema refresh task(ret=0, ret="OB_SUCCESS", schema_info={schema_version:-1, tenant_id:0, sequence_id:18446744073709551615}) [2024-03-15 07:03:42.606180] WDIAG [SERVER] process_refresh_task (ob_server_schema_updater.cpp:267) [127][SerScheQueue1][T0][YB427F000001-000613ACABEF8028-0-0] [lt=21][errcode=-4023] rootservice is not in full service, try again(ret=-4023, ret="OB_EAGAIN", GCTX.root_service_->in_service()=true, GCTX.root_service_->is_full_service()=false) [2024-03-15 07:03:42.606187] INFO [SERVER] do_heartbeat_event (ob_heartbeat.cpp:196) [949][LeaseHB][T0][Y0-0000000000000000-0-0] [lt=54] try reload schema success(schema_version=1, refresh_schema_info={schema_version:-1, tenant_id:0, sequence_id:18446744073709551615}, schema_ret=0) [2024-03-15 07:03:42.607100] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.607132] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=32][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.608773] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:755) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=15] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] current server is WRS leader, need start CLUSTER weak read service(tenant_id=1, serve_leader_epoch=0, cur_leader_epoch=420, cluster_service_tablet_id_={id:226}, in_service=false, can_update_version=false, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:42.608820] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:347) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=35] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] begin start service(tenant_id=1, is_in_service()=false, can_update_version=false) [2024-03-15 07:03:42.608834] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:349) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=14] start TenantWeakReadClusterService(tenant_id=1) [2024-03-15 07:03:42.608970] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:42.608996] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=25][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486224265147, now=[mts=1710486222542590], now0=[mts=1710486222542590], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:42.609013] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=15][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222541489, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.609087] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=64][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}) [2024-03-15 07:03:42.609111] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=24][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222541489, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.609158] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=37] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222541489, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.609219] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=60][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:42.609233] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=11][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:42.609242] WDIAG [SQL] open (ob_result_set.cpp:157) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=7][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:42.609250] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=6][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:42.609257] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=6][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:42.609269] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=10][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=3, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.609287] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=15] will sleep(sleep_us=3000, remain_us=1725861, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=3, timeout_timestamp=1710486224335147) [2024-03-15 07:03:42.609989] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=11] table not exist(tenant_id=1, database_id=201001, table_name=__all_weak_read_service, ret=-5019) [2024-03-15 07:03:42.610026] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=34][errcode=-5019] synonym not exist(tenant_id=1, database_id=201001, table_name=__all_weak_read_service, ret=-5019) [2024-03-15 07:03:42.610043] WDIAG [SQL.RESV] resolve_table_relation_factor_normal (ob_dml_resolver.cpp:7359) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=15][errcode=-5019] fail to resolve table relation recursively(tenant_id=1, ret=-5019, database_id=201001, database_id=201001, table_name=__all_weak_read_service, db_name=oceanbase) [2024-03-15 07:03:42.610059] WDIAG [SQL.RESV] resolve_table_relation_factor (ob_dml_resolver.cpp:7204) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=15][errcode=-5019] resolve table relation factor failed(ret=-5019, table_name=__all_weak_read_service) [2024-03-15 07:03:42.610078] WDIAG [SQL.RESV] inner_resolve_sys_view (ob_dml_resolver.cpp:2579) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=13][errcode=-5019] fail to resolve table(ret=-5019) [2024-03-15 07:03:42.610089] WDIAG [SQL.RESV] resolve_table_relation_factor_wrapper (ob_dml_resolver.cpp:2634) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=11][errcode=-5019] fail to resolve sys view(ret=-5019) [2024-03-15 07:03:42.610107] WDIAG resolve_basic_table_without_cte (ob_dml_resolver.cpp:2730) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=10][errcode=-5019] Table 'oceanbase.__all_weak_read_service' doesn't exist [2024-03-15 07:03:42.610124] WDIAG [SQL.RESV] resolve_basic_table_with_cte (ob_dml_resolver.cpp:13473) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=16][errcode=-5019] resolve base or alias table factor failed(ret=-5019) [2024-03-15 07:03:42.610135] WDIAG [SQL.RESV] resolve_basic_table (ob_dml_resolver.cpp:13407) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=10][errcode=-5019] fail to resolve basic table with cte(ret=-5019) [2024-03-15 07:03:42.610145] WDIAG [SQL.RESV] resolve_table (ob_dml_resolver.cpp:3142) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=9][errcode=-5019] resolve basic table failed(ret=-5019) [2024-03-15 07:03:42.610158] WDIAG [SQL.RESV] resolve_from_clause (ob_select_resolver.cpp:3426) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=12][errcode=-5019] fail to exec resolve_table(*table_node, table_item)(ret=-5019) [2024-03-15 07:03:42.610168] WDIAG [SQL.RESV] resolve_normal_query (ob_select_resolver.cpp:1033) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=9][errcode=-5019] fail to exec resolve_from_clause(parse_tree.children_[PARSE_SELECT_FROM])(ret=-5019) [2024-03-15 07:03:42.610184] WDIAG [SQL.RESV] resolve (ob_select_resolver.cpp:1240) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=14][errcode=-5019] resolve normal query failed(ret=-5019) [2024-03-15 07:03:42.610196] WDIAG [SQL.RESV] select_stmt_resolver_func (ob_resolver.cpp:170) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=10][errcode=-5019] execute stmt_resolver failed(ret=-5019, parse_tree.type_=3073) [2024-03-15 07:03:42.610224] WDIAG [SQL] generate_stmt (ob_sql.cpp:2659) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=16][errcode=-5019] failed to resolve(ret=-5019) [2024-03-15 07:03:42.610236] WDIAG [SQL] generate_physical_plan (ob_sql.cpp:2781) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=11][errcode=-5019] Failed to generate stmt(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:42.610256] WDIAG [SQL] handle_physical_plan (ob_sql.cpp:4452) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=16][errcode=-5019] Failed to generate plan(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:42.610268] WDIAG [SQL] handle_text_query (ob_sql.cpp:2383) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=9][errcode=-5019] fail to handle physical plan(ret=-5019) [2024-03-15 07:03:42.610281] WDIAG [SQL] stmt_query (ob_sql.cpp:206) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=11][errcode=-5019] fail to handle text query(stmt=select min_version, max_version from __all_weak_read_service where tenant_id = 1 and level_id = 0 and level_value = '', ret=-5019) [2024-03-15 07:03:42.610293] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:636) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=11][errcode=-5019] executor execute failed(ret=-5019) [2024-03-15 07:03:42.610308] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=13][errcode=-5019] execute failed(ret=-5019, tenant_id=1, executor={ObIExecutor:, sql:"select min_version, max_version from __all_weak_read_service where tenant_id = 1 and level_id = 0 and level_value = ''"}, retry_cnt=0, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.610337] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=23][errcode=-5019] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:0, local_retry_times:0, err_:-5019, err_:"OB_TABLE_NOT_EXIST", retry_type:0, client_ret:-5019}, need_retry=false) [2024-03-15 07:03:42.610359] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=19][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:42.610350] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=20][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.610370] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=10][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:42.610381] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=10][errcode=-5019] failed to close result(close_ret=-5019, ret=-5019) [2024-03-15 07:03:42.610372] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=21][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1003", warnings:[]}, tenant_id_=1003, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.610395] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=20][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, local_server_version={val:1710482141336457000}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486222610338) [2024-03-15 07:03:42.610406] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=10][errcode=-5019] failed to process record(executor={ObIExecutor:, sql:"select min_version, max_version from __all_weak_read_service where tenant_id = 1 and level_id = 0 and level_value = ''"}, record_ret=-5019, ret=-5019) [2024-03-15 07:03:42.610413] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=17][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, last_post_cluster_heartbeat_tstamp_=1710486222410319, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:42.610442] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F97FF-0-0] [lt=34][errcode=-5019] failed to process final(executor={ObIExecutor:, sql:"select min_version, max_version from __all_weak_read_service where tenant_id = 1 and level_id = 0 and level_value = ''"}, aret=-5019, ret=-5019) [2024-03-15 07:03:42.610457] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=13][errcode=-5019] execute sql failed(ret=-5019, tenant_id=1, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1 and level_id = 0 and level_value = '') [2024-03-15 07:03:42.610460] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:755) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=39] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] current server is WRS leader, need start CLUSTER weak read service(tenant_id=1003, serve_leader_epoch=0, cur_leader_epoch=1984, cluster_service_tablet_id_={id:226}, in_service=false, can_update_version=false, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:42.610471] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=12][errcode=-5019] retry_while_no_tenant_resource failed(ret=-5019, tenant_id=1) [2024-03-15 07:03:42.610483] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=11][errcode=-5019] execute_read failed(ret=-5019, cluster_id=1, tenant_id=1) [2024-03-15 07:03:42.610490] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:347) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=20] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] begin start service(tenant_id=1003, is_in_service()=false, can_update_version=false) [2024-03-15 07:03:42.610503] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:349) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=12] start TenantWeakReadClusterService(tenant_id=1003) [2024-03-15 07:03:42.610496] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=11][errcode=-5019] query failed(ret=-5019, conn=0x7f54a2df2050, start=1710486222609743, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1 and level_id = 0 and level_value = '') [2024-03-15 07:03:42.610519] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=22][errcode=-5019] read failed(ret=-5019) [2024-03-15 07:03:42.610532] WDIAG [STORAGE.TRANS] query_cluster_version_range_ (ob_tenant_weak_read_cluster_service.cpp:196) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=10][errcode=-5019] execute sql read fail(ret=-5019, ret="OB_TABLE_NOT_EXIST", exec_tenant_id=1, tenant_id=1, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1 and level_id = 0 and level_value = '') [2024-03-15 07:03:42.610616] WDIAG [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:378) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=14][errcode=-5019] query cluster version range from WRS table fail(ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:42.610633] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:432) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=14] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] start service done(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1, in_service=false, leader_epoch=0, current_version={val:0}, delta=1710486222610629, min_version={val:0}, max_version={val:0}, max_stale_time=5000000000, all_valid_server_count=0, total_time=1822, wlock_time=38, check_leader_time=2, query_version_time=0, persist_version_time=0) [2024-03-15 07:03:42.610662] WDIAG [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:798) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=28][errcode=-5019] start CLUSTER weak read service fail(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1) [2024-03-15 07:03:42.610676] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:808) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=12] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] done(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1, need_start_service=true, need_stop_service=false, need_change_leader=false, is_in_service()=false, can_update_version=false, cur_leader_epoch=420, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:42.611392] INFO [STORAGE.TRANS] run1 (ob_tx_loop_worker.cpp:100) [569][T1_TxLoopWorker][T1][Y0-0000000000000000-0-0] [lt=15] tx gc loop thread is running(MTL_ID()=1) [2024-03-15 07:03:42.611443] INFO [STORAGE.TRANS] run1 (ob_tx_loop_worker.cpp:107) [569][T1_TxLoopWorker][T1][Y0-0000000000000000-0-0] [lt=28] try gc retain ctx [2024-03-15 07:03:42.611464] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [737][T1003_TenantWea][T1003][YB427F000001-000613ACABAF95EC-0-0] [lt=9][errcode=-5627] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:42.611546] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:432) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=0] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] start service done(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1003, in_service=false, leader_epoch=0, current_version={val:0}, delta=1710486222611544, min_version={val:0}, max_version={val:0}, max_stale_time=5000000000, all_valid_server_count=0, total_time=1064, wlock_time=32, check_leader_time=1, query_version_time=0, persist_version_time=0) [2024-03-15 07:03:42.611580] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:808) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=0] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] done(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1003, need_start_service=true, need_stop_service=false, need_change_leader=false, is_in_service()=false, can_update_version=false, cur_leader_epoch=1984, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:42.612392] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=11][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:3, local_retry_times:3, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:42.612441] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=21][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.612450] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=9][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:42.612458] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=6][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:42.612476] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=7][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:42.612570] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.612586] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=16][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:5}) [2024-03-15 07:03:42.613440] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=39] get wrs ts(ls_id={id:1}, delta=146267760637, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:42.613462] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=19] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:42.614125] INFO [STORAGE.TRANS] do_tx_gc_ (ob_tx_loop_worker.cpp:226) [569][T1_TxLoopWorker][T1][Y0-0000000000000000-0-0] [lt=38] [Tx Loop Worker] check tx scheduler success(MTL_ID()=1, *ls_ptr={ls_meta:{tenant_id:1, ls_id:{id:1}, replica_type:0, ls_create_status:1, clog_checkpoint_scn:{val:1710208801027009356}, clog_base_lsn:{lsn:114682048512}, rebuild_seq:0, migration_status:0, gc_state_:1, offline_scn_:{val:18446744073709551615}, restore_status:{status:0}, replayable_point:{val:18446744073709551615}, tablet_change_checkpoint_scn:{val:1708138173521664934}, all_id_meta:{id_meta:[{limited_id:1710506557268654309, latest_log_ts:{val:1710506540610900036}}, {limited_id:216000001, latest_log_ts:{val:1710506427288403526}}, {limited_id:2390000001, latest_log_ts:{val:1710506427288403525}}]}}, log_handler:{role:1, proposal_id:420, palf_env_:0x7f54b7ff0030, is_in_stop_state_:false, is_inited_:true}, restore_handler:{is_inited:true, is_in_stop_state:false, id:1, proposal_id:9223372036854775807, role:2, parent:null, context:{issue_task_num:0, issue_version:-1, last_fetch_ts:-1, max_submit_lsn:{lsn:18446744073709551615}, max_fetch_lsn:{lsn:18446744073709551615}, max_fetch_scn:{val:18446744073709551615}, error_context:{ret_code:0, trace_id:Y0-0000000000000000-0-0}, task_count:0}, restore_context:{seek_done:false, lsn:{lsn:18446744073709551615}}}, is_inited:true, tablet_gc_handler:{tablet_persist_trigger:0, is_inited:true}}) [2024-03-15 07:03:42.614232] WDIAG [STORAGE.TRANS] check_gts_ (ob_keep_alive_ls_handler.cpp:237) [569][T1_TxLoopWorker][T1][Y0-0000000000000000-0-0] [lt=101][errcode=-4023] get gts error(ret=-4023) [2024-03-15 07:03:42.614301] INFO [PALF] handle_next_submit_log_ (log_sliding_window.cpp:1000) [569][T1_TxLoopWorker][T1][Y0-0000000000000000-0-0] [lt=15] [PALF STAT GROUP LOG INFO](palf_id=1, self="127.0.0.1:2882", role="LEADER", total_group_log_cnt=1, avg_log_batch_cnt=1, total_group_log_size=121, avg_group_log_size=121) [2024-03-15 07:03:42.614369] INFO [PALF] submit_log (palf_handle_impl.cpp:403) [569][T1_TxLoopWorker][T1][Y0-0000000000000000-0-0] [lt=42] [PALF STAT APPEND DATA SIZE](this={palf_id:1, self:"127.0.0.1:2882", has_set_deleted:false}, append size=122) [2024-03-15 07:03:42.615820] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=24][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.615904] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=82][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.617656] INFO [RPC.FRAME] mysql_easy_timer_cb (ob_net_easy.cpp:657) [203][MysqlIO][T0][Y0-0000000000000000-0-0] [lt=19] [MYSQL EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:42.617894] INFO [RPC.FRAME] mysql_easy_timer_cb (ob_net_easy.cpp:657) [202][MysqlIO][T0][Y0-0000000000000000-0-0] [lt=33] [MYSQL EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:42.618839] INFO [RPC.FRAME] mysql_easy_timer_cb (ob_net_easy.cpp:657) [204][MysqlIO][T0][Y0-0000000000000000-0-0] [lt=34] [MYSQL EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:42.619073] INFO [RPC.FRAME] mysql_easy_timer_cb (ob_net_easy.cpp:657) [206][MysqlUnix][T0][Y0-0000000000000000-0-0] [lt=38] [MYSQL EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:42.619765] INFO [RPC.FRAME] batch_rpc_easy_timer_cb (ob_net_easy.cpp:633) [198][BatchIO][T0][Y0-0000000000000000-0-0] [lt=32] [BATCH_RPC EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:42.619914] INFO [RPC.FRAME] batch_rpc_easy_timer_cb (ob_net_easy.cpp:633) [197][BatchIO][T0][Y0-0000000000000000-0-0] [lt=16] [BATCH_RPC EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:42.620273] INFO [RPC.FRAME] batch_rpc_easy_timer_cb (ob_net_easy.cpp:633) [200][BatchIO][T0][Y0-0000000000000000-0-0] [lt=34] [BATCH_RPC EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:42.620399] INFO [RPC.FRAME] batch_rpc_easy_timer_cb (ob_net_easy.cpp:633) [199][BatchIO][T0][Y0-0000000000000000-0-0] [lt=34] [BATCH_RPC EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:42.620606] INFO [RPC.FRAME] rpc_easy_timer_cb (ob_net_easy.cpp:595) [208][RpcUnix][T0][Y0-0000000000000000-0-0] [lt=19] [RPC EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:42.620874] INFO [LIB] stat (utility.h:1140) [461][T1_IOWorker][T1][Y0-0000000000000000-0-0] [lt=14] [PALF STAT WRITE LOG](cur_stat_count=2, stat_interval=1000000, avg cost=6248, this=0x7f54b87fbad8) [2024-03-15 07:03:42.620910] INFO [PALF] inner_append_log (palf_handle_impl.cpp:1660) [461][T1_IOWorker][T1][Y0-0000000000000000-0-0] [lt=35] [PALF STAT INNER APPEND LOG](this={palf_id:1, self:"127.0.0.1:2882", has_set_deleted:false}, accum_size=243) [2024-03-15 07:03:42.621004] WDIAG [PALF] try_update_match_lsn_map_ (log_sliding_window.cpp:3790) [460][T1_LogIOCb0][T1][Y0-0000000000000000-0-0] [lt=7][errcode=0] [MATCH LSN ADVANCE DELAY]match_lsn advance delay too much time(ret=0, palf_id=1, self="127.0.0.1:2882", server="127.0.0.1:2882", update_func={old_end_lsn:{lsn:115793802286}, new_end_lsn:{lsn:115793802407}, old_advance_time_us:1710486221617324, new_ack_time_us:1710486222620996, advance delay(us):1003672}) [2024-03-15 07:03:42.621057] INFO [PALF] try_advance_committed_lsn_ (log_sliding_window.cpp:1572) [460][T1_LogIOCb0][T1][Y0-0000000000000000-0-0] [lt=48] [PALF STAT COMMITTED LOG SIZE](palf_id=1, self="127.0.0.1:2882", committed size=243) [2024-03-15 07:03:42.621105] INFO [LIB] stat (utility.h:1140) [460][T1_LogIOCb0][T1][Y0-0000000000000000-0-0] [lt=16] [PALF STAT FS CB](cur_stat_count=2, stat_interval=1000000, avg cost=18, this=0x7f54b87f33a8) [2024-03-15 07:03:42.621132] INFO [LIB] stat (utility.h:1140) [460][T1_LogIOCb0][T1][Y0-0000000000000000-0-0] [lt=26] [PALF STAT LOG LIFETIME](cur_stat_count=2, stat_interval=1000000, avg cost=6577, this=0x7f54b87f33d8) [2024-03-15 07:03:42.621146] INFO [LIB] stat (utility.h:1140) [460][T1_LogIOCb0][T1][Y0-0000000000000000-0-0] [lt=13] [PALF STAT LOG SUBMIT WAIT](cur_stat_count=2, stat_interval=1000000, avg cost=21, this=0x7f54b87f3408) [2024-03-15 07:03:42.621182] INFO [LIB] stat (utility.h:1140) [460][T1_LogIOCb0][T1][Y0-0000000000000000-0-0] [lt=36] [PALF STAT LOG SLIDE WAIT](cur_stat_count=2, stat_interval=1000000, avg cost=6556, this=0x7f54b87f3438) [2024-03-15 07:03:42.621201] INFO [LIB] stat (utility.h:1140) [460][T1_LogIOCb0][T1][Y0-0000000000000000-0-0] [lt=11] [PALF STAT FLUSH CB](cur_stat_count=2, stat_interval=1000000, avg cost=144, this=0x7f54b87fbb08) [2024-03-15 07:03:42.623776] INFO [COMMON] compute_tenant_wash_size (ob_kvcache_store.cpp:1140) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=25] Wash compute wash size(is_wash_valid=true, sys_total_wash_size=2718601216, global_cache_size=12484608, tenant_max_wash_size=4161536, tenant_min_wash_size=4161536, tenant_ids_=[512, 500, 999, 506, 508, 509, 510, 1, 1003, 1004]) [2024-03-15 07:03:42.623887] INFO [COMMON] wash (ob_kvcache_store.cpp:343) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=45] Wash time detail, (compute_wash_size_time=155, refresh_score_time=58, wash_time=8) [2024-03-15 07:03:42.625811] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=0] table not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:42.625835] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=22][errcode=-5019] synonym not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:42.625842] WDIAG [SQL.RESV] resolve_table_relation_factor_normal (ob_dml_resolver.cpp:7359) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=7][errcode=-5019] fail to resolve table relation recursively(tenant_id=1, ret=-5019, database_id=201001, database_id=201001, table_name=__all_server, db_name=oceanbase) [2024-03-15 07:03:42.625849] WDIAG [SQL.RESV] resolve_table_relation_factor (ob_dml_resolver.cpp:7204) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=6][errcode=-5019] resolve table relation factor failed(ret=-5019, table_name=__all_server) [2024-03-15 07:03:42.625857] WDIAG [SQL.RESV] inner_resolve_sys_view (ob_dml_resolver.cpp:2579) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=4][errcode=-5019] fail to resolve table(ret=-5019) [2024-03-15 07:03:42.625861] WDIAG [SQL.RESV] resolve_table_relation_factor_wrapper (ob_dml_resolver.cpp:2634) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=4][errcode=-5019] fail to resolve sys view(ret=-5019) [2024-03-15 07:03:42.625868] WDIAG resolve_basic_table_without_cte (ob_dml_resolver.cpp:2730) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=3][errcode=-5019] Table 'oceanbase.__all_server' doesn't exist [2024-03-15 07:03:42.625874] WDIAG [SQL.RESV] resolve_basic_table_with_cte (ob_dml_resolver.cpp:13473) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=4][errcode=-5019] resolve base or alias table factor failed(ret=-5019) [2024-03-15 07:03:42.625878] WDIAG [SQL.RESV] resolve_basic_table (ob_dml_resolver.cpp:13407) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=5][errcode=-5019] fail to resolve basic table with cte(ret=-5019) [2024-03-15 07:03:42.625882] WDIAG [SQL.RESV] resolve_table (ob_dml_resolver.cpp:3142) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=3][errcode=-5019] resolve basic table failed(ret=-5019) [2024-03-15 07:03:42.625887] WDIAG [SQL.RESV] resolve_from_clause (ob_select_resolver.cpp:3426) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=4][errcode=-5019] fail to exec resolve_table(*table_node, table_item)(ret=-5019) [2024-03-15 07:03:42.625891] WDIAG [SQL.RESV] resolve_normal_query (ob_select_resolver.cpp:1033) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=4][errcode=-5019] fail to exec resolve_from_clause(parse_tree.children_[PARSE_SELECT_FROM])(ret=-5019) [2024-03-15 07:03:42.625896] WDIAG [SQL.RESV] resolve (ob_select_resolver.cpp:1240) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=4][errcode=-5019] resolve normal query failed(ret=-5019) [2024-03-15 07:03:42.625900] WDIAG [SQL.RESV] select_stmt_resolver_func (ob_resolver.cpp:170) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=4][errcode=-5019] execute stmt_resolver failed(ret=-5019, parse_tree.type_=3073) [2024-03-15 07:03:42.625911] WDIAG [SQL] generate_stmt (ob_sql.cpp:2659) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=5][errcode=-5019] failed to resolve(ret=-5019) [2024-03-15 07:03:42.625916] WDIAG [SQL] generate_physical_plan (ob_sql.cpp:2781) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=5][errcode=-5019] Failed to generate stmt(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:42.625922] WDIAG [SQL] handle_physical_plan (ob_sql.cpp:4452) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=4][errcode=-5019] Failed to generate plan(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:42.625927] WDIAG [SQL] handle_text_query (ob_sql.cpp:2383) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=4][errcode=-5019] fail to handle physical plan(ret=-5019) [2024-03-15 07:03:42.625931] WDIAG [SQL] stmt_query (ob_sql.cpp:206) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=4][errcode=-5019] fail to handle text query(stmt=SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882, ret=-5019) [2024-03-15 07:03:42.625937] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:636) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=4][errcode=-5019] executor execute failed(ret=-5019) [2024-03-15 07:03:42.625942] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=5][errcode=-5019] execute failed(ret=-5019, tenant_id=1, executor={ObIExecutor:, sql:"SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882"}, retry_cnt=0, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:42.625956] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=9][errcode=-5019] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:0, local_retry_times:0, err_:-5019, err_:"OB_TABLE_NOT_EXIST", retry_type:0, client_ret:-5019}, need_retry=false) [2024-03-15 07:03:42.625972] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=12][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:42.625979] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=7][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:42.625985] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=5][errcode=-5019] failed to close result(close_ret=-5019, ret=-5019) [2024-03-15 07:03:42.626000] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=5][errcode=-5019] failed to process record(executor={ObIExecutor:, sql:"SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882"}, record_ret=-5019, ret=-5019) [2024-03-15 07:03:42.626014] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7E-0-0] [lt=13][errcode=-5019] failed to process final(executor={ObIExecutor:, sql:"SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882"}, aret=-5019, ret=-5019) [2024-03-15 07:03:42.626020] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=5][errcode=-5019] execute sql failed(ret=-5019, tenant_id=1, sql=SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882) [2024-03-15 07:03:42.626025] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=5][errcode=-5019] retry_while_no_tenant_resource failed(ret=-5019, tenant_id=1) [2024-03-15 07:03:42.626029] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=4][errcode=-5019] execute_read failed(ret=-5019, cluster_id=1, tenant_id=1) [2024-03-15 07:03:42.626034] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=4][errcode=-5019] query failed(ret=-5019, conn=0x7f54609f8050, start=1710486222625604, sql=SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882) [2024-03-15 07:03:42.626041] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=6][errcode=-5019] read failed(ret=-5019) [2024-03-15 07:03:42.626047] WDIAG get_my_sql_result_ (ob_table_access_helper.h:431) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=4][errcode=-5019] GCTX.sql_proxy_ read failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", MTL_ID()=1, tenant_id=1, columns=0x7f54b0e5c1c8, table=__all_server, condition=where svr_ip='127.0.0.1' and svr_port=2882, sql=SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882, columns_str="zone") [2024-03-15 07:03:42.626064] WDIAG read_and_convert_to_values_ (ob_table_access_helper.h:332) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=15][errcode=-5019] fail to get ObMySQLResult(ret=-5019, ret="OB_TABLE_NOT_EXIST", MTL_ID()=1, table=__all_server, condition=where svr_ip='127.0.0.1' and svr_port=2882) [2024-03-15 07:03:42.626116] WDIAG [COORDINATOR] get_self_zone_name (table_accessor.cpp:530) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=13][errcode=-5019] get zone from __all_server failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", columns=0x7f54b0e5c1c8, where_condition="where svr_ip='127.0.0.1' and svr_port=2882", zone_name_holder=) [2024-03-15 07:03:42.626126] WDIAG [COORDINATOR] get_all_ls_election_reference_info (table_accessor.cpp:463) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=9][errcode=-5019] get self zone name failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", all_ls_election_reference_info=[]) [2024-03-15 07:03:42.626136] WDIAG [COORDINATOR] get_all_ls_election_reference_info (table_accessor.cpp:472) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=10][errcode=-5019] zone name is empty(ret=-5019, ret="OB_TABLE_NOT_EXIST", all_ls_election_reference_info=[]) [2024-03-15 07:03:42.626142] WDIAG [COORDINATOR] refresh (ob_leader_coordinator.cpp:143) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=4][errcode=-5019] get all ls election reference info failed(ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:42.626209] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=33][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.626273] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=65][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.629324] INFO [COMMON] clean_garbage_node (ob_kvcache_map.cpp:647) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=13] Cache wash clean map node details(ret=0, clean_node_count=0, clean_time=5419, clean_start_pos=1415565, clean_num=31457) [2024-03-15 07:03:42.629616] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1][errcode=-4002] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:42.636460] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=43][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.636519] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=59][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.637005] INFO [STORAGE] operator() (ob_tenant_freezer.cpp:125) [627][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=52] ====== tenant freeze timer task ====== [2024-03-15 07:03:42.637053] INFO [STORAGE] log_frozen_memstore_info_if_need_ (ob_tenant_freezer.cpp:1262) [627][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=38] [TenantFreezer] tenant have inactive memstores(ctx.active_memstore_used_=48234496, ctx.total_memstore_used_=373293056, ctx.total_memstore_hold_=373293056, memstore_freeze_trigger_limit_=107374180, tenant_id=1003) [2024-03-15 07:03:42.637289] INFO [STORAGE] log_frozen_memstore_info_if_need_ (ob_tenant_freezer.cpp:1271) [627][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=33] [TenantFreezer] oldest frozen memtable(list="{ObITable:{this:0x7f5447a16550, key:{tablet_id:{id:377}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710295225207725827}, end_scn:{val:1710333483941576501}}}, ref_cnt:2, upper_trans_version:9223372036854775807, timestamp:1710482270194518}, this:0x7f5447a16550, timestamp:1710482270194518, state:0, freeze_clock:4, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:50, clock:325058560}, host:0x7f54803ee030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482138638204}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710333483941576501}, rec_scn:{val:1710333477525544947}, snapshot_version:{val:1710333299511029035}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710482270670403, mt_stat_.ready_for_flush_time:1710482270823408, mt_stat_.create_flush_dag_time:1710486219554061, mt_stat_.release_time:0, mt_stat_.last_print_time:0},{ObITable:{this:0x7f5447a15d80, key:{tablet_id:{id:342}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710319328680530566}, end_scn:{val:1710333369599902242}}}, ref_cnt:2, upper_trans_version:9223372036854775807, timestamp:1710482253642783}, this:0x7f5447a15d80, timestamp:1710482253642783, state:0, freeze_clock:4, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:49, clock:272629760}, host:0x7f54803ee030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482138638204}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710333369599902242}, rec_scn:{val:1710323841225273629}, snapshot_version:{val:1710333299511029035}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710482270822838, mt_stat_.ready_for_flush_time:1710482270822919, mt_stat_.create_flush_dag_time:1710485037233573, mt_stat_.release_time:0, mt_stat_.last_print_time:0},{ObITable:{this:0x7f5447a155b0, key:{tablet_id:{id:373}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710319328680530566}, end_scn:{val:1710333369599902242}}}, ref_cnt:2, upper_trans_version:9223372036854775807, timestamp:1710482253590776}, this:0x7f5447a155b0, timestamp:1710482253590776, state:0, freeze_clock:4, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:48, clock:272629760}, host:0x7f54803ee030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482138638204}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710333369599902242}, rec_scn:{val:1710323853044414963}, snapshot_version:{val:1710333299511029035}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710482270822975, mt_stat_.ready_for_flush_time:1710482270823031, mt_stat_.create_flush_dag_time:1710485037233674, mt_stat_.release_time:0, mt_stat_.last_print_time:0},{ObITable:{this:0x7f5447a14de0, key:{tablet_id:{id:344}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710319328680530566}, end_scn:{val:1710333369599902242}}}, ref_cnt:2, upper_trans_version:9223372036854775807, timestamp:1710482253590305}, this:0x7f5447a14de0, timestamp:1710482253590305, state:0, freeze_clock:4, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:47, clock:272629760}, host:0x7f54803ee030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482138638204},") [2024-03-15 07:03:42.638574] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [627][T1003_Occam][T1003][YB427F000001-000613ACA99F8B78-0-0] [lt=34][errcode=-5627] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:42.638710] WDIAG [STORAGE] check_and_freeze_normal_data_ (ob_tenant_freezer.cpp:408) [627][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=1][errcode=0] [TenantFreezer] fail to do major freeze(tmp_ret=-5627) [2024-03-15 07:03:42.638759] INFO [STORAGE] check_and_freeze_tx_data_ (ob_tenant_freezer.cpp:448) [627][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=26] [TenantFreezer] Trigger Tx Data Table Self Freeze. (tenant_info_.tenant_id_=1003, tenant_tx_data_mem_used=150907520, self_freeze_max_limit_=53687091, memstore_hold_memory=373293056, self_freeze_tenant_hold_limit_=214748364, self_freeze_min_limit_=10737418) [2024-03-15 07:03:42.639093] INFO [STORAGE] do_tx_data_table_freeze_ (ob_tenant_freezer_rpc.cpp:74) [768][T1003_L0_G0][T1003][YB427F000001-000613ACA99F8B79-0-0] [lt=11] start tx data table self freeze task in rpc handle thread(arg_=freeze_type:3) [2024-03-15 07:03:42.639126] INFO [STORAGE] self_freeze_task (ob_tx_data_table.cpp:827) [768][T1003_L0_G0][T1003][YB427F000001-000613ACA99F8B79-0-0] [lt=26] start tx data table self freeze task(get_ls_id()={id:1}) [2024-03-15 07:03:42.639180] INFO [STORAGE] freeze (ob_tx_data_memtable_mgr.cpp:193) [768][T1003_L0_G0][T1003][YB427F000001-000613ACA99F8B79-0-0] [lt=48] start freeze tx data memtable(ls_id_={id:1}) [2024-03-15 07:03:42.639232] INFO [STORAGE] freeze_ (ob_tx_data_memtable_mgr.cpp:229) [768][T1003_L0_G0][T1003][YB427F000001-000613ACA99F8B79-0-0] [lt=48] There is a freezed memetable existed. Try freeze after flushing it.(ret=-4023, ret="OB_EAGAIN", get_memtable_count_()=2) [2024-03-15 07:03:42.639264] WDIAG [STORAGE] freeze (ob_tx_data_memtable_mgr.cpp:207) [768][T1003_L0_G0][T1003][YB427F000001-000613ACA99F8B79-0-0] [lt=30][errcode=-4023] freeze tx data memtable fail.(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.639278] WDIAG [STORAGE] flush (ob_tx_data_memtable_mgr.cpp:483) [768][T1003_L0_G0][T1003][YB427F000001-000613ACA99F8B79-0-0] [lt=14][errcode=-4023] freeze failed(ret=-4023, ret="OB_EAGAIN", this=0x7f5484de61b0) [2024-03-15 07:03:42.639293] WDIAG [STORAGE] self_freeze_task (ob_tx_data_table.cpp:831) [768][T1003_L0_G0][T1003][YB427F000001-000613ACA99F8B79-0-0] [lt=13][errcode=-4023] self freeze of tx data memtable failed.(ret=-4023, ret="OB_EAGAIN", ls_id={id:1}, memtable_mgr_={ObIMemtableMgr:{Memtables:this:0x7f5484de61b0, ref_cnt:1, is_inited:true, tablet_id:{id:49402}, freezer:0x7f5484deb290, table_type:1, memtable_head:0, memtable_tail:2, t3m:0x7f54affa8030, tables:[0x7f54800fa080, 0x7f54800fab00, null, null, null, null, null, null, null, null, null, null, null, null, null, null]}, is_freezing:false, ls_id:{id:1}, tx_data_table:0x7f5484dec690, ls_tablet_svr:0x7f5484de6190, slice_allocator:0x7f5484dec6d0}) [2024-03-15 07:03:42.639352] INFO [STORAGE] self_freeze_task (ob_tx_data_table.cpp:834) [768][T1003_L0_G0][T1003][YB427F000001-000613ACA99F8B79-0-0] [lt=58] finish tx data table self freeze task(ret=-4023, ret="OB_EAGAIN", get_ls_id()={id:1}) [2024-03-15 07:03:42.639365] WDIAG [STORAGE] do_tx_data_table_freeze_ (ob_tenant_freezer_rpc.cpp:103) [768][T1003_L0_G0][T1003][YB427F000001-000613ACA99F8B79-0-0] [lt=13][errcode=-4023] freeze tx data table failed.(ret=-4023, ret="OB_EAGAIN", arg_=freeze_type:3) [2024-03-15 07:03:42.639380] INFO [STORAGE] do_tx_data_table_freeze_ (ob_tenant_freezer_rpc.cpp:116) [768][T1003_L0_G0][T1003][YB427F000001-000613ACA99F8B79-0-0] [lt=15] finish self freeze task in rpc handle thread(ret=-4023, ret="OB_EAGAIN", arg_=freeze_type:3) [2024-03-15 07:03:42.639398] WDIAG [STORAGE] process (ob_tenant_freezer_rpc.cpp:57) [768][T1003_L0_G0][T1003][YB427F000001-000613ACA99F8B79-0-0] [lt=14][errcode=-4023] do tx data table freeze failed.(ret=-4023, ret="OB_EAGAIN", arg_=freeze_type:3) [2024-03-15 07:03:42.639620] INFO [STORAGE] rpc_callback (ob_tenant_freezer.cpp:1019) [194][RpcIO][T0][Y0-0000000000000000-0-0] [lt=19] [TenantFreezer] call back of tenant freezer request [2024-03-15 07:03:42.640194] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=32][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:42.645600] INFO [STORAGE] runTimerTask (ob_tablet_gc_service.cpp:221) [902][T1004_ObTimer][T1004][Y0-0000000000000000-0-0] [lt=14] ====== [tabletgc] timer task ======(GC_CHECK_INTERVAL=5000000) [2024-03-15 07:03:42.645658] INFO [STORAGE] runTimerTask (ob_tablet_gc_service.cpp:251) [902][T1004_ObTimer][T1004][Y0-0000000000000000-0-0] [lt=223] [tabletgc] task check ls(ls->get_ls_id()={id:1}, is_tablet_gc=false) [2024-03-15 07:03:42.645692] INFO [STORAGE] runTimerTask (ob_tablet_gc_service.cpp:251) [902][T1004_ObTimer][T1004][Y0-0000000000000000-0-0] [lt=29] [tabletgc] task check ls(ls->get_ls_id()={id:1001}, is_tablet_gc=false) [2024-03-15 07:03:42.645722] INFO [STORAGE] runTimerTask (ob_tablet_gc_service.cpp:279) [902][T1004_ObTimer][T1004][Y0-0000000000000000-0-0] [lt=27] [tabletgc] succeed to gc_tablet(ret=0, ret="OB_SUCCESS", ls_cnt=2) [2024-03-15 07:03:42.646688] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=29][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.646716] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=28][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.656832] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=12][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.656870] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=38][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.659278] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486222659266727, start_id=0, end_id=0) [2024-03-15 07:03:42.664129] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5F-0-0] [lt=204][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:42.664174] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5F-0-0] [lt=48][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.664203] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5F-0-0] [lt=27][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.664225] WDIAG iterate (ob_tuple.h:272) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5F-0-0] [lt=20][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.664249] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB5F-0-0] [lt=24][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:42.667081] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.667147] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=65][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.670210] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=2][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:42.670258] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=46][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486224536195, now=[mts=1710486222607156], now0=[mts=1710486222607156], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:42.670290] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=30][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:0, tx_consistency_type:0, isolation:1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222606785, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:false, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:0, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:42.670404] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:0, tx_consistency_type:0, isolation:1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222606785, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:false, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:0, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222606956, use_das=false, nested_level=0, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.670578] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:99) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1] already timeout, do not need sleep(sleep_us=0, remain_us=1935620, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=0, timeout_timestamp=1710486224606195) [2024-03-15 07:03:42.670837] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.670884] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=45][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.671521] INFO [SERVER] runTimerTask (ob_eliminate_task.cpp:199) [552][T1_ReqMemEvict][T1][Y0-0000000000000000-0-0] [lt=18] sql audit evict task end(evict_high_mem_level=32212254, evict_high_size_level=90000, evict_batch_count=0, elapse_time=0, size_used=15108, mem_used=31196160) [2024-03-15 07:03:42.671569] INFO [LIB] runTimerTask (ob_work_queue.cpp:24) [135][ObTimer][T0][Y0-0000000000000000-0-0] [lt=37] add async task(this=tasktype:N9oceanbase10rootserver13ObRootService19ObRefreshServerTaskE) [2024-03-15 07:03:42.673266] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [139][RSAsyncTask3][T1][YB427F000001-000613ACAACF84AC-0-0] [lt=38] table not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:42.673301] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [139][RSAsyncTask3][T1][YB427F000001-000613ACAACF84AC-0-0] [lt=32][errcode=-5019] synonym not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:42.673311] WDIAG [SQL.RESV] resolve_table_relation_factor_normal (ob_dml_resolver.cpp:7359) [139][RSAsyncTask3][T1][YB427F000001-000613ACAACF84AC-0-0] [lt=10][errcode=-5019] fail to resolve table relation recursively(tenant_id=1, ret=-5019, database_id=201001, database_id=201001, table_name=__all_server, db_name=oceanbase) [2024-03-15 07:03:42.673321] WDIAG [SQL.RESV] resolve_table_relation_factor (ob_dml_resolver.cpp:7204) [139][RSAsyncTask3][T1][YB427F000001-000613ACAACF84AC-0-0] [lt=9][errcode=-5019] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:42.675352] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222612326, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.675448] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0] will sleep(sleep_us=4000, remain_us=1659701, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=4, timeout_timestamp=1710486224335147) [2024-03-15 07:03:42.677325] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=62][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.677393] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=67][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.679682] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.679721] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=38][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.681540] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.681587] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=45][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222681528], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.681637] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=47][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222681528], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.681663] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=12] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222681528]}) [2024-03-15 07:03:42.681702] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=29] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:42.681715] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=13][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486222681698], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.681740] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=12] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222681698]}) [2024-03-15 07:03:42.681991] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.682012] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=19][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486222681984], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.682044] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=11] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222681984]}) [2024-03-15 07:03:42.686339] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.686372] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=32][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.687551] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=56][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.687584] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=32][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.697685] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.697786] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=99][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.707035] WDIAG [SQL] create_sessid (ob_sql_session_mgr.cpp:339) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=14][errcode=0] server is initiating(server_id=0, local_seq=27142, max_local_seq=262143, max_server_id=4095) [2024-03-15 07:03:42.707074] INFO [RPC.OBMYSQL] sm_conn_build_handshake (obsm_conn_callback.cpp:104) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=37] new mysql sessid created(conn.sessid_=3221252614, support_ssl=false) [2024-03-15 07:03:42.707154] INFO [RPC.OBMYSQL] init (obsm_conn_callback.cpp:120) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=10] sm conn init succ(conn.sessid_=3221252614, sess.client_addr_="172.21.122.86:42716") [2024-03-15 07:03:42.707181] INFO [RPC.OBMYSQL] do_accept_one (ob_sql_nio.cpp:899) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=20] accept one succ(*s={this:0x7f547b3ff290, fd:133, err:0, last_decode_time_:0, last_write_time_:1710486222707151, read_buffer_.get_consume_sz():0, get_pending_flag():0, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.707698] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=22] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:42.707717] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=19][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:42.707726] WDIAG [SERVER] extract_tenant_id (ob_srv_deliver.cpp:100) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=8][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:42.707736] WDIAG [SERVER] dispatch_req (ob_srv_deliver.cpp:115) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=9][errcode=-5160] extract tenant_id fail(ret=-5160, tenant_id=18446744073709551615, req={packet:{header:{length:232, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486222707682, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.707764] WDIAG [SERVER] deliver_mysql_request (ob_srv_deliver.cpp:507) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=27][errcode=-5150] cannot dispatch success(ret=-5150, req={packet:{header:{length:232, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486222707682, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.707816] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB037-0-0] [lt=30] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:42.707826] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB037-0-0] [lt=10][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:42.707833] WDIAG [SERVER] get_tenant_id (obmp_connect.cpp:1339) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB037-0-0] [lt=6][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:42.707840] WDIAG [SERVER] check_update_tenant_id (obmp_connect.cpp:1840) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB037-0-0] [lt=7][errcode=-5160] get_tenant_id failed(ret=-5160) [2024-03-15 07:03:42.707862] WDIAG [SERVER] process (obmp_connect.cpp:242) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB037-0-0] [lt=20][errcode=-5160] fail to check update tenant id(ret=-5160) [2024-03-15 07:03:42.707886] INFO [SERVER] send_error_packet (obmp_packet_sender.cpp:311) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB037-0-0] [lt=6] sending error packet(err=-4043, extra_err_info=NULL, lbt()="0xd9f6cf5 0x75d3e81 0x7596e3a 0x75be943 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:42.707934] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:745) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB037-0-0] [lt=9][errcode=0] server close connection(sessid=3221252614, proxy_sessid=0, stack="0xd9f6cf5 0x75d6bf2 0x75b2979 0x75bde02 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:42.707948] WDIAG [SERVER] get_session (obmp_packet_sender.cpp:515) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB037-0-0] [lt=11][errcode=-4018] get session fail(ret=-4018, sessid=3221252614, proxy_sessid=0) [2024-03-15 07:03:42.707958] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:749) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB037-0-0] [lt=8][errcode=-4016] session is null [2024-03-15 07:03:42.707966] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=37][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.707968] INFO [SERVER] process (obmp_connect.cpp:369) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB037-0-0] [lt=7] MySQL LOGIN(direct_client_ip="172.21.122.86", client_ip=, tenant_name=obmysql, tenant_id=18446744073709551615, user_name=yyyth, host_name=xxx.xxx.xxx.xxx, sessid=3221252614, proxy_sessid=0, sess_create_time=0, from_proxy=false, from_java_client=false, from_oci_client=false, from_jdbc_client=false, capability=270377487, proxy_capability=0, use_ssl=false, c/s protocol="OB_MYSQL_CS_TYPE", autocommit=false, proc_ret=-5160, ret=0) [2024-03-15 07:03:42.707990] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=24][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.708071] WDIAG [RPC.OBMYSQL] push_close_req (ob_sql_nio.cpp:704) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=14][errcode=-4015] close sql sock by user req(*s={this:0x7f547b3ff290, fd:133, err:5, last_decode_time_:1710486222707683, last_write_time_:1710486222708067, read_buffer_.get_consume_sz():236, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.708091] INFO [RPC.OBMYSQL] on_disconnect (obsm_conn_callback.cpp:231) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=15] kill and revert session(conn.sessid_=3221252614, proxy_sessid=0, server_id=0, ret=0) [2024-03-15 07:03:42.708100] INFO [RPC.OBMYSQL] handle_pending_destroy_list (ob_sql_nio.cpp:791) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=9] can close safely, do destroy(*s={this:0x7f547b3ff290, fd:133, err:5, last_decode_time_:1710486222707683, last_write_time_:1710486222708067, read_buffer_.get_consume_sz():236, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.708112] INFO [RPC.OBMYSQL] sm_conn_log_close (obsm_conn_callback.cpp:159) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=10] connection close(sessid=3221252614, proxy_sessid=0, tenant_id=0, server_id=0, from_proxy=false, from_java_client=false, c/s protocol="OB_MYSQL_CS_TYPE", is_need_clear_sessid_=true, ret=0) [2024-03-15 07:03:42.708935] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=20][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.709005] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=67][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1", warnings:[]}, tenant_id_=1, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.709068] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=34][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, local_server_version={val:1710506547196065858}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486222708918) [2024-03-15 07:03:42.709110] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=42][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, last_post_cluster_heartbeat_tstamp_=1710486222508810, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:42.710445] WDIAG [PALF] convert_to_ts (scn.cpp:265) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=-4016] invalid scn should not convert to ts (val_=18446744073709551615) [2024-03-15 07:03:42.710466] INFO [STORAGE.TRANS] print_stat_ (ob_tenant_weak_read_service.cpp:527) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=19] [WRS] [TENANT_WEAK_READ_SERVICE] [STAT](tenant_id=1003, server_version={version:{val:1710482141336457000}, total_part_count:1, valid_inner_part_count:1, valid_user_part_count:0}, server_version_delta=4081373962, in_cluster_service=false, cluster_version={val:18446744073709551615}, min_cluster_version={val:18446744073709551615}, max_cluster_version={val:18446744073709551615}, get_cluster_version_err=0, cluster_version_delta=1710486222710419, cluster_service_master="0.0.0.0:0", cluster_service_tablet_id={id:226}, post_cluster_heartbeat_count=0, succ_cluster_heartbeat_count=0, cluster_heartbeat_interval=1000000, local_cluster_version={val:0}, local_cluster_delta=1710486222710419, force_self_check=false, weak_read_refresh_interval=100000) [2024-03-15 07:03:42.712207] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=31] get wrs ts(ls_id={id:1}, delta=146267860498, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:42.712237] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=25] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:42.714120] WDIAG [SHARE] refresh (ob_task_define.cpp:382) [79][LogLimiterRefre][T0][Y0-0000000000000000-0-0] [lt=60][errcode=0] Throttled WDIAG logs in last second(details {error code, dropped logs, earliest tid}=[{errcode:-5627, dropped:114, tid:898}]) [2024-03-15 07:03:42.718146] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=13][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.718219] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=71][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.728407] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=50][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.728482] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=74][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.735901] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222669468, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222606956, use_das=false, nested_level=0, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.736033] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] will sleep(sleep_us=1000, remain_us=1870164, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=1, timeout_timestamp=1710486224606195) [2024-03-15 07:03:42.737303] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.737328] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=25][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.738674] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=22][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.738710] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=35][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.740262] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=39][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:42.744012] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222678977, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.744178] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0] will sleep(sleep_us=5000, remain_us=1590972, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=5, timeout_timestamp=1710486224335147) [2024-03-15 07:03:42.748802] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=15][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.748828] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=26][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.748775] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54e845a228, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222685820, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486200007067, use_das=false, nested_level=0, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.748938] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0] will sleep(sleep_us=100000, remain_us=7258113, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=166, timeout_timestamp=1710486230007049) [2024-03-15 07:03:42.749512] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.749552] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=39][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.751705] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=69][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.751751] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=45][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1004", warnings:[]}, tenant_id_=1004, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.751780] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=27][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, local_server_version={val:1710506547039047538}, valid_part_count=2, total_part_count=2, generate_timestamp=1710486222751690) [2024-03-15 07:03:42.751828] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=29][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, last_post_cluster_heartbeat_tstamp_=1710486222551654, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:42.757753] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4002] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:42.759335] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] get number(ret=-4023, service_type_=0, range=1, base_id=1710486222759319315, start_id=0, end_id=0) [2024-03-15 07:03:42.759522] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=12][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.759600] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=77][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.763759] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62F-0-0] [lt=260][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:42.763805] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62F-0-0] [lt=46][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.763852] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62F-0-0] [lt=44][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.763887] WDIAG iterate (ob_tuple.h:272) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62F-0-0] [lt=34][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.763937] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB62F-0-0] [lt=48][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:42.766946] INFO [STORAGE] runTimerTask (ob_tablet_gc_service.cpp:221) [741][T1003_ObTimer][T1003][Y0-0000000000000000-0-0] [lt=9] ====== [tabletgc] timer task ======(GC_CHECK_INTERVAL=5000000) [2024-03-15 07:03:42.766969] INFO [STORAGE] runTimerTask (ob_tablet_gc_service.cpp:251) [741][T1003_ObTimer][T1003][Y0-0000000000000000-0-0] [lt=15] [tabletgc] task check ls(ls->get_ls_id()={id:1}, is_tablet_gc=false) [2024-03-15 07:03:42.766979] INFO [STORAGE] runTimerTask (ob_tablet_gc_service.cpp:279) [741][T1003_ObTimer][T1003][Y0-0000000000000000-0-0] [lt=9] [tabletgc] succeed to gc_tablet(ret=0, ret="OB_SUCCESS", ls_cnt=1) [2024-03-15 07:03:42.768989] INFO [STORAGE] runTimerTask (ob_tablet_gc_service.cpp:221) [557][T1_ObTimer][T1][Y0-0000000000000000-0-0] [lt=8] ====== [tabletgc] timer task ======(GC_CHECK_INTERVAL=5000000) [2024-03-15 07:03:42.769008] INFO [STORAGE] runTimerTask (ob_tablet_gc_service.cpp:251) [557][T1_ObTimer][T1][Y0-0000000000000000-0-0] [lt=13] [tabletgc] task check ls(ls->get_ls_id()={id:1}, is_tablet_gc=false) [2024-03-15 07:03:42.769016] INFO [STORAGE] runTimerTask (ob_tablet_gc_service.cpp:279) [557][T1_ObTimer][T1][Y0-0000000000000000-0-0] [lt=7] [tabletgc] succeed to gc_tablet(ret=0, ret="OB_SUCCESS", ls_cnt=1) [2024-03-15 07:03:42.769721] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC2-0-0] [lt=115][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:42.769755] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC2-0-0] [lt=34][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.769776] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC2-0-0] [lt=19][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.769801] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC2-0-0] [lt=24][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.769814] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC2-0-0] [lt=13][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:42.770019] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=27][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.770118] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=101][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.771156] INFO [COORDINATOR] detect_recover (ob_failure_detector.cpp:138) [607][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=0] doing detect recover operation(events_with_ops=[{event:{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}}]) [2024-03-15 07:03:42.773449] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B99-0-0] [lt=38] table not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:42.775658] INFO [COMMON] replace_fragment_node (ob_kvcache_map.cpp:697) [103][KVCacheRep][T0][Y0-0000000000000000-0-0] [lt=40] Cache replace map node details(ret=0, replace_node_count=0, replace_time=1858, replace_start_pos=330288, replace_num=15728) [2024-03-15 07:03:42.780320] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=58][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.780352] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=39][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.782480] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=2][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.782536] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=52][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222782460], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.782549] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FA-0-0] [lt=1][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.782564] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FA-0-0] [lt=16][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:42.782562] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=24][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222782460], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.782586] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FA-0-0] [lt=9][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:42.782600] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=15] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222782460]}) [2024-03-15 07:03:42.782615] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [801][T1004_TenantInf][T1004][YB427F000001-000613ACB04F91FA-0-0] [lt=27][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, aret=-5627, ret=-5627) [2024-03-15 07:03:42.782625] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=9][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:42.782634] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.782633] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=17] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:42.782642] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:42.782645] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=11][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486222782628], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.782651] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] query failed(ret=-5627, conn=0x7f547e5f2050, start=1710486222782516, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:42.782662] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:42.782671] WDIAG [SHARE] load_tenant_info (ob_tenant_info_proxy.cpp:338) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5627] failed to read(ret=-5627, ret="OB_SCHEMA_EAGAIN", exec_tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:42.782675] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=13] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222782628]}) [2024-03-15 07:03:42.782688] INFO [STORAGE.TRANS] refresh_gts (ob_gts_source.cpp:516) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=11] refresh gts(ret=-4038, ret="OB_NOT_MASTER", tenant_id=1003, need_refresh=false, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222782628]}) [2024-03-15 07:03:42.782983] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.783008] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=23][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486222782974], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.783037] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=13] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222782974]}) [2024-03-15 07:03:42.783048] INFO [STORAGE.TRANS] refresh_gts (ob_gts_source.cpp:516) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=11] refresh gts(ret=-4023, ret="OB_EAGAIN", tenant_id=1004, need_refresh=false, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222782974]}) [2024-03-15 07:03:42.783719] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC3-0-0] [lt=123][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1001}, all_ls_election_reference_info=[]) [2024-03-15 07:03:42.783754] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC3-0-0] [lt=35][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.783772] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC3-0-0] [lt=17][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1001}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.783790] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC3-0-0] [lt=17][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.783805] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC3-0-0] [lt=15][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1001}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:42.783599] WDIAG [SERVER] batch_process_tasks (ob_tablet_table_updater.cpp:486) [131][TbltTblUp0][T0][YB427F000001-000613ACAD4F8319-0-0] [lt=230][errcode=-4076] tenant schema is not ready, need wait(ret=-4076, ret="OB_NEED_WAIT", meta_tenant_id=1003, batch_tasks=[{tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200145}, add_timestamp:1710482265837812}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200206}, add_timestamp:1710482265838302}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200136}, add_timestamp:1710482265838633}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200199}, add_timestamp:1710482265838685}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847940}, add_timestamp:1710482265838901}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847638}, add_timestamp:1710482265839043}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:201439}, add_timestamp:1710482265839045}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847683}, add_timestamp:1710482265839174}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200146}, add_timestamp:1710482265839439}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847572}, add_timestamp:1710482265839674}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200215}, add_timestamp:1710482265841134}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847593}, add_timestamp:1710482265842515}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200200}, add_timestamp:1710482265842752}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847615}, add_timestamp:1710482265844324}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847586}, add_timestamp:1710482265845566}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847569}, add_timestamp:1710482265846320}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847587}, add_timestamp:1710482265846490}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:201658}, add_timestamp:1710482265848499}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:201436}, add_timestamp:1710482265848623}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847626}, add_timestamp:1710482265848710}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200141}, add_timestamp:1710482265850314}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847658}, add_timestamp:1710482265850918}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847591}, add_timestamp:1710482265851009}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:201282}, add_timestamp:1710482265851308}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200159}, add_timestamp:1710482265851468}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200154}, add_timestamp:1710482265851563}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200137}, add_timestamp:1710482265851745}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200216}, add_timestamp:1710482265852142}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847685}, add_timestamp:1710482265852227}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847632}, add_timestamp:1710482265852467}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847645}, add_timestamp:1710482265852725}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847573}, add_timestamp:1710482265853141}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:201438}, add_timestamp:1710482265853158}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847571}, add_timestamp:1710482265853347}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847583}, add_timestamp:1710482265853431}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847604}, add_timestamp:1710482265853597}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:201452}, add_timestamp:1710482265854101}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200139}, add_timestamp:1710482265854168}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200138}, add_timestamp:1710482265854200}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847628}, add_timestamp:1710482265854209}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:201561}, add_timestamp:1710482265854323}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847585}, add_timestamp:1710482265854580}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847590}, add_timestamp:1710482265863542}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847684}, add_timestamp:1710482265863556}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200151}, add_timestamp:1710482265863756}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847671}, add_timestamp:1710482265863760}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847636}, add_timestamp:1710482265863935}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847589}, add_timestamp:1710482265865044}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847847}, add_timestamp:1710482265866864}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:202671}, add_timestamp:1710482265866993}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200150}, add_timestamp:1710482265867110}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847570}, add_timestamp:1710482265867170}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847637}, add_timestamp:1710482265867280}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847625}, add_timestamp:1710482265868193}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200149}, add_timestamp:1710482265868283}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:202672}, add_timestamp:1710482265868466}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606849082}, add_timestamp:1710482265868542}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847533}, add_timestamp:1710482265868792}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847592}, add_timestamp:1710482265868963}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847960}, add_timestamp:1710482265927068}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847627}, add_timestamp:1710482265927261}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847568}, add_timestamp:1710482265927302}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847584}, add_timestamp:1710482265927591}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847594}, add_timestamp:1710482265927757}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:202722}, add_timestamp:1710482265935613}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847644}, add_timestamp:1710482265961033}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847657}, add_timestamp:1710482265961320}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847670}, add_timestamp:1710482266002022}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847582}, add_timestamp:1710482266071706}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:201435}, add_timestamp:1710482266071862}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606849083}, add_timestamp:1710482266072065}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200153}, add_timestamp:1710482266072164}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847850}, add_timestamp:1710482266072377}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847633}, add_timestamp:1710482266072777}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847614}, add_timestamp:1710482266113599}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847959}, add_timestamp:1710482266146362}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847577}, add_timestamp:1710482266148888}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200201}, add_timestamp:1710482266164205}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847654}, add_timestamp:1710482266164384}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847631}, add_timestamp:1710482266164884}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:202064}, add_timestamp:1710482266211030}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:202044}, add_timestamp:1710482266251977}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847602}, add_timestamp:1710482266252906}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:202677}, add_timestamp:1710482266253172}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847588}, add_timestamp:1710482266253188}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847649}, add_timestamp:1710482266254019}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:202037}, add_timestamp:1710482266254156}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847646}, add_timestamp:1710482266254457}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:201143}, add_timestamp:1710482266255189}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200140}, add_timestamp:1710482266255243}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200156}, add_timestamp:1710482266255402}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200148}, add_timestamp:1710482266255794}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:202723}, add_timestamp:1710482266255831}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:202682}, add_timestamp:1710482266256081}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847641}, add_timestamp:1710482266256209}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847576}, add_timestamp:1710482266256268}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606847648}, add_timestamp:1710482266256833}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:201144}, add_timestamp:1710482266257445}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:200144}, add_timestamp:1710482266257844}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606849081}, add_timestamp:1710482266258621}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:201434}, add_timestamp:1710482265837477}, {tenant_id:1004, ls_id:{id:1001}, tablet_id:{id:1152921504606848808}, add_timestamp:1710482265837515}]) [2024-03-15 07:03:42.790468] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.790510] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=41][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.800636] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=20][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.800689] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=52][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.804154] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222736368, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222606956, use_das=false, nested_level=0, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.804305] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] will sleep(sleep_us=2000, remain_us=1801892, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=2, timeout_timestamp=1710486224606195) [2024-03-15 07:03:42.806713] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.806754] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=41][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.810459] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.810485] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=23][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1003", warnings:[]}, tenant_id_=1003, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.810535] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=48][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, local_server_version={val:1710482141336457000}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486222810450) [2024-03-15 07:03:42.810565] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=29][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, last_post_cluster_heartbeat_tstamp_=1710486222610451, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:42.810869] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=28][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.810907] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=38][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.811529] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=13] get wrs ts(ls_id={id:1}, delta=146267959718, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:42.811572] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=38] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:42.813814] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222749176, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.813885] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] will sleep(sleep_us=6000, remain_us=1521263, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=6, timeout_timestamp=1710486224335147) [2024-03-15 07:03:42.820039] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.820064] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=24][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.821012] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.821059] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=46][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.825546] INFO [COMMON] print_sender_status (ob_io_struct.cpp:716) [80][IO_TUNING0][T0][Y0-0000000000000000-0-0] [lt=8] [IO SENDER STATUS](send_index=1, req_count=0, reservation_ts=9223372036854775807, group_limitation_ts=9223372036854775807, tenant_limitation_ts=9223372036854775807, proportion_ts=9223372036854775807) [2024-03-15 07:03:42.826722] INFO [COMMON] print_sender_status (ob_io_struct.cpp:716) [80][IO_TUNING0][T0][Y0-0000000000000000-0-0] [lt=1185] [IO SENDER STATUS](send_index=2, req_count=0, reservation_ts=9223372036854775807, group_limitation_ts=9223372036854775807, tenant_limitation_ts=9223372036854775807, proportion_ts=9223372036854775807) [2024-03-15 07:03:42.827569] WDIAG [STORAGE.TRANS] check_gts_ (ob_keep_alive_ls_handler.cpp:237) [914][T1004_TxLoopWor][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-4023] get gts error(ret=-4023) [2024-03-15 07:03:42.827652] INFO [PALF] handle_next_submit_log_ (log_sliding_window.cpp:1000) [914][T1004_TxLoopWor][T1004][Y0-0000000000000000-0-0] [lt=16] [PALF STAT GROUP LOG INFO](palf_id=1001, self="127.0.0.1:2882", role="LEADER", total_group_log_cnt=1, avg_log_batch_cnt=1, total_group_log_size=122, avg_group_log_size=122) [2024-03-15 07:03:42.827740] INFO [PALF] submit_log (palf_handle_impl.cpp:403) [914][T1004_TxLoopWor][T1004][Y0-0000000000000000-0-0] [lt=84] [PALF STAT APPEND DATA SIZE](this={palf_id:1001, self:"127.0.0.1:2882", has_set_deleted:false}, append size=113) [2024-03-15 07:03:42.827754] INFO [LIB] stat (utility.h:1140) [914][T1004_TxLoopWor][T1004][Y0-0000000000000000-0-0] [lt=11] [PALF STAT APPEND COST](cur_stat_count=1, stat_interval=1000000, avg cost=167, this=0x7f54639f0950) [2024-03-15 07:03:42.830171] INFO [COMMON] compute_tenant_wash_size (ob_kvcache_store.cpp:1140) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=45] Wash compute wash size(is_wash_valid=true, sys_total_wash_size=2718601216, global_cache_size=12484608, tenant_max_wash_size=4161536, tenant_min_wash_size=4161536, tenant_ids_=[512, 500, 999, 506, 508, 509, 510, 1, 1003, 1004]) [2024-03-15 07:03:42.830400] INFO [COMMON] wash (ob_kvcache_store.cpp:343) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=60] Wash time detail, (compute_wash_size_time=217, refresh_score_time=152, wash_time=16) [2024-03-15 07:03:42.831331] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.831370] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=38][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.837582] INFO [ARCHIVE] gc_stale_ls_task_ (ob_ls_mgr.cpp:537) [559][T1_LSArchiveMgr][T1][YB427F000001-000613ACA7FF7BAA-0-0] [lt=38] gc stale ls task succ [2024-03-15 07:03:42.837823] INFO [COMMON] clean_garbage_node (ob_kvcache_map.cpp:647) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=57] Cache wash clean map node details(ret=0, clean_node_count=0, clean_time=7361, clean_start_pos=1447022, clean_num=31457) [2024-03-15 07:03:42.840340] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=26][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:42.840691] INFO [LIB] stat (utility.h:1140) [792][T1004_IOWorker][T1004][Y0-0000000000000000-0-0] [lt=12] [PALF STAT WRITE LOG](cur_stat_count=1, stat_interval=1000000, avg cost=13022, this=0x7f54743ffad8) [2024-03-15 07:03:42.840735] WDIAG [PALF] inner_append_log (palf_handle_impl.cpp:1657) [792][T1004_IOWorker][T1004][Y0-0000000000000000-0-0] [lt=42][errcode=-4389] write log cost too much time(ret=-4389, this={palf_id:1001, self:"127.0.0.1:2882", has_set_deleted:false}, lsn_array=[{lsn:34077856158}], scn_array=[{val:1710506547039047539}], time_cost=13022) [2024-03-15 07:03:42.840770] INFO [PALF] inner_append_log (palf_handle_impl.cpp:1660) [792][T1004_IOWorker][T1004][Y0-0000000000000000-0-0] [lt=33] [PALF STAT INNER APPEND LOG](this={palf_id:1001, self:"127.0.0.1:2882", has_set_deleted:false}, accum_size=122) [2024-03-15 07:03:42.840837] WDIAG [PALF] try_update_match_lsn_map_ (log_sliding_window.cpp:3790) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=0] [MATCH LSN ADVANCE DELAY]match_lsn advance delay too much time(ret=0, palf_id=1001, self="127.0.0.1:2882", server="127.0.0.1:2882", update_func={old_end_lsn:{lsn:34077856158}, new_end_lsn:{lsn:34077856280}, old_advance_time_us:1710486220830382, new_ack_time_us:1710486222840831, advance delay(us):2010449}) [2024-03-15 07:03:42.840876] INFO [PALF] try_advance_committed_lsn_ (log_sliding_window.cpp:1572) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=35] [PALF STAT COMMITTED LOG SIZE](palf_id=1001, self="127.0.0.1:2882", committed size=122) [2024-03-15 07:03:42.840908] INFO [LIB] stat (utility.h:1140) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=11] [PALF STAT FS CB](cur_stat_count=1, stat_interval=1000000, avg cost=12, this=0x7f54743f73a8) [2024-03-15 07:03:42.840917] INFO [LIB] stat (utility.h:1140) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=9] [PALF STAT LOG LIFETIME](cur_stat_count=1, stat_interval=1000000, avg cost=13304, this=0x7f54743f73d8) [2024-03-15 07:03:42.840925] INFO [LIB] stat (utility.h:1140) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=7] [PALF STAT LOG SUBMIT WAIT](cur_stat_count=1, stat_interval=1000000, avg cost=15, this=0x7f54743f7408) [2024-03-15 07:03:42.840931] INFO [LIB] stat (utility.h:1140) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=7] [PALF STAT LOG SLIDE WAIT](cur_stat_count=1, stat_interval=1000000, avg cost=13289, this=0x7f54743f7438) [2024-03-15 07:03:42.840942] INFO [LIB] stat (utility.h:1140) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=7] [PALF STAT FLUSH CB](cur_stat_count=1, stat_interval=1000000, avg cost=126, this=0x7f54743ffb08) [2024-03-15 07:03:42.841477] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.841512] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=34][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.849259] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.849308] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=49][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.851697] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.851747] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=50][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.851822] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:755) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=16] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] current server is WRS leader, need start CLUSTER weak read service(tenant_id=1004, serve_leader_epoch=0, cur_leader_epoch=431, cluster_service_tablet_id_={id:226}, in_service=false, can_update_version=false, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:42.851858] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:347) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=25] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] begin start service(tenant_id=1004, is_in_service()=false, can_update_version=false) [2024-03-15 07:03:42.851867] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:349) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=8] start TenantWeakReadClusterService(tenant_id=1004) [2024-03-15 07:03:42.852608] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [898][T1004_TenantWea][T1003][YB427F000001-000613ACAC1F984A-0-0] [lt=6][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.852651] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [898][T1004_TenantWea][T1003][YB427F000001-000613ACAC1F984A-0-0] [lt=44][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:42.852672] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [898][T1004_TenantWea][T1003][YB427F000001-000613ACAC1F984A-0-0] [lt=8][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = ''"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:42.852685] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [898][T1004_TenantWea][T1004][YB427F000001-000613ACAC1F984A-0-0] [lt=12][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = ''"}, aret=-5627, ret=-5627) [2024-03-15 07:03:42.852693] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = '') [2024-03-15 07:03:42.852708] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=13][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.852715] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:42.852728] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=12][errcode=-5627] query failed(ret=-5627, conn=0x7f53fb6d8050, start=1710486222852575, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = '') [2024-03-15 07:03:42.852739] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:42.852747] WDIAG [STORAGE.TRANS] query_cluster_version_range_ (ob_tenant_weak_read_cluster_service.cpp:196) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5627] execute sql read fail(ret=-5627, ret="OB_SCHEMA_EAGAIN", exec_tenant_id=1003, tenant_id=1004, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = '') [2024-03-15 07:03:42.852801] WDIAG [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:378) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=9][errcode=-5627] query cluster version range from WRS table fail(ret=-5627, ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:42.852830] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:432) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=26] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] start service done(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004, in_service=false, leader_epoch=0, current_version={val:0}, delta=1710486222852827, min_version={val:0}, max_version={val:0}, max_stale_time=5000000000, all_valid_server_count=0, total_time=981, wlock_time=26, check_leader_time=2, query_version_time=0, persist_version_time=0) [2024-03-15 07:03:42.852850] WDIAG [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:798) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-5627] start CLUSTER weak read service fail(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004) [2024-03-15 07:03:42.852859] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:808) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=7] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] done(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004, need_start_service=true, need_stop_service=false, need_change_leader=false, is_in_service()=false, can_update_version=false, cur_leader_epoch=431, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:42.859531] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1] get number(ret=-4023, service_type_=0, range=1, base_id=1710486222859510776, start_id=0, end_id=0) [2024-03-15 07:03:42.861937] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=20][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.862486] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=183][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.866221] INFO [CLOG] do_fetch_log_ (ob_remote_fetch_log.cpp:154) [872][T1004_LogRessvr][T1004][YB427F000001-000613ACA9BF7BAA-0-0] [lt=48] print do_fetch_log_(lsn={lsn:18446744073709551615}, max_fetch_lsn={lsn:18446744073709551615}, need_schedule=false, proposal_id=-1, last_fetch_ts=-1, task_count=0, ls={ls_meta:{tenant_id:1004, ls_id:{id:1}, replica_type:0, ls_create_status:1, clog_checkpoint_scn:{val:1710235938936212294}, clog_base_lsn:{lsn:86430941184}, rebuild_seq:0, migration_status:0, gc_state_:1, offline_scn_:{val:18446744073709551615}, restore_status:{status:0}, replayable_point:{val:1710339948785102625}, tablet_change_checkpoint_scn:{val:1710384841822240797}, all_id_meta:{id_meta:[{limited_id:1710506562631136223, latest_log_ts:{val:1710506546001774991}}, {limited_id:109000001, latest_log_ts:{val:1710506430849800962}}, {limited_id:1, latest_log_ts:{val:18446744073709551615}}]}}, log_handler:{role:1, proposal_id:431, palf_env_:0x7f54765f0030, is_in_stop_state_:false, is_inited_:true}, restore_handler:{is_inited:true, is_in_stop_state:false, id:1, proposal_id:9223372036854775807, role:2, parent:null, context:{issue_task_num:0, issue_version:-1, last_fetch_ts:-1, max_submit_lsn:{lsn:18446744073709551615}, max_fetch_lsn:{lsn:18446744073709551615}, max_fetch_scn:{val:18446744073709551615}, error_context:{ret_code:0, trace_id:Y0-0000000000000000-0-0}, task_count:0}, restore_context:{seek_done:false, lsn:{lsn:18446744073709551615}}}, is_inited:true, tablet_gc_handler:{tablet_persist_trigger:0, is_inited:true}}) [2024-03-15 07:03:42.866376] INFO [CLOG] do_fetch_log_ (ob_remote_fetch_log.cpp:154) [872][T1004_LogRessvr][T1004][YB427F000001-000613ACA9BF7BAA-0-0] [lt=148] print do_fetch_log_(lsn={lsn:18446744073709551615}, max_fetch_lsn={lsn:18446744073709551615}, need_schedule=false, proposal_id=-1, last_fetch_ts=-1, task_count=0, ls={ls_meta:{tenant_id:1004, ls_id:{id:1001}, replica_type:0, ls_create_status:1, clog_checkpoint_scn:{val:1710235941799359711}, clog_base_lsn:{lsn:33753698304}, rebuild_seq:0, migration_status:0, gc_state_:1, offline_scn_:{val:18446744073709551615}, restore_status:{status:0}, replayable_point:{val:1710339948785102625}, tablet_change_checkpoint_scn:{val:1710384842533059633}, all_id_meta:{id_meta:[{limited_id:1710264987596709396, latest_log_ts:{val:1710264970963877439}}, {limited_id:98000001, latest_log_ts:{val:1710035127719876240}}, {limited_id:1, latest_log_ts:{val:18446744073709551615}}]}}, log_handler:{role:1, proposal_id:428, palf_env_:0x7f54765f0030, is_in_stop_state_:false, is_inited_:true}, restore_handler:{is_inited:true, is_in_stop_state:false, id:1001, proposal_id:9223372036854775807, role:2, parent:null, context:{issue_task_num:0, issue_version:-1, last_fetch_ts:-1, max_submit_lsn:{lsn:18446744073709551615}, max_fetch_lsn:{lsn:18446744073709551615}, max_fetch_scn:{val:18446744073709551615}, error_context:{ret_code:0, trace_id:Y0-0000000000000000-0-0}, task_count:0}, restore_context:{seek_done:false, lsn:{lsn:18446744073709551615}}}, is_inited:true, tablet_gc_handler:{tablet_persist_trigger:0, is_inited:true}}) [2024-03-15 07:03:42.869157] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222805936, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222606956, use_das=false, nested_level=0, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.869304] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] will sleep(sleep_us=3000, remain_us=1736893, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=3, timeout_timestamp=1710486224606195) [2024-03-15 07:03:42.872667] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.872716] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=49][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.872792] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=427][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.872834] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=41][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.880596] INFO [RPC.FRAME] rpc_easy_timer_cb (ob_net_easy.cpp:595) [193][RpcIO][T0][Y0-0000000000000000-0-0] [lt=34] [RPC EASY STAT](log_str=conn count=1/1, request done=47767/47767, request doing=0/0) [2024-03-15 07:03:42.881372] WDIAG [SQL] create_sessid (ob_sql_session_mgr.cpp:339) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=22][errcode=0] server is initiating(server_id=0, local_seq=27143, max_local_seq=262143, max_server_id=4095) [2024-03-15 07:03:42.881410] INFO [RPC.OBMYSQL] sm_conn_build_handshake (obsm_conn_callback.cpp:104) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=35] new mysql sessid created(conn.sessid_=3221252615, support_ssl=false) [2024-03-15 07:03:42.881549] INFO [RPC.OBMYSQL] init (obsm_conn_callback.cpp:120) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=51] sm conn init succ(conn.sessid_=3221252615, sess.client_addr_="172.21.122.86:42718") [2024-03-15 07:03:42.881590] INFO [RPC.OBMYSQL] do_accept_one (ob_sql_nio.cpp:899) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=33] accept one succ(*s={this:0x7f544f60d230, fd:136, err:0, last_decode_time_:0, last_write_time_:1710486222881546, read_buffer_.get_consume_sz():0, get_pending_flag():0, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.882987] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=37][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.883043] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=55][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.883562] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.883591] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=26][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222883551], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.883609] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=16][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222883551], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.883633] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=10] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222883551]}) [2024-03-15 07:03:42.883662] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=16] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:42.883670] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=7][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486222883657], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.883706] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=23] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222883657]}) [2024-03-15 07:03:42.884001] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FB-0-0] [lt=9][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.884016] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.884029] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=11][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486222884007], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.884033] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FB-0-0] [lt=32][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:42.884047] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=10] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222884007]}) [2024-03-15 07:03:42.884070] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FB-0-0] [lt=19][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:42.884091] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [801][T1004_TenantInf][T1004][YB427F000001-000613ACB04F91FB-0-0] [lt=21][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, aret=-5627, ret=-5627) [2024-03-15 07:03:42.884125] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=31][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:42.884141] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.884158] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=16][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:42.884173] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=13][errcode=-5627] query failed(ret=-5627, conn=0x7f5509bf8050, start=1710486222883964, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:42.884190] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=16][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:42.884204] WDIAG [SHARE] load_tenant_info (ob_tenant_info_proxy.cpp:338) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-5627] failed to read(ret=-5627, ret="OB_SCHEMA_EAGAIN", exec_tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:42.884415] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=49] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:42.884469] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=53][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:42.884485] WDIAG [SERVER] extract_tenant_id (ob_srv_deliver.cpp:100) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=16][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:42.884504] WDIAG [SERVER] dispatch_req (ob_srv_deliver.cpp:115) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=16][errcode=-5160] extract tenant_id fail(ret=-5160, tenant_id=18446744073709551615, req={packet:{header:{length:369, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486222884390, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.884569] WDIAG [SERVER] deliver_mysql_request (ob_srv_deliver.cpp:507) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=61][errcode=-5150] cannot dispatch success(ret=-5150, req={packet:{header:{length:369, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486222884390, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.884652] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB122-0-0] [lt=70] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:42.884719] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB122-0-0] [lt=67][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:42.884732] WDIAG [SERVER] get_tenant_id (obmp_connect.cpp:1339) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB122-0-0] [lt=11][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:42.884746] WDIAG [SERVER] check_update_tenant_id (obmp_connect.cpp:1840) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB122-0-0] [lt=12][errcode=-5160] get_tenant_id failed(ret=-5160) [2024-03-15 07:03:42.884761] WDIAG [SERVER] process (obmp_connect.cpp:242) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB122-0-0] [lt=14][errcode=-5160] fail to check update tenant id(ret=-5160) [2024-03-15 07:03:42.884798] INFO [SERVER] send_error_packet (obmp_packet_sender.cpp:311) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB122-0-0] [lt=12] sending error packet(err=-4043, extra_err_info=NULL, lbt()="0xd9f6cf5 0x75d3e81 0x7596e3a 0x75be943 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:42.884855] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:745) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB122-0-0] [lt=32][errcode=0] server close connection(sessid=3221252615, proxy_sessid=0, stack="0xd9f6cf5 0x75d6bf2 0x75b2979 0x75bde02 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:42.884874] WDIAG [SERVER] get_session (obmp_packet_sender.cpp:515) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB122-0-0] [lt=16][errcode=-4018] get session fail(ret=-4018, sessid=3221252615, proxy_sessid=0) [2024-03-15 07:03:42.884894] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:749) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB122-0-0] [lt=17][errcode=-4016] session is null [2024-03-15 07:03:42.884910] INFO [SERVER] process (obmp_connect.cpp:369) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB122-0-0] [lt=12] MySQL LOGIN(direct_client_ip="172.21.122.86", client_ip=, tenant_name=obmysql, tenant_id=18446744073709551615, user_name=uccenter, host_name=xxx.xxx.xxx.xxx, sessid=3221252615, proxy_sessid=0, sess_create_time=0, from_proxy=false, from_java_client=false, from_oci_client=false, from_jdbc_client=true, capability=683647754, proxy_capability=0, use_ssl=false, c/s protocol="OB_MYSQL_CS_TYPE", autocommit=false, proc_ret=-5160, ret=0) [2024-03-15 07:03:42.885107] WDIAG [RPC.OBMYSQL] push_close_req (ob_sql_nio.cpp:704) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=28][errcode=-4015] close sql sock by user req(*s={this:0x7f544f60d230, fd:136, err:5, last_decode_time_:1710486222884390, last_write_time_:1710486222885100, read_buffer_.get_consume_sz():373, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.885149] INFO [RPC.OBMYSQL] on_disconnect (obsm_conn_callback.cpp:231) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=35] kill and revert session(conn.sessid_=3221252615, proxy_sessid=0, server_id=0, ret=0) [2024-03-15 07:03:42.885167] INFO [RPC.OBMYSQL] handle_pending_destroy_list (ob_sql_nio.cpp:791) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=17] can close safely, do destroy(*s={this:0x7f544f60d230, fd:136, err:5, last_decode_time_:1710486222884390, last_write_time_:1710486222885100, read_buffer_.get_consume_sz():373, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:42.885200] INFO [RPC.OBMYSQL] sm_conn_log_close (obsm_conn_callback.cpp:159) [212][sql_nio2][T0][Y0-0000000000000000-0-0] [lt=30] connection close(sessid=3221252615, proxy_sessid=0, tenant_id=0, server_id=0, from_proxy=false, from_java_client=false, c/s protocol="OB_MYSQL_CS_TYPE", is_need_clear_sessid_=true, ret=0) [2024-03-15 07:03:42.885695] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222819545, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.885793] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] will sleep(sleep_us=7000, remain_us=1449356, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=7, timeout_timestamp=1710486224335147) [2024-03-15 07:03:42.893062] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.893095] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=31][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.893173] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=36][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.893206] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=32][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.893727] INFO [STORAGE] scheduler_ls_ha_handler_ (ob_storage_ha_service.cpp:186) [568][T1_HAService][T1][Y0-0000000000000000-0-0] [lt=48] start do ls ha handler(ls_id_array_=[{id:1}]) [2024-03-15 07:03:42.903316] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.903351] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=35][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.909090] WDIAG [PALF] convert_to_ts (scn.cpp:265) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=37][errcode=-4016] invalid scn should not convert to ts (val_=18446744073709551615) [2024-03-15 07:03:42.909129] INFO [STORAGE.TRANS] print_stat_ (ob_tenant_weak_read_service.cpp:527) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=37] [WRS] [TENANT_WEAK_READ_SERVICE] [STAT](tenant_id=1, server_version={version:{val:1710506547196065859}, total_part_count:1, valid_inner_part_count:1, valid_user_part_count:0}, server_version_delta=-20324286984, in_cluster_service=false, cluster_version={val:18446744073709551615}, min_cluster_version={val:18446744073709551615}, max_cluster_version={val:18446744073709551615}, get_cluster_version_err=0, cluster_version_delta=1710486222909081, cluster_service_master="0.0.0.0:0", cluster_service_tablet_id={id:226}, post_cluster_heartbeat_count=0, succ_cluster_heartbeat_count=0, cluster_heartbeat_interval=1000000, local_cluster_version={val:0}, local_cluster_delta=1710486222909081, force_self_check=false, weak_read_refresh_interval=100000) [2024-03-15 07:03:42.909240] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=83][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.909268] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=26][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1", warnings:[]}, tenant_id_=1, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.909316] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=28][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, local_server_version={val:1710506547196065859}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486222909211) [2024-03-15 07:03:42.909354] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=53][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, last_post_cluster_heartbeat_tstamp_=1710486222709147, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:42.912169] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54e845a228, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222848605, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486200007067, use_das=false, nested_level=0, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.912323] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0] will sleep(sleep_us=100000, remain_us=7094729, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=167, timeout_timestamp=1710486230007049) [2024-03-15 07:03:42.912780] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=14] get wrs ts(ls_id={id:1}, delta=146268061081, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:42.912861] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=71] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:42.913897] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.913929] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=32][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.914025] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB60-0-0] [lt=143][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:42.914058] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB60-0-0] [lt=32][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.914081] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB60-0-0] [lt=21][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.914097] WDIAG iterate (ob_tuple.h:272) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB60-0-0] [lt=15][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:42.914110] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB60-0-0] [lt=13][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:42.917739] INFO [ARCHIVE] gc_stale_ls_task_ (ob_ls_mgr.cpp:537) [743][T1003_LSArchive][T1003][YB427F000001-000613ACA90F7BAA-0-0] [lt=18] gc stale ls task succ [2024-03-15 07:03:42.924088] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.924156] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=67][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.934310] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=25][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.934385] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=60][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.939745] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222872284, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222606956, use_das=false, nested_level=0, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.940084] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] will sleep(sleep_us=4000, remain_us=1666113, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=4, timeout_timestamp=1710486224606195) [2024-03-15 07:03:42.940448] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=25][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:42.944715] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=3] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.944778] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=62][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.944919] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=98][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.944963] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=61][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.951883] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.951924] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=40][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1004", warnings:[]}, tenant_id_=1004, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:42.951957] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=18][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, local_server_version={val:1710506547039047538}, valid_part_count=2, total_part_count=2, generate_timestamp=1710486222951871) [2024-03-15 07:03:42.951974] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=16][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, last_post_cluster_heartbeat_tstamp_=1710486222751843, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:42.953828] INFO [CLOG] get_max_applied_scn (ob_log_apply_service.cpp:730) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=12] get_max_applied_scn(scn={val:1710506547039047539}, this={ls_id_:{id:1001}, role_:1, proposal_id_:428, palf_committed_end_lsn_:{lsn:34077856280}, last_check_scn_:{val:1710506547039047539}, max_applied_cb_scn_:{val:1710506547039047539}}) [2024-03-15 07:03:42.953900] INFO [CLOG] get_min_unreplayed_log_info (ob_replay_status.cpp:1025) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=61] get_min_unreplayed_log_info(lsn={lsn:34077616717}, scn={val:1710506547039045527}, this={ls_id_:{id:1001}, is_enabled_:true, is_submit_blocked_:false, role_:1, err_info_:{lsn_:{lsn:18446744073709551615}, scn_:{val:0}, log_type_:0, is_submit_err_:false, err_ts_:0, err_ret_:0}, ref_cnt_:2, post_barrier_lsn_:{lsn:18446744073709551615}, pending_task_count_:0, submit_log_task_:{ObReplayServiceSubmitTask:{type_:1, enqueue_ts_:1710482185563073, err_info_:{has_fatal_error_:false, fail_ts_:0, fail_cost_:29057989, ret_code_:0}}, next_to_submit_lsn_:{lsn:34077616717}, committed_end_lsn_:{lsn:34077616717}, next_to_submit_scn_:{val:1710506547039045527}, base_lsn_:{lsn:33753698304}, base_scn_:{val:1710235941799359711}, iterator_:{iterator_impl:{buf_:0x7f5456405000, next_round_pread_size:2121728, curr_read_pos:337132, curr_read_buf_start_pos:0, curr_read_buf_end_pos:337132, log_storage_:{IteratorStorage:{start_lsn:{lsn:34077279585}, end_lsn:{lsn:34077616717}, read_buf:{buf_len_:2125824, buf_:0x7f5456405000}, block_size:67104768, log_storage_:0x7f54743fc070, read_buf_has_log_block_header:false}, IteratorStorageType::"DiskIteratorStorage"}, curr_entry_is_raw_write:false, curr_entry_size:0, prev_entry_scn:{val:1710506547039045526}, curr_entry:{LogEntryHeader:{magic:19528, version:1, log_size:25, scn_:{val:1710506547039045526}, data_checksum:3559887847, flag:1}}, init_mode_version:0, accumlate_checksum:3937902784}}}}) [2024-03-15 07:03:42.955120] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=22][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.955170] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=48][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.958007] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222892822, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:42.958102] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0] will sleep(sleep_us=8000, remain_us=1377046, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=8, timeout_timestamp=1710486224335147) [2024-03-15 07:03:42.959595] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486222959584365, start_id=0, end_id=0) [2024-03-15 07:03:42.965343] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=21][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.965411] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=66][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.966351] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8E-0-0] [lt=10] table not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:42.966350] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:42.966372] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=22][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:42.966389] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8E-0-0] [lt=35][errcode=-5019] synonym not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:42.966400] WDIAG [SQL.RESV] resolve_table_relation_factor_normal (ob_dml_resolver.cpp:7359) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8E-0-0] [lt=11][errcode=-5019] fail to resolve table relation recursively(tenant_id=1, ret=-5019, database_id=201001, database_id=201001, table_name=__all_server, db_name=oceanbase) [2024-03-15 07:03:42.966411] WDIAG [SQL.RESV] resolve_table_relation_factor (ob_dml_resolver.cpp:7204) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8E-0-0] [lt=10][errcode=-5019] resolve table relation factor failed(ret=-5019, table_name=__all_server) [2024-03-15 07:03:42.966440] WDIAG [SQL.RESV] inner_resolve_sys_view (ob_dml_resolver.cpp:2579) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8E-0-0] [lt=8][errcode=-5019] fail to resolve table(ret=-5019) [2024-03-15 07:03:42.966447] WDIAG [SQL.RESV] resolve_table_relation_factor_wrapper (ob_dml_resolver.cpp:2634) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8E-0-0] [lt=7][errcode=-5019] fail to resolve sys view(ret=-5019) [2024-03-15 07:03:42.966458] WDIAG resolve_basic_table_without_cte (ob_dml_resolver.cpp:2730) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8E-0-0] [lt=5][errcode=-5019] Table 'oceanbase.__all_server' doesn't exist [2024-03-15 07:03:42.966465] WDIAG [SQL.RESV] resolve_basic_table_with_cte (ob_dml_resolver.cpp:13473) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8E-0-0] [lt=6][errcode=-5019] resolve base or alias table factor failed(ret=-5019) [2024-03-15 07:03:42.966472] WDIAG [SQL.RESV] resolve_basic_table (ob_dml_resolver.cpp:13407) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8E-0-0] [lt=7][errcode=-5019] fail to resolve basic table with cte(ret=-5019) [2024-03-15 07:03:42.966478] WDIAG [SQL.RESV] resolve_table (ob_dml_resolver.cpp:3142) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8E-0-0] [lt=6][errcode=-5019] resolve basic table failed(ret=-5019) [2024-03-15 07:03:42.966484] WDIAG [SQL.RESV] resolve_from_clause (ob_select_resolver.cpp:3426) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8E-0-0] [lt=5][errcode=-5019] fail to exec resolve_table(*table_node, table_item)(ret=-5019) [2024-03-15 07:03:42.966491] WDIAG [SQL.RESV] resolve_normal_query (ob_select_resolver.cpp:1033) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8E-0-0] [lt=6][errcode=-5019] fail to exec resolve_from_clause(parse_tree.children_[PARSE_SELECT_FROM])(ret=-5019) [2024-03-15 07:03:42.966498] WDIAG [SQL.RESV] resolve (ob_select_resolver.cpp:1240) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8E-0-0] [lt=6][errcode=-5019] resolve normal query failed(ret=-5019) [2024-03-15 07:03:42.966506] WDIAG [SQL.RESV] select_stmt_resolver_func (ob_resolver.cpp:170) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8E-0-0] [lt=8][errcode=-5019] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:42.975580] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=60][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.975675] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=93][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.979550] INFO [COMMON] replace_fragment_node (ob_kvcache_map.cpp:697) [103][KVCacheRep][T0][Y0-0000000000000000-0-0] [lt=199] Cache replace map node details(ret=0, replace_node_count=0, replace_time=3521, replace_start_pos=346016, replace_num=15728) [2024-03-15 07:03:42.984418] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.984461] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=41][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222984405], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.984481] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=19][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486222984405], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.984522] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=12] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222984405]}) [2024-03-15 07:03:42.984548] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=13] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:42.984558] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=9][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486222984543], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.984576] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=10] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222984543]}) [2024-03-15 07:03:42.984802] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:42.984812] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=8][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486222984794], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:42.984830] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=10] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486222984794]}) [2024-03-15 07:03:42.985694] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FC-0-0] [lt=13][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.985717] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FC-0-0] [lt=22][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:42.985743] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FC-0-0] [lt=11][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:42.985759] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [801][T1004_TenantInf][T1004][YB427F000001-000613ACB04F91FC-0-0] [lt=14][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, aret=-5627, ret=-5627) [2024-03-15 07:03:42.985770] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:42.985782] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:42.985791] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=9][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:42.985802] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] query failed(ret=-5627, conn=0x7f5492b2e050, start=1710486222985652, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:42.985816] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=13][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:42.985827] WDIAG [SHARE] load_tenant_info (ob_tenant_info_proxy.cpp:338) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] failed to read(ret=-5627, ret="OB_SCHEMA_EAGAIN", exec_tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:42.985837] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=37][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.985895] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=58][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:42.985931] INFO do_work (ob_rl_mgr.cpp:704) [124][rl_mgr0][T0][Y0-0000000000000000-0-0] [lt=30] swc wakeup.(stat_period_=1000000, ready=false) [2024-03-15 07:03:42.996138] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=25][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:42.996219] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=80][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.005866] WDIAG [SQL] create_sessid (ob_sql_session_mgr.cpp:339) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=11][errcode=0] server is initiating(server_id=0, local_seq=27144, max_local_seq=262143, max_server_id=4095) [2024-03-15 07:03:43.005942] INFO [RPC.OBMYSQL] sm_conn_build_handshake (obsm_conn_callback.cpp:104) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=72] new mysql sessid created(conn.sessid_=3221252616, support_ssl=false) [2024-03-15 07:03:43.006110] INFO [RPC.OBMYSQL] init (obsm_conn_callback.cpp:120) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=18] sm conn init succ(conn.sessid_=3221252616, sess.client_addr_="172.21.122.86:42720") [2024-03-15 07:03:43.006156] INFO [RPC.OBMYSQL] do_accept_one (ob_sql_nio.cpp:899) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=36] accept one succ(*s={this:0x7f5494dbd270, fd:120, err:0, last_decode_time_:0, last_write_time_:1710486223006106, read_buffer_.get_consume_sz():0, get_pending_flag():0, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.006381] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=27][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.006486] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=105][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.007065] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=35] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:43.007106] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=42][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:43.007122] WDIAG [SERVER] extract_tenant_id (ob_srv_deliver.cpp:100) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=15][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:43.007140] WDIAG [SERVER] dispatch_req (ob_srv_deliver.cpp:115) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=14][errcode=-5160] extract tenant_id fail(ret=-5160, tenant_id=18446744073709551615, req={packet:{header:{length:398, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486223007039, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.007200] WDIAG [SERVER] deliver_mysql_request (ob_srv_deliver.cpp:507) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=59][errcode=-5150] cannot dispatch success(ret=-5150, req={packet:{header:{length:398, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486223007039, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.007289] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB038-0-0] [lt=21] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:43.007319] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB038-0-0] [lt=30][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:43.007339] WDIAG [SERVER] get_tenant_id (obmp_connect.cpp:1339) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB038-0-0] [lt=18][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:43.007356] WDIAG [SERVER] check_update_tenant_id (obmp_connect.cpp:1840) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB038-0-0] [lt=15][errcode=-5160] get_tenant_id failed(ret=-5160) [2024-03-15 07:03:43.007369] WDIAG [SERVER] process (obmp_connect.cpp:242) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB038-0-0] [lt=12][errcode=-5160] fail to check update tenant id(ret=-5160) [2024-03-15 07:03:43.007403] INFO [SERVER] send_error_packet (obmp_packet_sender.cpp:311) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB038-0-0] [lt=13] sending error packet(err=-4043, extra_err_info=NULL, lbt()="0xd9f6cf5 0x75d3e81 0x7596e3a 0x75be943 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:43.007579] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:745) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB038-0-0] [lt=53][errcode=0] server close connection(sessid=3221252616, proxy_sessid=0, stack="0xd9f6cf5 0x75d6bf2 0x75b2979 0x75bde02 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:43.007618] WDIAG [SERVER] get_session (obmp_packet_sender.cpp:515) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB038-0-0] [lt=36][errcode=-4018] get session fail(ret=-4018, sessid=3221252616, proxy_sessid=0) [2024-03-15 07:03:43.007636] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:749) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB038-0-0] [lt=16][errcode=-4016] session is null [2024-03-15 07:03:43.007668] INFO [SERVER] process (obmp_connect.cpp:369) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB038-0-0] [lt=26] MySQL LOGIN(direct_client_ip="172.21.122.86", client_ip=, tenant_name=obmysql, tenant_id=18446744073709551615, user_name=yyyth, host_name=xxx.xxx.xxx.xxx, sessid=3221252616, proxy_sessid=0, sess_create_time=0, from_proxy=false, from_java_client=false, from_oci_client=false, from_jdbc_client=true, capability=683647754, proxy_capability=0, use_ssl=false, c/s protocol="OB_MYSQL_CS_TYPE", autocommit=false, proc_ret=-5160, ret=0) [2024-03-15 07:03:43.007859] WDIAG [RPC.OBMYSQL] push_close_req (ob_sql_nio.cpp:704) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=29][errcode=-4015] close sql sock by user req(*s={this:0x7f5494dbd270, fd:120, err:5, last_decode_time_:1710486223007039, last_write_time_:1710486223007854, read_buffer_.get_consume_sz():402, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.007908] INFO [RPC.OBMYSQL] on_disconnect (obsm_conn_callback.cpp:231) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=42] kill and revert session(conn.sessid_=3221252616, proxy_sessid=0, server_id=0, ret=0) [2024-03-15 07:03:43.007937] INFO [RPC.OBMYSQL] handle_pending_destroy_list (ob_sql_nio.cpp:791) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=29] can close safely, do destroy(*s={this:0x7f5494dbd270, fd:120, err:5, last_decode_time_:1710486223007039, last_write_time_:1710486223007854, read_buffer_.get_consume_sz():402, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.007960] INFO [RPC.OBMYSQL] sm_conn_log_close (obsm_conn_callback.cpp:159) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=20] connection close(sessid=3221252616, proxy_sessid=0, tenant_id=0, server_id=0, from_proxy=false, from_java_client=false, c/s protocol="OB_MYSQL_CS_TYPE", is_need_clear_sessid_=true, ret=0) [2024-03-15 07:03:43.010603] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=51][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.010628] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=26][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1003", warnings:[]}, tenant_id_=1003, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.010663] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=15][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, local_server_version={val:1710482141336457000}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486223010589) [2024-03-15 07:03:43.010678] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=14][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, last_post_cluster_heartbeat_tstamp_=1710486222810578, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:43.012048] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=10] get wrs ts(ls_id={id:1}, delta=146268159976, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:43.012078] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=25] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:43.012610] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.012642] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=32][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.013312] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222944446, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222606956, use_das=false, nested_level=0, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.013416] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] will sleep(sleep_us=5000, remain_us=1592780, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=5, timeout_timestamp=1710486224606195) [2024-03-15 07:03:43.014514] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB630-0-0] [lt=236][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:43.014545] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB630-0-0] [lt=30][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.014580] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB630-0-0] [lt=33][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.014603] WDIAG iterate (ob_tuple.h:272) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB630-0-0] [lt=21][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.014602] WDIAG [SHARE] refresh (ob_task_define.cpp:382) [79][LogLimiterRefre][T0][Y0-0000000000000000-0-0] [lt=52][errcode=0] Throttled WDIAG logs in last second(details {error code, dropped logs, earliest tid}=[{errcode:-4002, dropped:7379, tid:127}]) [2024-03-15 07:03:43.014593] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=2][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.014618] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB630-0-0] [lt=15][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:43.014941] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=33][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.015020] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.015199] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.015240] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=2][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.015472] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=45][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.015771] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=40][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.015784] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=40][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.015893] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=48][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.016021] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=25][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.016302] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=34][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.016412] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=50][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.016576] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=51][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.016575] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=39][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.016681] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=52][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.016712] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=30][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.016816] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=37][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.017048] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=39][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.017102] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=66][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.017323] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=28][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.017561] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=28][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.017752] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=32][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.017895] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=61][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.017961] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=36][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.018223] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=30][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.018462] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=36][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.018448] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=44][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.018641] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.018640] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=38][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.018678] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.018675] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=33][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.018702] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.018868] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=13][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.019079] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.019151] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=61][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.019277] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=30][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.019260] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.019459] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=41][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.019668] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.019691] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC4-0-0] [lt=200][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:43.019719] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC4-0-0] [lt=28][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.019738] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC4-0-0] [lt=17][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.019754] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC4-0-0] [lt=15][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.019767] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC4-0-0] [lt=13][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:43.019859] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.019884] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=10][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.019936] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=45][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.020021] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.020172] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=26][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.020418] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=39][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.020479] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=12][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.020531] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=19][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.020742] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=67][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.020869] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=26][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.020945] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=25][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.021069] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=15][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.021157] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.021336] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.021410] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=163][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.021535] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=39][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.021557] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.021670] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=18][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.021833] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=28][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.022088] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=30][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.022264] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=14][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.022372] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=56][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.022389] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=40][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.022624] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.022874] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.022923] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=48][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.023011] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=35][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.023168] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=49][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.023396] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=40][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.023455] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=12][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.023468] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=53][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.023869] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=49][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.024045] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=6][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.024082] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.024333] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=44][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.024634] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=26][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.024687] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=32][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.024775] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.024969] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=26][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.025241] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=16][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.025374] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=78][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.025459] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=45][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.025678] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=70][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.025845] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=25][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.025925] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=44][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.026348] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=50][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.026558] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=38][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.026600] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=22][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.026626] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=35][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.026666] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=19][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.026816] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.027038] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=30][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.027184] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=13][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.027212] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=28][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.027250] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.027278] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=28][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.027344] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=16][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.027503] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.027725] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=25][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.027892] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=18][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.027951] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=30][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.027966] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=14][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.028169] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=28][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.028220] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [909][T1004_ArcSrv][T1003][YB427F000001-000613ACAAAF7BAA-0-0] [lt=31][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.028274] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [909][T1004_ArcSrv][T1003][YB427F000001-000613ACAAAF7BAA-0-0] [lt=53][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:43.028310] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [909][T1004_ArcSrv][T1003][YB427F000001-000613ACAAAF7BAA-0-0] [lt=17][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:43.028333] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=21][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc"}, aret=-5627, ret=-5627) [2024-03-15 07:03:43.028355] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=20][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc) [2024-03-15 07:03:43.028395] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=17][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.028446] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=49][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:43.028403] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=34][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.028462] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=14][errcode=-5627] query failed(ret=-5627, conn=0x7f5435182050, start=1710486223028181, sql=select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc) [2024-03-15 07:03:43.028496] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=32][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:43.028517] WDIAG [SHARE] get_valid_dest_pairs (ob_archive_persist_helper.cpp:459) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=19][errcode=-5627] failed to exec sql(ret=-5627, sql=select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc) [2024-03-15 07:03:43.028528] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=29][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.028600] WDIAG [ARCHIVE] load_archive_round_attr (ob_archive_persist_mgr.cpp:308) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=16][errcode=-5627] get valid dest pair failed(ret=-5627, tenant_id_=1004) [2024-03-15 07:03:43.028619] WDIAG [ARCHIVE] do_check_switch_archive_ (ob_archive_service.cpp:261) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=19][errcode=-5627] load archive round attr failed(ret=-5627, tenant_id=1004) [2024-03-15 07:03:43.028653] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=16][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.028666] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=62][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.028885] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=31][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.029105] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=26][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.029144] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.029247] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=16][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.029319] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=51][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.029555] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=26][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.029754] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=14][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.029763] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [909][T1004_ArcSrv][T1003][YB427F000001-000613ACAAAF7BAA-0-0] [lt=13][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.029789] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [909][T1004_ArcSrv][T1003][YB427F000001-000613ACAAAF7BAA-0-0] [lt=26][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:43.029791] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=37][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.029817] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [909][T1004_ArcSrv][T1003][YB427F000001-000613ACAAAF7BAA-0-0] [lt=16][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:43.029839] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=20][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc"}, aret=-5627, ret=-5627) [2024-03-15 07:03:43.029834] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=16][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.029857] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=16][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc) [2024-03-15 07:03:43.029872] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=14][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.029909] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=34][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:43.029937] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=26][errcode=-5627] query failed(ret=-5627, conn=0x7f54a2df2050, start=1710486223029736, sql=select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc) [2024-03-15 07:03:43.029970] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=31][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:43.029997] WDIAG [SHARE] get_valid_dest_pairs (ob_archive_persist_helper.cpp:459) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=24][errcode=-5627] failed to exec sql(ret=-5627, sql=select dest_no, value from __all_log_archive_dest_parameter where tenant_id=1004 and name='dest_id' order by dest_no asc) [2024-03-15 07:03:43.030063] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=66][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.030074] WDIAG [ARCHIVE] load_archive_round_attr (ob_archive_persist_mgr.cpp:308) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=28][errcode=-5627] get valid dest pair failed(ret=-5627, tenant_id_=1004) [2024-03-15 07:03:43.030090] WDIAG [ARCHIVE] persist_archive_progress_ (ob_archive_persist_mgr.cpp:355) [909][T1004_ArcSrv][T1004][YB427F000001-000613ACAAAF7BAA-0-0] [lt=16][errcode=-5627] load archive round attr failed(ret=-5627, attr={key:{tenant_id:0, dest_no:-1}, incarnation:1, dest_id:0, round_id:0, state:{status:"INVALID"}, start_scn:{val:0}, checkpoint_scn:{val:0}, max_scn:{val:0}, compatible:{version:1}, base_piece_id:0, used_piece_id:0, piece_switch_interval:0, frozen_input_bytes:0, frozen_output_bytes:0, active_input_bytes:0, active_output_bytes:0, deleted_input_bytes:0, deleted_output_bytes:0, path:"", comment:""}) [2024-03-15 07:03:43.030303] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=38][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.030297] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=29][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.030383] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.030497] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=39][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.030498] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.030779] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.030933] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=27][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.031001] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=34][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.031074] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=13][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.031179] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.031223] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=13][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.031449] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=18][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.031574] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=42][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.031661] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=13][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.031731] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=43][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.031898] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=108][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.032017] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=76][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.032232] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=59][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.032268] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=38][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.032535] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=44][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.032539] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=39][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.032750] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=18][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.032813] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=15][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.032923] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.033120] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.033200] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=21][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.033334] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.033369] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=31][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.033458] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=34][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.033537] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.033743] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.033938] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.034026] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=34][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.034068] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.034281] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.034303] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC5-0-0] [lt=93][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1001}, all_ls_election_reference_info=[]) [2024-03-15 07:03:43.034317] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC5-0-0] [lt=14][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.034337] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC5-0-0] [lt=19][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1001}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.034348] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC5-0-0] [lt=10][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.034360] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC5-0-0] [lt=12][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1001}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:43.034522] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.034571] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=45][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.034632] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=14][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.034665] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.034739] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.034936] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=28][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.035143] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=13][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.035269] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.035348] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=12][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.035480] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=15][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.035527] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.035608] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=39][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.035704] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.035824] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=12][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.035897] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=16][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.036093] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=13][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.036210] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=30][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.036349] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=12][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.036277] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486222965316, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.036484] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] will sleep(sleep_us=9000, remain_us=1298666, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=9, timeout_timestamp=1710486224335147) [2024-03-15 07:03:43.036506] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=20][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.036535] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=17][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.036677] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.036859] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=12][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.036858] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=47][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.037038] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.037043] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=13][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.037220] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=12][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.037360] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.037402] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.037418] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=57][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.037512] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=23][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.037586] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.037644] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=11][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.037769] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.037949] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.038128] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.038156] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=40][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.038309] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.038315] INFO [COMMON] compute_tenant_wash_size (ob_kvcache_store.cpp:1140) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=47] Wash compute wash size(is_wash_valid=true, sys_total_wash_size=2720698368, global_cache_size=12484608, tenant_max_wash_size=4161536, tenant_min_wash_size=4161536, tenant_ids_=[512, 500, 999, 506, 508, 509, 510, 1, 1003, 1004]) [2024-03-15 07:03:43.038491] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.038502] INFO [COMMON] wash (ob_kvcache_store.cpp:343) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=129] Wash time detail, (compute_wash_size_time=207, refresh_score_time=49, wash_time=8) [2024-03-15 07:03:43.038676] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.038775] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=30][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.038855] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=12][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.039086] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.039299] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.039483] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.039488] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=26][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.039662] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.039844] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.040023] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.040220] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=15][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.040216] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.040458] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=44][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.040540] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=37][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:43.040669] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=29][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.040865] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=24][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.040877] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=16][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.041323] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=13][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.041524] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54791e6f38, buf_len=40, ref_scn={val:1710506562631136223}) [2024-03-15 07:03:43.041567] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=33][errcode=-4002] invalid argument(palf_id=1, buf=0x7f54bb5e6f38, buf_len=40, ref_scn={val:1710506557268654309}) [2024-03-15 07:03:43.043360] INFO [COMMON] clean_garbage_node (ob_kvcache_map.cpp:647) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=10] Cache wash clean map node details(ret=0, clean_node_count=0, clean_time=4843, clean_start_pos=1478479, clean_num=31457) [2024-03-15 07:03:43.045894] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.045940] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=45][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.047981] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=63][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.048050] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=67][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.058185] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=31][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.058264] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=78][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.059830] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486223059794194, start_id=0, end_id=0) [2024-03-15 07:03:43.068454] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=24][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.068572] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=116][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.078766] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=36][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.078831] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=65][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.083018] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223017649, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222606956, use_das=false, nested_level=0, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.083173] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] will sleep(sleep_us=6000, remain_us=1523024, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=6, timeout_timestamp=1710486224606195) [2024-03-15 07:03:43.085201] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486223085188], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.085229] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=27][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486223085188], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.085251] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=9] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223085188]}) [2024-03-15 07:03:43.085270] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=9] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:43.085277] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=6][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486223085267], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.085306] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=8] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223085267]}) [2024-03-15 07:03:43.085604] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486223085593], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.085623] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=12] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223085593]}) [2024-03-15 07:03:43.087065] INFO [CLOG] do_thread_task_ (ob_remote_fetch_log_worker.cpp:247) [871][T1004_RFLWorker][T1004][YB427F000001-000613ACA9AF7BAA-0-0] [lt=42] ObRemoteFetchWorker is running(thread_index=0) [2024-03-15 07:03:43.087180] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FD-0-0] [lt=10][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.087205] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FD-0-0] [lt=26][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:43.087253] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FD-0-0] [lt=28][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:43.087299] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [801][T1004_TenantInf][T1004][YB427F000001-000613ACB04F91FD-0-0] [lt=43][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, aret=-5627, ret=-5627) [2024-03-15 07:03:43.087334] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=32][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:43.087352] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=16][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.087381] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=27][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:43.087393] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-5627] query failed(ret=-5627, conn=0x7f54bd1f4050, start=1710486223087134, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:43.087408] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:43.087419] WDIAG [SHARE] load_tenant_info (ob_tenant_info_proxy.cpp:338) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] failed to read(ret=-5627, ret="OB_SCHEMA_EAGAIN", exec_tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:43.087806] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54e845a228, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223011665, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486200007067, use_das=false, nested_level=0, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.087947] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0] will sleep(sleep_us=100000, remain_us=6919104, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=168, timeout_timestamp=1710486230007049) [2024-03-15 07:03:43.089410] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.089441] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=30][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.089475] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [859][T1004_FreInfoRe][T1003][YB427F000001-000613ACAB4F9B5A-0-0] [lt=7][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.089487] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [859][T1004_FreInfoRe][T1003][YB427F000001-000613ACAB4F9B5A-0-0] [lt=13][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:43.089503] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [859][T1004_FreInfoRe][T1003][YB427F000001-000613ACAB4F9B5A-0-0] [lt=7][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"SELECT * FROM __all_merge_info WHERE tenant_id = '1004'"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:43.089541] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [859][T1004_FreInfoRe][T1004][YB427F000001-000613ACAB4F9B5A-0-0] [lt=37][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"SELECT * FROM __all_merge_info WHERE tenant_id = '1004'"}, aret=-5627, ret=-5627) [2024-03-15 07:03:43.089566] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=23][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=SELECT * FROM __all_merge_info WHERE tenant_id = '1004') [2024-03-15 07:03:43.089575] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.089582] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:43.089589] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5627] query failed(ret=-5627, conn=0x7f54609f8050, start=1710486223089445, sql=SELECT * FROM __all_merge_info WHERE tenant_id = '1004') [2024-03-15 07:03:43.089604] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:43.089612] WDIAG [SHARE] load_global_merge_info (ob_global_merge_table_operator.cpp:49) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5627] fail to execute sql(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004, meta_tenant_id=1003, sql=SELECT * FROM __all_merge_info WHERE tenant_id = '1004') [2024-03-15 07:03:43.089650] WDIAG [STORAGE] refresh_merge_info (ob_tenant_freeze_info_mgr.cpp:856) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-5627] failed to load global merge info(ret=-5627, ret="OB_SCHEMA_EAGAIN", global_merge_info={tenant_id:1004, cluster:{name:"cluster", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, frozen_scn:{name:"frozen_scn", is_scn:true, scn:{val:1}, value:-1, need_update:false}, global_broadcast_scn:{name:"global_broadcast_scn", is_scn:true, scn:{val:1}, value:-1, need_update:false}, last_merged_scn:{name:"last_merged_scn", is_scn:true, scn:{val:1}, value:-1, need_update:false}, is_merge_error:{name:"is_merge_error", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, merge_status:{name:"merge_status", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, error_type:{name:"error_type", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, suspend_merging:{name:"suspend_merging", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, merge_start_time:{name:"merge_start_time", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, last_merged_time:{name:"last_merged_time", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}}) [2024-03-15 07:03:43.089704] WDIAG [STORAGE] runTimerTask (ob_tenant_freeze_info_mgr.cpp:967) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=53][errcode=-5627] fail to refresh merge info(tmp_ret=-5627, tmp_ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:43.090252] WDIAG [SHARE.SCHEMA] get_tenant_status (ob_schema_getter_guard.cpp:8471) [859][T1004_FreInfoRe][T1004][YB427F000001-000613ACAB4F9B5B-0-0] [lt=22][errcode=-5157] tenant not exist(ret=-5157, ret="OB_TENANT_NOT_EXIST", tenant_id=1004) [2024-03-15 07:03:43.090273] WDIAG [SHARE.SCHEMA] check_tenant_is_restore (ob_schema_getter_guard.cpp:8435) [859][T1004_FreInfoRe][T1004][YB427F000001-000613ACAB4F9B5B-0-0] [lt=21][errcode=-5157] fail to get tenant status(ret=-5157, ret="OB_TENANT_NOT_EXIST", tenant_id=1004) [2024-03-15 07:03:43.090281] WDIAG [SHARE.SCHEMA] check_tenant_is_restore (ob_multi_version_schema_service.cpp:3852) [859][T1004_FreInfoRe][T1004][YB427F000001-000613ACAB4F9B5B-0-0] [lt=8][errcode=-5157] fail to check tenant is restore(ret=-5157, tenant_id=1004) [2024-03-15 07:03:43.090288] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1171) [859][T1004_FreInfoRe][T1004][YB427F000001-000613ACAB4F9B5B-0-0] [lt=7][errcode=-5157] fail to check restore tenant exist(ret=-5157, tenant_id=1004) [2024-03-15 07:03:43.090296] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [859][T1004_FreInfoRe][T1004][YB427F000001-000613ACAB4F9B5B-0-0] [lt=7][errcode=-5157] get schema guard failed(ret=-5157) [2024-03-15 07:03:43.090316] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [859][T1004_FreInfoRe][T1004][YB427F000001-000613ACAB4F9B5B-0-0] [lt=13][errcode=-5157] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-5157, ret=-5157) [2024-03-15 07:03:43.090332] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [859][T1004_FreInfoRe][T1004][YB427F000001-000613ACAB4F9B5B-0-0] [lt=16][errcode=-5157] failed to process final(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, aret=-5157, ret=-5157) [2024-03-15 07:03:43.090340] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5157] execute sql failed(ret=-5157, tenant_id=1004, sql=SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name) [2024-03-15 07:03:43.090347] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5157] retry_while_no_tenant_resource failed(ret=-5157, tenant_id=1004) [2024-03-15 07:03:43.090353] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5157] execute_read failed(ret=-5157, cluster_id=1, tenant_id=1004) [2024-03-15 07:03:43.090360] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5157] query failed(ret=-5157, conn=0x7f54503be050, start=1710486223090236, sql=SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name) [2024-03-15 07:03:43.090369] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5157] read failed(ret=-5157) [2024-03-15 07:03:43.090376] WDIAG [SHARE] load (ob_core_table_proxy.cpp:436) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5157] execute sql failed(ret=-5157, ret="OB_TENANT_NOT_EXIST", tenant_id=1004, sql=SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name) [2024-03-15 07:03:43.090432] WDIAG [SHARE] load (ob_core_table_proxy.cpp:368) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=12][errcode=-5157] load failed(ret=-5157, for_update=false) [2024-03-15 07:03:43.090445] WDIAG [SHARE] get (ob_global_stat_proxy.cpp:422) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=11][errcode=-5157] core_table load failed(ret=-5157, ret="OB_TENANT_NOT_EXIST") [2024-03-15 07:03:43.090453] WDIAG [SHARE] get_snapshot_gc_scn (ob_global_stat_proxy.cpp:164) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5157] get failed(ret=-5157) [2024-03-15 07:03:43.090460] WDIAG [STORAGE] try_update_info (ob_tenant_freeze_info_mgr.cpp:923) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5157] tenant not exists, maybe has been removed(ret=-5157, MTL_ID()=1004) [2024-03-15 07:03:43.090468] WDIAG [STORAGE] runTimerTask (ob_tenant_freeze_info_mgr.cpp:970) [859][T1004_FreInfoRe][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5157] fail to try update info(tmp_ret=-5157, tmp_ret="OB_TENANT_NOT_EXIST") [2024-03-15 07:03:43.090495] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [736][T1003_ReqMemEvi][T1003][Y0-0000000000000000-0-0] [lt=10][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.090505] WDIAG get_global_sys_variable (ob_basic_session_info.cpp:887) [736][T1003_ReqMemEvi][T1003][Y0-0000000000000000-0-0] [lt=9][errcode=-4029] fail get schema guard(ret=-4029) [2024-03-15 07:03:43.090512] WDIAG [SERVER] get_mem_limit (ob_mysql_request_manager.cpp:270) [736][T1003_ReqMemEvi][T1003][Y0-0000000000000000-0-0] [lt=7][errcode=-4029] failed to get global sys variable(ret=-4029, tenant_id=1003, OB_SV_SQL_AUDIT_PERCENTAGE="ob_sql_audit_percentage", obj_val={"NULL":"NULL"}) [2024-03-15 07:03:43.090525] WDIAG [SERVER] check_config_mem_limit (ob_eliminate_task.cpp:65) [736][T1003_ReqMemEvi][T1003][Y0-0000000000000000-0-0] [lt=11][errcode=-4029] failed to get mem limit(ret=-4029) [2024-03-15 07:03:43.090533] INFO [SERVER] runTimerTask (ob_eliminate_task.cpp:199) [736][T1003_ReqMemEvi][T1003][Y0-0000000000000000-0-0] [lt=6] sql audit evict task end(evict_high_mem_level=858993459, evict_high_size_level=90000, evict_batch_count=0, elapse_time=0, size_used=0, mem_used=0) [2024-03-15 07:03:43.090715] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=208][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.090740] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=24][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.100873] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=17][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.101028] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=153][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.106158] INFO [SHARE] blacklist_loop_ (ob_server_blacklist.cpp:313) [187][Blacklist][T0][Y0-0000000000000000-0-0] [lt=26] blacklist_loop exec finished(cost_time=31, is_enabled=true, send_cnt=0) [2024-03-15 07:03:43.109341] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=23][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.109381] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=39][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1", warnings:[]}, tenant_id_=1, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.109449] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=65][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, local_server_version={val:1710506547196065859}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486223109328) [2024-03-15 07:03:43.109478] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=29][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, last_post_cluster_heartbeat_tstamp_=1710486222909376, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:43.111196] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.111277] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=80][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.111521] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=31] get wrs ts(ls_id={id:1}, delta=146268259349, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:43.111545] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=21] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:43.112950] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223045549, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.113087] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] will sleep(sleep_us=10000, remain_us=1222063, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=10, timeout_timestamp=1710486224335147) [2024-03-15 07:03:43.117825] INFO [ARCHIVE] gc_stale_ls_task_ (ob_ls_mgr.cpp:537) [904][T1004_LSArchive][T1004][YB427F000001-000613ACAA6F7BAA-0-0] [lt=24] gc stale ls task succ [2024-03-15 07:03:43.119458] INFO [SQL.QRR] runTimerTask (ob_udr_mgr.cpp:87) [527][T1_ReqMemEvict][T1][Y0-0000000000000000-0-0] [lt=0] run rewrite rule refresh task(rule_mgr_->tenant_id_=1) [2024-03-15 07:03:43.121610] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [527][T1_ReqMemEvict][T1][YB427F000001-000613ACA8DF7EDB-0-0] [lt=45] table not exist(tenant_id=1, database_id=201001, table_name=__all_sys_stat, ret=-5019) [2024-03-15 07:03:43.121674] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [527][T1_ReqMemEvict][T1][YB427F000001-000613ACA8DF7EDB-0-0] [lt=76][errcode=-5019] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:43.121975] INFO [SQL.EXE] run2 (ob_maintain_dependency_info_task.cpp:221) [158][MaintainDepInfo][T0][Y0-0000000000000000-0-0] [lt=26] [ASYNC TASK QUEUE](queue_size=0) [2024-03-15 07:03:43.122507] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=35][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.122546] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=38][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.123551] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.123618] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=66][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.127009] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7F-0-0] [lt=4] table not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:43.127093] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B7F-0-0] [lt=82][errcode=-5019] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:43.132761] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=15][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.132849] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=83][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.140611] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=32][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:43.143020] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=40][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.143073] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=51][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.150821] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [678][T1003_FreInfoRe][T1003][YB427F000001-000613ACA97F9B6C-0-0] [lt=8][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.150877] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [678][T1003_FreInfoRe][T1003][YB427F000001-000613ACA97F9B6C-0-0] [lt=54][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:43.150915] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [678][T1003_FreInfoRe][T1003][YB427F000001-000613ACA97F9B6C-0-0] [lt=17][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"SELECT * FROM __all_merge_info WHERE tenant_id = '1003'"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:43.150935] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [678][T1003_FreInfoRe][T1003][YB427F000001-000613ACA97F9B6C-0-0] [lt=20][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"SELECT * FROM __all_merge_info WHERE tenant_id = '1003'"}, aret=-5627, ret=-5627) [2024-03-15 07:03:43.150946] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=9][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=SELECT * FROM __all_merge_info WHERE tenant_id = '1003') [2024-03-15 07:03:43.150965] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=17][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.150979] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=13][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:43.151004] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=21][errcode=-5627] query failed(ret=-5627, conn=0x7f547e5f2050, start=1710486223150767, sql=SELECT * FROM __all_merge_info WHERE tenant_id = '1003') [2024-03-15 07:03:43.151022] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:43.151038] WDIAG [SHARE] load_global_merge_info (ob_global_merge_table_operator.cpp:49) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=10][errcode=-5627] fail to execute sql(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1003, meta_tenant_id=1003, sql=SELECT * FROM __all_merge_info WHERE tenant_id = '1003') [2024-03-15 07:03:43.151112] WDIAG [STORAGE] refresh_merge_info (ob_tenant_freeze_info_mgr.cpp:856) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=-5627] failed to load global merge info(ret=-5627, ret="OB_SCHEMA_EAGAIN", global_merge_info={tenant_id:1003, cluster:{name:"cluster", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, frozen_scn:{name:"frozen_scn", is_scn:true, scn:{val:1}, value:-1, need_update:false}, global_broadcast_scn:{name:"global_broadcast_scn", is_scn:true, scn:{val:1}, value:-1, need_update:false}, last_merged_scn:{name:"last_merged_scn", is_scn:true, scn:{val:1}, value:-1, need_update:false}, is_merge_error:{name:"is_merge_error", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, merge_status:{name:"merge_status", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, error_type:{name:"error_type", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, suspend_merging:{name:"suspend_merging", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, merge_start_time:{name:"merge_start_time", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}, last_merged_time:{name:"last_merged_time", is_scn:false, scn:{val:18446744073709551615}, value:0, need_update:false}}) [2024-03-15 07:03:43.151223] WDIAG [STORAGE] runTimerTask (ob_tenant_freeze_info_mgr.cpp:967) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=109][errcode=-5627] fail to refresh merge info(tmp_ret=-5627, tmp_ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:43.152194] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=123][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.152237] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [678][T1003_FreInfoRe][T1003][YB427F000001-000613ACA97F9B6D-0-0] [lt=18][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.152237] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=41][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1004", warnings:[]}, tenant_id_=1004, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.152258] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [678][T1003_FreInfoRe][T1003][YB427F000001-000613ACA97F9B6D-0-0] [lt=21][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:43.152286] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [678][T1003_FreInfoRe][T1003][YB427F000001-000613ACA97F9B6D-0-0] [lt=13][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:43.152271] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=30][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, local_server_version={val:1710506547039047539}, valid_part_count=2, total_part_count=2, generate_timestamp=1710486223152179) [2024-03-15 07:03:43.152304] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [678][T1003_FreInfoRe][T1003][YB427F000001-000613ACA97F9B6D-0-0] [lt=17][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, aret=-5627, ret=-5627) [2024-03-15 07:03:43.152320] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=14][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name) [2024-03-15 07:03:43.152335] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=14][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.152320] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=49][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, last_post_cluster_heartbeat_tstamp_=1710486222951986, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:43.152349] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=13][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:43.152364] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=14][errcode=-5627] query failed(ret=-5627, conn=0x7f53fb6d8050, start=1710486223152209, sql=SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name) [2024-03-15 07:03:43.152400] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=34][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:43.152418] WDIAG [SHARE] load (ob_core_table_proxy.cpp:436) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=15][errcode=-5627] execute sql failed(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1003, sql=SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name) [2024-03-15 07:03:43.152517] WDIAG [SHARE] load (ob_core_table_proxy.cpp:368) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=52][errcode=-5627] load failed(ret=-5627, for_update=false) [2024-03-15 07:03:43.152535] WDIAG [SHARE] get (ob_global_stat_proxy.cpp:422) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=-5627] core_table load failed(ret=-5627, ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:43.152550] WDIAG [SHARE] get_snapshot_gc_scn (ob_global_stat_proxy.cpp:164) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=14][errcode=-5627] get failed(ret=-5627) [2024-03-15 07:03:43.152563] WDIAG [STORAGE] get_global_info (ob_tenant_freeze_info_mgr.cpp:777) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=12][errcode=-5627] fail to get global info(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.152579] WDIAG [STORAGE] try_update_info (ob_tenant_freeze_info_mgr.cpp:921) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=14][errcode=-5627] failed to get global info(ret=-5627) [2024-03-15 07:03:43.152600] WDIAG [STORAGE] runTimerTask (ob_tenant_freeze_info_mgr.cpp:970) [678][T1003_FreInfoRe][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=-5627] fail to try update info(tmp_ret=-5627, tmp_ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:43.153228] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=22][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.153291] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=60][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.158449] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [888][T1004_LSMetaCh][T1003][YB427F000001-000613ACAB3F8B89-0-0] [lt=0][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.158491] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [888][T1004_LSMetaCh][T1003][YB427F000001-000613ACAB3F8B89-0-0] [lt=41][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:43.158520] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [888][T1004_LSMetaCh][T1003][YB427F000001-000613ACAB3F8B89-0-0] [lt=13][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1004 ORDER BY tenant_id, ls_id, svr_ip, svr_port"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:43.158536] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B89-0-0] [lt=15][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1004 ORDER BY tenant_id, ls_id, svr_ip, svr_port"}, aret=-5627, ret=-5627) [2024-03-15 07:03:43.158563] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B89-0-0] [lt=26][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1004 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:43.158574] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B89-0-0] [lt=9][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.158582] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B89-0-0] [lt=8][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:43.158592] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B89-0-0] [lt=8][errcode=-5627] query failed(ret=-5627, conn=0x7f5509bf8050, start=1710486223158390, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1004 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:43.158605] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B89-0-0] [lt=12][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:43.158615] WDIAG [SHARE.PT] get_by_tenant (ob_persistent_ls_table.cpp:609) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B89-0-0] [lt=7][errcode=-5627] execute sql failed(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1004 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:43.158679] WDIAG [SHARE.PT] get_by_tenant (ob_ls_table_operator.cpp:252) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B89-0-0] [lt=11][errcode=-5627] get all ls info by persistent_ls_ failed(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004) [2024-03-15 07:03:43.158692] WDIAG [SHARE] inner_open_ (ob_ls_table_iterator.cpp:104) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B89-0-0] [lt=12][errcode=-5627] fail to get ls infos by tenant(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004, inner_table_only=false) [2024-03-15 07:03:43.158702] WDIAG [SHARE] next (ob_ls_table_iterator.cpp:71) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B89-0-0] [lt=10][errcode=-5627] fail to open iterator(ret=-5627, ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:43.158712] WDIAG [SERVER] build_replica_map_ (ob_tenant_meta_checker.cpp:332) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B89-0-0] [lt=8][errcode=-5627] ls table iterator next failed(ret=-5627, ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:43.158722] WDIAG [SERVER] check_ls_table_ (ob_tenant_meta_checker.cpp:214) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B89-0-0] [lt=7][errcode=-5627] build replica map from ls table failed(ret=-5627, ret="OB_SCHEMA_EAGAIN", mode=0) [2024-03-15 07:03:43.158737] WDIAG [SERVER] check_ls_table (ob_tenant_meta_checker.cpp:188) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B89-0-0] [lt=12][errcode=-5627] check ls table failed(ret=-5627, ret="OB_SCHEMA_EAGAIN", mode=0) [2024-03-15 07:03:43.158750] WDIAG [SERVER] runTimerTask (ob_tenant_meta_checker.cpp:44) [888][T1004_LSMetaCh][T1004][YB427F000001-000613ACAB3F8B89-0-0] [lt=12][errcode=-5627] fail to check ls meta table(ret=-5627, ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:43.159646] WDIAG [SHARE.SCHEMA] get_tenant_status (ob_schema_getter_guard.cpp:8471) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=47][errcode=-5157] tenant not exist(ret=-5157, ret="OB_TENANT_NOT_EXIST", tenant_id=1004) [2024-03-15 07:03:43.159668] WDIAG [SHARE.SCHEMA] check_tenant_is_restore (ob_schema_getter_guard.cpp:8435) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=22][errcode=-5157] fail to get tenant status(ret=-5157, ret="OB_TENANT_NOT_EXIST", tenant_id=1004) [2024-03-15 07:03:43.159677] WDIAG [SHARE.SCHEMA] check_tenant_is_restore (ob_multi_version_schema_service.cpp:3852) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5157] fail to check tenant is restore(ret=-5157, tenant_id=1004) [2024-03-15 07:03:43.159686] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1171) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5157] fail to check restore tenant exist(ret=-5157, tenant_id=1004) [2024-03-15 07:03:43.159694] WDIAG get_global_sys_variable (ob_basic_session_info.cpp:887) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-4029] fail get schema guard(ret=-4029) [2024-03-15 07:03:43.159703] INFO [SQL.PC] update_memory_conf (ob_plan_cache.cpp:1330) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=8] update plan cache memory config(ob_plan_cache_percentage=5, ob_plan_cache_evict_high_percentage=90, ob_plan_cache_evict_low_percentage=50, tenant_id=1004) [2024-03-15 07:03:43.159713] WDIAG [SQL.PC] run_plan_cache_task (ob_plan_cache.cpp:2039) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-4029] fail to update plan cache memory sys val(ret=-4029) [2024-03-15 07:03:43.159726] INFO [SQL.PC] cache_evict (ob_plan_cache.cpp:1021) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=7] start lib cache evict(tenant_id=1004, mem_hold=0, mem_limit=107374180, cache_obj_num=0, cache_node_num=0) [2024-03-15 07:03:43.159740] INFO [SQL.PC] cache_evict (ob_plan_cache.cpp:1038) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=11] end lib cache evict(tenant_id=1004, cache_evict_num=0, mem_hold=0, mem_limit=107374180, cache_obj_num=0, cache_node_num=0) [2024-03-15 07:03:43.159750] INFO [SQL.PC] runTimerTask (ob_plan_cache.cpp:2023) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=10] schedule next cache evict task(evict_interval=1000000) [2024-03-15 07:03:43.159901] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486223159886094, start_id=0, end_id=0) [2024-03-15 07:03:43.161511] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223088215, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222606956, use_das=false, nested_level=0, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.161662] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] will sleep(sleep_us=7000, remain_us=1444536, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=7, timeout_timestamp=1710486224606195) [2024-03-15 07:03:43.163004] INFO [SQL.PC] dump_all_objs (ob_plan_cache.cpp:1798) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=6] Dumping All Cache Objs(alloc_obj_list.count()=0, alloc_obj_list=[]) [2024-03-15 07:03:43.163022] INFO [SQL.PC] runTimerTask (ob_plan_cache.cpp:2031) [806][T1004_PlanCache][T1004][Y0-0000000000000000-0-0] [lt=17] schedule next cache evict task(evict_interval=1000000) [2024-03-15 07:03:43.163464] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=32][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.163548] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=83][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.164579] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB61-0-0] [lt=132][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:43.164621] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB61-0-0] [lt=42][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.164650] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB61-0-0] [lt=28][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.164676] WDIAG iterate (ob_tuple.h:272) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB61-0-0] [lt=24][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.164700] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB61-0-0] [lt=23][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:43.168985] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.169233] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=245][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.173758] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=35][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.173823] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=64][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.175493] WDIAG [SERVER] batch_process_tasks (ob_ls_table_updater.cpp:333) [134][LSUserTblUp0][T0][YB427F000001-000613ACB0BF8927-0-0] [lt=30][errcode=-4076] tenant schema is not ready, need wait(ret=-4076, ret="OB_NEED_WAIT", superior_tenant_id=1003, task={tenant_id:1004, ls_id:{id:1}, add_timestamp:1710482139497411}) [2024-03-15 07:03:43.178894] INFO [SHARE] run_loop_ (ob_bg_thread_monitor.cpp:331) [959][BGThreadMonitor][T0][Y0-0000000000000000-0-0] [lt=39] current monitor number(seq_=-1) [2024-03-15 07:03:43.181809] INFO [COMMON] replace_fragment_node (ob_kvcache_map.cpp:697) [103][KVCacheRep][T0][Y0-0000000000000000-0-0] [lt=41] Cache replace map node details(ret=0, replace_node_count=0, replace_time=1662, replace_start_pos=361744, replace_num=15728) [2024-03-15 07:03:43.183972] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=35][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.184047] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=71][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.185972] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486223185951], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.186029] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=53][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486223185951], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.186082] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=29] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223185951]}) [2024-03-15 07:03:43.186104] INFO [STORAGE.TRANS] statistics (ob_gts_source.cpp:70) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=19] gts statistics(tenant_id=1, gts_rpc_cnt=0, get_gts_cache_cnt=7855, get_gts_with_stc_cnt=13413, try_get_gts_cache_cnt=0, try_get_gts_with_stc_cnt=0, wait_gts_elapse_cnt=0, try_wait_gts_elapse_cnt=0) [2024-03-15 07:03:43.186144] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=23] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:43.186162] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=16][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486223186138], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.186193] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=15] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223186138]}) [2024-03-15 07:03:43.186205] INFO [STORAGE.TRANS] statistics (ob_gts_source.cpp:70) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=12] gts statistics(tenant_id=1003, gts_rpc_cnt=0, get_gts_cache_cnt=7838, get_gts_with_stc_cnt=8, try_get_gts_cache_cnt=0, try_get_gts_with_stc_cnt=0, wait_gts_elapse_cnt=0, try_wait_gts_elapse_cnt=0) [2024-03-15 07:03:43.186585] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486223186573], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.186708] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=29] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223186573]}) [2024-03-15 07:03:43.186733] INFO [STORAGE.TRANS] statistics (ob_gts_source.cpp:70) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=25] gts statistics(tenant_id=1004, gts_rpc_cnt=0, get_gts_cache_cnt=7833, get_gts_with_stc_cnt=18628, try_get_gts_cache_cnt=0, try_get_gts_with_stc_cnt=0, wait_gts_elapse_cnt=0, try_wait_gts_elapse_cnt=0) [2024-03-15 07:03:43.188246] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.188287] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=41][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.189008] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FE-0-0] [lt=16][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.189038] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FE-0-0] [lt=30][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:43.189062] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FE-0-0] [lt=11][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:43.189078] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [801][T1004_TenantInf][T1004][YB427F000001-000613ACB04F91FE-0-0] [lt=15][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, aret=-5627, ret=-5627) [2024-03-15 07:03:43.189090] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=9][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:43.189103] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=12][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.189113] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=9][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:43.189124] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=9][errcode=-5627] query failed(ret=-5627, conn=0x7f54845f4050, start=1710486223188965, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:43.189138] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=12][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:43.189149] WDIAG [SHARE] load_tenant_info (ob_tenant_info_proxy.cpp:338) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] failed to read(ret=-5627, ret="OB_SCHEMA_EAGAIN", exec_tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:43.191583] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223122965, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.191783] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0] will sleep(sleep_us=11000, remain_us=1143367, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=11, timeout_timestamp=1710486224335147) [2024-03-15 07:03:43.194369] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=40][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.194479] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=109][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.203310] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=2] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.203375] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=65][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.204653] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=29][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.204750] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=95][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.207510] INFO [STORAGE] runTimerTask (ob_checkpoint_service.cpp:351) [901][T1004_CKClogDis][T1004][Y0-0000000000000000-0-0] [lt=7] ====== check clog disk timer task ====== [2024-03-15 07:03:43.207537] INFO [PALF] get_disk_usage (palf_env_impl.cpp:777) [901][T1004_CKClogDis][T1004][Y0-0000000000000000-0-0] [lt=21] get_disk_usage(ret=0, capacity(MB):=8294, used(MB):=6610) [2024-03-15 07:03:43.208714] INFO [STORAGE.TRANS] get_rec_scn (ob_trans_ctx_mgr_v4.cpp:1295) [901][T1004_CKClogDis][T1004][Y0-0000000000000000-0-0] [lt=12] succ to get rec scn(*this={this:0x7f545f604030, ls_id:{id:1}, tenant_id:1004, state:"L_WORKING", total_tx_ctx_count:14, ls_retain_ctx_mgr:{retain_ctx_list_.size():14, max_wait_ckpt_ts_:{val:1710435096734134968}, last_push_gc_task_ts_:1710485740147765, skip_remove_cnt_:0}, aggre_rec_scn:{val:18446744073709551615}, prev_aggre_rec_scn:{val:18446744073709551615}, uref:3}, aggre_rec_scn={val:4611686018427387903}) [2024-03-15 07:03:43.208748] INFO [STORAGE.TRANS] get_rec_scn (ob_tx_ctx_memtable.cpp:232) [901][T1004_CKClogDis][T1004][Y0-0000000000000000-0-0] [lt=33] tx ctx memtable get rec scn(this={ObITable:{this:0x7f545f234080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710483939808321}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f545f234080, snapshot_version:{val:1710483939808321}, ls_id:{id:1}, is_frozen:true}, rec_scn={val:1710300088341019363}) [2024-03-15 07:03:43.208780] INFO [STORAGE.TABLELOCK] get_rec_scn (ob_lock_memtable.cpp:742) [901][T1004_CKClogDis][T1004][Y0-0000000000000000-0-0] [lt=25] rec_scn of ObLockMemtable is (rec_scn_={val:4611686018427387903}, flushed_scn_={val:1709707857349638225}, pre_rec_scn_={val:18446744073709551615}, freeze_scn_={val:0}, max_committed_scn_={val:18446744073709551615}, is_frozen_=false, ls_id_={id:1}) [2024-03-15 07:03:43.208810] INFO [STORAGE.TRANS] get_rec_scn (ob_ls_tx_service.cpp:441) [901][T1004_CKClogDis][T1004][Y0-0000000000000000-0-0] [lt=26] [CHECKPOINT] ObLSTxService::get_rec_scn(common_checkpoint_type="TX_DATA_MEMTABLE_TYPE", common_checkpoints_[min_rec_scn_common_checkpoint_type_index]={ObIMemtableMgr:{Memtables:this:0x7f54639da1b0, ref_cnt:1, is_inited:true, tablet_id:{id:49402}, freezer:0x7f54639df290, table_type:1, memtable_head:0, memtable_tail:2, t3m:0x7f54a23e2030, tables:[0x7f545f3f0080, 0x7f545f3f1580, null, null, null, null, null, null, null, null, null, null, null, null, null, null]}, is_freezing:false, ls_id:{id:1}, tx_data_table:0x7f54639e0690, ls_tablet_svr:0x7f54639da190, slice_allocator:0x7f54639e06d0}, min_rec_scn={val:1710235938936212294}, ls_id_={id:1}) [2024-03-15 07:03:43.209455] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:755) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=16] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] current server is WRS leader, need start CLUSTER weak read service(tenant_id=1, serve_leader_epoch=0, cur_leader_epoch=420, cluster_service_tablet_id_={id:226}, in_service=false, can_update_version=false, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:43.209507] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:347) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=37] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] begin start service(tenant_id=1, is_in_service()=false, can_update_version=false) [2024-03-15 07:03:43.209522] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:349) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=14] start TenantWeakReadClusterService(tenant_id=1) [2024-03-15 07:03:43.209600] INFO [STORAGE.TRANS] get_rec_scn (ob_id_service.cpp:306) [901][T1004_CKClogDis][T1004][Y0-0000000000000000-0-0] [lt=44] get rec log scn(service_type_=0, rec_log_ts={val:1710506546001774991}) [2024-03-15 07:03:43.209616] INFO [STORAGE.TRANS] get_rec_scn (ob_id_service.cpp:306) [901][T1004_CKClogDis][T1004][Y0-0000000000000000-0-0] [lt=16] get rec log scn(service_type_=1, rec_log_ts={val:1710506430849800962}) [2024-03-15 07:03:43.209629] INFO [STORAGE] update_clog_checkpoint (ob_checkpoint_executor.cpp:158) [901][T1004_CKClogDis][T1004][Y0-0000000000000000-0-0] [lt=7] [CHECKPOINT] clog checkpoint no change(checkpoint_scn={val:1710235938936212294}, checkpoint_scn_in_ls_meta={val:1710235938936212294}, ls_id={id:1}, service_type="TRANS_SERVICE") [2024-03-15 07:03:43.210732] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.210746] INFO [STORAGE.TRANS] get_rec_scn (ob_trans_ctx_mgr_v4.cpp:1295) [901][T1004_CKClogDis][T1004][Y0-0000000000000000-0-0] [lt=10] succ to get rec scn(*this={this:0x7f545f686030, ls_id:{id:1001}, tenant_id:1004, state:"L_WORKING", total_tx_ctx_count:0, ls_retain_ctx_mgr:{retain_ctx_list_.size():0, max_wait_ckpt_ts_:{val:1710234140568838399}, last_push_gc_task_ts_:1710482139497241, skip_remove_cnt_:0}, aggre_rec_scn:{val:18446744073709551615}, prev_aggre_rec_scn:{val:18446744073709551615}, uref:3}, aggre_rec_scn={val:4611686018427387903}) [2024-03-15 07:03:43.210760] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=28][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1003", warnings:[]}, tenant_id_=1003, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.210765] INFO [STORAGE.TRANS] get_rec_scn (ob_tx_ctx_memtable.cpp:232) [901][T1004_CKClogDis][T1004][Y0-0000000000000000-0-0] [lt=19] tx ctx memtable get rec scn(this={ObITable:{this:0x7f545f234dd0, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:2}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f545f234dd0, snapshot_version:{val:4611686018427387903}, ls_id:{id:1001}, is_frozen:false}, rec_scn={val:4611686018427387903}) [2024-03-15 07:03:43.210783] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=21][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, local_server_version={val:1710482141336457000}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486223210718) [2024-03-15 07:03:43.210786] INFO [STORAGE.TABLELOCK] get_rec_scn (ob_lock_memtable.cpp:742) [901][T1004_CKClogDis][T1004][Y0-0000000000000000-0-0] [lt=18] rec_scn of ObLockMemtable is (rec_scn_={val:4611686018427387903}, flushed_scn_={val:1709707857349638223}, pre_rec_scn_={val:18446744073709551615}, freeze_scn_={val:0}, max_committed_scn_={val:18446744073709551615}, is_frozen_=false, ls_id_={id:1001}) [2024-03-15 07:03:43.210799] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, last_post_cluster_heartbeat_tstamp_=1710486223010687, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:43.210800] INFO [STORAGE.TRANS] get_rec_scn (ob_ls_tx_service.cpp:441) [901][T1004_CKClogDis][T1004][Y0-0000000000000000-0-0] [lt=12] [CHECKPOINT] ObLSTxService::get_rec_scn(common_checkpoint_type="TX_DATA_MEMTABLE_TYPE", common_checkpoints_[min_rec_scn_common_checkpoint_type_index]={ObIMemtableMgr:{Memtables:this:0x7f54639ec1b0, ref_cnt:1, is_inited:true, tablet_id:{id:49402}, freezer:0x7f54639f1290, table_type:1, memtable_head:0, memtable_tail:1, t3m:0x7f54a23e2030, tables:[0x7f545f3f0b00, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null]}, is_freezing:false, ls_id:{id:1001}, tx_data_table:0x7f54639f2690, ls_tablet_svr:0x7f54639ec190, slice_allocator:0x7f54639f26d0}, min_rec_scn={val:1710235941799359711}, ls_id_={id:1001}) [2024-03-15 07:03:43.210821] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:755) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=14] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] current server is WRS leader, need start CLUSTER weak read service(tenant_id=1003, serve_leader_epoch=0, cur_leader_epoch=1984, cluster_service_tablet_id_={id:226}, in_service=false, can_update_version=false, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:43.210863] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:347) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=33] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] begin start service(tenant_id=1003, is_in_service()=false, can_update_version=false) [2024-03-15 07:03:43.210875] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:349) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=11] start TenantWeakReadClusterService(tenant_id=1003) [2024-03-15 07:03:43.211166] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F9800-0-0] [lt=10] table not exist(tenant_id=1, database_id=201001, table_name=__all_weak_read_service, ret=-5019) [2024-03-15 07:03:43.211195] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F9800-0-0] [lt=26][errcode=-5019] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:43.211389] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:432) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=1] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] start service done(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1, in_service=false, leader_epoch=0, current_version={val:0}, delta=1710486223211385, min_version={val:0}, max_version={val:0}, max_stale_time=5000000000, all_valid_server_count=0, total_time=1895, wlock_time=42, check_leader_time=1, query_version_time=0, persist_version_time=0) [2024-03-15 07:03:43.211457] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:808) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=0] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] done(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1, need_start_service=true, need_stop_service=false, need_change_leader=false, is_in_service()=false, can_update_version=false, cur_leader_epoch=420, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:43.211814] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [737][T1003_TenantWea][T1003][YB427F000001-000613ACABAF95ED-0-0] [lt=8][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.211834] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [737][T1003_TenantWea][T1003][YB427F000001-000613ACABAF95ED-0-0] [lt=19][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:43.211857] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [737][T1003_TenantWea][T1003][YB427F000001-000613ACABAF95ED-0-0] [lt=10][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select min_version, max_version from __all_weak_read_service where tenant_id = 1003 and level_id = 0 and level_value = ''"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:43.211873] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [737][T1003_TenantWea][T1003][YB427F000001-000613ACABAF95ED-0-0] [lt=15][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select min_version, max_version from __all_weak_read_service where tenant_id = 1003 and level_id = 0 and level_value = ''"}, aret=-5627, ret=-5627) [2024-03-15 07:03:43.211884] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=10][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1003 and level_id = 0 and level_value = '') [2024-03-15 07:03:43.211895] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=10][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.211904] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:43.211914] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=9][errcode=-5627] query failed(ret=-5627, conn=0x7f5435182050, start=1710486223211778, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1003 and level_id = 0 and level_value = '') [2024-03-15 07:03:43.211927] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=13][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:43.211938] WDIAG [STORAGE.TRANS] query_cluster_version_range_ (ob_tenant_weak_read_cluster_service.cpp:196) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=7][errcode=-5627] execute sql read fail(ret=-5627, ret="OB_SCHEMA_EAGAIN", exec_tenant_id=1003, tenant_id=1003, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1003 and level_id = 0 and level_value = '') [2024-03-15 07:03:43.211986] WDIAG [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:378) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=12][errcode=-5627] query cluster version range from WRS table fail(ret=-5627, ret="OB_SCHEMA_EAGAIN") [2024-03-15 07:03:43.211967] WDIAG [SQL] create_sessid (ob_sql_session_mgr.cpp:339) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=32][errcode=0] server is initiating(server_id=0, local_seq=27145, max_local_seq=262143, max_server_id=4095) [2024-03-15 07:03:43.211998] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:432) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=10] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] start service done(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1003, in_service=false, leader_epoch=0, current_version={val:0}, delta=1710486223211995, min_version={val:0}, max_version={val:0}, max_stale_time=5000000000, all_valid_server_count=0, total_time=1143, wlock_time=31, check_leader_time=2, query_version_time=0, persist_version_time=0) [2024-03-15 07:03:43.212017] INFO [RPC.OBMYSQL] sm_conn_build_handshake (obsm_conn_callback.cpp:104) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=48] new mysql sessid created(conn.sessid_=3221252617, support_ssl=false) [2024-03-15 07:03:43.212026] WDIAG [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:798) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=27][errcode=-5627] start CLUSTER weak read service fail(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1003) [2024-03-15 07:03:43.212037] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:808) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=9] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] done(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1003, need_start_service=true, need_stop_service=false, need_change_leader=false, is_in_service()=false, can_update_version=false, cur_leader_epoch=1984, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:43.212142] INFO [RPC.OBMYSQL] init (obsm_conn_callback.cpp:120) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=17] sm conn init succ(conn.sessid_=3221252617, sess.client_addr_="172.21.122.86:42722") [2024-03-15 07:03:43.212171] INFO [RPC.OBMYSQL] do_accept_one (ob_sql_nio.cpp:899) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=19] accept one succ(*s={this:0x7f547b3ff290, fd:133, err:0, last_decode_time_:0, last_write_time_:1710486223212138, read_buffer_.get_consume_sz():0, get_pending_flag():0, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.213326] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=25] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:43.213351] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=24][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:43.213365] WDIAG [SERVER] extract_tenant_id (ob_srv_deliver.cpp:100) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=14][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:43.213381] WDIAG [SERVER] dispatch_req (ob_srv_deliver.cpp:115) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=14][errcode=-5160] extract tenant_id fail(ret=-5160, tenant_id=18446744073709551615, req={packet:{header:{length:232, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486223213304, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.213454] INFO [STORAGE] update_clog_checkpoint (ob_checkpoint_executor.cpp:158) [901][T1004_CKClogDis][T1004][Y0-0000000000000000-0-0] [lt=21] [CHECKPOINT] clog checkpoint no change(checkpoint_scn={val:1710235941799359711}, checkpoint_scn_in_ls_meta={val:1710235941799359711}, ls_id={id:1001}, service_type="TRANS_SERVICE") [2024-03-15 07:03:43.213474] INFO [STORAGE] cannot_recycle_log_over_threshold_ (ob_checkpoint_service.cpp:264) [901][T1004_CKClogDis][T1004][Y0-0000000000000000-0-0] [lt=17] cannot_recycle_log_size statistics(cannot_recycle_log_size=1294483054, threshold=2609192632) [2024-03-15 07:03:43.213468] WDIAG [SERVER] deliver_mysql_request (ob_srv_deliver.cpp:507) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=85][errcode=-5150] cannot dispatch success(ret=-5150, req={packet:{header:{length:232, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486223213304, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.213495] INFO [STORAGE] flush_if_need_ (ob_ls.cpp:1559) [901][T1004_CKClogDis][T1004][Y0-0000000000000000-0-0] [lt=8] the ls no need flush to advance_checkpoint(get_ls_id()={id:1}, need_flush=false) [2024-03-15 07:03:43.213507] INFO [STORAGE] flush_if_need_ (ob_ls.cpp:1559) [901][T1004_CKClogDis][T1004][Y0-0000000000000000-0-0] [lt=8] the ls no need flush to advance_checkpoint(get_ls_id()={id:1001}, need_flush=false) [2024-03-15 07:03:43.213575] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB039-0-0] [lt=51] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:43.213591] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB039-0-0] [lt=16][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:43.213604] WDIAG [SERVER] get_tenant_id (obmp_connect.cpp:1339) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB039-0-0] [lt=11][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:43.213617] WDIAG [SERVER] check_update_tenant_id (obmp_connect.cpp:1840) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB039-0-0] [lt=12][errcode=-5160] get_tenant_id failed(ret=-5160) [2024-03-15 07:03:43.213628] WDIAG [SERVER] process (obmp_connect.cpp:242) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB039-0-0] [lt=10][errcode=-5160] fail to check update tenant id(ret=-5160) [2024-03-15 07:03:43.213663] INFO [SERVER] send_error_packet (obmp_packet_sender.cpp:311) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB039-0-0] [lt=11] sending error packet(err=-4043, extra_err_info=NULL, lbt()="0xd9f6cf5 0x75d3e81 0x7596e3a 0x75be943 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:43.213730] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:745) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB039-0-0] [lt=17][errcode=0] server close connection(sessid=3221252617, proxy_sessid=0, stack="0xd9f6cf5 0x75d6bf2 0x75b2979 0x75bde02 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:43.213752] WDIAG [SERVER] get_session (obmp_packet_sender.cpp:515) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB039-0-0] [lt=18][errcode=-4018] get session fail(ret=-4018, sessid=3221252617, proxy_sessid=0) [2024-03-15 07:03:43.213767] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:749) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB039-0-0] [lt=13][errcode=-4016] session is null [2024-03-15 07:03:43.213781] INFO [SERVER] process (obmp_connect.cpp:369) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB039-0-0] [lt=11] MySQL LOGIN(direct_client_ip="172.21.122.86", client_ip=, tenant_name=obmysql, tenant_id=18446744073709551615, user_name=yyyth, host_name=xxx.xxx.xxx.xxx, sessid=3221252617, proxy_sessid=0, sess_create_time=0, from_proxy=false, from_java_client=false, from_oci_client=false, from_jdbc_client=false, capability=270377487, proxy_capability=0, use_ssl=false, c/s protocol="OB_MYSQL_CS_TYPE", autocommit=false, proc_ret=-5160, ret=0) [2024-03-15 07:03:43.214305] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=15] get wrs ts(ls_id={id:1}, delta=146268362036, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:43.214334] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=24] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:43.214844] WDIAG [RPC.OBMYSQL] push_close_req (ob_sql_nio.cpp:704) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=32][errcode=-4015] close sql sock by user req(*s={this:0x7f547b3ff290, fd:133, err:5, last_decode_time_:1710486223213304, last_write_time_:1710486223214837, read_buffer_.get_consume_sz():236, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.214884] INFO [RPC.OBMYSQL] on_disconnect (obsm_conn_callback.cpp:231) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=32] kill and revert session(conn.sessid_=3221252617, proxy_sessid=0, server_id=0, ret=0) [2024-03-15 07:03:43.214899] INFO [RPC.OBMYSQL] handle_pending_destroy_list (ob_sql_nio.cpp:791) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=15] can close safely, do destroy(*s={this:0x7f547b3ff290, fd:133, err:5, last_decode_time_:1710486223213304, last_write_time_:1710486223214837, read_buffer_.get_consume_sz():236, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.214898] WDIAG [SHARE] refresh (ob_task_define.cpp:382) [79][LogLimiterRefre][T0][Y0-0000000000000000-0-0] [lt=18][errcode=0] Throttled WDIAG logs in last second(details {error code, dropped logs, earliest tid}=[{errcode:-4283, dropped:306, tid:127}]) [2024-03-15 07:03:43.214918] INFO [RPC.OBMYSQL] sm_conn_log_close (obsm_conn_callback.cpp:159) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=16] connection close(sessid=3221252617, proxy_sessid=0, tenant_id=0, server_id=0, from_proxy=false, from_java_client=false, c/s protocol="OB_MYSQL_CS_TYPE", is_need_clear_sessid_=true, ret=0) [2024-03-15 07:03:43.214949] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=54][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.215008] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.215037] WDIAG get_global_sys_variable (ob_basic_session_info.cpp:887) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=28][errcode=-4029] fail get schema guard(ret=-4029) [2024-03-15 07:03:43.215012] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=61][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.215059] INFO [SQL.PC] update_memory_conf (ob_plan_cache.cpp:1330) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=19] update plan cache memory config(ob_plan_cache_percentage=5, ob_plan_cache_evict_high_percentage=90, ob_plan_cache_evict_low_percentage=50, tenant_id=1003) [2024-03-15 07:03:43.215078] WDIAG [SQL.PC] run_plan_cache_task (ob_plan_cache.cpp:2039) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=-4029] fail to update plan cache memory sys val(ret=-4029) [2024-03-15 07:03:43.215100] INFO [SQL.PC] cache_evict (ob_plan_cache.cpp:1021) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=12] start lib cache evict(tenant_id=1003, mem_hold=0, mem_limit=53687090, cache_obj_num=0, cache_node_num=0) [2024-03-15 07:03:43.215125] INFO [SQL.PC] cache_evict (ob_plan_cache.cpp:1038) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=21] end lib cache evict(tenant_id=1003, cache_evict_num=0, mem_hold=0, mem_limit=53687090, cache_obj_num=0, cache_node_num=0) [2024-03-15 07:03:43.215145] INFO [SQL.PC] runTimerTask (ob_plan_cache.cpp:2023) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=19] schedule next cache evict task(evict_interval=1000000) [2024-03-15 07:03:43.219189] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4002] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:43.221681] INFO [SQL.PC] dump_all_objs (ob_plan_cache.cpp:1798) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=15] Dumping All Cache Objs(alloc_obj_list.count()=0, alloc_obj_list=[]) [2024-03-15 07:03:43.221738] INFO [SQL.PC] runTimerTask (ob_plan_cache.cpp:2031) [625][T1003_PlanCache][T1003][Y0-0000000000000000-0-0] [lt=138] schedule next cache evict task(evict_interval=1000000) [2024-03-15 07:03:43.225165] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=54][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.225214] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=47][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.235442] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=22][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.235504] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=62][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.236664] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:43.236705] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=41][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486224536195, now=[mts=1710486223169290], now0=[mts=1710486223169290], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:43.236754] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=45][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223167990, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.236855] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=87][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}) [2024-03-15 07:03:43.236905] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=49][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223167990, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.236979] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=69] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223167990, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222606956, use_das=false, nested_level=0, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.237097] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=116][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:43.237154] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=53][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:43.237170] WDIAG [SQL] open (ob_result_set.cpp:157) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=13][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:43.237184] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=11][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:43.237197] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=10][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:43.237215] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=15][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=8, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.237245] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=24] will sleep(sleep_us=8000, remain_us=1368953, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=8, timeout_timestamp=1710486224606195) [2024-03-15 07:03:43.240714] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=46][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:43.243883] INFO [COMMON] compute_tenant_wash_size (ob_kvcache_store.cpp:1140) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=38] Wash compute wash size(is_wash_valid=true, sys_total_wash_size=2720698368, global_cache_size=12484608, tenant_max_wash_size=4161536, tenant_min_wash_size=4161536, tenant_ids_=[512, 500, 999, 506, 508, 509, 510, 1, 1003, 1004]) [2024-03-15 07:03:43.243982] INFO [COMMON] wash (ob_kvcache_store.cpp:343) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=43] Wash time detail, (compute_wash_size_time=147, refresh_score_time=49, wash_time=8) [2024-03-15 07:03:43.245419] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=59][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:8, local_retry_times:8, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:43.245523] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=76][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.245554] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=30][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.245565] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=10][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:43.245602] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=11][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:43.245666] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=23][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.245746] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=78][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.245867] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.245904] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=36][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.248042] INFO [COMMON] clean_garbage_node (ob_kvcache_map.cpp:647) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=21] Cache wash clean map node details(ret=0, clean_node_count=0, clean_time=4033, clean_start_pos=1509936, clean_num=31457) [2024-03-15 07:03:43.253720] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:43.253766] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=45][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486229937049, now=[mts=1710486223188312], now0=[mts=1710486223188312], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:43.253800] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=31][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223187413, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.253905] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=88][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}) [2024-03-15 07:03:43.253946] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=41][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223187413, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.254006] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=56] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54e845a228, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223187413, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486200007067, use_das=false, nested_level=0, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.254157] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=148][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:43.254224] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=60][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:43.254242] WDIAG [SQL] open (ob_result_set.cpp:157) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=16][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:43.254273] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=28][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:43.254303] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=26][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_schema_status' ORDER BY row_id, column_name"}) [2024-03-15 07:03:43.254342] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=33][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_schema_status' ORDER BY row_id, column_name"}, retry_cnt=169, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.254384] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=36] will sleep(sleep_us=100000, remain_us=6752669, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=169, timeout_timestamp=1710486230007049) [2024-03-15 07:03:43.255889] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=56][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.255954] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=63][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.259997] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486223259983136, start_id=0, end_id=0) [2024-03-15 07:03:43.264390] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB631-0-0] [lt=142][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:43.264487] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB631-0-0] [lt=96][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.264558] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB631-0-0] [lt=67][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.264599] WDIAG iterate (ob_tuple.h:272) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB631-0-0] [lt=38][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.264648] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB631-0-0] [lt=48][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:43.266135] INFO [CLOG] get_replay_process (ob_replay_status.cpp:1062) [687][T1003_ReplayPro][T1003][Y0-0000000000000000-0-0] [lt=19] replay status is not follower(min_unreplayed_lsn={lsn:365766615723}, base_lsn={lsn:364982833152}, this={ls_id_:{id:1}, is_enabled_:true, is_submit_blocked_:false, role_:1, err_info_:{lsn_:{lsn:18446744073709551615}, scn_:{val:0}, log_type_:0, is_submit_err_:false, err_ts_:0, err_ret_:0}, ref_cnt_:1, post_barrier_lsn_:{lsn:18446744073709551615}, pending_task_count_:0, submit_log_task_:{ObReplayServiceSubmitTask:{type_:1, enqueue_ts_:1710482618002690, err_info_:{has_fatal_error_:false, fail_ts_:0, fail_cost_:121035516, ret_code_:0}}, next_to_submit_lsn_:{lsn:365766615723}, committed_end_lsn_:{lsn:365766615723}, next_to_submit_scn_:{val:1710339954851689029}, base_lsn_:{lsn:364982833152}, base_scn_:{val:1710278140261191947}, iterator_:{iterator_impl:{buf_:0x7f5457005000, next_round_pread_size:2121728, curr_read_pos:0, curr_read_buf_start_pos:0, curr_read_buf_end_pos:0, log_storage_:{IteratorStorage:{start_lsn:{lsn:365766615723}, end_lsn:{lsn:365766615723}, read_buf:{buf_len_:2125824, buf_:0x7f5457005000}, block_size:67104768, log_storage_:0x7f54939a2070, read_buf_has_log_block_header:false}, IteratorStorageType::"DiskIteratorStorage"}, curr_entry_is_raw_write:false, curr_entry_size:0, prev_entry_scn:{val:18446744073709551615}, curr_entry:{LogEntryHeader:{magic:0, version:0, log_size:-1, scn_:{val:18446744073709551615}, data_checksum:0, flag:0}}, init_mode_version:0, accumlate_checksum:-1}}}}) [2024-03-15 07:03:43.266208] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=67][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.266253] INFO [CLOG] operator() (ob_log_replay_service.cpp:1464) [687][T1003_ReplayPro][T1003][Y0-0000000000000000-0-0] [lt=118] get_replay_process success(id={id:1}, replayed_log_size=783782571, unreplayed_log_size=0) [2024-03-15 07:03:43.266264] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=41][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.266308] INFO [CLOG] runTimerTask (ob_log_replay_service.cpp:154) [687][T1003_ReplayPro][T1003][Y0-0000000000000000-0-0] [lt=15] dump tenant replay process(tenant_id=1003, unreplayed_log_size(MB)=0, estimate_time(second)=0, replayed_log_size(MB)=747, last_replayed_log_size(MB)=747, round_cost_time(second)=10, pending_replay_log_size(MB)=0) [2024-03-15 07:03:43.269713] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC6-0-0] [lt=110][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:43.269758] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC6-0-0] [lt=45][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.269813] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC6-0-0] [lt=51][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.269881] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC6-0-0] [lt=65][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.269911] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC6-0-0] [lt=29][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:43.271364] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9A-0-0] [lt=0] table not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:43.271473] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:43.271500] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=28][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486224265147, now=[mts=1710486223203465], now0=[mts=1710486223203465], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:43.271518] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=16][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223202843, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.271580] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=52][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}) [2024-03-15 07:03:43.271612] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=31][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223202843, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.271641] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=27] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223202843, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.271696] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=54][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:43.271709] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=11][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:43.271716] WDIAG [SQL] open (ob_result_set.cpp:157) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=6][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:43.271724] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=5][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:43.271731] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=6][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:43.271741] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=9][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=12, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.271766] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=22] will sleep(sleep_us=12000, remain_us=1063383, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=12, timeout_timestamp=1710486224335147) [2024-03-15 07:03:43.271853] INFO [STORAGE] gc_tables_in_queue (ob_tenant_meta_mem_mgr.cpp:360) [602][T1003_T3mGC][T1003][Y0-0000000000000000-0-0] [lt=54] Recycle 0 table(ret=0, allocator_={used:1489502, total:1846656}, tablet_pool_={typeid(T).name():"N9oceanbase7storage8ObTabletE", sizeof(T):2560, used_obj_cnt:728, free_obj_hold_cnt:0, allocator used:1910272, allocator total:2027648}, sstable_pool_={typeid(T).name():"N9oceanbase12blocksstable9ObSSTableE", sizeof(T):1088, used_obj_cnt:1537, free_obj_hold_cnt:0, allocator used:1770624, allocator total:1831424}, ddl_kv_pool_={typeid(T).name():"N9oceanbase7storage7ObDDLKVE", sizeof(T):3008, used_obj_cnt:0, free_obj_hold_cnt:0, allocator used:0, allocator total:0}, memtable_pool_={typeid(T).name():"N9oceanbase8memtable10ObMemtableE", sizeof(T):1920, used_obj_cnt:70, free_obj_hold_cnt:0, allocator used:138880, allocator total:196224}, tablet count=728, min_minor_cnt=0, pinned_tablet_cnt=0) [2024-03-15 07:03:43.276414] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=26][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.276478] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=63][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.283850] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=11][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:12, local_retry_times:12, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:43.283908] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=40][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.283919] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=10][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.283926] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=6][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:43.283962] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=7][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:43.284110] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.284139] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=28][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.284326] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC7-0-0] [lt=196][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1001}, all_ls_election_reference_info=[]) [2024-03-15 07:03:43.284370] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC7-0-0] [lt=44][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.284398] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC7-0-0] [lt=26][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1001}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.284443] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC7-0-0] [lt=21][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.284460] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC7-0-0] [lt=39][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1001}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:43.286687] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.286762] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=73][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.287242] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486223287226], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.287276] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=32][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486223287226], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.287302] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=12] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223287226]}) [2024-03-15 07:03:43.287326] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=12] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:43.287334] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=8][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486223287322], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.287358] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=9] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223287322]}) [2024-03-15 07:03:43.287598] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486223287589], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.287632] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=23] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223287589]}) [2024-03-15 07:03:43.290100] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FF-0-0] [lt=15][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.290130] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FF-0-0] [lt=29][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:43.290152] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F91FF-0-0] [lt=8][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:43.290165] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [801][T1004_TenantInf][T1004][YB427F000001-000613ACB04F91FF-0-0] [lt=12][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, aret=-5627, ret=-5627) [2024-03-15 07:03:43.290174] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:43.290183] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.290190] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:43.290199] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5627] query failed(ret=-5627, conn=0x7f54bd1f4050, start=1710486223290069, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:43.290210] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:43.290218] WDIAG [SHARE] load_tenant_info (ob_tenant_info_proxy.cpp:338) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5627] failed to read(ret=-5627, ret="OB_SCHEMA_EAGAIN", exec_tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:43.293306] INFO [SQL.PC] update_memory_conf (ob_plan_cache.cpp:1330) [473][T1_PlanCacheEvi][T1][Y0-0000000000000000-0-0] [lt=57] update plan cache memory config(ob_plan_cache_percentage=5, ob_plan_cache_evict_high_percentage=90, ob_plan_cache_evict_low_percentage=50, tenant_id=1) [2024-03-15 07:03:43.293359] INFO [SQL.PC] cache_evict (ob_plan_cache.cpp:1021) [473][T1_PlanCacheEvi][T1][Y0-0000000000000000-0-0] [lt=47] start lib cache evict(tenant_id=1, mem_hold=2097152, mem_limit=107374180, cache_obj_num=1, cache_node_num=1) [2024-03-15 07:03:43.293373] INFO [SQL.PC] cache_evict (ob_plan_cache.cpp:1038) [473][T1_PlanCacheEvi][T1][Y0-0000000000000000-0-0] [lt=12] end lib cache evict(tenant_id=1, cache_evict_num=0, mem_hold=2097152, mem_limit=107374180, cache_obj_num=1, cache_node_num=1) [2024-03-15 07:03:43.293384] INFO [SQL.PC] runTimerTask (ob_plan_cache.cpp:2023) [473][T1_PlanCacheEvi][T1][Y0-0000000000000000-0-0] [lt=9] schedule next cache evict task(evict_interval=1000000) [2024-03-15 07:03:43.297942] INFO [SQL.PC] dump_all_objs (ob_plan_cache.cpp:1798) [473][T1_PlanCacheEvi][T1][Y0-0000000000000000-0-0] [lt=7] Dumping All Cache Objs(alloc_obj_list.count()=1, alloc_obj_list=[{obj_id:307, tenant_id:1, log_del_time:9223372036854775807, real_del_time:9223372036854775807, ref_count:5, added_to_lc:true, mem_used:108080}]) [2024-03-15 07:03:43.297987] INFO [SQL.PC] runTimerTask (ob_plan_cache.cpp:2031) [473][T1_PlanCacheEvi][T1][Y0-0000000000000000-0-0] [lt=39] schedule next cache evict task(evict_interval=1000000) [2024-03-15 07:03:43.298360] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=21][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.298404] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=42][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.309021] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=34][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.309111] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=91][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.309534] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=20][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.309576] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=42][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1", warnings:[]}, tenant_id_=1, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.309633] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=33][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, local_server_version={val:1710506547196065859}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486223309519) [2024-03-15 07:03:43.309651] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=18][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, last_post_cluster_heartbeat_tstamp_=1710486223109493, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:43.311958] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=23] get wrs ts(ls_id={id:1}, delta=146268456974, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:43.311988] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=26] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=1, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:43.319246] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=29][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.319399] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=151][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.321102] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:43.321285] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=179][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486224536195, now=[mts=1710486223245962], now0=[mts=1710486223245962], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:43.321329] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=42][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223245032, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.321407] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=65][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}) [2024-03-15 07:03:43.321504] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=95][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223245032, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.321552] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=45] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223245032, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222606956, use_das=false, nested_level=0, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.321690] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=135][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:43.321715] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=21][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:43.321730] WDIAG [SQL] open (ob_result_set.cpp:157) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=12][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:43.321759] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=24][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:43.321773] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=12][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:43.321806] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=29][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=9, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.321847] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=35] will sleep(sleep_us=9000, remain_us=1284351, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=9, timeout_timestamp=1710486224606195) [2024-03-15 07:03:43.329567] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=41][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.329627] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=61][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.330968] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=55][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:9, local_retry_times:9, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:43.331040] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=50][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.331050] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=10][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.331057] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=7][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:43.331086] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=8][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:43.331262] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.331291] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=29][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.335543] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1][errcode=-4002] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:43.339777] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=21][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.339825] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=47][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.340834] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=41][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:43.349943] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=21][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.350002] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=58][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.352358] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=43][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.352398] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=40][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1004", warnings:[]}, tenant_id_=1004, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.352441] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=21][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, local_server_version={val:1710506547039047539}, valid_part_count=2, total_part_count=2, generate_timestamp=1710486223352342) [2024-03-15 07:03:43.352460] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=18][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, last_post_cluster_heartbeat_tstamp_=1710486223152363, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:43.354746] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=230][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:169, local_retry_times:169, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:43.354820] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=47][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.354839] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=18][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.354851] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=11][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:43.354880] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=8][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_schema_status' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:43.355082] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.355133] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=51][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.359844] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:43.359896] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=191][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486224265147, now=[mts=1710486223284161], now0=[mts=1710486223284161], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:43.359917] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=19][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223283305, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.360151] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=220][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}) [2024-03-15 07:03:43.360161] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=23][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.360173] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=23][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223283305, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.360194] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=33][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.360212] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486223360192281, start_id=0, end_id=0) [2024-03-15 07:03:43.360216] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=39] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223283305, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.360277] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=59][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:43.360302] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=22][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:43.360311] WDIAG [SQL] open (ob_result_set.cpp:157) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=7][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:43.360323] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=10][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:43.360331] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=6][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:43.360344] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=11][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=13, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.360357] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=10] will sleep(sleep_us=13000, remain_us=974792, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=13, timeout_timestamp=1710486224335147) [2024-03-15 07:03:43.364059] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [277][OmtNodeBalancer][T1][YB427F000001-000613ACA77F7ED8-0-0] [lt=17] table not exist(tenant_id=1, database_id=201001, table_name=__all_unit, ret=-5019) [2024-03-15 07:03:43.364143] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [277][OmtNodeBalancer][T1][YB427F000001-000613ACA77F7ED8-0-0] [lt=81][errcode=-5019] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:43.364331] INFO [SERVER.OMT] run1 (ob_tenant_node_balancer.cpp:103) [277][OmtNodeBalancer][T0][YB427F000001-000613ACA77F7ED8-0-0] [lt=0] refresh tenant units(sys_unit_cnt=0, units=[], ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:43.364679] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.364734] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.364737] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.364903] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.365125] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.365343] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.365364] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.365396] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.365543] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.365696] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [277][OmtNodeBalancer][T1][YB427F000001-000613ACA77F7ED8-0-0] [lt=46] table not exist(tenant_id=1, database_id=201001, table_name=__all_tenant, ret=-5019) [2024-03-15 07:03:43.365736] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.365920] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.365947] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.365974] INFO [SERVER] decide_disk_size (ob_server_utils.cpp:205) [277][OmtNodeBalancer][T0][YB427F000001-000613ACA77F7ED8-0-0] [lt=0] decide disk size finished(dir="/root/ob/store/sstable", suggested_disk_size=5368709120, suggested_disk_percentage=0, default_disk_percentage=60, total_space=536604577792, free_space=493852606464, disk_size=5368709120) [2024-03-15 07:03:43.366054] INFO [SERVER] decide_disk_size (ob_server_utils.cpp:205) [277][OmtNodeBalancer][T0][YB427F000001-000613ACA77F7ED8-0-0] [lt=82] decide disk size finished(dir="/root/ob/store/clog", suggested_disk_size=16106127360, suggested_disk_percentage=0, default_disk_percentage=30, total_space=536604577792, free_space=493852606464, disk_size=16106127360) [2024-03-15 07:03:43.366076] INFO [SERVER] cal_all_part_disk_size (ob_server_utils.cpp:167) [277][OmtNodeBalancer][T0][YB427F000001-000613ACA77F7ED8-0-0] [lt=19] decide_all_disk_size succ(data_dir="/root/ob/store/sstable", clog_dir="/root/ob/store/clog", suggested_data_disk_size=5368709120, suggested_data_disk_percentage=0, data_default_disk_percentage=60, clog_default_disk_percentage=30, shared_mode=true, data_disk_size=5368709120, log_disk_size=16106127360) [2024-03-15 07:03:43.366118] INFO [SERVER.OMT] run1 (ob_tenant_node_balancer.cpp:123) [277][OmtNodeBalancer][T0][YB427F000001-000613ACA77F7ED8-0-0] [lt=38] refresh tenant config(tenants=[], ret=-5019) [2024-03-15 07:03:43.366112] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.366188] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.366381] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.366635] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.366667] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.366873] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.366906] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.367101] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.367226] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.367302] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.367471] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.367510] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.367661] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.367819] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.367838] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.368034] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.368161] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.368231] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.368410] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.368465] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.368690] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.368863] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.368901] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.368979] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.369087] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.369338] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.369509] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.369602] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.369634] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.369784] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.370001] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.370202] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.370203] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=25][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.370233] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.370362] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=20][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.370476] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.370461] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=98][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.370686] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.370761] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.370859] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.370975] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.371179] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.371335] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.371354] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.371490] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.371535] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.371716] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.371945] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.371993] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.372163] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.372205] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.372393] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.372549] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.372576] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.372758] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.372787] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.372945] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.373135] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.373161] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.373380] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.373451] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=15][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:13, local_retry_times:13, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:43.373473] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.373503] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=31][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.373522] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=18][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.373528] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=6][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:43.373568] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=6][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:43.373731] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.373756] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=22] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.373772] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=16][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.373796] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.373827] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.373853] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.374038] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.374076] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.374241] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.374404] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.374435] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.374449] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.374630] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.374684] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.374837] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.374992] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.375085] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.375207] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.375321] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.375446] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.375598] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.375626] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.375671] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.375822] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.375988] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=2][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.376018] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.376323] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.376353] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.376368] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.376679] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.376701] WDIAG [STORAGE.TRANS] get_gts_from_local_timestamp_service_ (ob_gts_source.cpp:294) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=14][errcode=-4023] global_timestamp_service get gts fail(leader="127.0.0.1:2882", tmp_gts=0, ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.377082] INFO [STORAGE] runTimerTask (ob_checkpoint_service.cpp:131) [554][T1_TxCkpt][T1][Y0-0000000000000000-0-0] [lt=8] ====== checkpoint timer task ====== [2024-03-15 07:03:43.378395] INFO [STORAGE.TRANS] get_rec_scn (ob_trans_ctx_mgr_v4.cpp:1295) [554][T1_TxCkpt][T1][Y0-0000000000000000-0-0] [lt=25] succ to get rec scn(*this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:3}, aggre_rec_scn={val:1710230200691799540}) [2024-03-15 07:03:43.378455] INFO [STORAGE.TRANS] get_rec_scn (ob_tx_ctx_memtable.cpp:232) [554][T1_TxCkpt][T1][Y0-0000000000000000-0-0] [lt=59] tx ctx memtable get rec scn(this={ObITable:{this:0x7f549eda2080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710482167841511}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f549eda2080, snapshot_version:{val:1710482167841511}, ls_id:{id:1}, is_frozen:true}, rec_scn={val:1710230200691799540}) [2024-03-15 07:03:43.378489] INFO [STORAGE.TABLELOCK] get_rec_scn (ob_lock_memtable.cpp:742) [554][T1_TxCkpt][T1][Y0-0000000000000000-0-0] [lt=27] rec_scn of ObLockMemtable is (rec_scn_={val:4611686018427387903}, flushed_scn_={val:0}, pre_rec_scn_={val:18446744073709551615}, freeze_scn_={val:0}, max_committed_scn_={val:18446744073709551615}, is_frozen_=false, ls_id_={id:1}) [2024-03-15 07:03:43.378508] INFO [STORAGE.TRANS] get_rec_scn (ob_ls_tx_service.cpp:441) [554][T1_TxCkpt][T1][Y0-0000000000000000-0-0] [lt=15] [CHECKPOINT] ObLSTxService::get_rec_scn(common_checkpoint_type="TX_DATA_MEMTABLE_TYPE", common_checkpoints_[min_rec_scn_common_checkpoint_type_index]={ObIMemtableMgr:{Memtables:this:0x7f549ecc81b0, ref_cnt:1, is_inited:true, tablet_id:{id:49402}, freezer:0x7f549eccd290, table_type:1, memtable_head:0, memtable_tail:2, t3m:0x7f54b39e8030, tables:[0x7f549ed92080, 0x7f549ed92b00, null, null, null, null, null, null, null, null, null, null, null, null, null, null]}, is_freezing:false, ls_id:{id:1}, tx_data_table:0x7f549ecce690, ls_tablet_svr:0x7f549ecc8190, slice_allocator:0x7f549ecce6d0}, min_rec_scn={val:1710208801027009356}, ls_id_={id:1}) [2024-03-15 07:03:43.379186] WDIAG load_file_to_string (utility.h:630) [69][ServerGTimer][T0][Y0-0000000000000000-0-0] [lt=10][errcode=0] read /sys/class/net/lo/speed failed, errno 22 [2024-03-15 07:03:43.379214] WDIAG get_ethernet_speed (utility.cpp:625) [69][ServerGTimer][T0][Y0-0000000000000000-0-0] [lt=23][errcode=-4000] load file /sys/class/net/lo/speed failed, ret -4000 [2024-03-15 07:03:43.379226] WDIAG [SERVER] get_network_speed_from_sysfs (ob_server.cpp:2260) [69][ServerGTimer][T0][Y0-0000000000000000-0-0] [lt=7][errcode=-4000] cannot get Ethernet speed, use default(tmp_ret=0, devname="lo") [2024-03-15 07:03:43.379236] WDIAG [SERVER] runTimerTask (ob_server.cpp:2782) [69][ServerGTimer][T0][Y0-0000000000000000-0-0] [lt=9][errcode=-4000] ObRefreshNetworkSpeedTask reload bandwidth throttle limit failed(ret=-4000, ret="OB_ERROR") [2024-03-15 07:03:43.379763] INFO [STORAGE.TRANS] get_rec_scn (ob_id_service.cpp:306) [554][T1_TxCkpt][T1][Y0-0000000000000000-0-0] [lt=31] get rec log scn(service_type_=0, rec_log_ts={val:1710506540610900036}) [2024-03-15 07:03:43.379782] INFO [STORAGE.TRANS] get_rec_scn (ob_id_service.cpp:306) [554][T1_TxCkpt][T1][Y0-0000000000000000-0-0] [lt=19] get rec log scn(service_type_=1, rec_log_ts={val:1710506427288403526}) [2024-03-15 07:03:43.379791] INFO [STORAGE.TRANS] get_rec_scn (ob_id_service.cpp:306) [554][T1_TxCkpt][T1][Y0-0000000000000000-0-0] [lt=7] get rec log scn(service_type_=2, rec_log_ts={val:1710506427288403525}) [2024-03-15 07:03:43.379803] INFO [STORAGE] update_clog_checkpoint (ob_checkpoint_executor.cpp:158) [554][T1_TxCkpt][T1][Y0-0000000000000000-0-0] [lt=6] [CHECKPOINT] clog checkpoint no change(checkpoint_scn={val:1710208801027009356}, checkpoint_scn_in_ls_meta={val:1710208801027009356}, ls_id={id:1}, service_type="TRANS_SERVICE") [2024-03-15 07:03:43.379823] WDIAG [PALF] set_base_lsn (palf_handle_impl.cpp:1153) [554][T1_TxCkpt][T1][Y0-0000000000000000-0-0] [lt=10][errcode=0] no need to set new base lsn, curr base lsn is greater than or equal to new base lsn(this={palf_id:1, self:"127.0.0.1:2882", has_set_deleted:false}, curr_base_lsn={lsn:114682048512}, new_base_lsn={lsn:114682048512}, lsn={lsn:114682048512}) [2024-03-15 07:03:43.379848] INFO [STORAGE] runTimerTask (ob_checkpoint_service.cpp:184) [554][T1_TxCkpt][T1][Y0-0000000000000000-0-0] [lt=25] [CHECKPOINT] advance palf base lsn successfully(checkpoint_lsn={lsn:114682048512}, ls->get_ls_id()={id:1}) [2024-03-15 07:03:43.379859] INFO [STORAGE] runTimerTask (ob_checkpoint_service.cpp:191) [554][T1_TxCkpt][T1][Y0-0000000000000000-0-0] [lt=8] succeed to update_clog_checkpoint(ret=0, ls_cnt=1) [2024-03-15 07:03:43.380681] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=42][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.380756] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=75][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.383773] INFO [COMMON] replace_fragment_node (ob_kvcache_map.cpp:697) [103][KVCacheRep][T0][Y0-0000000000000000-0-0] [lt=32] Cache replace map node details(ret=0, replace_node_count=0, replace_time=1862, replace_start_pos=377472, replace_num=15728) [2024-03-15 07:03:43.383978] INFO [LIB] stat (utility.h:1140) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] [PALF STAT APPEND COST](cur_stat_count=3262, stat_interval=1000000, avg cost=4, this=0x7f549eccc950) [2024-03-15 07:03:43.388025] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=8][errcode=-4002] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:43.388110] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=75][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486223388011], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.388150] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=36][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486223388011], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.388197] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=25] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223388011]}) [2024-03-15 07:03:43.388242] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=25] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:43.388312] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=66][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486223388233], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.388347] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=20] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223388233]}) [2024-03-15 07:03:43.388784] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=2][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486223388764], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.388854] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=46] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223388764]}) [2024-03-15 07:03:43.390980] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=55][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.391062] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=80][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.391122] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F9200-0-0] [lt=8][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.391138] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F9200-0-0] [lt=16][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:43.391161] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F9200-0-0] [lt=9][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:43.391174] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [801][T1004_TenantInf][T1004][YB427F000001-000613ACB04F9200-0-0] [lt=12][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select * from __all_tenant_info where tenant_id = 1004 "}, aret=-5627, ret=-5627) [2024-03-15 07:03:43.391183] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:43.391192] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-5627] retry_while_no_tenant_resource failed(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.391200] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=-5627] execute_read failed(ret=-5627, cluster_id=1, tenant_id=1003) [2024-03-15 07:03:43.391208] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-5627] query failed(ret=-5627, conn=0x7f5457d38050, start=1710486223391090, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:43.391217] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=9][errcode=-5627] read failed(ret=-5627) [2024-03-15 07:03:43.391225] WDIAG [SHARE] load_tenant_info (ob_tenant_info_proxy.cpp:338) [801][T1004_TenantInf][T1004][Y0-0000000000000000-0-0] [lt=5][errcode=-5627] failed to read(ret=-5627, ret="OB_SCHEMA_EAGAIN", exec_tenant_id=1003, sql=select * from __all_tenant_info where tenant_id = 1004 ) [2024-03-15 07:03:43.396007] INFO [RPC.FRAME] rpc_easy_timer_cb (ob_net_easy.cpp:595) [195][RpcIO][T0][Y0-0000000000000000-0-0] [lt=13] [RPC EASY STAT](log_str=conn count=1/1, request done=47216/47216, request doing=0/0) [2024-03-15 07:03:43.397259] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B88-0-0] [lt=1] table not exist(tenant_id=1, database_id=201001, table_name=__all_sys_parameter, ret=-5019) [2024-03-15 07:03:43.397291] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [75][ConfigMgr][T1][YB427F000001-000613ACAB1F8B88-0-0] [lt=30][errcode=-5019] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:43.397593] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:43.397659] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=64][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486224536195, now=[mts=1710486223331314], now0=[mts=1710486223331314], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:43.397696] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=34][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223330764, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.397793] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=85][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}) [2024-03-15 07:03:43.397818] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=25][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223330764, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.397855] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=33] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223330764, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222606956, use_das=false, nested_level=0, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.397941] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=85][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:43.397959] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=15][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:43.397971] WDIAG [SQL] open (ob_result_set.cpp:157) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=9][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:43.398002] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=27][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:43.398023] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=19][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:43.398046] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=21][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=10, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.398070] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=19] will sleep(sleep_us=10000, remain_us=1208128, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=10, timeout_timestamp=1710486224606195) [2024-03-15 07:03:43.401322] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=39][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.401462] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=138][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.401994] WDIAG [SHARE.SCHEMA] get_tenant_status (ob_schema_getter_guard.cpp:8471) [897][T1004_ReqMemEvi][T1004][Y0-0000000000000000-0-0] [lt=18][errcode=-5157] tenant not exist(ret=-5157, ret="OB_TENANT_NOT_EXIST", tenant_id=1004) [2024-03-15 07:03:43.402021] WDIAG [SHARE.SCHEMA] check_tenant_is_restore (ob_schema_getter_guard.cpp:8435) [897][T1004_ReqMemEvi][T1004][Y0-0000000000000000-0-0] [lt=29][errcode=-5157] fail to get tenant status(ret=-5157, ret="OB_TENANT_NOT_EXIST", tenant_id=1004) [2024-03-15 07:03:43.402032] WDIAG [SHARE.SCHEMA] check_tenant_is_restore (ob_multi_version_schema_service.cpp:3852) [897][T1004_ReqMemEvi][T1004][Y0-0000000000000000-0-0] [lt=9][errcode=-5157] fail to check tenant is restore(ret=-5157, tenant_id=1004) [2024-03-15 07:03:43.402040] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1171) [897][T1004_ReqMemEvi][T1004][Y0-0000000000000000-0-0] [lt=9][errcode=-5157] fail to check restore tenant exist(ret=-5157, tenant_id=1004) [2024-03-15 07:03:43.402049] WDIAG get_global_sys_variable (ob_basic_session_info.cpp:887) [897][T1004_ReqMemEvi][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-4029] fail get schema guard(ret=-4029) [2024-03-15 07:03:43.402059] WDIAG [SERVER] get_mem_limit (ob_mysql_request_manager.cpp:270) [897][T1004_ReqMemEvi][T1004][Y0-0000000000000000-0-0] [lt=7][errcode=-4029] failed to get global sys variable(ret=-4029, tenant_id=1004, OB_SV_SQL_AUDIT_PERCENTAGE="ob_sql_audit_percentage", obj_val={"NULL":"NULL"}) [2024-03-15 07:03:43.402076] WDIAG [SERVER] check_config_mem_limit (ob_eliminate_task.cpp:65) [897][T1004_ReqMemEvi][T1004][Y0-0000000000000000-0-0] [lt=16][errcode=-4029] failed to get mem limit(ret=-4029) [2024-03-15 07:03:43.402084] INFO [SERVER] runTimerTask (ob_eliminate_task.cpp:199) [897][T1004_ReqMemEvi][T1004][Y0-0000000000000000-0-0] [lt=6] sql audit evict task end(evict_high_mem_level=858993459, evict_high_size_level=90000, evict_batch_count=0, elapse_time=0, size_used=0, mem_used=0) [2024-03-15 07:03:43.405667] INFO [STORAGE] scheduler_ls_ha_handler_ (ob_storage_ha_service.cpp:186) [752][T1003_HAService][T1003][Y0-0000000000000000-0-0] [lt=12] start do ls ha handler(ls_id_array_=[{id:1}]) [2024-03-15 07:03:43.405731] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [752][T1003_HAService][T1003][Y0-0000000000000000-0-0] [lt=48][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.405754] WDIAG [STORAGE] check_meta_tenant_normal_ (ob_ls_restore_handler.cpp:364) [752][T1003_HAService][T1003][Y0-0000000000000000-0-0] [lt=23][errcode=-5627] failed to get schema guard(ret=-5627, meta_tenant_id=1003) [2024-03-15 07:03:43.405771] WDIAG [STORAGE] check_before_do_restore_ (ob_ls_restore_handler.cpp:291) [752][T1003_HAService][T1003][Y0-0000000000000000-0-0] [lt=15][errcode=-5627] fail to get meta tenant status(ret=-5627) [2024-03-15 07:03:43.405809] WDIAG [STORAGE] process (ob_ls_restore_handler.cpp:252) [752][T1003_HAService][T1003][Y0-0000000000000000-0-0] [lt=13][errcode=-5627] fail to check before do restore(ret=-5627, ls_={ls_meta:{tenant_id:1003, ls_id:{id:1}, replica_type:0, ls_create_status:1, clog_checkpoint_scn:{val:1710278140261191947}, clog_base_lsn:{lsn:364982833152}, rebuild_seq:0, migration_status:0, gc_state_:1, offline_scn_:{val:18446744073709551615}, restore_status:{status:0}, replayable_point:{val:18446744073709551615}, tablet_change_checkpoint_scn:{val:1708483816326334010}, all_id_meta:{id_meta:[{limited_id:1710339965845487427, latest_log_ts:{val:1710339949200647547}}, {limited_id:598000001, latest_log_ts:{val:1710324545958121911}}, {limited_id:4840000001, latest_log_ts:{val:1710299167434590202}}]}}, log_handler:{role:1, proposal_id:1984, palf_env_:0x7f54937de030, is_in_stop_state_:false, is_inited_:true}, restore_handler:{is_inited:true, is_in_stop_state:false, id:1, proposal_id:9223372036854775807, role:2, parent:null, context:{issue_task_num:0, issue_version:-1, last_fetch_ts:-1, max_submit_lsn:{lsn:18446744073709551615}, max_fetch_lsn:{lsn:18446744073709551615}, max_fetch_scn:{val:18446744073709551615}, error_context:{ret_code:0, trace_id:Y0-0000000000000000-0-0}, task_count:0}, restore_context:{seek_done:false, lsn:{lsn:18446744073709551615}}}, is_inited:true, tablet_gc_handler:{tablet_persist_trigger:0, is_inited:true}}) [2024-03-15 07:03:43.405919] WDIAG [STORAGE] do_ha_handler_ (ob_storage_ha_service.cpp:223) [752][T1003_HAService][T1003][Y0-0000000000000000-0-0] [lt=131][errcode=0] failed to do ls restore handler process(tmp_ret=-5627, ls_id={id:1}) [2024-03-15 07:03:43.408631] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=20][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:10, local_retry_times:10, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:43.408724] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=67][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.408742] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=18][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.408755] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=12][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:43.408790] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=10][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:43.408981] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.409026] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=44][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.410925] WDIAG [STORAGE.TRANS] get_cluster_service_master_ (ob_tenant_weak_read_service.cpp:286) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=22][errcode=-4076] tenant schema is not ready, need wait(ret=-4076, ret="OB_NEED_WAIT", superior_tenant_id=1) [2024-03-15 07:03:43.410976] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=50][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.411002] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=23][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1003", warnings:[]}, tenant_id_=1003, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.411028] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=24][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, local_server_version={val:1710482141336457000}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486223410911) [2024-03-15 07:03:43.411044] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, last_post_cluster_heartbeat_tstamp_=1710486223210812, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:43.411080] INFO [STORAGE.TRANS] generate_new_version (ob_tenant_weak_read_server_version_mgr.cpp:120) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=24] [WRS] update tenant weak read server version(tenant_id=1, server_version={version:{val:1710506547196065859}, total_part_count:1, valid_inner_part_count:1, valid_user_part_count:0, epoch_tstamp:1710486223409612}, version_delta=-20323784989) [2024-03-15 07:03:43.411615] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=30][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.411648] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=33][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.413000] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=12] get wrs ts(ls_id={id:1}, delta=146268560844, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:43.414249] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=1241] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:43.414358] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB62-0-0] [lt=90][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:43.414418] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB62-0-0] [lt=59][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.414498] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB62-0-0] [lt=77][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.414519] WDIAG iterate (ob_tuple.h:272) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB62-0-0] [lt=21][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.414537] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB62-0-0] [lt=17][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:43.419548] WDIAG [SQL] create_sessid (ob_sql_session_mgr.cpp:339) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=34][errcode=0] server is initiating(server_id=0, local_seq=27146, max_local_seq=262143, max_server_id=4095) [2024-03-15 07:03:43.419601] INFO [RPC.OBMYSQL] sm_conn_build_handshake (obsm_conn_callback.cpp:104) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=50] new mysql sessid created(conn.sessid_=3221252618, support_ssl=false) [2024-03-15 07:03:43.419712] INFO [RPC.OBMYSQL] init (obsm_conn_callback.cpp:120) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=19] sm conn init succ(conn.sessid_=3221252618, sess.client_addr_="172.21.122.86:42724") [2024-03-15 07:03:43.419754] INFO [RPC.OBMYSQL] do_accept_one (ob_sql_nio.cpp:899) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=31] accept one succ(*s={this:0x7f5433113230, fd:120, err:0, last_decode_time_:0, last_write_time_:1710486223419707, read_buffer_.get_consume_sz():0, get_pending_flag():0, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.421053] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=28] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:43.421123] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=67][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:43.421143] WDIAG [SERVER] extract_tenant_id (ob_srv_deliver.cpp:100) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=18][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:43.421194] WDIAG [SERVER] dispatch_req (ob_srv_deliver.cpp:115) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=45][errcode=-5160] extract tenant_id fail(ret=-5160, tenant_id=18446744073709551615, req={packet:{header:{length:369, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486223421027, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.421285] WDIAG [SERVER] deliver_mysql_request (ob_srv_deliver.cpp:507) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=89][errcode=-5150] cannot dispatch success(ret=-5150, req={packet:{header:{length:369, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486223421027, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.421391] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB123-0-0] [lt=51] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:43.421410] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB123-0-0] [lt=19][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:43.421418] WDIAG [SERVER] get_tenant_id (obmp_connect.cpp:1339) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB123-0-0] [lt=7][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:43.421441] WDIAG [SERVER] check_update_tenant_id (obmp_connect.cpp:1840) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB123-0-0] [lt=21][errcode=-5160] get_tenant_id failed(ret=-5160) [2024-03-15 07:03:43.421448] WDIAG [SERVER] process (obmp_connect.cpp:242) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB123-0-0] [lt=6][errcode=-5160] fail to check update tenant id(ret=-5160) [2024-03-15 07:03:43.421470] INFO [SERVER] send_error_packet (obmp_packet_sender.cpp:311) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB123-0-0] [lt=5] sending error packet(err=-4043, extra_err_info=NULL, lbt()="0xd9f6cf5 0x75d3e81 0x7596e3a 0x75be943 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:43.421501] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:745) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB123-0-0] [lt=13][errcode=0] server close connection(sessid=3221252618, proxy_sessid=0, stack="0xd9f6cf5 0x75d6bf2 0x75b2979 0x75bde02 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:43.421513] WDIAG [SERVER] get_session (obmp_packet_sender.cpp:515) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB123-0-0] [lt=10][errcode=-4018] get session fail(ret=-4018, sessid=3221252618, proxy_sessid=0) [2024-03-15 07:03:43.421520] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:749) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB123-0-0] [lt=6][errcode=-4016] session is null [2024-03-15 07:03:43.421526] INFO [SERVER] process (obmp_connect.cpp:369) [109][MysqlQueueTh0][T0][Y0-000613ACA75FB123-0-0] [lt=4] MySQL LOGIN(direct_client_ip="172.21.122.86", client_ip=, tenant_name=obmysql, tenant_id=18446744073709551615, user_name=uccenter, host_name=xxx.xxx.xxx.xxx, sessid=3221252618, proxy_sessid=0, sess_create_time=0, from_proxy=false, from_java_client=false, from_oci_client=false, from_jdbc_client=true, capability=683647754, proxy_capability=0, use_ssl=false, c/s protocol="OB_MYSQL_CS_TYPE", autocommit=false, proc_ret=-5160, ret=0) [2024-03-15 07:03:43.421648] WDIAG [RPC.OBMYSQL] push_close_req (ob_sql_nio.cpp:704) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=37][errcode=-4015] close sql sock by user req(*s={this:0x7f5433113230, fd:120, err:5, last_decode_time_:1710486223421027, last_write_time_:1710486223421645, read_buffer_.get_consume_sz():373, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.421671] INFO [RPC.OBMYSQL] on_disconnect (obsm_conn_callback.cpp:231) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=17] kill and revert session(conn.sessid_=3221252618, proxy_sessid=0, server_id=0, ret=0) [2024-03-15 07:03:43.421678] INFO [RPC.OBMYSQL] handle_pending_destroy_list (ob_sql_nio.cpp:791) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=7] can close safely, do destroy(*s={this:0x7f5433113230, fd:120, err:5, last_decode_time_:1710486223421027, last_write_time_:1710486223421645, read_buffer_.get_consume_sz():373, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.421688] INFO [RPC.OBMYSQL] sm_conn_log_close (obsm_conn_callback.cpp:159) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=7] connection close(sessid=3221252618, proxy_sessid=0, tenant_id=0, server_id=0, from_proxy=false, from_java_client=false, c/s protocol="OB_MYSQL_CS_TYPE", is_need_clear_sessid_=true, ret=0) [2024-03-15 07:03:43.421889] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=20][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.421949] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=59][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.423065] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:43.423094] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=28][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486229937049, now=[mts=1710486223355175], now0=[mts=1710486223355175], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:43.423114] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=18][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223352091, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.423162] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=39][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}) [2024-03-15 07:03:43.423186] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=24][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223352091, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.423219] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=31] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54e845a228, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223352091, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486200007067, use_das=false, nested_level=0, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.423292] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=71][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:43.423308] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=14][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:43.423317] WDIAG [SQL] open (ob_result_set.cpp:157) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=8][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:43.423326] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=6][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:43.423334] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=6][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_schema_status' ORDER BY row_id, column_name"}) [2024-03-15 07:03:43.423346] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=10][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_schema_status' ORDER BY row_id, column_name"}, retry_cnt=170, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.423360] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=11] will sleep(sleep_us=100000, remain_us=6583690, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=170, timeout_timestamp=1710486230007049) [2024-03-15 07:03:43.432113] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=35][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.432166] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=52][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.440830] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=10][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:43.442311] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=29][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.442352] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=40][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.442492] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:43.442533] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=41][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486224265147, now=[mts=1710486223373793], now0=[mts=1710486223373793], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:43.442567] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=32][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223372935, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.442658] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=77][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}) [2024-03-15 07:03:43.442692] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=34][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223372935, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.442739] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=43] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223372935, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.442835] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=94][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:43.442859] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=18][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:43.442885] WDIAG [SQL] open (ob_result_set.cpp:157) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=23][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:43.442897] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=9][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:43.442908] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=9][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:43.442923] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=13][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=14, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.442942] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=15] will sleep(sleep_us=14000, remain_us=892207, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=14, timeout_timestamp=1710486224335147) [2024-03-15 07:03:43.442955] INFO [LIB] stat (utility.h:1140) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=2] [PALF STAT APPEND COST](cur_stat_count=4487, stat_interval=1000000, avg cost=3, this=0x7f54639de950) [2024-03-15 07:03:43.448738] INFO [COMMON] compute_tenant_wash_size (ob_kvcache_store.cpp:1140) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=28] Wash compute wash size(is_wash_valid=true, sys_total_wash_size=2720698368, global_cache_size=12484608, tenant_max_wash_size=4161536, tenant_min_wash_size=4161536, tenant_ids_=[512, 500, 999, 506, 508, 509, 510, 1, 1003, 1004]) [2024-03-15 07:03:43.448958] INFO [COMMON] wash (ob_kvcache_store.cpp:343) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=59] Wash time detail, (compute_wash_size_time=259, refresh_score_time=142, wash_time=17) [2024-03-15 07:03:43.452541] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=24][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.452576] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=34][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.452604] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.452638] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=32][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1004", warnings:[]}, tenant_id_=1004, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.452719] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=51][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, local_server_version={val:1710506547039047539}, valid_part_count=2, total_part_count=2, generate_timestamp=1710486223452586) [2024-03-15 07:03:43.452762] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=42][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, last_post_cluster_heartbeat_tstamp_=1710486223352473, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:43.452813] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:755) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=35] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] current server is WRS leader, need start CLUSTER weak read service(tenant_id=1004, serve_leader_epoch=0, cur_leader_epoch=431, cluster_service_tablet_id_={id:226}, in_service=false, can_update_version=false, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:43.452872] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:347) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=43] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] begin start service(tenant_id=1004, is_in_service()=false, can_update_version=false) [2024-03-15 07:03:43.452890] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:349) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=17] start TenantWeakReadClusterService(tenant_id=1004) [2024-03-15 07:03:43.454974] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [898][T1004_TenantWea][T1003][YB427F000001-000613ACAC1F984B-0-0] [lt=25][errcode=-5627] get tenant schema store fail, maybe local schema is old(ret=-5627, tenant_id=1003) [2024-03-15 07:03:43.455008] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:753) [898][T1004_TenantWea][T1003][YB427F000001-000613ACAC1F984B-0-0] [lt=35][errcode=-5627] get schema guard failed(ret=-5627) [2024-03-15 07:03:43.455140] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [898][T1004_TenantWea][T1003][YB427F000001-000613ACAC1F984B-0-0] [lt=16][errcode=-5627] failed to process record(executor={ObIExecutor:, sql:"select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = ''"}, record_ret=-5627, ret=-5627) [2024-03-15 07:03:43.455229] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [898][T1004_TenantWea][T1004][YB427F000001-000613ACAC1F984B-0-0] [lt=86][errcode=-5627] failed to process final(executor={ObIExecutor:, sql:"select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = ''"}, aret=-5627, ret=-5627) [2024-03-15 07:03:43.455250] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-5627] execute sql failed(ret=-5627, tenant_id=1003, sql=select min_version, max_version from __all_weak_read_service where tenant_id = 1004 and level_id = 0 and level_value = '') [2024-03-15 07:03:43.455494] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=238][errcode=-5627] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:43.455668] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:432) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=0] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] start service done(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004, in_service=false, leader_epoch=0, current_version={val:0}, delta=1710486223455663, min_version={val:0}, max_version={val:0}, max_stale_time=5000000000, all_valid_server_count=0, total_time=2809, wlock_time=62, check_leader_time=3, query_version_time=0, persist_version_time=0) [2024-03-15 07:03:43.455734] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:808) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=1] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] done(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1004, need_start_service=true, need_stop_service=false, need_change_leader=false, is_in_service()=false, can_update_version=false, cur_leader_epoch=431, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:43.457038] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=15][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:14, local_retry_times:14, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:43.457087] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=31][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.457097] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=10][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.457103] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=6][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:43.457122] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=7][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:43.457256] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.457283] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=25][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.457323] INFO [COMMON] clean_garbage_node (ob_kvcache_map.cpp:647) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=38] Cache wash clean map node details(ret=0, clean_node_count=0, clean_time=8322, clean_start_pos=1541393, clean_num=31457) [2024-03-15 07:03:43.460280] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486223460271304, start_id=0, end_id=0) [2024-03-15 07:03:43.463133] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=17][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.463201] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=66][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.466517] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [783][T1004_Occam][T1][YB427F000001-000613ACA9FF9B8F-0-0] [lt=0] table not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:43.473465] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=27][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.473564] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=82][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.478742] INFO [DETECT] record_summary_info_and_logout_when_necessary_ (ob_lcl_batch_sender_thread.cpp:202) [891][T1004_LCLSender][T1004][Y0-0000000000000000-0-0] [lt=38] ObLCLBatchSenderThread periodic report summary info(duty_ratio_percentage=0, total_constructed_detector=0, total_destructed_detector=0, total_alived_detector=0, _lcl_op_interval=30000, lcl_msg_map_.count()=0, *this={this:0x7f54791e42b0, is_inited:true, is_running:true, total_record_time:5010000, over_night_times:0}) [2024-03-15 07:03:43.479054] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:43.479210] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=154][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486224536195, now=[mts=1710486223409091], now0=[mts=1710486223409091], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:43.479265] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=52][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223408445, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.479377] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=95][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}) [2024-03-15 07:03:43.479477] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=99][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223408445, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.479555] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=72] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223408445, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222606956, use_das=false, nested_level=0, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.479685] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=127][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:43.479715] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=26][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:43.479731] WDIAG [SQL] open (ob_result_set.cpp:157) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=14][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:43.479754] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=19][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:43.479768] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=11][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:43.479803] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=31][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=11, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.479835] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=25] will sleep(sleep_us=11000, remain_us=1126362, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=11, timeout_timestamp=1710486224606195) [2024-03-15 07:03:43.483732] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=41][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.483772] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=38][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.489657] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=3][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486223489612], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.489771] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=108][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486223489612], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.489830] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=28] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223489612]}) [2024-03-15 07:03:43.489894] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=43] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:43.489916] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=20][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486223489885], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.489956] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=23] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223489885]}) [2024-03-15 07:03:43.490394] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=2][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486223490376], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.490478] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=61] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223490376]}) [2024-03-15 07:03:43.491299] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=58][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:11, local_retry_times:11, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:43.491409] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=61][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.491479] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=69][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.491495] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=15][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:43.491536] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=14][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:43.491789] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.491842] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=51][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.492245] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [801][T1004_TenantInf][T1003][YB427F000001-000613ACB04F9201-0-0] [lt=9][errcode=-5627] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:43.493956] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.493996] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=39][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.494360] INFO [RPC.FRAME] rpc_easy_timer_cb (ob_net_easy.cpp:595) [194][RpcIO][T0][Y0-0000000000000000-0-0] [lt=32] [RPC EASY STAT](log_str=conn count=1/1, request done=47218/47218, request doing=0/0) [2024-03-15 07:03:43.504131] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=24][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.504206] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=73][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.509692] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=111][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.509747] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=54][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1", warnings:[]}, tenant_id_=1, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.509787] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=37][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, local_server_version={val:1710506547196065859}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486223509676) [2024-03-15 07:03:43.509804] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=17][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, last_post_cluster_heartbeat_tstamp_=1710486223309674, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:43.510063] WDIAG [SQL] create_sessid (ob_sql_session_mgr.cpp:339) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=18][errcode=0] server is initiating(server_id=0, local_seq=27147, max_local_seq=262143, max_server_id=4095) [2024-03-15 07:03:43.510098] INFO [RPC.OBMYSQL] sm_conn_build_handshake (obsm_conn_callback.cpp:104) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=32] new mysql sessid created(conn.sessid_=3221252619, support_ssl=false) [2024-03-15 07:03:43.510176] INFO [RPC.OBMYSQL] init (obsm_conn_callback.cpp:120) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=13] sm conn init succ(conn.sessid_=3221252619, sess.client_addr_="172.21.122.86:42726") [2024-03-15 07:03:43.510205] INFO [RPC.OBMYSQL] do_accept_one (ob_sql_nio.cpp:899) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=21] accept one succ(*s={this:0x7f5494dbd270, fd:137, err:0, last_decode_time_:0, last_write_time_:1710486223510174, read_buffer_.get_consume_sz():0, get_pending_flag():0, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.511282] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=41] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:43.511308] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=26][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:43.511319] WDIAG [SERVER] extract_tenant_id (ob_srv_deliver.cpp:100) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=9][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:43.511332] WDIAG [SERVER] dispatch_req (ob_srv_deliver.cpp:115) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=8][errcode=-5160] extract tenant_id fail(ret=-5160, tenant_id=18446744073709551615, req={packet:{header:{length:398, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486223511263, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.511360] WDIAG [SERVER] deliver_mysql_request (ob_srv_deliver.cpp:507) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=28][errcode=-5150] cannot dispatch success(ret=-5150, req={packet:{header:{length:398, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486223511263, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.511470] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03A-0-0] [lt=32] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:43.511486] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03A-0-0] [lt=16][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:43.511494] WDIAG [SERVER] get_tenant_id (obmp_connect.cpp:1339) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03A-0-0] [lt=6][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:43.511502] WDIAG [SERVER] check_update_tenant_id (obmp_connect.cpp:1840) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03A-0-0] [lt=7][errcode=-5160] get_tenant_id failed(ret=-5160) [2024-03-15 07:03:43.511509] WDIAG [SERVER] process (obmp_connect.cpp:242) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03A-0-0] [lt=7][errcode=-5160] fail to check update tenant id(ret=-5160) [2024-03-15 07:03:43.511528] INFO [SERVER] send_error_packet (obmp_packet_sender.cpp:311) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03A-0-0] [lt=6] sending error packet(err=-4043, extra_err_info=NULL, lbt()="0xd9f6cf5 0x75d3e81 0x7596e3a 0x75be943 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:43.511581] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:745) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03A-0-0] [lt=11][errcode=0] server close connection(sessid=3221252619, proxy_sessid=0, stack="0xd9f6cf5 0x75d6bf2 0x75b2979 0x75bde02 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:43.511747] WDIAG [SERVER] get_session (obmp_packet_sender.cpp:515) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03A-0-0] [lt=11][errcode=-4018] get session fail(ret=-4018, sessid=3221252619, proxy_sessid=0) [2024-03-15 07:03:43.511776] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:749) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03A-0-0] [lt=28][errcode=-4016] session is null [2024-03-15 07:03:43.511787] INFO [SERVER] process (obmp_connect.cpp:369) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03A-0-0] [lt=7] MySQL LOGIN(direct_client_ip="172.21.122.86", client_ip=, tenant_name=obmysql, tenant_id=18446744073709551615, user_name=yyyth, host_name=xxx.xxx.xxx.xxx, sessid=3221252619, proxy_sessid=0, sess_create_time=0, from_proxy=false, from_java_client=false, from_oci_client=false, from_jdbc_client=true, capability=683647754, proxy_capability=0, use_ssl=false, c/s protocol="OB_MYSQL_CS_TYPE", autocommit=false, proc_ret=-5160, ret=0) [2024-03-15 07:03:43.511915] WDIAG [RPC.OBMYSQL] push_close_req (ob_sql_nio.cpp:704) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=13][errcode=-4015] close sql sock by user req(*s={this:0x7f5494dbd270, fd:137, err:5, last_decode_time_:1710486223511263, last_write_time_:1710486223511912, read_buffer_.get_consume_sz():402, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.511936] INFO [RPC.OBMYSQL] on_disconnect (obsm_conn_callback.cpp:231) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=15] kill and revert session(conn.sessid_=3221252619, proxy_sessid=0, server_id=0, ret=0) [2024-03-15 07:03:43.511944] INFO [RPC.OBMYSQL] handle_pending_destroy_list (ob_sql_nio.cpp:791) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=8] can close safely, do destroy(*s={this:0x7f5494dbd270, fd:137, err:5, last_decode_time_:1710486223511263, last_write_time_:1710486223511912, read_buffer_.get_consume_sz():402, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.511957] INFO [RPC.OBMYSQL] sm_conn_log_close (obsm_conn_callback.cpp:159) [210][sql_nio0][T0][Y0-0000000000000000-0-0] [lt=9] connection close(sessid=3221252619, proxy_sessid=0, tenant_id=0, server_id=0, from_proxy=false, from_java_client=false, c/s protocol="OB_MYSQL_CS_TYPE", is_need_clear_sessid_=true, ret=0) [2024-03-15 07:03:43.512329] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=32] get wrs ts(ls_id={id:1}, delta=146268660167, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:43.512372] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=38] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:43.512706] INFO [STORAGE] operator() (ob_tenant_freezer.cpp:125) [475][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=27] ====== tenant freeze timer task ====== [2024-03-15 07:03:43.512737] INFO [STORAGE] log_frozen_memstore_info_if_need_ (ob_tenant_freezer.cpp:1262) [475][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=22] [TenantFreezer] tenant have inactive memstores(ctx.active_memstore_used_=0, ctx.total_memstore_used_=398458880, ctx.total_memstore_hold_=398458880, memstore_freeze_trigger_limit_=214748360, tenant_id=1) [2024-03-15 07:03:43.512847] INFO [STORAGE] log_frozen_memstore_info_if_need_ (ob_tenant_freezer.cpp:1271) [475][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=12] [TenantFreezer] oldest frozen memtable(list="{ObITable:{this:0x7f549edc6850, key:{tablet_id:{id:344}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710347213179170384}, end_scn:{val:1710506547196061973}}}, ref_cnt:2, upper_trans_version:9223372036854775807, timestamp:1710482294248780}, this:0x7f549edc6850, timestamp:1710482294248780, state:0, freeze_clock:2, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:66, clock:297795584}, host:0x7f5494dbc030, arena_handle:{allocated:2097152}, last_freeze_timestamp:1710482133589323}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547196061973}, rec_scn:{val:1710458239555578633}, snapshot_version:{val:1710506547196061973}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710482325584587, mt_stat_.ready_for_flush_time:1710482325584657, mt_stat_.create_flush_dag_time:0, mt_stat_.release_time:0, mt_stat_.last_print_time:0},{ObITable:{this:0x7f549edc6080, key:{tablet_id:{id:377}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710347213179170384}, end_scn:{val:1710506547196061973}}}, ref_cnt:2, upper_trans_version:9223372036854775807, timestamp:1710482276736733}, this:0x7f549edc6080, timestamp:1710482276736733, state:0, freeze_clock:2, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:65, clock:260046848}, host:0x7f5494dbc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482133589323}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547196061973}, rec_scn:{val:1710432900836225100}, snapshot_version:{val:1710506547196061973}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710482325584395, mt_stat_.ready_for_flush_time:1710482325584491, mt_stat_.create_flush_dag_time:1710486129203220, mt_stat_.release_time:0, mt_stat_.last_print_time:0},{ObITable:{this:0x7f549edf52b0, key:{tablet_id:{id:323}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710347213179170384}, end_scn:{val:1710506547196061973}}}, ref_cnt:2, upper_trans_version:9223372036854775807, timestamp:1710482265917979}, this:0x7f549edf52b0, timestamp:1710482265917979, state:0, freeze_clock:2, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:64, clock:230686720}, host:0x7f5494dbc030, arena_handle:{allocated:0}, last_freeze_timestamp:1710482133589323}, unsubmitted_cnt:0, unsynced_cnt:0, logging_blocked:false, unset_active_memtable_logging_blocked:false, resolve_active_memtable_left_boundary:true, contain_hotspot_row:false, max_end_scn:{val:1710506547196061973}, rec_scn:{val:1710384419727393501}, snapshot_version:{val:1710506547196061973}, migration_clog_checkpoint_scn:{val:0}, is_tablet_freeze:false, is_force_freeze:false, contain_hotspot_row:false, read_barrier:false, is_flushed:false, freeze_state:1, mt_stat_.frozen_time:1710482325584266, mt_stat_.ready_for_flush_time:1710482325584332, mt_stat_.create_flush_dag_time:1710486129203128, mt_stat_.release_time:0, mt_stat_.last_print_time:0},{ObITable:{this:0x7f549edf4ae0, key:{tablet_id:{id:373}, column_group_idx:0, table_type:"MEMTABLE", scn_range:{start_scn:{val:1710347213179170384}, end_scn:{val:1710506547196061973}}}, ref_cnt:2, upper_trans_version:9223372036854775807, timestamp:1710482264868941}, this:0x7f549edf4ae0, timestamp:1710482264868941, state:0, freeze_clock:2, max_schema_version:0, write_ref_cnt:0, local_allocator:{ListHandle:{freeze_stat:2, id:63, clock:226492416}, host:0x7f5494dbc030, arena_handle:{allocated:2097152}, last_freeze_timestamp:1710482133589323}, un") [2024-03-15 07:03:43.513789] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [475][T1_Occam][T1][YB427F000001-000613ACA8CF8B73-0-0] [lt=25] table not exist(tenant_id=1, database_id=201001, table_name=__all_freeze_info, ret=-5019) [2024-03-15 07:03:43.513814] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB632-0-0] [lt=308][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:43.513850] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB632-0-0] [lt=36][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.513895] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB632-0-0] [lt=43][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.514351] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=33][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.514393] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=42][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.514548] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [475][T1_Occam][T1][YB427F000001-000613ACA8CF8B73-0-0] [lt=755][errcode=-5019] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:43.514585] WDIAG iterate (ob_tuple.h:272) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB632-0-0] [lt=685][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.514619] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB632-0-0] [lt=34][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:43.514710] WDIAG [STORAGE] check_and_freeze_normal_data_ (ob_tenant_freezer.cpp:408) [475][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=1][errcode=0] [TenantFreezer] fail to do major freeze(tmp_ret=-5019) [2024-03-15 07:03:43.514744] INFO [STORAGE] check_and_freeze_tx_data_ (ob_tenant_freezer.cpp:448) [475][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=18] [TenantFreezer] Trigger Tx Data Table Self Freeze. (tenant_info_.tenant_id_=1, tenant_tx_data_mem_used=242215936, self_freeze_max_limit_=107374182, memstore_hold_memory=398458880, self_freeze_tenant_hold_limit_=429496729, self_freeze_min_limit_=21474836) [2024-03-15 07:03:43.514985] INFO [STORAGE] do_tx_data_table_freeze_ (ob_tenant_freezer_rpc.cpp:74) [583][T1_L0_G0][T1][YB427F000001-000613ACA8CF8B74-0-0] [lt=13] start tx data table self freeze task in rpc handle thread(arg_=freeze_type:3) [2024-03-15 07:03:43.515017] INFO [STORAGE] self_freeze_task (ob_tx_data_table.cpp:827) [583][T1_L0_G0][T1][YB427F000001-000613ACA8CF8B74-0-0] [lt=25] start tx data table self freeze task(get_ls_id()={id:1}) [2024-03-15 07:03:43.515030] INFO [STORAGE] freeze (ob_tx_data_memtable_mgr.cpp:193) [583][T1_L0_G0][T1][YB427F000001-000613ACA8CF8B74-0-0] [lt=8] start freeze tx data memtable(ls_id_={id:1}) [2024-03-15 07:03:43.515039] INFO [STORAGE] freeze_ (ob_tx_data_memtable_mgr.cpp:229) [583][T1_L0_G0][T1][YB427F000001-000613ACA8CF8B74-0-0] [lt=7] There is a freezed memetable existed. Try freeze after flushing it.(ret=-4023, ret="OB_EAGAIN", get_memtable_count_()=2) [2024-03-15 07:03:43.515053] WDIAG [STORAGE] freeze (ob_tx_data_memtable_mgr.cpp:207) [583][T1_L0_G0][T1][YB427F000001-000613ACA8CF8B74-0-0] [lt=12][errcode=-4023] freeze tx data memtable fail.(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.515060] WDIAG [STORAGE] flush (ob_tx_data_memtable_mgr.cpp:483) [583][T1_L0_G0][T1][YB427F000001-000613ACA8CF8B74-0-0] [lt=8][errcode=-4023] freeze failed(ret=-4023, ret="OB_EAGAIN", this=0x7f549ecc81b0) [2024-03-15 07:03:43.515068] WDIAG [STORAGE] self_freeze_task (ob_tx_data_table.cpp:831) [583][T1_L0_G0][T1][YB427F000001-000613ACA8CF8B74-0-0] [lt=7][errcode=-4023] self freeze of tx data memtable failed.(ret=-4023, ret="OB_EAGAIN", ls_id={id:1}, memtable_mgr_={ObIMemtableMgr:{Memtables:this:0x7f549ecc81b0, ref_cnt:1, is_inited:true, tablet_id:{id:49402}, freezer:0x7f549eccd290, table_type:1, memtable_head:0, memtable_tail:2, t3m:0x7f54b39e8030, tables:[0x7f549ed92080, 0x7f549ed92b00, null, null, null, null, null, null, null, null, null, null, null, null, null, null]}, is_freezing:false, ls_id:{id:1}, tx_data_table:0x7f549ecce690, ls_tablet_svr:0x7f549ecc8190, slice_allocator:0x7f549ecce6d0}) [2024-03-15 07:03:43.515094] INFO [STORAGE] self_freeze_task (ob_tx_data_table.cpp:834) [583][T1_L0_G0][T1][YB427F000001-000613ACA8CF8B74-0-0] [lt=26] finish tx data table self freeze task(ret=-4023, ret="OB_EAGAIN", get_ls_id()={id:1}) [2024-03-15 07:03:43.515101] WDIAG [STORAGE] do_tx_data_table_freeze_ (ob_tenant_freezer_rpc.cpp:103) [583][T1_L0_G0][T1][YB427F000001-000613ACA8CF8B74-0-0] [lt=6][errcode=-4023] freeze tx data table failed.(ret=-4023, ret="OB_EAGAIN", arg_=freeze_type:3) [2024-03-15 07:03:43.515109] INFO [STORAGE] do_tx_data_table_freeze_ (ob_tenant_freezer_rpc.cpp:116) [583][T1_L0_G0][T1][YB427F000001-000613ACA8CF8B74-0-0] [lt=7] finish self freeze task in rpc handle thread(ret=-4023, ret="OB_EAGAIN", arg_=freeze_type:3) [2024-03-15 07:03:43.515119] WDIAG [STORAGE] process (ob_tenant_freezer_rpc.cpp:57) [583][T1_L0_G0][T1][YB427F000001-000613ACA8CF8B74-0-0] [lt=7][errcode=-4023] do tx data table freeze failed.(ret=-4023, ret="OB_EAGAIN", arg_=freeze_type:3) [2024-03-15 07:03:43.515289] INFO [STORAGE] rpc_callback (ob_tenant_freezer.cpp:1019) [193][RpcIO][T0][Y0-0000000000000000-0-0] [lt=39] [TenantFreezer] call back of tenant freezer request [2024-03-15 07:03:43.515645] WDIAG [SHARE] refresh (ob_task_define.cpp:382) [79][LogLimiterRefre][T0][Y0-0000000000000000-0-0] [lt=19][errcode=0] Throttled WDIAG logs in last second(details {error code, dropped logs, earliest tid}=[{errcode:-5019, dropped:455, tid:783}]) [2024-03-15 07:03:43.516505] INFO [STORAGE] scheduler_ls_ha_handler_ (ob_storage_ha_service.cpp:186) [913][T1004_HAService][T1004][Y0-0000000000000000-0-0] [lt=9] start do ls ha handler(ls_id_array_=[{id:1}, {id:1001}]) [2024-03-15 07:03:43.516539] WDIAG [STORAGE] do_ha_handler_ (ob_storage_ha_service.cpp:223) [913][T1004_HAService][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=0] failed to do ls restore handler process(tmp_ret=-5627, ls_id={id:1}) [2024-03-15 07:03:43.516551] WDIAG [STORAGE] do_ha_handler_ (ob_storage_ha_service.cpp:223) [913][T1004_HAService][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=0] failed to do ls restore handler process(tmp_ret=-5627, ls_id={id:1001}) [2024-03-15 07:03:43.520104] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC8-0-0] [lt=153][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:43.520321] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC8-0-0] [lt=215][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.520361] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC8-0-0] [lt=37][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.520395] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC8-0-0] [lt=32][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.520452] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC8-0-0] [lt=55][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:43.520644] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:43.520658] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=14][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486224265147, now=[mts=1710486223457307], now0=[mts=1710486223457307], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:43.520675] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=15][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223456552, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.520724] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=40][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}) [2024-03-15 07:03:43.520743] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=19][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223456552, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.520773] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=26] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223456552, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.520833] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=59][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:43.520846] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=11][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:43.520854] WDIAG [SQL] open (ob_result_set.cpp:157) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=7][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:43.520862] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=6][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:43.520871] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=8][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:43.520885] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=11][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=15, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.520905] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=16] will sleep(sleep_us=15000, remain_us=814243, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=15, timeout_timestamp=1710486224335147) [2024-03-15 07:03:43.523481] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=10][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:170, local_retry_times:170, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:43.523552] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=48][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.523567] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=13][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.523578] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=10][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:43.523610] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=11][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_schema_status' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:43.523961] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.523998] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=37][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.524526] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=26][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.524568] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=41][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.533958] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC9-0-0] [lt=192][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1001}, all_ls_election_reference_info=[]) [2024-03-15 07:03:43.534022] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC9-0-0] [lt=64][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.534092] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC9-0-0] [lt=65][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1001}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.534130] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC9-0-0] [lt=36][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.534180] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFAC9-0-0] [lt=49][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1001}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:43.534299] WDIAG [RPC.OBRPC] rpc_call (ob_rpc_proxy.ipp:453) [72][ServerTracerTim][T0][YB427F000001-000613ACA7EF7EDB-0-0] [lt=12][errcode=-8001] execute rpc fail(ret=-8001, dst="127.0.0.1:2882") [2024-03-15 07:03:43.534330] WDIAG log_user_error_and_warn (ob_rpc_proxy.cpp:252) [72][ServerTracerTim][T0][YB427F000001-000613ACA7EF7EDB-0-0] [lt=29][errcode=-8001] [2024-03-15 07:03:43.534363] INFO [SHARE] renew_master_rootserver (ob_rs_mgr.cpp:366) [72][ServerTracerTim][T0][YB427F000001-000613ACA7EF7EDB-0-0] [lt=10] [RS_MGR] new master rootserver found(rootservice="127.0.0.1:2882", cluster_id=1) [2024-03-15 07:03:43.534699] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=23][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.534744] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=44][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.534961] WDIAG [RPC.OBRPC] rpc_call (ob_rpc_proxy.ipp:453) [72][ServerTracerTim][T0][YB427F000001-000613ACA7EF7EDB-0-0] [lt=14][errcode=-8001] execute rpc fail(ret=-8001, dst="127.0.0.1:2882") [2024-03-15 07:03:43.535204] WDIAG log_user_error_and_warn (ob_rpc_proxy.cpp:252) [72][ServerTracerTim][T0][YB427F000001-000613ACA7EF7EDB-0-0] [lt=25][errcode=-8001] [2024-03-15 07:03:43.535259] INFO [SHARE] renew_master_rootserver (ob_rs_mgr.cpp:366) [72][ServerTracerTim][T0][YB427F000001-000613ACA7EF7EDB-0-0] [lt=38] [RS_MGR] new master rootserver found(rootservice="127.0.0.1:2882", cluster_id=1) [2024-03-15 07:03:43.535556] WDIAG [RPC.OBRPC] rpc_call (ob_rpc_proxy.ipp:453) [72][ServerTracerTim][T0][YB427F000001-000613ACA7EF7EDB-0-0] [lt=13][errcode=-8001] execute rpc fail(ret=-8001, dst="127.0.0.1:2882") [2024-03-15 07:03:43.535578] WDIAG log_user_error_and_warn (ob_rpc_proxy.cpp:252) [72][ServerTracerTim][T0][YB427F000001-000613ACA7EF7EDB-0-0] [lt=21][errcode=-8001] [2024-03-15 07:03:43.535592] INFO [SHARE] renew_master_rootserver (ob_rs_mgr.cpp:366) [72][ServerTracerTim][T0][YB427F000001-000613ACA7EF7EDB-0-0] [lt=7] [RS_MGR] new master rootserver found(rootservice="127.0.0.1:2882", cluster_id=1) [2024-03-15 07:03:43.535858] WDIAG [RPC.OBRPC] rpc_call (ob_rpc_proxy.ipp:453) [72][ServerTracerTim][T0][YB427F000001-000613ACA7EF7EDB-0-0] [lt=17][errcode=-8001] execute rpc fail(ret=-8001, dst="127.0.0.1:2882") [2024-03-15 07:03:43.535882] WDIAG log_user_error_and_warn (ob_rpc_proxy.cpp:252) [72][ServerTracerTim][T0][YB427F000001-000613ACA7EF7EDB-0-0] [lt=23][errcode=-8001] [2024-03-15 07:03:43.535894] WDIAG [SHARE] refresh (ob_alive_server_tracer.cpp:379) [72][ServerTracerTim][T0][YB427F000001-000613ACA7EF7EDB-0-0] [lt=7][errcode=-8001] fetch alive server failed(ret=-8001) [2024-03-15 07:03:43.535976] WDIAG [SHARE] runTimerTask (ob_alive_server_tracer.cpp:251) [72][ServerTracerTim][T0][YB427F000001-000613ACA7EF7EDB-0-0] [lt=81][errcode=-8001] refresh alive server list failed(ret=-8001) [2024-03-15 07:03:43.535983] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=11][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:15, local_retry_times:15, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:43.536033] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=32][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.536041] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=8][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.536048] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=6][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:43.536072] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=7][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:43.536218] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.536246] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=26][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.540980] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=51][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:43.544875] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=20][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.544957] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=80][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.550233] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=6] table not exist(tenant_id=1, database_id=201001, table_name=__all_ls_meta_table, ret=-5019) [2024-03-15 07:03:43.550281] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=45][errcode=-5019] synonym not exist(tenant_id=1, database_id=201001, table_name=__all_ls_meta_table, ret=-5019) [2024-03-15 07:03:43.550297] WDIAG [SQL.RESV] resolve_table_relation_factor_normal (ob_dml_resolver.cpp:7359) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=15][errcode=-5019] fail to resolve table relation recursively(tenant_id=1, ret=-5019, database_id=201001, database_id=201001, table_name=__all_ls_meta_table, db_name=oceanbase) [2024-03-15 07:03:43.550311] WDIAG [SQL.RESV] resolve_table_relation_factor (ob_dml_resolver.cpp:7204) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=12][errcode=-5019] resolve table relation factor failed(ret=-5019, table_name=__all_ls_meta_table) [2024-03-15 07:03:43.550324] WDIAG [SQL.RESV] inner_resolve_sys_view (ob_dml_resolver.cpp:2579) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=9][errcode=-5019] fail to resolve table(ret=-5019) [2024-03-15 07:03:43.550333] WDIAG [SQL.RESV] resolve_table_relation_factor_wrapper (ob_dml_resolver.cpp:2634) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=9][errcode=-5019] fail to resolve sys view(ret=-5019) [2024-03-15 07:03:43.550347] WDIAG resolve_basic_table_without_cte (ob_dml_resolver.cpp:2730) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=7][errcode=-5019] Table 'oceanbase.__all_ls_meta_table' doesn't exist [2024-03-15 07:03:43.550355] WDIAG [SQL.RESV] resolve_basic_table_with_cte (ob_dml_resolver.cpp:13473) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=6][errcode=-5019] resolve base or alias table factor failed(ret=-5019) [2024-03-15 07:03:43.550363] WDIAG [SQL.RESV] resolve_basic_table (ob_dml_resolver.cpp:13407) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=8][errcode=-5019] fail to resolve basic table with cte(ret=-5019) [2024-03-15 07:03:43.550371] WDIAG [SQL.RESV] resolve_table (ob_dml_resolver.cpp:3142) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=7][errcode=-5019] resolve basic table failed(ret=-5019) [2024-03-15 07:03:43.550379] WDIAG [SQL.RESV] resolve_from_clause (ob_select_resolver.cpp:3426) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=7][errcode=-5019] fail to exec resolve_table(*table_node, table_item)(ret=-5019) [2024-03-15 07:03:43.550387] WDIAG [SQL.RESV] resolve_normal_query (ob_select_resolver.cpp:1033) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=8][errcode=-5019] fail to exec resolve_from_clause(parse_tree.children_[PARSE_SELECT_FROM])(ret=-5019) [2024-03-15 07:03:43.550395] WDIAG [SQL.RESV] resolve (ob_select_resolver.cpp:1240) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=7][errcode=-5019] resolve normal query failed(ret=-5019) [2024-03-15 07:03:43.550404] WDIAG [SQL.RESV] select_stmt_resolver_func (ob_resolver.cpp:170) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=8][errcode=-5019] execute stmt_resolver failed(ret=-5019, parse_tree.type_=3073) [2024-03-15 07:03:43.550445] WDIAG [SQL] generate_stmt (ob_sql.cpp:2659) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=9][errcode=-5019] failed to resolve(ret=-5019) [2024-03-15 07:03:43.550456] WDIAG [SQL] generate_physical_plan (ob_sql.cpp:2781) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=9][errcode=-5019] Failed to generate stmt(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:43.550467] WDIAG [SQL] handle_physical_plan (ob_sql.cpp:4452) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=8][errcode=-5019] Failed to generate plan(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:43.550476] WDIAG [SQL] handle_text_query (ob_sql.cpp:2383) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=7][errcode=-5019] fail to handle physical plan(ret=-5019) [2024-03-15 07:03:43.550485] WDIAG [SQL] stmt_query (ob_sql.cpp:206) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=7][errcode=-5019] fail to handle text query(stmt=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1003 ORDER BY tenant_id, ls_id, svr_ip, svr_port, ret=-5019) [2024-03-15 07:03:43.550496] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:636) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=8][errcode=-5019] executor execute failed(ret=-5019) [2024-03-15 07:03:43.550504] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=7][errcode=-5019] execute failed(ret=-5019, tenant_id=1, executor={ObIExecutor:, sql:"SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1003 ORDER BY tenant_id, ls_id, svr_ip, svr_port"}, retry_cnt=0, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.550526] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=14][errcode=-5019] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:0, local_retry_times:0, err_:-5019, err_:"OB_TABLE_NOT_EXIST", retry_type:0, client_ret:-5019}, need_retry=false) [2024-03-15 07:03:43.550555] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=25][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:43.550564] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=8][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:43.550572] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=7][errcode=-5019] failed to close result(close_ret=-5019, ret=-5019) [2024-03-15 07:03:43.550594] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [727][T1003_LSMetaCh][T1][YB427F000001-000613ACA96F8B91-0-0] [lt=8][errcode=-5019] failed to process record(executor={ObIExecutor:, sql:"SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1003 ORDER BY tenant_id, ls_id, svr_ip, svr_port"}, record_ret=-5019, ret=-5019) [2024-03-15 07:03:43.550606] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B91-0-0] [lt=10][errcode=-5019] failed to process final(executor={ObIExecutor:, sql:"SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1003 ORDER BY tenant_id, ls_id, svr_ip, svr_port"}, aret=-5019, ret=-5019) [2024-03-15 07:03:43.550616] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B91-0-0] [lt=9][errcode=-5019] execute sql failed(ret=-5019, tenant_id=1, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1003 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:43.550626] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B91-0-0] [lt=9][errcode=-5019] retry_while_no_tenant_resource failed(ret=-5019, tenant_id=1) [2024-03-15 07:03:43.550633] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B91-0-0] [lt=7][errcode=-5019] execute_read failed(ret=-5019, cluster_id=1, tenant_id=1) [2024-03-15 07:03:43.550642] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B91-0-0] [lt=7][errcode=-5019] query failed(ret=-5019, conn=0x7f54845f4050, start=1710486223549989, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1003 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:43.550654] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B91-0-0] [lt=11][errcode=-5019] read failed(ret=-5019) [2024-03-15 07:03:43.550662] WDIAG [SHARE.PT] get_by_tenant (ob_persistent_ls_table.cpp:609) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B91-0-0] [lt=6][errcode=-5019] execute sql failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1003, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1003 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:43.550745] WDIAG [SHARE.PT] get_by_tenant (ob_ls_table_operator.cpp:252) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B91-0-0] [lt=16][errcode=-5019] get all ls info by persistent_ls_ failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1003) [2024-03-15 07:03:43.550776] WDIAG [SHARE] inner_open_ (ob_ls_table_iterator.cpp:104) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B91-0-0] [lt=29][errcode=-5019] fail to get ls infos by tenant(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1003, inner_table_only=false) [2024-03-15 07:03:43.550786] WDIAG [SHARE] next (ob_ls_table_iterator.cpp:71) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B91-0-0] [lt=10][errcode=-5019] fail to open iterator(ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:43.550795] WDIAG [SERVER] build_replica_map_ (ob_tenant_meta_checker.cpp:332) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B91-0-0] [lt=7][errcode=-5019] ls table iterator next failed(ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:43.550806] WDIAG [SERVER] check_ls_table_ (ob_tenant_meta_checker.cpp:214) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B91-0-0] [lt=8][errcode=-5019] build replica map from ls table failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", mode=0) [2024-03-15 07:03:43.550816] WDIAG [SERVER] check_ls_table (ob_tenant_meta_checker.cpp:188) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B91-0-0] [lt=8][errcode=-5019] check ls table failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", mode=0) [2024-03-15 07:03:43.550826] WDIAG [SERVER] runTimerTask (ob_tenant_meta_checker.cpp:44) [727][T1003_LSMetaCh][T1003][YB427F000001-000613ACA96F8B91-0-0] [lt=8][errcode=-5019] fail to check ls meta table(ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:43.553310] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=12] table not exist(tenant_id=1, database_id=201001, table_name=__all_ls_meta_table, ret=-5019) [2024-03-15 07:03:43.553353] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=40][errcode=-5019] synonym not exist(tenant_id=1, database_id=201001, table_name=__all_ls_meta_table, ret=-5019) [2024-03-15 07:03:43.553371] WDIAG [SQL.RESV] resolve_table_relation_factor_normal (ob_dml_resolver.cpp:7359) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=16][errcode=-5019] fail to resolve table relation recursively(tenant_id=1, ret=-5019, database_id=201001, database_id=201001, table_name=__all_ls_meta_table, db_name=oceanbase) [2024-03-15 07:03:43.553387] WDIAG [SQL.RESV] resolve_table_relation_factor (ob_dml_resolver.cpp:7204) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=14][errcode=-5019] resolve table relation factor failed(ret=-5019, table_name=__all_ls_meta_table) [2024-03-15 07:03:43.553405] WDIAG [SQL.RESV] inner_resolve_sys_view (ob_dml_resolver.cpp:2579) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=12][errcode=-5019] fail to resolve table(ret=-5019) [2024-03-15 07:03:43.553417] WDIAG [SQL.RESV] resolve_table_relation_factor_wrapper (ob_dml_resolver.cpp:2634) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=11][errcode=-5019] fail to resolve sys view(ret=-5019) [2024-03-15 07:03:43.553474] WDIAG resolve_basic_table_without_cte (ob_dml_resolver.cpp:2730) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=48][errcode=-5019] Table 'oceanbase.__all_ls_meta_table' doesn't exist [2024-03-15 07:03:43.553494] WDIAG [SQL.RESV] resolve_basic_table_with_cte (ob_dml_resolver.cpp:13473) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=17][errcode=-5019] resolve base or alias table factor failed(ret=-5019) [2024-03-15 07:03:43.553507] WDIAG [SQL.RESV] resolve_basic_table (ob_dml_resolver.cpp:13407) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=13][errcode=-5019] fail to resolve basic table with cte(ret=-5019) [2024-03-15 07:03:43.553518] WDIAG [SQL.RESV] resolve_table (ob_dml_resolver.cpp:3142) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=9][errcode=-5019] resolve basic table failed(ret=-5019) [2024-03-15 07:03:43.553529] WDIAG [SQL.RESV] resolve_from_clause (ob_select_resolver.cpp:3426) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=10][errcode=-5019] fail to exec resolve_table(*table_node, table_item)(ret=-5019) [2024-03-15 07:03:43.553543] WDIAG [SQL.RESV] resolve_normal_query (ob_select_resolver.cpp:1033) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=13][errcode=-5019] fail to exec resolve_from_clause(parse_tree.children_[PARSE_SELECT_FROM])(ret=-5019) [2024-03-15 07:03:43.553554] WDIAG [SQL.RESV] resolve (ob_select_resolver.cpp:1240) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=10][errcode=-5019] resolve normal query failed(ret=-5019) [2024-03-15 07:03:43.553569] WDIAG [SQL.RESV] select_stmt_resolver_func (ob_resolver.cpp:170) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=12][errcode=-5019] execute stmt_resolver failed(ret=-5019, parse_tree.type_=3073) [2024-03-15 07:03:43.553591] WDIAG [SQL] generate_stmt (ob_sql.cpp:2659) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=13][errcode=-5019] failed to resolve(ret=-5019) [2024-03-15 07:03:43.553611] WDIAG [SQL] generate_physical_plan (ob_sql.cpp:2781) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=18][errcode=-5019] Failed to generate stmt(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:43.553627] WDIAG [SQL] handle_physical_plan (ob_sql.cpp:4452) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=13][errcode=-5019] Failed to generate plan(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:43.553663] WDIAG [SQL] handle_text_query (ob_sql.cpp:2383) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=33][errcode=-5019] fail to handle physical plan(ret=-5019) [2024-03-15 07:03:43.553678] WDIAG [SQL] stmt_query (ob_sql.cpp:206) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=13][errcode=-5019] fail to handle text query(stmt=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1 ORDER BY tenant_id, ls_id, svr_ip, svr_port, ret=-5019) [2024-03-15 07:03:43.553693] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:636) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=13][errcode=-5019] executor execute failed(ret=-5019) [2024-03-15 07:03:43.553705] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=10][errcode=-5019] execute failed(ret=-5019, tenant_id=1, executor={ObIExecutor:, sql:"SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1 ORDER BY tenant_id, ls_id, svr_ip, svr_port"}, retry_cnt=0, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.553732] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=21][errcode=-5019] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:0, local_retry_times:0, err_:-5019, err_:"OB_TABLE_NOT_EXIST", retry_type:0, client_ret:-5019}, need_retry=false) [2024-03-15 07:03:43.553850] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=111][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:43.553870] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=19][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:43.553879] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=8][errcode=-5019] failed to close result(close_ret=-5019, ret=-5019) [2024-03-15 07:03:43.553906] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=10][errcode=-5019] failed to process record(executor={ObIExecutor:, sql:"SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1 ORDER BY tenant_id, ls_id, svr_ip, svr_port"}, record_ret=-5019, ret=-5019) [2024-03-15 07:03:43.553943] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=35][errcode=-5019] failed to process final(executor={ObIExecutor:, sql:"SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1 ORDER BY tenant_id, ls_id, svr_ip, svr_port"}, aret=-5019, ret=-5019) [2024-03-15 07:03:43.553962] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=16][errcode=-5019] execute sql failed(ret=-5019, tenant_id=1, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:43.553977] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=14][errcode=-5019] retry_while_no_tenant_resource failed(ret=-5019, tenant_id=1) [2024-03-15 07:03:43.553989] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=11][errcode=-5019] execute_read failed(ret=-5019, cluster_id=1, tenant_id=1) [2024-03-15 07:03:43.554020] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=27][errcode=-5019] query failed(ret=-5019, conn=0x7f5492b2e050, start=1710486223553067, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:43.554089] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=68][errcode=-5019] read failed(ret=-5019) [2024-03-15 07:03:43.554104] WDIAG [SHARE.PT] get_by_tenant (ob_persistent_ls_table.cpp:609) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=12][errcode=-5019] execute sql failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1, sql=SELECT * FROM __all_ls_meta_table WHERE tenant_id = 1 ORDER BY tenant_id, ls_id, svr_ip, svr_port) [2024-03-15 07:03:43.554195] WDIAG [SHARE.PT] get_by_tenant (ob_ls_table_operator.cpp:252) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=14][errcode=-5019] get all ls info by persistent_ls_ failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1) [2024-03-15 07:03:43.554221] WDIAG [SHARE] inner_open_ (ob_ls_table_iterator.cpp:104) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=22][errcode=-5019] fail to get ls infos by tenant(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1, inner_table_only=true) [2024-03-15 07:03:43.554236] WDIAG [SHARE] next (ob_ls_table_iterator.cpp:71) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=17][errcode=-5019] fail to open iterator(ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:43.554250] WDIAG [SERVER] build_replica_map_ (ob_tenant_meta_checker.cpp:332) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=13][errcode=-5019] ls table iterator next failed(ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:43.554265] WDIAG [SERVER] check_ls_table_ (ob_tenant_meta_checker.cpp:214) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=11][errcode=-5019] build replica map from ls table failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", mode=1) [2024-03-15 07:03:43.554278] WDIAG [SERVER] check_ls_table (ob_tenant_meta_checker.cpp:194) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=11][errcode=-5019] check ls table failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", mode=1) [2024-03-15 07:03:43.554297] WDIAG [SERVER] runTimerTask (ob_tenant_meta_checker.cpp:44) [543][T1_LSMetaCh][T1][YB427F000001-000613ACA84F8B95-0-0] [lt=18][errcode=-5019] fail to check ls meta table(ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:43.555141] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=35][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.555191] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=48][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.560323] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486223560312961, start_id=0, end_id=0) [2024-03-15 07:03:43.560840] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:43.560859] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=19][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486224536195, now=[mts=1710486223491884], now0=[mts=1710486223491884], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:43.560877] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=16][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223491189, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.560926] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=40][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}) [2024-03-15 07:03:43.560945] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=19][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223491189, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.560975] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=27] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223491189, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222606956, use_das=false, nested_level=0, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.561043] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=66][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:43.561057] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=11][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:43.561066] WDIAG [SQL] open (ob_result_set.cpp:157) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=7][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:43.561074] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=7][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:43.561081] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=7][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:43.561091] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=9][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=12, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.561105] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=10] will sleep(sleep_us=12000, remain_us=1045092, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=12, timeout_timestamp=1710486224606195) [2024-03-15 07:03:43.565329] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=34][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.565394] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=65][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.568508] INFO [STORAGE] runTimerTask (ob_checkpoint_service.cpp:131) [738][T1003_TxCkpt][T1003][Y0-0000000000000000-0-0] [lt=14] ====== checkpoint timer task ====== [2024-03-15 07:03:43.570258] INFO [STORAGE.TRANS] get_rec_scn (ob_trans_ctx_mgr_v4.cpp:1295) [738][T1003_TxCkpt][T1003][Y0-0000000000000000-0-0] [lt=49] succ to get rec scn(*this={this:0x7f547e804030, ls_id:{id:1}, tenant_id:1003, state:"F_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204930519490}, last_push_gc_task_ts_:1710485741683482, skip_remove_cnt_:0}, aggre_rec_scn:{val:18446744073709551615}, prev_aggre_rec_scn:{val:18446744073709551615}, uref:3}, aggre_rec_scn={val:4611686018427387903}) [2024-03-15 07:03:43.570414] INFO [STORAGE.TRANS] get_rec_scn (ob_tx_ctx_memtable.cpp:232) [738][T1003_TxCkpt][T1003][Y0-0000000000000000-0-0] [lt=154] tx ctx memtable get rec scn(this={ObITable:{this:0x7f548010a080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710486221679144}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f548010a080, snapshot_version:{val:1710486221679144}, ls_id:{id:1}, is_frozen:false}, rec_scn={val:1710295204909211866}) [2024-03-15 07:03:43.570496] INFO [STORAGE.TABLELOCK] get_rec_scn (ob_lock_memtable.cpp:742) [738][T1003_TxCkpt][T1003][Y0-0000000000000000-0-0] [lt=72] rec_scn of ObLockMemtable is (rec_scn_={val:4611686018427387903}, flushed_scn_={val:0}, pre_rec_scn_={val:18446744073709551615}, freeze_scn_={val:0}, max_committed_scn_={val:18446744073709551615}, is_frozen_=false, ls_id_={id:1}) [2024-03-15 07:03:43.570526] INFO [STORAGE.TRANS] get_rec_scn (ob_ls_tx_service.cpp:441) [738][T1003_TxCkpt][T1003][Y0-0000000000000000-0-0] [lt=26] [CHECKPOINT] ObLSTxService::get_rec_scn(common_checkpoint_type="DATA_CHECKPOINT_TYPE", common_checkpoints_[min_rec_scn_common_checkpoint_type_index]={this:0x7f5484df2290}, min_rec_scn={val:1710278140261191947}, ls_id_={id:1}) [2024-03-15 07:03:43.571690] INFO [STORAGE.TRANS] get_rec_scn (ob_id_service.cpp:306) [738][T1003_TxCkpt][T1003][Y0-0000000000000000-0-0] [lt=21] get rec log scn(service_type_=0, rec_log_ts={val:1710339949200647547}) [2024-03-15 07:03:43.571728] INFO [STORAGE.TRANS] get_rec_scn (ob_id_service.cpp:306) [738][T1003_TxCkpt][T1003][Y0-0000000000000000-0-0] [lt=39] get rec log scn(service_type_=1, rec_log_ts={val:1710324545958121911}) [2024-03-15 07:03:43.571745] INFO [STORAGE.TRANS] get_rec_scn (ob_id_service.cpp:306) [738][T1003_TxCkpt][T1003][Y0-0000000000000000-0-0] [lt=13] get rec log scn(service_type_=2, rec_log_ts={val:4611686018427387903}) [2024-03-15 07:03:43.571763] INFO [STORAGE] update_clog_checkpoint (ob_checkpoint_executor.cpp:158) [738][T1003_TxCkpt][T1003][Y0-0000000000000000-0-0] [lt=10] [CHECKPOINT] clog checkpoint no change(checkpoint_scn={val:1710278140261191947}, checkpoint_scn_in_ls_meta={val:1710278140261191947}, ls_id={id:1}, service_type="TRANS_SERVICE") [2024-03-15 07:03:43.571791] WDIAG [PALF] set_base_lsn (palf_handle_impl.cpp:1153) [738][T1003_TxCkpt][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=0] no need to set new base lsn, curr base lsn is greater than or equal to new base lsn(this={palf_id:1, self:"127.0.0.1:2882", has_set_deleted:false}, curr_base_lsn={lsn:364982833152}, new_base_lsn={lsn:364982833152}, lsn={lsn:364982833152}) [2024-03-15 07:03:43.571834] INFO [STORAGE] runTimerTask (ob_checkpoint_service.cpp:184) [738][T1003_TxCkpt][T1003][Y0-0000000000000000-0-0] [lt=40] [CHECKPOINT] advance palf base lsn successfully(checkpoint_lsn={lsn:364982833152}, ls->get_ls_id()={id:1}) [2024-03-15 07:03:43.571852] INFO [STORAGE] runTimerTask (ob_checkpoint_service.cpp:191) [738][T1003_TxCkpt][T1003][Y0-0000000000000000-0-0] [lt=14] succeed to update_clog_checkpoint(ret=0, ls_cnt=1) [2024-03-15 07:03:43.573199] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=10][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:12, local_retry_times:12, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:43.573254] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=35][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.573267] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=13][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.573275] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=8][errcode=-4283] failed to close result(close_ret=-4283, ret=-4283) [2024-03-15 07:03:43.573301] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=10][errcode=-4283] failed to process record(executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, record_ret=-4283, ret=-4283) [2024-03-15 07:03:43.573492] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.573515] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=22][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.575566] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=22][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.575630] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=63][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.585829] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=33][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.585859] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=30][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.586945] INFO [COMMON] replace_fragment_node (ob_kvcache_map.cpp:697) [103][KVCacheRep][T0][Y0-0000000000000000-0-0] [lt=36] Cache replace map node details(ret=0, replace_node_count=0, replace_time=2968, replace_start_pos=393200, replace_num=15728) [2024-03-15 07:03:43.591238] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.591261] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=21][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486223591224], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.591290] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=18][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486223591224], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.591316] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=13] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223591224]}) [2024-03-15 07:03:43.591332] WDIAG [STORAGE.TRANS] operator() (ob_ts_mgr.h:167) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=14][errcode=-4023] refresh gts failed(ret=-4023, ret="OB_EAGAIN", gts_tenant_info={v:1}) [2024-03-15 07:03:43.591342] INFO [STORAGE.TRANS] operator() (ob_ts_mgr.h:171) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=10] refresh gts functor(ret=-4023, ret="OB_EAGAIN", gts_tenant_info={v:1}) [2024-03-15 07:03:43.591361] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=7] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:43.591372] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=10][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486223591357], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.591376] WDIAG [SHARE.LOCATION] batch_process_tasks (ob_ls_location_service.cpp:524) [159][SysLocAsyncUp0][T0][YB427F000001-000613ACAFBFB1C1-0-0] [lt=34][errcode=0] tenant schema is not ready, need wait(ret=0, ret="OB_SUCCESS", superior_tenant_id=1, tasks=[{cluster_id:1, tenant_id:1, ls_id:{id:1}, add_timestamp:1710486223591303}]) [2024-03-15 07:03:43.591407] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=22] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223591357]}) [2024-03-15 07:03:43.591672] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.591685] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=10][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486223591665], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.591711] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=12] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223591665]}) [2024-03-15 07:03:43.592017] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:43.592039] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=21][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486229937049, now=[mts=1710486223524033], now0=[mts=1710486223524033], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:43.592054] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=14][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223523347, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.592095] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=33][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}) [2024-03-15 07:03:43.592111] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=17][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223523347, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.592134] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=21] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54e845a228, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223523347, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486200007067, use_das=false, nested_level=0, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.592183] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=47][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:43.592195] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=10][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:43.592201] WDIAG [SQL] open (ob_result_set.cpp:157) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=5][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:43.592207] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=4][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:43.592213] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=5][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_schema_status' ORDER BY row_id, column_name"}) [2024-03-15 07:03:43.592232] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=7][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_schema_status' ORDER BY row_id, column_name"}, retry_cnt=171, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.592252] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=28] will sleep(sleep_us=100000, remain_us=6414799, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=171, timeout_timestamp=1710486230007049) [2024-03-15 07:03:43.596058] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=14][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.596115] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=57][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.601557] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1567) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0][errcode=-4283] gts not ready(ret=-4283, retry_times=102) [2024-03-15 07:03:43.601582] WDIAG [STORAGE.TRANS] acquire_global_snapshot__ (ob_trans_service_v4.cpp:1589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=25][errcode=-4283] acquire global snapshot fail(ret=-4283, gts_ahead=0, expire_ts=1710486224265147, now=[mts=1710486223536268], now0=[mts=1710486223536268], snapshot={val:18446744073709551615}, uncertain_bound=0) [2024-03-15 07:03:43.601604] WDIAG [STORAGE.TRANS] get_read_snapshot (ob_tx_api.cpp:586) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=12][errcode=-4283] acquire global snapshot fail(ret=-4283, tx={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223535816, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.601649] WDIAG [SQL.EXE] stmt_setup_snapshot_ (ob_sql_trans_control.cpp:679) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=38][errcode=-4283] fail to get snapshot(ret=-4283, local_ls_id={id:1}, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}) [2024-03-15 07:03:43.601666] WDIAG [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:531) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=17][errcode=-4283] fail to exec stmt_setup_snapshot_(session, das_ctx, plan, plan_ctx, txs)(ret=-4283, session_id=1, *tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223535816, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}) [2024-03-15 07:03:43.601687] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=19] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223535816, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.601729] WDIAG [SQL] start_stmt (ob_result_set.cpp:317) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=41][errcode=-4283] fail to start stmt(ret=-4283, phy_plan->get_dependency_table()=[{table_id:1, schema_version:0, object_type:1, is_db_explicit:false, is_existed:true}]) [2024-03-15 07:03:43.601740] WDIAG [SQL] do_open_plan (ob_result_set.cpp:496) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=8][errcode=-4283] fail start stmt(ret=-4283) [2024-03-15 07:03:43.601747] WDIAG [SQL] open (ob_result_set.cpp:157) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=7][errcode=-4283] execute plan failed(ret=-4283) [2024-03-15 07:03:43.601753] WDIAG [SERVER] open (ob_inner_sql_result.cpp:153) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=4][errcode=-4283] open result set failed(ret=-4283) [2024-03-15 07:03:43.601758] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:648) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=4][errcode=-4283] result set open failed(ret=-4283, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}) [2024-03-15 07:03:43.601767] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=8][errcode=-4283] execute failed(ret=-4283, tenant_id=1, executor={ObIExecutor:, sql:"SELECT row_id, column_name, column_value FROM __all_core_table WHERE table_name = '__all_global_stat' ORDER BY row_id, column_name"}, retry_cnt=16, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.601776] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=6] will sleep(sleep_us=16000, remain_us=733372, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=16, timeout_timestamp=1710486224335147) [2024-03-15 07:03:43.606449] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=28][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.606530] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=79][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.611192] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=23][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.611239] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=46][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1003", warnings:[]}, tenant_id_=1003, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.611273] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=15][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, local_server_version={val:1710482141336457000}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486223611096) [2024-03-15 07:03:43.611294] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=20][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, last_post_cluster_heartbeat_tstamp_=1710486223411057, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:43.612370] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=11] get wrs ts(ls_id={id:1}, delta=146268760472, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:43.612398] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=24] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:43.614564] INFO [STORAGE.TRANS] print_retain_ctx_info (ob_tx_retain_ctx_mgr.cpp:263) [569][T1_TxLoopWorker][T1][Y0-0000000000000000-0-0] [lt=33] [RetainCtxMgr] print retain ctx(ls_id={id:1}, this={retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, retain_ctx_list_.get_first()={cause_:0, tx_id_:{txid:198924641}, ls_id_:{id:1}, tx_ctx_:0x7f54b1a209d0}, retain_ctx_list_.get_last()={cause_:0, tx_id_:{txid:198924641}, ls_id_:{id:1}, tx_ctx_:0x7f54b1a209d0}) [2024-03-15 07:03:43.614652] WDIAG [STORAGE.TRANS] check_gts_ (ob_keep_alive_ls_handler.cpp:237) [569][T1_TxLoopWorker][T1][Y0-0000000000000000-0-0] [lt=64][errcode=-4023] get gts error(ret=-4023) [2024-03-15 07:03:43.614724] INFO [PALF] handle_next_submit_log_ (log_sliding_window.cpp:1000) [569][T1_TxLoopWorker][T1][Y0-0000000000000000-0-0] [lt=14] [PALF STAT GROUP LOG INFO](palf_id=1, self="127.0.0.1:2882", role="LEADER", total_group_log_cnt=1, avg_log_batch_cnt=1, total_group_log_size=122, avg_group_log_size=122) [2024-03-15 07:03:43.614764] INFO [PALF] submit_log (palf_handle_impl.cpp:403) [569][T1_TxLoopWorker][T1][Y0-0000000000000000-0-0] [lt=36] [PALF STAT APPEND DATA SIZE](this={palf_id:1, self:"127.0.0.1:2882", has_set_deleted:false}, append size=121) [2024-03-15 07:03:43.616780] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=44][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.616839] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=57][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.617883] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=11][errcode=-4283] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:16, local_retry_times:16, err_:-4283, err_:"OB_GTS_NOT_READY", retry_type:1, client_ret:-4283}, need_retry=true) [2024-03-15 07:03:43.617968] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=59][errcode=-4283] result set close failed(ret=-4283) [2024-03-15 07:03:43.618178] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.618225] INFO [RPC.FRAME] mysql_easy_timer_cb (ob_net_easy.cpp:657) [204][MysqlIO][T0][Y0-0000000000000000-0-0] [lt=16] [MYSQL EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:43.618210] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=31][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.618254] INFO [RPC.FRAME] mysql_easy_timer_cb (ob_net_easy.cpp:657) [202][MysqlIO][T0][Y0-0000000000000000-0-0] [lt=36] [MYSQL EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:43.618710] INFO [RPC.FRAME] mysql_easy_timer_cb (ob_net_easy.cpp:657) [203][MysqlIO][T0][Y0-0000000000000000-0-0] [lt=19] [MYSQL EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:43.619714] INFO [RPC.FRAME] batch_rpc_easy_timer_cb (ob_net_easy.cpp:633) [197][BatchIO][T0][Y0-0000000000000000-0-0] [lt=15] [BATCH_RPC EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:43.619772] INFO [RPC.FRAME] batch_rpc_easy_timer_cb (ob_net_easy.cpp:633) [198][BatchIO][T0][Y0-0000000000000000-0-0] [lt=40] [BATCH_RPC EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:43.620043] INFO [RPC.FRAME] mysql_easy_timer_cb (ob_net_easy.cpp:657) [206][MysqlUnix][T0][Y0-0000000000000000-0-0] [lt=28] [MYSQL EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:43.620213] INFO [RPC.FRAME] batch_rpc_easy_timer_cb (ob_net_easy.cpp:633) [200][BatchIO][T0][Y0-0000000000000000-0-0] [lt=13] [BATCH_RPC EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:43.620589] INFO [RPC.FRAME] batch_rpc_easy_timer_cb (ob_net_easy.cpp:633) [199][BatchIO][T0][Y0-0000000000000000-0-0] [lt=62] [BATCH_RPC EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:43.620709] INFO [RPC.FRAME] rpc_easy_timer_cb (ob_net_easy.cpp:595) [208][RpcUnix][T0][Y0-0000000000000000-0-0] [lt=27] [RPC EASY STAT](log_str=conn count=0/0, request done=0/0, request doing=0/0) [2024-03-15 07:03:43.621550] INFO [LIB] stat (utility.h:1140) [461][T1_IOWorker][T1][Y0-0000000000000000-0-0] [lt=22] [PALF STAT WRITE LOG](cur_stat_count=1, stat_interval=1000000, avg cost=6795, this=0x7f54b87fbad8) [2024-03-15 07:03:43.621588] INFO [PALF] inner_append_log (palf_handle_impl.cpp:1660) [461][T1_IOWorker][T1][Y0-0000000000000000-0-0] [lt=38] [PALF STAT INNER APPEND LOG](this={palf_id:1, self:"127.0.0.1:2882", has_set_deleted:false}, accum_size=122) [2024-03-15 07:03:43.621665] WDIAG [PALF] try_update_match_lsn_map_ (log_sliding_window.cpp:3790) [460][T1_LogIOCb0][T1][Y0-0000000000000000-0-0] [lt=12][errcode=0] [MATCH LSN ADVANCE DELAY]match_lsn advance delay too much time(ret=0, palf_id=1, self="127.0.0.1:2882", server="127.0.0.1:2882", update_func={old_end_lsn:{lsn:115793802407}, new_end_lsn:{lsn:115793802529}, old_advance_time_us:1710486222620996, new_ack_time_us:1710486223621656, advance delay(us):1000660}) [2024-03-15 07:03:43.621709] INFO [PALF] try_advance_committed_lsn_ (log_sliding_window.cpp:1572) [460][T1_LogIOCb0][T1][Y0-0000000000000000-0-0] [lt=41] [PALF STAT COMMITTED LOG SIZE](palf_id=1, self="127.0.0.1:2882", committed size=122) [2024-03-15 07:03:43.621767] INFO [LIB] stat (utility.h:1140) [460][T1_LogIOCb0][T1][Y0-0000000000000000-0-0] [lt=30] [PALF STAT FS CB](cur_stat_count=1, stat_interval=1000000, avg cost=13, this=0x7f54b87f33a8) [2024-03-15 07:03:43.621795] INFO [LIB] stat (utility.h:1140) [460][T1_LogIOCb0][T1][Y0-0000000000000000-0-0] [lt=28] [PALF STAT LOG LIFETIME](cur_stat_count=1, stat_interval=1000000, avg cost=7077, this=0x7f54b87f33d8) [2024-03-15 07:03:43.621807] INFO [LIB] stat (utility.h:1140) [460][T1_LogIOCb0][T1][Y0-0000000000000000-0-0] [lt=12] [PALF STAT LOG SUBMIT WAIT](cur_stat_count=1, stat_interval=1000000, avg cost=25, this=0x7f54b87f3408) [2024-03-15 07:03:43.621817] INFO [LIB] stat (utility.h:1140) [460][T1_LogIOCb0][T1][Y0-0000000000000000-0-0] [lt=10] [PALF STAT LOG SLIDE WAIT](cur_stat_count=1, stat_interval=1000000, avg cost=7052, this=0x7f54b87f3438) [2024-03-15 07:03:43.621835] INFO [LIB] stat (utility.h:1140) [460][T1_LogIOCb0][T1][Y0-0000000000000000-0-0] [lt=11] [PALF STAT FLUSH CB](cur_stat_count=1, stat_interval=1000000, avg cost=194, this=0x7f54b87fbb08) [2024-03-15 07:03:43.625177] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=0] table not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:43.625196] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=18][errcode=-5019] synonym not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:43.625205] WDIAG [SQL.RESV] resolve_table_relation_factor_normal (ob_dml_resolver.cpp:7359) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=8][errcode=-5019] fail to resolve table relation recursively(tenant_id=1, ret=-5019, database_id=201001, database_id=201001, table_name=__all_server, db_name=oceanbase) [2024-03-15 07:03:43.625214] WDIAG [SQL.RESV] resolve_table_relation_factor (ob_dml_resolver.cpp:7204) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=8][errcode=-5019] resolve table relation factor failed(ret=-5019, table_name=__all_server) [2024-03-15 07:03:43.625222] WDIAG [SQL.RESV] inner_resolve_sys_view (ob_dml_resolver.cpp:2579) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=6][errcode=-5019] fail to resolve table(ret=-5019) [2024-03-15 07:03:43.625227] WDIAG [SQL.RESV] resolve_table_relation_factor_wrapper (ob_dml_resolver.cpp:2634) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=5][errcode=-5019] fail to resolve sys view(ret=-5019) [2024-03-15 07:03:43.625236] WDIAG resolve_basic_table_without_cte (ob_dml_resolver.cpp:2730) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=4][errcode=-5019] Table 'oceanbase.__all_server' doesn't exist [2024-03-15 07:03:43.625242] WDIAG [SQL.RESV] resolve_basic_table_with_cte (ob_dml_resolver.cpp:13473) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=5][errcode=-5019] resolve base or alias table factor failed(ret=-5019) [2024-03-15 07:03:43.625256] WDIAG [SQL.RESV] resolve_basic_table (ob_dml_resolver.cpp:13407) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=14][errcode=-5019] fail to resolve basic table with cte(ret=-5019) [2024-03-15 07:03:43.625263] WDIAG [SQL.RESV] resolve_table (ob_dml_resolver.cpp:3142) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=7][errcode=-5019] resolve basic table failed(ret=-5019) [2024-03-15 07:03:43.625269] WDIAG [SQL.RESV] resolve_from_clause (ob_select_resolver.cpp:3426) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=4][errcode=-5019] fail to exec resolve_table(*table_node, table_item)(ret=-5019) [2024-03-15 07:03:43.625274] WDIAG [SQL.RESV] resolve_normal_query (ob_select_resolver.cpp:1033) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=5][errcode=-5019] fail to exec resolve_from_clause(parse_tree.children_[PARSE_SELECT_FROM])(ret=-5019) [2024-03-15 07:03:43.625279] WDIAG [SQL.RESV] resolve (ob_select_resolver.cpp:1240) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=4][errcode=-5019] resolve normal query failed(ret=-5019) [2024-03-15 07:03:43.625285] WDIAG [SQL.RESV] select_stmt_resolver_func (ob_resolver.cpp:170) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=5][errcode=-5019] execute stmt_resolver failed(ret=-5019, parse_tree.type_=3073) [2024-03-15 07:03:43.625299] WDIAG [SQL] generate_stmt (ob_sql.cpp:2659) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=6][errcode=-5019] failed to resolve(ret=-5019) [2024-03-15 07:03:43.625306] WDIAG [SQL] generate_physical_plan (ob_sql.cpp:2781) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=7][errcode=-5019] Failed to generate stmt(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:43.625314] WDIAG [SQL] handle_physical_plan (ob_sql.cpp:4452) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=6][errcode=-5019] Failed to generate plan(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:43.625320] WDIAG [SQL] handle_text_query (ob_sql.cpp:2383) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=4][errcode=-5019] fail to handle physical plan(ret=-5019) [2024-03-15 07:03:43.625327] WDIAG [SQL] stmt_query (ob_sql.cpp:206) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=5][errcode=-5019] fail to handle text query(stmt=SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882, ret=-5019) [2024-03-15 07:03:43.625334] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:636) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=6][errcode=-5019] executor execute failed(ret=-5019) [2024-03-15 07:03:43.625339] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=5][errcode=-5019] execute failed(ret=-5019, tenant_id=1, executor={ObIExecutor:, sql:"SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882"}, retry_cnt=0, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.625353] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=9][errcode=-5019] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:0, local_retry_times:0, err_:-5019, err_:"OB_TABLE_NOT_EXIST", retry_type:0, client_ret:-5019}, need_retry=false) [2024-03-15 07:03:43.625366] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=11][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:43.625371] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=4][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:43.625376] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=4][errcode=-5019] failed to close result(close_ret=-5019, ret=-5019) [2024-03-15 07:03:43.625391] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=5][errcode=-5019] failed to process record(executor={ObIExecutor:, sql:"SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882"}, record_ret=-5019, ret=-5019) [2024-03-15 07:03:43.625398] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [454][T1_Occam][T1][YB427F000001-000613ACAB8F9B80-0-0] [lt=7][errcode=-5019] failed to process final(executor={ObIExecutor:, sql:"SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882"}, aret=-5019, ret=-5019) [2024-03-15 07:03:43.625404] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=6][errcode=-5019] execute sql failed(ret=-5019, tenant_id=1, sql=SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882) [2024-03-15 07:03:43.625410] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=5][errcode=-5019] retry_while_no_tenant_resource failed(ret=-5019, tenant_id=1) [2024-03-15 07:03:43.625415] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=5][errcode=-5019] execute_read failed(ret=-5019, cluster_id=1, tenant_id=1) [2024-03-15 07:03:43.625443] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=26][errcode=-5019] query failed(ret=-5019, conn=0x7f54a2df2050, start=1710486223625009, sql=SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882) [2024-03-15 07:03:43.625451] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=8][errcode=-5019] read failed(ret=-5019) [2024-03-15 07:03:43.625458] WDIAG get_my_sql_result_ (ob_table_access_helper.h:431) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=5][errcode=-5019] GCTX.sql_proxy_ read failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", MTL_ID()=1, tenant_id=1, columns=0x7f54b0e5c1c8, table=__all_server, condition=where svr_ip='127.0.0.1' and svr_port=2882, sql=SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882, columns_str="zone") [2024-03-15 07:03:43.625470] WDIAG read_and_convert_to_values_ (ob_table_access_helper.h:332) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=11][errcode=-5019] fail to get ObMySQLResult(ret=-5019, ret="OB_TABLE_NOT_EXIST", MTL_ID()=1, table=__all_server, condition=where svr_ip='127.0.0.1' and svr_port=2882) [2024-03-15 07:03:43.625517] WDIAG [COORDINATOR] get_self_zone_name (table_accessor.cpp:530) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=5][errcode=-5019] get zone from __all_server failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", columns=0x7f54b0e5c1c8, where_condition="where svr_ip='127.0.0.1' and svr_port=2882", zone_name_holder=) [2024-03-15 07:03:43.625527] WDIAG [COORDINATOR] get_all_ls_election_reference_info (table_accessor.cpp:463) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=8][errcode=-5019] get self zone name failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", all_ls_election_reference_info=[]) [2024-03-15 07:03:43.625534] WDIAG [COORDINATOR] get_all_ls_election_reference_info (table_accessor.cpp:472) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=7][errcode=-5019] zone name is empty(ret=-5019, ret="OB_TABLE_NOT_EXIST", all_ls_election_reference_info=[]) [2024-03-15 07:03:43.625541] WDIAG [COORDINATOR] refresh (ob_leader_coordinator.cpp:143) [454][T1_Occam][T1][Y0-0000000000000000-0-0] [lt=4][errcode=-5019] get all ls election reference info failed(ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:43.626998] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=23][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.627054] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=55][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.630274] WDIAG [PALF] submit_log (palf_handle_impl.cpp:378) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0][errcode=-4002] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:43.636630] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223572700, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222606956, use_das=false, nested_level=0, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.636719] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] will sleep(sleep_us=13000, remain_us=969478, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=13, timeout_timestamp=1710486224606195) [2024-03-15 07:03:43.637188] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=31][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.637217] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=30][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.641071] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=55][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:43.647378] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=14][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.647454] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=75][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.650169] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.650203] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=33][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.652795] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=39][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.652832] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=35][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1004", warnings:[]}, tenant_id_=1004, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.652856] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=22][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, local_server_version={val:1710506547039047539}, valid_part_count=2, total_part_count=2, generate_timestamp=1710486223652780) [2024-03-15 07:03:43.652871] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, last_post_cluster_heartbeat_tstamp_=1710486223452797, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:43.657645] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=25][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.657788] INFO [COMMON] compute_tenant_wash_size (ob_kvcache_store.cpp:1140) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=62] Wash compute wash size(is_wash_valid=true, sys_total_wash_size=2720698368, global_cache_size=12484608, tenant_max_wash_size=4161536, tenant_min_wash_size=4161536, tenant_ids_=[512, 500, 999, 506, 508, 509, 510, 1, 1003, 1004]) [2024-03-15 07:03:43.657778] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=131][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.657892] INFO [COMMON] wash (ob_kvcache_store.cpp:343) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=28] Wash time detail, (compute_wash_size_time=122, refresh_score_time=70, wash_time=5) [2024-03-15 07:03:43.657911] INFO [COMMON] clean_garbage_node (ob_kvcache_map.cpp:647) [102][KVCacheWash][T0][Y0-0000000000000000-0-0] [lt=10] Cache wash clean map node details(ret=0, clean_node_count=0, clean_time=7, clean_start_pos=1572850, clean_num=31457) [2024-03-15 07:03:43.660389] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=0] get number(ret=-4023, service_type_=0, range=1, base_id=1710486223660381212, start_id=0, end_id=0) [2024-03-15 07:03:43.664317] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB63-0-0] [lt=162][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:43.664366] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB63-0-0] [lt=49][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.664408] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB63-0-0] [lt=40][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.664473] WDIAG iterate (ob_tuple.h:272) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB63-0-0] [lt=62][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.664496] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [877][T1_L0_G2][T1][YB427F000001-000613ACAA1FBB63-0-0] [lt=23][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:43.667891] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=38][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.667935] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=44][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.668612] INFO [DETECT] record_summary_info_and_logout_when_necessary_ (ob_lcl_batch_sender_thread.cpp:202) [546][T1_LCLSender][T1][Y0-0000000000000000-0-0] [lt=69] ObLCLBatchSenderThread periodic report summary info(duty_ratio_percentage=0, total_constructed_detector=0, total_destructed_detector=0, total_alived_detector=0, _lcl_op_interval=30000, lcl_msg_map_.count()=0, *this={this:0x7f54bb5e42b0, is_inited:true, is_running:true, total_record_time:5010000, over_night_times:0}) [2024-03-15 07:03:43.671750] INFO [SERVER] runTimerTask (ob_eliminate_task.cpp:164) [552][T1_ReqMemEvict][T1][Y0-0000000000000000-0-0] [lt=51] sql audit evict mem start(evict_low_mem_level=0, evict_high_mem_level=32212254, size_used=15176, mem_used=33275904) [2024-03-15 07:03:43.672175] INFO [SERVER] runTimerTask (ob_eliminate_task.cpp:170) [552][T1_ReqMemEvict][T1][Y0-0000000000000000-0-0] [lt=51] release old cannot free more memory [2024-03-15 07:03:43.672210] INFO [SERVER] runTimerTask (ob_eliminate_task.cpp:199) [552][T1_ReqMemEvict][T1][Y0-0000000000000000-0-0] [lt=33] sql audit evict task end(evict_high_mem_level=32212254, evict_high_size_level=90000, evict_batch_count=2, elapse_time=462, size_used=14176, mem_used=31196160) [2024-03-15 07:03:43.673603] INFO [LIB] runTimerTask (ob_work_queue.cpp:24) [135][ObTimer][T0][Y0-0000000000000000-0-0] [lt=24] add async task(this=tasktype:N9oceanbase10rootserver13ObRootService19ObRefreshServerTaskE) [2024-03-15 07:03:43.675119] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=33] table not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:43.675146] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=24][errcode=-5019] synonym not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:43.675226] WDIAG [SQL.RESV] resolve_table_relation_factor_normal (ob_dml_resolver.cpp:7359) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=78][errcode=-5019] fail to resolve table relation recursively(tenant_id=1, ret=-5019, database_id=201001, database_id=201001, table_name=__all_server, db_name=oceanbase) [2024-03-15 07:03:43.675241] WDIAG [SQL.RESV] resolve_table_relation_factor (ob_dml_resolver.cpp:7204) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=15][errcode=-5019] resolve table relation factor failed(ret=-5019, table_name=__all_server) [2024-03-15 07:03:43.675256] WDIAG [SQL.RESV] inner_resolve_sys_view (ob_dml_resolver.cpp:2579) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=10][errcode=-5019] fail to resolve table(ret=-5019) [2024-03-15 07:03:43.675265] WDIAG [SQL.RESV] resolve_table_relation_factor_wrapper (ob_dml_resolver.cpp:2634) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=9][errcode=-5019] fail to resolve sys view(ret=-5019) [2024-03-15 07:03:43.675279] WDIAG resolve_basic_table_without_cte (ob_dml_resolver.cpp:2730) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=8][errcode=-5019] Table 'oceanbase.__all_server' doesn't exist [2024-03-15 07:03:43.675288] WDIAG [SQL.RESV] resolve_basic_table_with_cte (ob_dml_resolver.cpp:13473) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=8][errcode=-5019] resolve base or alias table factor failed(ret=-5019) [2024-03-15 07:03:43.675297] WDIAG [SQL.RESV] resolve_basic_table (ob_dml_resolver.cpp:13407) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=7][errcode=-5019] fail to resolve basic table with cte(ret=-5019) [2024-03-15 07:03:43.675305] WDIAG [SQL.RESV] resolve_table (ob_dml_resolver.cpp:3142) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=7][errcode=-5019] resolve basic table failed(ret=-5019) [2024-03-15 07:03:43.675313] WDIAG [SQL.RESV] resolve_from_clause (ob_select_resolver.cpp:3426) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=7][errcode=-5019] fail to exec resolve_table(*table_node, table_item)(ret=-5019) [2024-03-15 07:03:43.675322] WDIAG [SQL.RESV] resolve_normal_query (ob_select_resolver.cpp:1033) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=8][errcode=-5019] fail to exec resolve_from_clause(parse_tree.children_[PARSE_SELECT_FROM])(ret=-5019) [2024-03-15 07:03:43.675331] WDIAG [SQL.RESV] resolve (ob_select_resolver.cpp:1240) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=7][errcode=-5019] resolve normal query failed(ret=-5019) [2024-03-15 07:03:43.675343] WDIAG [SQL.RESV] select_stmt_resolver_func (ob_resolver.cpp:170) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=11][errcode=-5019] execute stmt_resolver failed(ret=-5019, parse_tree.type_=3073) [2024-03-15 07:03:43.675371] WDIAG [SQL] generate_stmt (ob_sql.cpp:2659) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=15][errcode=-5019] failed to resolve(ret=-5019) [2024-03-15 07:03:43.675388] WDIAG [SQL] generate_physical_plan (ob_sql.cpp:2781) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=15][errcode=-5019] Failed to generate stmt(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:43.675406] WDIAG [SQL] handle_physical_plan (ob_sql.cpp:4452) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=13][errcode=-5019] Failed to generate plan(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:43.675451] WDIAG [SQL] handle_text_query (ob_sql.cpp:2383) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=13][errcode=-5019] fail to handle physical plan(ret=-5019) [2024-03-15 07:03:43.675465] WDIAG [SQL] stmt_query (ob_sql.cpp:206) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=11][errcode=-5019] fail to handle text query(stmt=SELECT time_to_usec(gmt_modified) AS last_hb_time, id, zone, svr_ip, svr_port, inner_port, status, with_rootserver, block_migrate_in_time, build_version, stop_time, start_service_time, with_partition FROM __all_server, ret=-5019) [2024-03-15 07:03:43.675477] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:636) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=10][errcode=-5019] executor execute failed(ret=-5019) [2024-03-15 07:03:43.675487] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=8][errcode=-5019] execute failed(ret=-5019, tenant_id=1, executor={ObIExecutor:, sql:"SELECT time_to_usec(gmt_modified) AS last_hb_time, id, zone, svr_ip, svr_port, inner_port, status, with_rootserver, block_migrate_in_time, build_version, stop_time, start_service_time, with_partition FROM __all_server"}, retry_cnt=0, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.675508] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=15][errcode=-5019] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:0, local_retry_times:0, err_:-5019, err_:"OB_TABLE_NOT_EXIST", retry_type:0, client_ret:-5019}, need_retry=false) [2024-03-15 07:03:43.675527] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=15][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:43.675536] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=8][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:43.675544] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=7][errcode=-5019] failed to close result(close_ret=-5019, ret=-5019) [2024-03-15 07:03:43.675566] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [136][RSAsyncTask0][T1][YB427F000001-000613ACABDF8442-0-0] [lt=8][errcode=-5019] failed to process record(executor={ObIExecutor:, sql:"SELECT time_to_usec(gmt_modified) AS last_hb_time, id, zone, svr_ip, svr_port, inner_port, status, with_rootserver, block_migrate_in_time, build_version, stop_time, start_service_time, with_partition FROM __all_server"}, record_ret=-5019, ret=-5019) [2024-03-15 07:03:43.675613] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [136][RSAsyncTask0][T0][YB427F000001-000613ACABDF8442-0-0] [lt=42][errcode=-5019] failed to process final(executor={ObIExecutor:, sql:"SELECT time_to_usec(gmt_modified) AS last_hb_time, id, zone, svr_ip, svr_port, inner_port, status, with_rootserver, block_migrate_in_time, build_version, stop_time, start_service_time, with_partition FROM __all_server"}, aret=-5019, ret=-5019) [2024-03-15 07:03:43.675631] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [136][RSAsyncTask0][T0][Y0-0000000000000000-0-0] [lt=16][errcode=-5019] execute sql failed(ret=-5019, tenant_id=1, sql=SELECT time_to_usec(gmt_modified) AS last_hb_time, id, zone, svr_ip, svr_port, inner_port, status, with_rootserver, block_migrate_in_time, build_version, stop_time, start_service_time, with_partition FROM __all_server) [2024-03-15 07:03:43.675648] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [136][RSAsyncTask0][T0][Y0-0000000000000000-0-0] [lt=14][errcode=-5019] retry_while_no_tenant_resource failed(ret=-5019, tenant_id=1) [2024-03-15 07:03:43.675664] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [136][RSAsyncTask0][T0][Y0-0000000000000000-0-0] [lt=14][errcode=-5019] execute_read failed(ret=-5019, cluster_id=1, tenant_id=1) [2024-03-15 07:03:43.675680] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [136][RSAsyncTask0][T0][Y0-0000000000000000-0-0] [lt=14][errcode=-5019] query failed(ret=-5019, conn=0x7f54bd1f4050, start=1710486223674812, sql=SELECT time_to_usec(gmt_modified) AS last_hb_time, id, zone, svr_ip, svr_port, inner_port, status, with_rootserver, block_migrate_in_time, build_version, stop_time, start_service_time, with_partition FROM __all_server) [2024-03-15 07:03:43.675693] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [136][RSAsyncTask0][T0][Y0-0000000000000000-0-0] [lt=14][errcode=-5019] read failed(ret=-5019) [2024-03-15 07:03:43.675875] WDIAG [SHARE] run2 (ob_async_task_queue.cpp:149) [136][RSAsyncTask0][T0][Y0-0000000000000000-0-0] [lt=8][errcode=-5019] task process failed, start retry(max retry time=0, retry interval=1000000, ret=-5019) [2024-03-15 07:03:43.678144] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.678184] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=41][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.680527] INFO [STORAGE] runTimerTask (ob_checkpoint_service.cpp:351) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=29] ====== check clog disk timer task ====== [2024-03-15 07:03:43.680599] INFO [PALF] get_disk_usage (palf_env_impl.cpp:777) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=64] get_disk_usage(ret=0, capacity(MB):=921, used(MB):=875) [2024-03-15 07:03:43.682502] INFO [STORAGE.TRANS] get_rec_scn (ob_trans_ctx_mgr_v4.cpp:1295) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=20] succ to get rec scn(*this={this:0x7f547e804030, ls_id:{id:1}, tenant_id:1003, state:"F_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204930519490}, last_push_gc_task_ts_:1710485741683482, skip_remove_cnt_:0}, aggre_rec_scn:{val:18446744073709551615}, prev_aggre_rec_scn:{val:18446744073709551615}, uref:3}, aggre_rec_scn={val:4611686018427387903}) [2024-03-15 07:03:43.682548] INFO [STORAGE.TRANS] get_rec_scn (ob_tx_ctx_memtable.cpp:232) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=45] tx ctx memtable get rec scn(this={ObITable:{this:0x7f548010a080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710486221679144}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f548010a080, snapshot_version:{val:1710486221679144}, ls_id:{id:1}, is_frozen:false}, rec_scn={val:1710295204909211866}) [2024-03-15 07:03:43.682587] INFO [STORAGE.TABLELOCK] get_rec_scn (ob_lock_memtable.cpp:742) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=30] rec_scn of ObLockMemtable is (rec_scn_={val:4611686018427387903}, flushed_scn_={val:0}, pre_rec_scn_={val:18446744073709551615}, freeze_scn_={val:0}, max_committed_scn_={val:18446744073709551615}, is_frozen_=false, ls_id_={id:1}) [2024-03-15 07:03:43.682608] INFO [STORAGE.TRANS] get_rec_scn (ob_ls_tx_service.cpp:441) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=18] [CHECKPOINT] ObLSTxService::get_rec_scn(common_checkpoint_type="DATA_CHECKPOINT_TYPE", common_checkpoints_[min_rec_scn_common_checkpoint_type_index]={this:0x7f5484df2290}, min_rec_scn={val:1710278140261191947}, ls_id_={id:1}) [2024-03-15 07:03:43.683643] INFO [STORAGE.TRANS] get_rec_scn (ob_id_service.cpp:306) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=15] get rec log scn(service_type_=0, rec_log_ts={val:1710339949200647547}) [2024-03-15 07:03:43.683676] INFO [STORAGE.TRANS] get_rec_scn (ob_id_service.cpp:306) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=32] get rec log scn(service_type_=1, rec_log_ts={val:1710324545958121911}) [2024-03-15 07:03:43.683686] INFO [STORAGE.TRANS] get_rec_scn (ob_id_service.cpp:306) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=9] get rec log scn(service_type_=2, rec_log_ts={val:4611686018427387903}) [2024-03-15 07:03:43.683700] INFO [STORAGE] update_clog_checkpoint (ob_checkpoint_executor.cpp:158) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=8] [CHECKPOINT] clog checkpoint no change(checkpoint_scn={val:1710278140261191947}, checkpoint_scn_in_ls_meta={val:1710278140261191947}, ls_id={id:1}, service_type="TRANS_SERVICE") [2024-03-15 07:03:43.683718] INFO [STORAGE] cannot_recycle_log_over_threshold_ (ob_checkpoint_service.cpp:264) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=13] cannot_recycle_log_size statistics(cannot_recycle_log_size=783782571, threshold=289910292) [2024-03-15 07:03:43.683994] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223617562, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.684169] INFO [PALF] locate_by_lsn_coarsely (palf_handle_impl.cpp:1547) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=8] locate_by_lsn_coarsely(ret=0, ret="OB_SUCCESS", this={palf_id:1, self:"127.0.0.1:2882", has_set_deleted:false}, lsn={lsn:365453102694}, committed_lsn={lsn:365766615723}, result_scn={val:1710311365828975114}) [2024-03-15 07:03:43.684197] INFO [STORAGE] advance_checkpoint_by_flush (ob_checkpoint_executor.cpp:218) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=28] advance checkpoint by flush to avoid clog disk full(recycle_scn={val:1710311365828975114}, end_lsn={lsn:365766615723}, clog_checkpoint_lsn={lsn:364982833152}, calcu_recycle_lsn={lsn:365453102694}, ls_->get_ls_id()={id:1}) [2024-03-15 07:03:43.684187] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0] will sleep(sleep_us=17000, remain_us=650963, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=17, timeout_timestamp=1710486224335147) [2024-03-15 07:03:43.684215] INFO [STORAGE] advance_checkpoint_by_flush (ob_checkpoint_executor.cpp:236) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=15] start flush(recycle_scn={val:1710311365828975114}, ls_->get_clog_checkpoint_scn()={val:1710278140261191947}, ls_->get_ls_id()={id:1}) [2024-03-15 07:03:43.685927] INFO [STORAGE.TRANS] get_rec_scn (ob_trans_ctx_mgr_v4.cpp:1295) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=11] succ to get rec scn(*this={this:0x7f547e804030, ls_id:{id:1}, tenant_id:1003, state:"F_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204930519490}, last_push_gc_task_ts_:1710485741683482, skip_remove_cnt_:0}, aggre_rec_scn:{val:18446744073709551615}, prev_aggre_rec_scn:{val:18446744073709551615}, uref:3}, aggre_rec_scn={val:4611686018427387903}) [2024-03-15 07:03:43.685984] INFO [STORAGE.TRANS] get_rec_scn (ob_tx_ctx_memtable.cpp:232) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=55] tx ctx memtable get rec scn(this={ObITable:{this:0x7f548010a080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710486221679144}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f548010a080, snapshot_version:{val:1710486221679144}, ls_id:{id:1}, is_frozen:false}, rec_scn={val:1710295204909211866}) [2024-03-15 07:03:43.687671] INFO [STORAGE.TRANS] get_rec_scn (ob_trans_ctx_mgr_v4.cpp:1295) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=25] succ to get rec scn(*this={this:0x7f547e804030, ls_id:{id:1}, tenant_id:1003, state:"F_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204930519490}, last_push_gc_task_ts_:1710485741683482, skip_remove_cnt_:0}, aggre_rec_scn:{val:18446744073709551615}, prev_aggre_rec_scn:{val:18446744073709551615}, uref:3}, aggre_rec_scn={val:4611686018427387903}) [2024-03-15 07:03:43.687724] INFO [STORAGE.TRANS] get_rec_scn (ob_tx_ctx_memtable.cpp:232) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=53] tx ctx memtable get rec scn(this={ObITable:{this:0x7f548010a080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710486221679144}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f548010a080, snapshot_version:{val:1710486221679144}, ls_id:{id:1}, is_frozen:false}, rec_scn={val:1710295204909211866}) [2024-03-15 07:03:43.687797] INFO [COMMON] inner_add_dag (ob_dag_scheduler.cpp:3277) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=34] add dag success(dag=0x7f54801da080, start_time=0, id=Y0-0000000000000000-0-0, dag->hash()=3526853371410145563, dag_cnt=1, dag_type_cnts=1) [2024-03-15 07:03:43.687840] INFO [STORAGE.TRANS] flush (ob_tx_ctx_memtable.cpp:298) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=30] tx ctx memtable flush successfully(this={ObITable:{this:0x7f548010a080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710486223687758}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f548010a080, snapshot_version:{val:1710486223687758}, ls_id:{id:1}, is_frozen:true}, ls_id_={id:1}) [2024-03-15 07:03:43.687870] INFO [STORAGE] freeze (ob_tx_data_memtable_mgr.cpp:193) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=21] start freeze tx data memtable(ls_id_={id:1}) [2024-03-15 07:03:43.687881] INFO [STORAGE] freeze_ (ob_tx_data_memtable_mgr.cpp:229) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=10] There is a freezed memetable existed. Try freeze after flushing it.(ret=-4023, ret="OB_EAGAIN", get_memtable_count_()=2) [2024-03-15 07:03:43.687892] WDIAG [STORAGE] freeze (ob_tx_data_memtable_mgr.cpp:207) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=10][errcode=-4023] freeze tx data memtable fail.(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.687907] WDIAG [STORAGE] flush (ob_tx_data_memtable_mgr.cpp:483) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=14][errcode=-4023] freeze failed(ret=-4023, ret="OB_EAGAIN", this=0x7f5484de61b0) [2024-03-15 07:03:43.687885] INFO [SERVER] add_task (ob_sys_task_stat.cpp:140) [677][T1003_DagSchedu][T1003][Y0-0000000000000000-0-0] [lt=25] succeed to add sys task(task={start_time:1710486223687871, task_id:YB427F000001-000613ACAB2FBA95-0-0, task_type:4, svr_ip:"127.0.0.1:2882", tenant_id:1003, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=49401"}) [2024-03-15 07:03:43.687916] WDIAG [STORAGE.TRANS] flush (ob_ls_tx_service.cpp:455) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=8][errcode=0] obCommonCheckpoint flush failed(tmp_ret=-4023, common_checkpoints_[i]=0x7f5484de6288) [2024-03-15 07:03:43.687926] INFO [STORAGE.TABLELOCK] get_rec_scn (ob_lock_memtable.cpp:742) [740][T1003_CKClogDis][T1003][Y0-0000000000000000-0-0] [lt=9] rec_scn of ObLockMemtable is (rec_scn_={val:4611686018427387903}, flushed_scn_={val:0}, pre_rec_scn_={val:18446744073709551615}, freeze_scn_={val:0}, max_committed_scn_={val:18446744073709551615}, is_frozen_=false, ls_id_={id:1}) [2024-03-15 07:03:43.687923] INFO [COMMON] schedule_one (ob_dag_scheduler.cpp:2897) [677][T1003_DagSchedu][T1003][YB427F000001-000613ACAB2FBA95-0-0] [lt=36] schedule one task(task={this:0x7f5447a2e080, type:15, status:2, dag:{this:0x7f54801da080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAB2FBA95-0-0, dag_ret:0, dag_status:2, start_time:1710486223687919, running_task_cnt:1, indegree:0, hash:3526853371410145563}}, priority="PRIO_COMPACTION_HIGH", group id=18446744073709551615, total_running_task_cnt=1, running_task_cnts_[priority]=1, low_limits_[priority]=6, up_limits_[priority]=6, task->get_dag()->get_dag_net()=NULL) [2024-03-15 07:03:43.688333] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.688388] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=54][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.688729] WDIAG [STORAGE] inner_get_neighbour_major_freeze (ob_tenant_freeze_info_mgr.cpp:334) [649][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA95-0-0] [lt=14][errcode=-4018] no freeze info in curr info_list(ret=-4018, cur_idx_=0, info_list_[0]=[], info_list_[1]=[]) [2024-03-15 07:03:43.688755] WDIAG [STORAGE] get_neighbour_freeze_info (ob_partition_merge_policy.cpp:65) [649][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA95-0-0] [lt=26][errcode=-4018] Failed to get freeze info, use snapshot_gc_ts instead(ret=-4018, snapshot_version=1710506028960191) [2024-03-15 07:03:43.688770] INFO [STORAGE] release_head_memtable_ (ob_tx_ctx_memtable_mgr.cpp:178) [649][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA95-0-0] [lt=10] tx ctx memtable mgr release head memtable(*imemtable={ObITable:{this:0x7f548010a080, key:{tablet_id:{id:49401}, column_group_idx:0, table_type:"TX_CTX_MEMTABLE", scn_range:{start_scn:{val:1}, end_scn:{val:1710486223687758}}}, ref_cnt:2, upper_trans_version:-4007, timestamp:0}, this:0x7f548010a080, snapshot_version:{val:1710486223687758}, ls_id:{id:1}, is_frozen:true}) [2024-03-15 07:03:43.690444] INFO [STORAGE.TRANS] on_tx_ctx_table_flushed (ob_trans_ctx_mgr_v4.cpp:1323) [649][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA95-0-0] [lt=26] succ to on tx ctx table flushed(*this={this:0x7f547e804030, ls_id:{id:1}, tenant_id:1003, state:"F_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204930519490}, last_push_gc_task_ts_:1710485741683482, skip_remove_cnt_:0}, aggre_rec_scn:{val:18446744073709551615}, prev_aggre_rec_scn:{val:18446744073709551615}, uref:3}) [2024-03-15 07:03:43.690485] INFO [STORAGE] release_memtables (ob_i_memtable_mgr.cpp:164) [649][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA95-0-0] [lt=39] succeed to release memtable(ret=0, i=0, scn={val:1710506028960191}) [2024-03-15 07:03:43.690500] WDIAG [STORAGE.COMPACTION] build_merge_ctx (ob_tablet_merge_task.cpp:919) [649][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA95-0-0] [lt=12][errcode=-4677] fail to inner init ctx(ret=-4677, tablet_id={id:49401}, ctx={param:{merge_type:"MINI_MERGE", merge_version:0, ls_id:{id:1}, tablet_id:{id:49401}, report_:null, for_diagnose:false, is_tenant_major_merge:false}, sstable_version_range:{multi_version_start:-1, base_version:-1, snapshot_version:-1}, create_snapshot_version:0, is_full_merge:false, merge_level:1, progressive_merge_num:0, parallel_merge_ctx:{parallel_type:4, range_array:[], concurrent_cnt:0, is_inited:false}, schema_ctx:{base_schema_version:0, schema_version:0, storage_schema:NULL}, tables_handle count:0, progressive_merge_round:0, progressive_merge_step:0, tables_handle:{meta_mem_mgr_:null, allocator_:null, tablet_id:{id:0}, table_count:0, []}, schedule_major:false, scn_range:{start_scn:{val:0}, end_scn:{val:0}}, merge_scn:{val:4611686018427387903}, read_base_version:0, ls_handle:{ls_map_:0x7f5497b6e040, ls_:0x7f5484de6150, mod_:1}, tablet_handle:{obj:0x7f547e643910, obj_pool:0x7f54affb9cb0, wash_priority:0}, merge_progress:NULL, compaction_filter:NULL, time_guard:total=0us, rebuild_seq:0, data_version:0, merge_list:{is_inited:false, info:1, last_compaction_type:0, wait_check_flag:0, last_medium_scn:0, list_size:0, medium_info_list:[]}}) [2024-03-15 07:03:43.690576] WDIAG [STORAGE.COMPACTION] process (ob_tablet_merge_task.cpp:854) [649][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA95-0-0] [lt=76][errcode=-4677] sstable merge finish(ret=-4677, ctx=0x7f5429976060, task={this:0x7f5447a2e080, type:15, status:2, dag:{this:0x7f54801da080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAB2FBA95-0-0, dag_ret:0, dag_status:2, start_time:1710486223687919, running_task_cnt:1, indegree:0, hash:3526853371410145563}}) [2024-03-15 07:03:43.690599] WDIAG [COMMON] do_work (ob_dag_scheduler.cpp:241) [649][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA95-0-0] [lt=22][errcode=-4677] failed to process task(ret=-4677) [2024-03-15 07:03:43.690609] INFO [COMMON] do_work (ob_dag_scheduler.cpp:247) [649][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA95-0-0] [lt=8] task finish process(ret=-4677, start_time=1710486223688711, end_time=1710486223690606, runtime=1895, *this={this:0x7f5447a2e080, type:15, status:2, dag:{this:0x7f54801da080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAB2FBA95-0-0, dag_ret:0, dag_status:2, start_time:1710486223687919, running_task_cnt:1, indegree:0, hash:3526853371410145563}}) [2024-03-15 07:03:43.690631] WDIAG [COMMON] run1 (ob_dag_scheduler.cpp:1424) [649][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA95-0-0] [lt=22][errcode=-4677] failed to do work(ret=-4677, *task_={this:0x7f5447a2e080, type:15, status:2, dag:{this:0x7f54801da080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAB2FBA95-0-0, dag_ret:0, dag_status:2, start_time:1710486223687919, running_task_cnt:1, indegree:0, hash:3526853371410145563}}, compat_mode=0) [2024-03-15 07:03:43.690660] INFO [COMMON] finish_dag_ (ob_dag_scheduler.cpp:2471) [649][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA95-0-0] [lt=21] dag finished(dag_ret=-4677, runtime=2739, dag_cnt=0, dag_cnts_[dag.get_type()]=0, &dag=0x7f54801da080, dag={this:0x7f54801da080, type:3, name:"TX_TABLE_MERGE", id:YB427F000001-000613ACAB2FBA95-0-0, dag_ret:-4677, dag_status:5, start_time:1710486223687919, running_task_cnt:0, indegree:0, hash:3526853371410145563}) [2024-03-15 07:03:43.690696] INFO [SERVER] del_task (ob_sys_task_stat.cpp:169) [649][T1003_TX_TABLE_][T1003][YB427F000001-000613ACAB2FBA95-0-0] [lt=33] succeed to del sys task(removed_task={start_time:1710486223687871, task_id:YB427F000001-000613ACAB2FBA95-0-0, task_type:4, svr_ip:"127.0.0.1:2882", tenant_id:1003, is_cancel:false, comment:"MINI_MERGE dag: ls_id=1 tablet_id=49401"}) [2024-03-15 07:03:43.692236] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.692259] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=21][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486223692221], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.692278] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=16][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486223692221], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.692300] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=12] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223692221]}) [2024-03-15 07:03:43.692322] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=11] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:43.692331] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=8][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486223692319], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.692346] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=10] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223692319]}) [2024-03-15 07:03:43.692642] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.692653] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=8][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486223692635], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.692680] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=11] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223692635]}) [2024-03-15 07:03:43.693782] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.693837] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=53][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.698690] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=21][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.698772] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=80][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.701544] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.701593] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=49][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.708961] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=30][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.709063] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=101][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.709820] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=11][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.709846] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=25][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1", warnings:[]}, tenant_id_=1, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.709864] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=16][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, local_server_version={val:1710506547196065859}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486223709809) [2024-03-15 07:03:43.709879] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=14][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, last_post_cluster_heartbeat_tstamp_=1710486223509816, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:43.712884] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=17] get wrs ts(ls_id={id:1}, delta=146268860445, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:43.712954] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=64] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:43.719157] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=28][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.719195] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=38][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.724946] WDIAG [SQL] create_sessid (ob_sql_session_mgr.cpp:339) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=8][errcode=0] server is initiating(server_id=0, local_seq=27148, max_local_seq=262143, max_server_id=4095) [2024-03-15 07:03:43.725000] INFO [RPC.OBMYSQL] sm_conn_build_handshake (obsm_conn_callback.cpp:104) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=77] new mysql sessid created(conn.sessid_=3221252620, support_ssl=false) [2024-03-15 07:03:43.725141] INFO [RPC.OBMYSQL] init (obsm_conn_callback.cpp:120) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=38] sm conn init succ(conn.sessid_=3221252620, sess.client_addr_="172.21.122.86:42728") [2024-03-15 07:03:43.725194] INFO [RPC.OBMYSQL] do_accept_one (ob_sql_nio.cpp:899) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=42] accept one succ(*s={this:0x7f545a1fd240, fd:133, err:0, last_decode_time_:0, last_write_time_:1710486223725137, read_buffer_.get_consume_sz():0, get_pending_flag():0, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.727226] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=42] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:43.727276] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=50][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:43.727296] WDIAG [SERVER] extract_tenant_id (ob_srv_deliver.cpp:100) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=18][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:43.727313] WDIAG [SERVER] dispatch_req (ob_srv_deliver.cpp:115) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=15][errcode=-5160] extract tenant_id fail(ret=-5160, tenant_id=18446744073709551615, req={packet:{header:{length:232, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486223727200, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.727354] WDIAG [SERVER] deliver_mysql_request (ob_srv_deliver.cpp:507) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=40][errcode=-5150] cannot dispatch success(ret=-5150, req={packet:{header:{length:232, sequence:1}, capability_.capability:0, max_packet_size:0, character_set:0, username:"", database:"", auth_plugin_name:"", connect_attrs:[]}, type:1, group:0, sql_req_level:0, connection_phase:0, recv_timestamp_:1710486223727200, enqueue_timestamp_:0, request_arrival_time_:0, trace_id_:Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.727447] INFO [SHARE.SCHEMA] get_tenant_info (ob_schema_getter_guard.cpp:2162) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03B-0-0] [lt=31] tenant not exist(tenant_name=obmysql) [2024-03-15 07:03:43.727459] WDIAG [SHARE.SCHEMA] get_tenant_id (ob_schema_getter_guard.cpp:380) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03B-0-0] [lt=12][errcode=-5160] Can not find tenant(tenant_name=obmysql) [2024-03-15 07:03:43.727469] WDIAG [SERVER] get_tenant_id (obmp_connect.cpp:1339) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03B-0-0] [lt=9][errcode=-5160] get_tenant_id failed(ret=-5160, tenant_name=obmysql) [2024-03-15 07:03:43.727480] WDIAG [SERVER] check_update_tenant_id (obmp_connect.cpp:1840) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03B-0-0] [lt=11][errcode=-5160] get_tenant_id failed(ret=-5160) [2024-03-15 07:03:43.727494] WDIAG [SERVER] process (obmp_connect.cpp:242) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03B-0-0] [lt=13][errcode=-5160] fail to check update tenant id(ret=-5160) [2024-03-15 07:03:43.727555] INFO [SERVER] send_error_packet (obmp_packet_sender.cpp:311) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03B-0-0] [lt=30] sending error packet(err=-4043, extra_err_info=NULL, lbt()="0xd9f6cf5 0x75d3e81 0x7596e3a 0x75be943 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:43.727622] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:745) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03B-0-0] [lt=17][errcode=0] server close connection(sessid=3221252620, proxy_sessid=0, stack="0xd9f6cf5 0x75d6bf2 0x75b2979 0x75bde02 0x39e75aa 0xe535cef 0xe536ba1 0x3d99a09 0xdc671e7 0xdc6402a 0x7f5510167ea5 0x7f550fe9096d") [2024-03-15 07:03:43.727644] WDIAG [SERVER] get_session (obmp_packet_sender.cpp:515) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03B-0-0] [lt=17][errcode=-4018] get session fail(ret=-4018, sessid=3221252620, proxy_sessid=0) [2024-03-15 07:03:43.727679] WDIAG [SERVER] disconnect (obmp_packet_sender.cpp:749) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03B-0-0] [lt=29][errcode=-4016] session is null [2024-03-15 07:03:43.727695] INFO [SERVER] process (obmp_connect.cpp:369) [110][MysqlQueueTh1][T0][Y0-000613ACA76FB03B-0-0] [lt=12] MySQL LOGIN(direct_client_ip="172.21.122.86", client_ip=, tenant_name=obmysql, tenant_id=18446744073709551615, user_name=yyyth, host_name=xxx.xxx.xxx.xxx, sessid=3221252620, proxy_sessid=0, sess_create_time=0, from_proxy=false, from_java_client=false, from_oci_client=false, from_jdbc_client=false, capability=270377487, proxy_capability=0, use_ssl=false, c/s protocol="OB_MYSQL_CS_TYPE", autocommit=false, proc_ret=-5160, ret=0) [2024-03-15 07:03:43.727926] WDIAG [RPC.OBMYSQL] push_close_req (ob_sql_nio.cpp:704) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=20][errcode=-4015] close sql sock by user req(*s={this:0x7f545a1fd240, fd:133, err:5, last_decode_time_:1710486223727200, last_write_time_:1710486223727901, read_buffer_.get_consume_sz():236, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.728030] INFO [RPC.OBMYSQL] on_disconnect (obsm_conn_callback.cpp:231) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=113] kill and revert session(conn.sessid_=3221252620, proxy_sessid=0, server_id=0, ret=0) [2024-03-15 07:03:43.728052] INFO [RPC.OBMYSQL] handle_pending_destroy_list (ob_sql_nio.cpp:791) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=22] can close safely, do destroy(*s={this:0x7f545a1fd240, fd:133, err:5, last_decode_time_:1710486223727200, last_write_time_:1710486223727901, read_buffer_.get_consume_sz():236, get_pending_flag():1, get_trace_id():Y0-0000000000000000-0-0}) [2024-03-15 07:03:43.728077] INFO [RPC.OBMYSQL] sm_conn_log_close (obsm_conn_callback.cpp:159) [211][sql_nio1][T0][Y0-0000000000000000-0-0] [lt=21] connection close(sessid=3221252620, proxy_sessid=0, tenant_id=0, server_id=0, from_proxy=false, from_java_client=false, c/s protocol="OB_MYSQL_CS_TYPE", is_need_clear_sessid_=true, ret=0) [2024-03-15 07:03:43.729395] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.729482] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=86][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.739641] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=23][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.739718] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=76][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.740848] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223649656, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222606956, use_das=false, nested_level=0, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.740968] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] will sleep(sleep_us=14000, remain_us=865228, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=14, timeout_timestamp=1710486224606195) [2024-03-15 07:03:43.741214] INFO [STORAGE.TRANS] query_and_update_last_id (ob_standby_timestamp_service.cpp:118) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=63] tenant info is invalid(ret=-4076, query_ts=0, standby_scn={val:0}, this={inited:true, last_id:-1, tenant_id:1004, epoch:-1, self:"127.0.0.1:2882", switch_to_leader_ts:-1}) [2024-03-15 07:03:43.741248] INFO [STORAGE.TRANS] query_and_update_last_id (ob_standby_timestamp_service.cpp:132) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=33] ObStandbyTimestampService state(*this={inited:true, last_id:-1, tenant_id:1004, epoch:-1, self:"127.0.0.1:2882", switch_to_leader_ts:-1}) [2024-03-15 07:03:43.741274] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=23][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:43.749848] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=35][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.749889] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=40][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.755716] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.755753] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=37][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.756483] WDIAG [DATA_DICT] do_dump_data_dict_ (ob_data_dict_service.cpp:300) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1][errcode=-4012] get_snapshot_scn failed(ret=-4012, ret="OB_TIMEOUT", snapshot_scn={val:18446744073709551615}) [2024-03-15 07:03:43.756519] INFO [DATA_DICT] check_callback_list_ (ob_data_dict_storager.cpp:706) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=1] [STAT] callbacks_status(ret=0, ret="OB_SUCCESS", total_cb_count=0, not_invoked_cb_count=0, failed_cb_count=0, is_all_invoked=true, need_print_cb_status=false, stop_flag=false) [2024-03-15 07:03:43.756539] WDIAG [DATA_DICT] runTimerTask (ob_data_dict_service.cpp:190) [915][T1004_DataDictT][T1004][Y0-0000000000000000-0-0] [lt=19][errcode=-4012] dump_data_dict_ failed(ret=-4012, ret="OB_TIMEOUT", tenant_id=1004, force_need_dump=false) [2024-03-15 07:03:43.760034] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=17][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.760086] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=52][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.760438] INFO [STORAGE.TRANS] get_number (ob_id_service.cpp:389) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=1] get number(ret=-4023, service_type_=0, range=1, base_id=1710486223760416915, start_id=0, end_id=0) [2024-03-15 07:03:43.764339] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB633-0-0] [lt=219][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:43.764392] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB633-0-0] [lt=53][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.764461] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB633-0-0] [lt=65][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.764504] WDIAG iterate (ob_tuple.h:272) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB633-0-0] [lt=41][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.764540] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [884][T1003_L0_G2][T1003][YB427F000001-000613ACAA3FB633-0-0] [lt=35][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1003, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:43.769994] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFACA-0-0] [lt=285][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1}, all_ls_election_reference_info=[]) [2024-03-15 07:03:43.770036] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFACA-0-0] [lt=42][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.770059] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFACA-0-0] [lt=22][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.770077] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFACA-0-0] [lt=17][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.770091] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFACA-0-0] [lt=14][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:43.770252] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=28][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.770279] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=28][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.770685] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f549f3ceae8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54433d7290, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223700392, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222336235, use_das=false, nested_level=0, session={this:0x7f53faa860d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54433d7290}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.770813] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=0] will sleep(sleep_us=18000, remain_us=564336, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=18, timeout_timestamp=1710486224335147) [2024-03-15 07:03:43.770850] INFO [COORDINATOR] detect_recover (ob_failure_detector.cpp:138) [607][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=0] doing detect recover operation(events_with_ops=[{event:{type:RESOURCE NOT ENOUGH, module:LOG, info:clog disk full event, level:FATAL}}]) [2024-03-15 07:03:43.772284] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=32] table not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:43.772347] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=61][errcode=-5019] synonym not exist(tenant_id=1, database_id=201001, table_name=__all_server, ret=-5019) [2024-03-15 07:03:43.772370] WDIAG [SQL.RESV] resolve_table_relation_factor_normal (ob_dml_resolver.cpp:7359) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=22][errcode=-5019] fail to resolve table relation recursively(tenant_id=1, ret=-5019, database_id=201001, database_id=201001, table_name=__all_server, db_name=oceanbase) [2024-03-15 07:03:43.772392] WDIAG [SQL.RESV] resolve_table_relation_factor (ob_dml_resolver.cpp:7204) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=20][errcode=-5019] resolve table relation factor failed(ret=-5019, table_name=__all_server) [2024-03-15 07:03:43.772414] WDIAG [SQL.RESV] inner_resolve_sys_view (ob_dml_resolver.cpp:2579) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=16][errcode=-5019] fail to resolve table(ret=-5019) [2024-03-15 07:03:43.772449] WDIAG [SQL.RESV] resolve_table_relation_factor_wrapper (ob_dml_resolver.cpp:2634) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=34][errcode=-5019] fail to resolve sys view(ret=-5019) [2024-03-15 07:03:43.772470] WDIAG resolve_basic_table_without_cte (ob_dml_resolver.cpp:2730) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=13][errcode=-5019] Table 'oceanbase.__all_server' doesn't exist [2024-03-15 07:03:43.772484] WDIAG [SQL.RESV] resolve_basic_table_with_cte (ob_dml_resolver.cpp:13473) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=12][errcode=-5019] resolve base or alias table factor failed(ret=-5019) [2024-03-15 07:03:43.772498] WDIAG [SQL.RESV] resolve_basic_table (ob_dml_resolver.cpp:13407) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=12][errcode=-5019] fail to resolve basic table with cte(ret=-5019) [2024-03-15 07:03:43.772512] WDIAG [SQL.RESV] resolve_table (ob_dml_resolver.cpp:3142) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=13][errcode=-5019] resolve basic table failed(ret=-5019) [2024-03-15 07:03:43.772524] WDIAG [SQL.RESV] resolve_from_clause (ob_select_resolver.cpp:3426) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=11][errcode=-5019] fail to exec resolve_table(*table_node, table_item)(ret=-5019) [2024-03-15 07:03:43.772539] WDIAG [SQL.RESV] resolve_normal_query (ob_select_resolver.cpp:1033) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=14][errcode=-5019] fail to exec resolve_from_clause(parse_tree.children_[PARSE_SELECT_FROM])(ret=-5019) [2024-03-15 07:03:43.772553] WDIAG [SQL.RESV] resolve (ob_select_resolver.cpp:1240) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=12][errcode=-5019] resolve normal query failed(ret=-5019) [2024-03-15 07:03:43.772569] WDIAG [SQL.RESV] select_stmt_resolver_func (ob_resolver.cpp:170) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=14][errcode=-5019] execute stmt_resolver failed(ret=-5019, parse_tree.type_=3073) [2024-03-15 07:03:43.772595] WDIAG [SQL] generate_stmt (ob_sql.cpp:2659) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=15][errcode=-5019] failed to resolve(ret=-5019) [2024-03-15 07:03:43.772612] WDIAG [SQL] generate_physical_plan (ob_sql.cpp:2781) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=15][errcode=-5019] Failed to generate stmt(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:43.772630] WDIAG [SQL] handle_physical_plan (ob_sql.cpp:4452) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=14][errcode=-5019] Failed to generate plan(ret=-5019, result.get_exec_context().need_disconnect()=false) [2024-03-15 07:03:43.772646] WDIAG [SQL] handle_text_query (ob_sql.cpp:2383) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=13][errcode=-5019] fail to handle physical plan(ret=-5019) [2024-03-15 07:03:43.772661] WDIAG [SQL] stmt_query (ob_sql.cpp:206) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=12][errcode=-5019] fail to handle text query(stmt=SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882, ret=-5019) [2024-03-15 07:03:43.772678] WDIAG [SERVER] do_query (ob_inner_sql_connection.cpp:636) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=14][errcode=-5019] executor execute failed(ret=-5019) [2024-03-15 07:03:43.772694] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:783) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=14][errcode=-5019] execute failed(ret=-5019, tenant_id=1, executor={ObIExecutor:, sql:"SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882"}, retry_cnt=0, local_sys_schema_version=1, local_tenant_schema_version=1) [2024-03-15 07:03:43.772724] WDIAG [SERVER] after_func (ob_query_retry_ctrl.cpp:868) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=25][errcode=-5019] [RETRY] check if need retry(v={force_local_retry:true, stmt_retry_times:0, local_retry_times:0, err_:-5019, err_:"OB_TABLE_NOT_EXIST", retry_type:0, client_ret:-5019}, need_retry=false) [2024-03-15 07:03:43.772752] WDIAG [SERVER] inner_close (ob_inner_sql_result.cpp:218) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=24][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:43.772766] WDIAG [SERVER] force_close (ob_inner_sql_result.cpp:198) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=13][errcode=-5019] result set close failed(ret=-5019) [2024-03-15 07:03:43.772778] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:788) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=11][errcode=-5019] failed to close result(close_ret=-5019, ret=-5019) [2024-03-15 07:03:43.772809] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:815) [607][T1003_Occam][T1][YB427F000001-000613ACA8BF9B9B-0-0] [lt=14][errcode=-5019] failed to process record(executor={ObIExecutor:, sql:"SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882"}, record_ret=-5019, ret=-5019) [2024-03-15 07:03:43.772831] WDIAG [SERVER] query (ob_inner_sql_connection.cpp:833) [607][T1003_Occam][T1003][YB427F000001-000613ACA8BF9B9B-0-0] [lt=19][errcode=-5019] failed to process final(executor={ObIExecutor:, sql:"SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882"}, aret=-5019, ret=-5019) [2024-03-15 07:03:43.772850] WDIAG [SERVER] execute_read_inner (ob_inner_sql_connection.cpp:2020) [607][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=-5019] execute sql failed(ret=-5019, tenant_id=1, sql=SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882) [2024-03-15 07:03:43.772867] WDIAG [SERVER] retry_while_no_tenant_resource (ob_inner_sql_connection.cpp:890) [607][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=15][errcode=-5019] retry_while_no_tenant_resource failed(ret=-5019, tenant_id=1) [2024-03-15 07:03:43.772882] WDIAG [SERVER] execute_read (ob_inner_sql_connection.cpp:1961) [607][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=14][errcode=-5019] execute_read failed(ret=-5019, cluster_id=1, tenant_id=1) [2024-03-15 07:03:43.772899] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:125) [607][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=15][errcode=-5019] query failed(ret=-5019, conn=0x7f54503be050, start=1710486223772011, sql=SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882) [2024-03-15 07:03:43.772918] WDIAG [COMMON.MYSQLP] read (ob_mysql_proxy.cpp:63) [607][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=-5019] read failed(ret=-5019) [2024-03-15 07:03:43.772934] WDIAG get_my_sql_result_ (ob_table_access_helper.h:431) [607][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=12][errcode=-5019] GCTX.sql_proxy_ read failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", MTL_ID()=1003, tenant_id=1, columns=0x7f549245c1c8, table=__all_server, condition=where svr_ip='127.0.0.1' and svr_port=2882, sql=SELECT zone FROM __all_server where svr_ip='127.0.0.1' and svr_port=2882, columns_str="zone") [2024-03-15 07:03:43.772965] WDIAG read_and_convert_to_values_ (ob_table_access_helper.h:332) [607][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=26][errcode=-5019] fail to get ObMySQLResult(ret=-5019, ret="OB_TABLE_NOT_EXIST", MTL_ID()=1003, table=__all_server, condition=where svr_ip='127.0.0.1' and svr_port=2882) [2024-03-15 07:03:43.773061] WDIAG [COORDINATOR] get_self_zone_name (table_accessor.cpp:530) [607][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=18][errcode=-5019] get zone from __all_server failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", columns=0x7f549245c1c8, where_condition="where svr_ip='127.0.0.1' and svr_port=2882", zone_name_holder=) [2024-03-15 07:03:43.773082] WDIAG [COORDINATOR] get_all_ls_election_reference_info (table_accessor.cpp:463) [607][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=21][errcode=-5019] get self zone name failed(ret=-5019, ret="OB_TABLE_NOT_EXIST", all_ls_election_reference_info=[]) [2024-03-15 07:03:43.773098] WDIAG [COORDINATOR] get_all_ls_election_reference_info (table_accessor.cpp:472) [607][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=-5019] zone name is empty(ret=-5019, ret="OB_TABLE_NOT_EXIST", all_ls_election_reference_info=[]) [2024-03-15 07:03:43.773115] WDIAG [COORDINATOR] refresh (ob_leader_coordinator.cpp:143) [607][T1003_Occam][T1003][Y0-0000000000000000-0-0] [lt=15][errcode=-5019] get all ls election reference info failed(ret=-5019, ret="OB_TABLE_NOT_EXIST") [2024-03-15 07:03:43.780386] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.780419] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=34][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.784365] WDIAG [COORDINATOR] get_ls_election_reference_info (ob_leader_coordinator.cpp:174) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFACB-0-0] [lt=143][errcode=-4018] can not find this ls_id in all_ls_election_reference_info_(ret=-4018, ret="OB_ENTRY_NOT_EXIST", ls_id={id:1001}, all_ls_election_reference_info=[]) [2024-03-15 07:03:43.784415] WDIAG [COORDINATOR] refresh_ (election_priority_v1.cpp:162) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFACB-0-0] [lt=48][errcode=-4018] fail to get ls election reference info(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, *this={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.784519] WDIAG [COORDINATOR] operator() (election_priority_impl.cpp:246) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFACB-0-0] [lt=98][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id_={id:1001}, element={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.784587] WDIAG iterate (ob_tuple.h:272) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFACB-0-0] [lt=63][errcode=-4018] assign element failed(ret=-4018, std::get(tuple)={is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}) [2024-03-15 07:03:43.784613] WDIAG [COORDINATOR] refresh (election_priority_impl.cpp:261) [969][T1004_L0_G2][T1004][YB427F000001-000613ACABFFFACB-0-0] [lt=26][errcode=-4018] refresh priority failed(ret=-4018, ret="OB_ENTRY_NOT_EXIST", MTL_ID()=1004, ls_id={id:1001}, *this={priority:{is_valid:false, is_observer_stopped:false, is_server_stopped:false, is_zone_stopped:false, fatal_failures:[], is_primary_region:false, serious_failures:[], is_in_blacklist:false, in_blacklist_reason:, scn:{val:0}, is_manual_leader:false, zone_priority:9223372036854775807}}) [2024-03-15 07:03:43.788891] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54e845a228, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f5420252550, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223692647, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486200007067, use_das=false, nested_level=0, session={this:0x7f54913f80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f5420252550}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.789037] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [138][RSAsyncTask2][T1][YB427F000001-000613ACAADF8382-0-0] [lt=1] will sleep(sleep_us=100000, remain_us=6218014, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=172, timeout_timestamp=1710486230007049) [2024-03-15 07:03:43.789198] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=1] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.789236] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [526][T1_FreInfoReloa][T1][YB427F000001-000613ACA85F8689-0-0] [lt=39][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.789451] INFO [COMMON] replace_fragment_node (ob_kvcache_map.cpp:697) [103][KVCacheRep][T0][Y0-0000000000000000-0-0] [lt=49] Cache replace map node details(ret=0, replace_node_count=0, replace_time=2353, replace_start_pos=408928, replace_num=15728) [2024-03-15 07:03:43.790631] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=27][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.790689] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=58][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.800007] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=1][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.800028] WDIAG [STORAGE.TRANS] post (ob_gts_rpc.cpp:226) [190][TsMgr][T1][Y0-0000000000000000-0-0] [lt=20][errcode=-4023] post local gts request failed(ret=-4023, ret="OB_EAGAIN", server="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486223799994], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.800044] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=14][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1, srr:[mts=1710486223799994], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.800069] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=8] gts nonblock renew success(ret=0, tenant_id=1, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223799994]}) [2024-03-15 07:03:43.800087] INFO [STORAGE.TRANS] handle_request (ob_timestamp_access.cpp:32) [190][TsMgr][T1003][Y0-0000000000000000-0-0] [lt=9] ObTimestampAccess service type is FOLLOWER(ret=-4038, service_type=0) [2024-03-15 07:03:43.800094] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=6][errcode=-4038] post gts request failed(ret=-4038, ret="OB_NOT_MASTER", leader="127.0.0.1:2882", msg={tenant_id:1003, srr:[mts=1710486223800084], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.800108] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=7] gts nonblock renew success(ret=0, tenant_id=1003, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223800084]}) [2024-03-15 07:03:43.800299] WDIAG [STORAGE.TRANS] handle_local_request_ (ob_timestamp_service.cpp:126) [190][TsMgr][T1004][Y0-0000000000000000-0-0] [lt=0][errcode=-4023] get timestamp failed(ret=-4023, ret="OB_EAGAIN") [2024-03-15 07:03:43.800307] WDIAG [STORAGE.TRANS] query_gts_ (ob_gts_source.cpp:562) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=6][errcode=-4023] post gts request failed(ret=-4023, ret="OB_EAGAIN", leader="127.0.0.1:2882", msg={tenant_id:1004, srr:[mts=1710486223800294], range_size:1, sender:"127.0.0.1:2882"}) [2024-03-15 07:03:43.800325] INFO [STORAGE.TRANS] refresh_gts_location_ (ob_gts_source.cpp:580) [190][TsMgr][T0][Y0-0000000000000000-0-0] [lt=7] gts nonblock renew success(ret=0, tenant_id=1004, gts_local_cache={srr:[mts=0], gts:0, latest_srr:[mts=1710486223800294]}) [2024-03-15 07:03:43.800871] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=35][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.800932] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=60][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.810472] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=16][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.810497] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=25][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1", warnings:[]}, tenant_id_=1, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.810529] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=15][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, local_server_version={val:1710506547196065859}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486223810460) [2024-03-15 07:03:43.810547] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=17][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1, last_post_cluster_heartbeat_tstamp_=1710486223709894, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:43.810567] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:755) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=12] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] current server is WRS leader, need start CLUSTER weak read service(tenant_id=1, serve_leader_epoch=0, cur_leader_epoch=420, cluster_service_tablet_id_={id:226}, in_service=false, can_update_version=false, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:43.810591] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:347) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=15] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] begin start service(tenant_id=1, is_in_service()=false, can_update_version=false) [2024-03-15 07:03:43.810751] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:349) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=156] start TenantWeakReadClusterService(tenant_id=1) [2024-03-15 07:03:43.811120] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=80][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.811172] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=52][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.811281] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=23][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.811296] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=15][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1003", warnings:[]}, tenant_id_=1003, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.811309] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=13][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, local_server_version={val:1710482141336457000}, valid_part_count=1, total_part_count=1, generate_timestamp=1710486223811272) [2024-03-15 07:03:43.811320] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=11][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1003, last_post_cluster_heartbeat_tstamp_=1710486223611304, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0") [2024-03-15 07:03:43.811335] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:755) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=9] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] current server is WRS leader, need start CLUSTER weak read service(tenant_id=1003, serve_leader_epoch=0, cur_leader_epoch=1984, cluster_service_tablet_id_={id:226}, in_service=false, can_update_version=false, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:43.811354] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:347) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=12] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] begin start service(tenant_id=1003, is_in_service()=false, can_update_version=false) [2024-03-15 07:03:43.811361] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:349) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=7] start TenantWeakReadClusterService(tenant_id=1003) [2024-03-15 07:03:43.811803] INFO [SQL.RESV] check_table_exist_or_not (ob_dml_resolver.cpp:7564) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F9801-0-0] [lt=22] table not exist(tenant_id=1, database_id=201001, table_name=__all_weak_read_service, ret=-5019) [2024-03-15 07:03:43.811834] WDIAG [SQL.RESV] resolve_table_relation_recursively (ob_dml_resolver.cpp:7522) [553][T1_TenantWeakRe][T1][YB427F000001-000613ACAA2F9801-0-0] [lt=28][errcode=-5019] synonym not exist(tenant_id=1, database_id=201001, table_name=__all_weak_read_service, ret=-5019) [2024-03-15 07:03:43.811938] WDIAG [SHARE.SCHEMA] get_tenant_schema_guard (ob_multi_version_schema_service.cpp:1195) [737][T1003_TenantWea][T1003][YB427F000001-000613ACABAF95EE-0-0] [lt=5][errcode=-5627] REACH SYSLOG RATE LIMIT [2024-03-15 07:03:43.811957] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:432) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=0] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] start service done(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1, in_service=false, leader_epoch=0, current_version={val:0}, delta=1710486223811953, min_version={val:0}, max_version={val:0}, max_stale_time=5000000000, all_valid_server_count=0, total_time=1372, wlock_time=191, check_leader_time=3, query_version_time=0, persist_version_time=0) [2024-03-15 07:03:43.811981] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:808) [553][T1_TenantWeakRe][T1][Y0-0000000000000000-0-0] [lt=0] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] done(ret=-5019, ret="OB_TABLE_NOT_EXIST", tenant_id=1, need_start_service=true, need_stop_service=false, need_change_leader=false, is_in_service()=false, can_update_version=false, cur_leader_epoch=420, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:43.812004] INFO [STORAGE.TRANS] start_service (ob_tenant_weak_read_cluster_service.cpp:432) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=0] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] start service done(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1003, in_service=false, leader_epoch=0, current_version={val:0}, delta=1710486223812002, min_version={val:0}, max_version={val:0}, max_stale_time=5000000000, all_valid_server_count=0, total_time=656, wlock_time=21, check_leader_time=0, query_version_time=0, persist_version_time=0) [2024-03-15 07:03:43.812034] INFO [STORAGE.TRANS] self_check (ob_tenant_weak_read_cluster_service.cpp:808) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=0] [WRS] [TENANT_WEAK_READ_SERVICE] [CLUSTER_SERVICE] [SELF_CHECK] done(ret=-5627, ret="OB_SCHEMA_EAGAIN", tenant_id=1003, need_start_service=true, need_stop_service=false, need_change_leader=false, is_in_service()=false, can_update_version=false, cur_leader_epoch=1984, start_service_tstamp_=0, error_count_for_change_leader_=0, last_error_tstamp_for_change_leader_=0) [2024-03-15 07:03:43.813171] INFO [STORAGE.TRANS] generate_weak_read_timestamp_ (ob_ls_wrs_handler.cpp:175) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=12] get wrs ts(ls_id={id:1}, delta=146268958723, timestamp={val:1710339954851689028}, min_tx_service_ts={val:4611686018427387903}) [2024-03-15 07:03:43.813201] INFO [STORAGE.TRANS] print_stat_info (ob_keep_alive_ls_handler.cpp:211) [737][T1003_TenantWea][T1003][Y0-0000000000000000-0-0] [lt=24] [Keep Alive Stat] LS Keep Alive Info(tenant_id=1003, LS_ID={id:1}, Not_Master_Cnt=0, Near_To_GTS_Cnt=0, Other_Error_Cnt=0, Submit_Succ_Cnt=0, last_scn="{val:1710339954825900947}", last_lsn={lsn:365766615140}, last_gts={val:0}, min_start_scn="{val:1710295204909211866}", min_start_status=2) [2024-03-15 07:03:43.816162] WDIAG [SHARE] refresh (ob_task_define.cpp:382) [79][LogLimiterRefre][T0][Y0-0000000000000000-0-0] [lt=63][errcode=0] Throttled WDIAG logs in last second(details {error code, dropped logs, earliest tid}=[{errcode:-5627, dropped:75, tid:801}]) [2024-03-15 07:03:43.821369] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=19][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.821474] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=104][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.825896] INFO [SQL.EXE] start_stmt (ob_sql_trans_control.cpp:589) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] start stmt(ret=-4283, auto_commit=true, session_id=1, snapshot={this:0x7f54ea2d67f8, valid:false, source:0, core:{version:{val:18446744073709551615}, tx_id:{txid:0}, scn:-1}, uncertain_bound:0, snapshot_lsid:{id:-1}, parts:[]}, savepoint=0, tx_desc={this:0x7f54539d0ae0, tx_id:{txid:0}, state:1, addr:"127.0.0.1:2882", tenant_id:1, session_id:1, assoc_session_id:1, xid:NULL, xa_mode:"", xa_start_addr:"0.0.0.0:0", access_mode:-1, tx_consistency_type:0, isolation:-1, snapshot_version:{val:18446744073709551615}, snapshot_scn:0, active_scn:-1, op_sn:1, alloc_ts:1710486223754491, active_ts:-1, commit_ts:-1, finish_ts:-1, timeout_us:-1, lock_timeout_us:-1, expire_ts:9223372036854775807, coord_id:{id:-1}, parts:[], exec_info_reap_ts:0, commit_version:{val:18446744073709551615}, commit_cb:null, cluster_id:-1, cluster_version:0, flags_.SHADOW:true, flags_.INTERRUPTED:false, flags_.BLOCK:false, flags_.REPLICA:false, can_elr:false, cflict_txs:[], abort_cause:0, commit_expire_ts:-1, commit_task_.is_registered():false, ref:1}, plan_type=1, stmt_type=1, has_for_update=false, query_start_time=1710486222606956, use_das=false, nested_level=0, session={this:0x7f54b5ff80d8, id:1, tenant:"sys", tenant_id:1, effective_tenant:"sys", effective_tenant_id:1, database:"oceanbase", user:"root@%", consistency_level:3, session_state:0, autocommit:true, tx:0x7f54539d0ae0}, plan=0x7f5455a0a050, consistency_level_in_plan_ctx=3, trans_result={incomplete:false, parts:[], touched_ls_list:[], cflict_txs:[]}) [2024-03-15 07:03:43.825997] INFO [SERVER] sleep_before_local_retry (ob_query_retry_ctrl.cpp:87) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=0] will sleep(sleep_us=15000, remain_us=780200, base_sleep_us=1000, retry_sleep_type=1, v.stmt_retry_times_=15, timeout_timestamp=1710486224606195) [2024-03-15 07:03:43.828393] WDIAG [STORAGE.TRANS] check_gts_ (ob_keep_alive_ls_handler.cpp:237) [914][T1004_TxLoopWor][T1004][Y0-0000000000000000-0-0] [lt=8][errcode=-4023] get gts error(ret=-4023) [2024-03-15 07:03:43.828394] INFO [COMMON] print_sender_status (ob_io_struct.cpp:716) [80][IO_TUNING0][T0][Y0-0000000000000000-0-0] [lt=33] [IO SENDER STATUS](send_index=1, req_count=0, reservation_ts=9223372036854775807, group_limitation_ts=9223372036854775807, tenant_limitation_ts=9223372036854775807, proportion_ts=9223372036854775807) [2024-03-15 07:03:43.828417] INFO [COMMON] print_sender_status (ob_io_struct.cpp:716) [80][IO_TUNING0][T0][Y0-0000000000000000-0-0] [lt=24] [IO SENDER STATUS](send_index=2, req_count=0, reservation_ts=9223372036854775807, group_limitation_ts=9223372036854775807, tenant_limitation_ts=9223372036854775807, proportion_ts=9223372036854775807) [2024-03-15 07:03:43.828491] INFO [PALF] handle_next_submit_log_ (log_sliding_window.cpp:1000) [914][T1004_TxLoopWor][T1004][Y0-0000000000000000-0-0] [lt=26] [PALF STAT GROUP LOG INFO](palf_id=1, self="127.0.0.1:2882", role="LEADER", total_group_log_cnt=1, avg_log_batch_cnt=1, total_group_log_size=122, avg_group_log_size=122) [2024-03-15 07:03:43.828516] INFO [PALF] submit_log (palf_handle_impl.cpp:403) [914][T1004_TxLoopWor][T1004][Y0-0000000000000000-0-0] [lt=21] [PALF STAT APPEND DATA SIZE](this={palf_id:1, self:"127.0.0.1:2882", has_set_deleted:false}, append size=122) [2024-03-15 07:03:43.831639] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=38][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.831668] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=29][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.832210] INFO [LIB] stat (utility.h:1140) [792][T1004_IOWorker][T1004][Y0-0000000000000000-0-0] [lt=17] [PALF STAT WRITE LOG](cur_stat_count=1, stat_interval=1000000, avg cost=3707, this=0x7f5474395ad8) [2024-03-15 07:03:43.832231] INFO [PALF] inner_append_log (palf_handle_impl.cpp:1660) [792][T1004_IOWorker][T1004][Y0-0000000000000000-0-0] [lt=20] [PALF STAT INNER APPEND LOG](this={palf_id:1, self:"127.0.0.1:2882", has_set_deleted:false}, accum_size=122) [2024-03-15 07:03:43.832291] WDIAG [PALF] try_update_match_lsn_map_ (log_sliding_window.cpp:3790) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=6][errcode=0] [MATCH LSN ADVANCE DELAY]match_lsn advance delay too much time(ret=0, palf_id=1, self="127.0.0.1:2882", server="127.0.0.1:2882", update_func={old_end_lsn:{lsn:87401266262}, new_end_lsn:{lsn:87401266384}, old_advance_time_us:1710486221832724, new_ack_time_us:1710486223832284, advance delay(us):1999560}) [2024-03-15 07:03:43.832322] INFO [PALF] try_advance_committed_lsn_ (log_sliding_window.cpp:1572) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=29] [PALF STAT COMMITTED LOG SIZE](palf_id=1, self="127.0.0.1:2882", committed size=122) [2024-03-15 07:03:43.832349] INFO [LIB] stat (utility.h:1140) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=9] [PALF STAT FS CB](cur_stat_count=1, stat_interval=1000000, avg cost=10, this=0x7f547438d3a8) [2024-03-15 07:03:43.832356] INFO [LIB] stat (utility.h:1140) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=7] [PALF STAT LOG LIFETIME](cur_stat_count=1, stat_interval=1000000, avg cost=3889, this=0x7f547438d3d8) [2024-03-15 07:03:43.832363] INFO [LIB] stat (utility.h:1140) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=6] [PALF STAT LOG SUBMIT WAIT](cur_stat_count=1, stat_interval=1000000, avg cost=18, this=0x7f547438d408) [2024-03-15 07:03:43.832369] INFO [LIB] stat (utility.h:1140) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=6] [PALF STAT LOG SLIDE WAIT](cur_stat_count=1, stat_interval=1000000, avg cost=3871, this=0x7f547438d438) [2024-03-15 07:03:43.832378] INFO [LIB] stat (utility.h:1140) [791][T1004_LogIOCb0][T1004][Y0-0000000000000000-0-0] [lt=6] [PALF STAT FLUSH CB](cur_stat_count=1, stat_interval=1000000, avg cost=117, this=0x7f5474395b08) [2024-03-15 07:03:43.837654] INFO [ARCHIVE] gc_stale_ls_task_ (ob_ls_mgr.cpp:537) [559][T1_LSArchiveMgr][T1][YB427F000001-000613ACA7FF7BAA-0-0] [lt=66] gc stale ls task succ [2024-03-15 07:03:43.841253] WDIAG [STORAGE.TRANS] run1 (ob_standby_timestamp_service.cpp:145) [896][T1004_STSWorker][T1004][Y0-0000000000000000-0-0] [lt=16][errcode=-4076] query and update last id fail(ret=-4076, ret="OB_NEED_WAIT") [2024-03-15 07:03:43.841631] INFO [STORAGE.TRANS] try_wait_gts_and_inc_max_commit_ts_ (ob_trans_ctx_mgr_v4.cpp:739) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=2] try wait gts(ret=-4038, ret="OB_NOT_MASTER", ls_id={id:1}, tenant_id=1, max_replay_commit_version={val:1710506547196058421}, gts={val:18446744073709551615}) [2024-03-15 07:03:43.841715] WDIAG [STORAGE.TRANS] in_leader_serving_state (ob_trans_ctx_mgr_v4.cpp:880) [127][SerScheQueue1][T1][YB427F000001-000613ACABEF8028-0-0] [lt=83][errcode=-4038] try_wait_gts_and_inc_max_commit_ts_ failed(ret=-4038, this={this:0x7f549d404030, ls_id:{id:1}, tenant_id:1, state:"L_WORKING", total_tx_ctx_count:1, ls_retain_ctx_mgr:{retain_ctx_list_.size():1, max_wait_ckpt_ts_:{val:1710295204634496822}, last_push_gc_task_ts_:1710485733058358, skip_remove_cnt_:0}, aggre_rec_scn:{val:1710234134355157495}, prev_aggre_rec_scn:{val:1710230200691799540}, uref:4}) [2024-03-15 07:03:43.841795] WDIAG [PALF] recycle_blocks_ (palf_env_impl.cpp:1012) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=16][errcode=0] there is not any block can be recycled, need verify the baselsn of PalfHandleImpl whether has been advanced(ret=0, this={IPalfEnvImpl:{IPalfEnvImpl:"Dummy"}, self:"127.0.0.1:2882", log_dir:"/root/ob/store/clog/tenant_1003", disk_options_wrapper:{disk_opts_for_stopping_writing:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, disk_opts_for_recycling_blocks:{log_disk_size(MB):921, log_disk_utilization_threshold(%):80, log_disk_utilization_limit_threshold(%):95}, status:1}, log_alloc_mgr_:{flying_log_task:0, flying_meta_task:0}}) [2024-03-15 07:03:43.841857] ERROR try_recycle_blocks (palf_env_impl.cpp:688) [616][T1003_PalfGC][T1003][Y0-0000000000000000-0-0] [lt=53][errcode=-4264] Log out of disk space(msg="log disk space is almost full", ret=-4264, total_size(MB)=921, used_size(MB)=875, used_percent(%)=95, warn_size(MB)=737, warn_percent(%)=80, limit_size(MB)=875, limit_percent(%)=95, maximum_used_size(MB)=875, maximum_log_stream=1, oldest_log_stream=1, oldest_scn={val:1710274190378573777}) [2024-03-15 07:03:43.852937] WDIAG [STORAGE.TRANS] post_cluster_heartbeat_rpc_ (ob_tenant_weak_read_service.cpp:800) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=14][errcode=-4076] get cluster service master fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.852963] WDIAG [STORAGE.TRANS] process_cluster_heartbeat_rpc_cb (ob_tenant_weak_read_service.cpp:438) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=26][errcode=-4016] tenant weak read service cluster heartbeat RPC fail(rcode={code:-4076, msg:"post cluster heartbeat rpc failed, tenant_id=1004", warnings:[]}, tenant_id_=1004, dst="127.0.0.1:2882", cluster_service_tablet_id={id:226}) [2024-03-15 07:03:43.852980] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:759) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=15][errcode=-4076] post cluster heartbeat rpc fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, local_server_version={val:1710506547039047539}, valid_part_count=2, total_part_count=2, generate_timestamp=1710486223852923) [2024-03-15 07:03:43.852993] WDIAG [STORAGE.TRANS] do_cluster_heartbeat_ (ob_tenant_weak_read_service.cpp:769) [898][T1004_TenantWea][T1004][Y0-0000000000000000-0-0] [lt=13][errcode=-4076] tenant weak read service do cluster heartbeat fail(ret=-4076, ret="OB_NEED_WAIT", tenant_id_=1004, last_post_cluster_heartbeat_tstamp_=1710486223652886, cluster_heartbeat_interval_=1000000, cluster_service_tablet_id={id:226}, cluster_service_master="0.0.0.0:0")