obproxy 登录失败

【 使用环境 】生产环境 or 测试环境
【 OB or 其他组件 】
obproxy ,observer
【 使用版本 】
obproxy master版本编译
【问题描述】清晰明确描述问题
直接登录obproxy失败,在日志中可以看到连接observer断开了连接,不清楚是为什么,配置的参数应该都是没问题的
【复现路径】问题出现前后相关操作
【问题现象及影响】

【附件】
obproxy 日志

在此处键入或粘贴代码

obproxy 日志
obproxy.log (86.8 KB)

无法确认所有信息,从给出的日志中看:

[2023-07-03 15:22:02.791765] WARN  [PROXY] get_idc_url (ob_config_server_processor.cpp:522) [103082][Y0-00007FFFE03C8A10] [lt=6] [dc=0] rs_url_ is invalid(cluster_info={rs_url:{url:""}, cluster_name:{config_string:"obcluster"}, master_cluster_id:0, sub_cluster_info:{cluster_id:0, is_used:false, role:1, web_rs_list:[[0]{server:"10.186.60.55:2881", is_dup_replica:false, role:"LEADER", type:"FULL"}], rs_list_hash:0, create_failure_count:0, idc_list:[]}}, ret=-4002)
[2023-07-03 15:22:02.791804] WARN  [PROXY] refresh_idc_list (ob_config_server_processor.cpp:343) [103082][Y0-00007FFFE03C8A10] [lt=38] [dc=0] fail to get cluster url(cluster_name=obcluster, cluster_id=0, ret=-4002)

obproxy中配置的集群,看起来连不上,直连observer是否可以成功?
楼主可以把敏感信息删除然后配置贴一下看

可以直连observer
obproxy 配置

binlog_service_ip=
weak_read_user_list=
enable_transaction_split=False
enable_read_write_split=False
enable_global_ps_cache=True
obproxy_read_consistency=0
obproxy_read_only=1
digest_sql_length=1024
enable_stat=True
enable_trace=True
enable_performance_mode=False
observer_sys_password1=
observer_sys_password=
obproxy_sys_password=
inspector_password=
citadel_agent_ip=
bt_mode=
bt_retry_times=3
bt_server_addr=
domain_name=
session_pool_stat_log_interval=1m
session_pool_stat_log_ratio=9000
session_pool_default_prefill=False
session_pool_default_blocking_timeout=500ms
session_pool_default_idle_timeout=1800s
session_pool_default_max_conn=20
session_pool_default_min_conn=0
use_local_session_prop=False
session_pool_cont_delay_interval=5ms
max_pending_num=100000
create_conn_cont_num=10
refresh_server_cont_num=5
session_pool_retry_interval=1ms
need_release_after_tx=False
enable_no_sharding_skip_real_conn=False
enable_session_pool_for_no_sharding=False
enable_conn_precreate=False
is_pool_mode=False
auto_scan_all=False
cloud_instance_id=
mng_url=
runtime_env=
instance_ip=
pod_namespace=
pod_name=
server_zone=
workspace_name=
env_tenant_name=
grpc_timeout=30m
enable_shard_authority=False
use_local_dbconfig=False
dataplane_host=
sidecar_node_id=
enable_sharding=False
enable_mysql_proxy_pool=True
sequence_prefetch_threshold=7000
enable_sequence_prefetch=True
sequence_fail_retry_count=2
sequence_entry_expire_time=1d
enable_async_pull_location_cache=True
check_tenant_locality_change=True
ldg_info_refresh_interval=20s
enable_ldg=False
enable_primary_zone=True
proxy_primary_zone_name=
enable_standby=True
qos_stat_item_limit=3000
qos_stat_item_max_idle_period=30m
qos_stat_clean_interval=1m
enable_qos=False
enable_server_ssl=False
enable_client_ssl=False
skip_proxy_sys_private_check=True
skip_proxyro_check=False
enable_full_username=False
enable_cloud_full_username=False
sql_table_cache_mem_limited=128MB
sql_table_cache_expire_relative_time=0
enable_index_route=False
mysql_version=5.6.25
enable_cpu_isolate=False
proxy_route_policy=
qa_mode_mock_public_cloud_vid=1
qa_mode_mock_public_cloud_slb_addr=127.0.0.1:33045
location_expire_period_time=0d
location_expire_period=0
enable_qa_mode=False
partition_location_expire_relative_time=0
enable_causal_order_read=True
enable_extra_prometheus_metric=False
enable_prometheus=True
prometheus_sync_interval=1m
prometheus_cost_ms_unit=False
prometheus_listen_port=2884
enable_monitor_stat=True
monitor_stat_high_threshold=500ms
monitor_stat_middle_threshold=100ms
monitor_stat_low_threshold=30ms
monitor_stat_dump_interval=1m
monitor_cost_ms_unit=False
monitor_item_max_idle_period=30m
monitor_item_limit=3000
tenant_location_valid_time=1d
delay_update_entry_interval=5s
client_max_memory_size=8MB
max_connections=60000
current_local_config_version=0
proxy_idc_name=
enable_async_log=True
xflush_log_level=INFO
monitor_log_level=INFO
syslog_level=INFO
server_routing_mode=oceanbase
test_server_addr=
target_db_server=
internal_cmd_mem_limited=16M
sqlaudit_mem_limited=0
limited_pl_update_threshold=10
normal_pl_update_threshold=100
enable_cached_server=True
enable_pl_route=True
enable_reroute=False
enable_transaction_internal_routing=True
enable_ob_protocol_v2_with_client=True
enable_ob_protocol_v2=True
enable_compression_protocol=True
enable_partition_table_route=True
enable_bad_route_reject=False
detect_server_timeout=5s
server_detect_refresh_interval=1s
server_detect_fail_threshold=3
server_detect_mode=1
enable_congestion=True
min_congested_connect_timeout=100ms
congestion_retry_interval=20s
congestion_fail_window=120s
min_keep_congestion_interval=20s
congestion_failure_threshold=5
proxy_tenant_name=
rootservice_list=xx:2881
rootservice_cluster_name=obcluster
enable_get_rslist_remote=False
enable_metadb_used=True
app_name=undefined
proxy_id=0
proxy_service_mode=client
obproxy_config_server_url=
local_bound_ipv6_ip=::
ip_listen_mode=1
listen_port=2885
local_bound_ip=0.0.0.0
ignore_local_config=True
slow_query_time_threshold=500ms
query_digest_time_threshold=100ms
slow_proxy_process_time_threshold=2ms
slow_transaction_time_threshold=1s
enable_trace_stats=False
enable_cpu_topology=False
enable_strict_stat_time=True
enable_report_session_stats=False
enable_mysqlsm_info=True
enable_sync_all_stats=True
enable_trans_detail_stats=True
enable_flow_control=True
flow_event_queue_threshold=5
flow_consumer_reenable_threshold=256
flow_low_water_mark=64K
flow_high_water_mark=64K
request_buffer_length=4KB
tunnel_request_size_threshold=8KB
default_buffer_water_mark=32KB
max_log_file_size=256MB
enable_strict_kernel_release=False
automatic_match_work_thread=True
shard_scan_thread_num=0
grpc_client_num=9
grpc_thread_num=8
block_thread_num=1
task_thread_num=2
work_thread_num=128
routing_cache_mem_limited=128MB
stack_size=1MB
proxy_mem_limited=2G
client_tcp_user_timeout=0
client_tcp_keepcnt=5
client_tcp_keepintvl=5
client_tcp_keepidle=5
client_sock_option_flag_out=3
server_tcp_user_timeout=0
server_tcp_keepcnt=5
server_tcp_keepintvl=5
server_tcp_keepidle=5
server_tcp_init_cwnd=0
sock_packet_tos_out=0
sock_packet_mark_out=0
sock_option_flag_out=3
sock_send_buffer_size_out=0
sock_recv_buffer_size_out=0
default_inactivity_timeout=180000s
net_config_poll_timeout=1ms
net_accept_threads=2
frequent_accept=True
connect_observer_max_retries=3
enable_client_ip_checkout=True
enable_proxy_scramble=False
enable_cluster_checkout=False
observer_query_timeout_delta=20s
client_max_connections=8192
enable_client_connection_lru_disconnect=False
username_separator=:;-;.
short_async_task_timeout=5s
long_async_task_timeout=60s
need_convert_vip_to_tname=False
enable_syslog_file_compress=False
max_syslog_file_count=0
max_syslog_file_time=7d
log_dir_size_threshold=64GB
log_cleanup_interval=1m
log_file_percentage=80
delay_exit_time=100ms
hot_upgrade_exit_timeout=30000000
hot_upgrade_rollback_timeout=24h
hot_upgrade_failure_retries=5
fetch_proxy_bin_timeout=120s
fetch_proxy_bin_random_time=300s
cluster_expire_time=1d
cluster_count_high_water_mark=256
stat_dump_interval=6000s
stat_table_sync_interval=60s
idc_list_refresh_interval=2h
config_server_refresh_interval=60s
metadb_server_state_refresh_interval=60s
server_state_refresh_interval=20s
cache_cleaner_clean_interval=20s
proxy_hot_upgrade_check_interval=5s
proxy_info_check_interval=60s
enable_xa_route=True

observer 配置

[root@localhost etc]# strings observer.config.bin
all_server_list=127.0.0.1:2882
__min_full_resource_pool_memory=1073741824
log_disk_size=15G
min_observer_version=4.1.0.1
enable_syslog_recycle=True
enable_syslog_wf=False
max_syslog_file_count=4
syslog_level=WDIAG
cluster_id=1
rootservice_list=127.0.0.1:2882:2881
cpu_count=16
system_memory=1G
memory_limit=6G
zone=zone1
devname=lo
mysql_port=2881
rpc_port=2882
datafile_size=20G
data_dir=/root/oceanbase-ce/store
compatible=4.1.0.1
cpu_quota_concurrency=10

有些信息看不出来。

  1. 这个集群是使用obd或ocp部署的吗?
  2. 如果是使用obd部署的,可以参考你安装Obd的机器上,这个示例配置文件:/usr/obd/example/single-with-obproxy-example.yaml
    这个配置文件中,observer集群会配置 appname,在obproxy中会配置 cluster_name 与observer集群对应,名字默认是obcluster。

1.是的
2.对应配置

cat single-with-obproxy-example.yaml
## Only need to configure when remote login is required
# user:
#   username: your username
#   password: your password if need
#   key_file: your ssh-key file path if need
#   port: your ssh port, default 22
#   timeout: ssh connection timeout (second), default 30
oceanbase-ce:
  servers:
    # Please don't use hostname, only IP can be supported
    - 192.168.1.3
  global:
    #  The working directory for OceanBase Database. OceanBase Database is started under this directory. This is a required field.
    home_path: /root/observer
    # The directory for data storage. The default value is $home_path/store.
    # data_dir: /data
    # The directory for clog, ilog, and slog. The default value is the same as the data_dir value.
    # redo_dir: /redo
    # Please set devname as the network adaptor's name whose ip is  in the setting of severs.
    # if set severs as "127.0.0.1", please set devname as "lo"
    # if current ip is 192.168.1.10, and the ip's network adaptor's name is "eth0", please use "eth0"
    devname: eth0
    mysql_port: 2881 # External port for OceanBase Database. The default value is 2881. DO NOT change this value after the cluster is started.
    rpc_port: 2882 # Internal port for OceanBase Database. The default value is 2882. DO NOT change this value after the cluster is started.
    zone: zone1
    # if current hardware's memory capacity is smaller than 50G, please use the setting of "mini-single-example.yaml" and do a small adjustment.
    memory_limit: 64G # The maximum running memory for an observer
    # The reserved system memory. system_memory is reserved for general tenants. The default value is 30G.
    system_memory: 30G
    datafile_size: 192G # Size of the data file.
    log_disk_size: 192G # The size of disk space used by the clog files.
    enable_syslog_wf: false # Print system logs whose levels are higher than WARNING to a separate log file. The default value is true.
    enable_syslog_recycle: true # Enable auto system log recycling or not. The default value is false.
    max_syslog_file_count: 4 # The maximum number of reserved log files before enabling auto recycling. The default value is 0.
    # observer cluster name, consistent with obproxy's cluster_name
    appname: obcluster
    # root_password: # root user password, can be empty
    # proxyro_password: # proxyro user pasword, consistent with obproxy's observer_sys_password, can be empty
obproxy-ce:
  # Set dependent components for the component.
  # When the associated configurations are not done, OBD will automatically get the these configurations from the dependent components.
  depends:
    - oceanbase-ce
  servers:
    - 192.168.1.2
  global:
    listen_port: 2883 # External port. The default value is 2883.
    prometheus_listen_port: 2884 # The Prometheus port. The default value is 2884.
    home_path: /root/obproxy
    # oceanbase root server list
    # format: ip:mysql_port;ip:mysql_port. When a depends exists, OBD gets this value from the oceanbase-ce of the depends.
    # rs_list: 192.168.1.2:2881;192.168.1.3:2881;192.168.1.4:2881
    enable_cluster_checkout: false
    # observer cluster name, consistent with oceanbase-ce's appname. When a depends exists, OBD gets this value from the oceanbase-ce of the depends.
    # cluster_name: obcluster
    skip_proxy_sys_private_check: true
    enable_strict_kernel_release: false
    # obproxy_sys_password: # obproxy sys user password, can be empty. When a depends exists, OBD gets this value from the oceanbase-ce of the depends.
    # observer_sys_password: # proxyro user pasword, consistent with oceanbase-ce's proxyro_password, can be empty. When a depends exists, OBD gets this value from the oceanbase-ce of the depends.

抱歉,我的意思是可以参考这个示例文件,配置ob集群名字和obproxy关联ob集群的名字。可以看看你自己的配置

检查过后配置都是正确的
排查obproxy日志

[2023-07-04 16:44:23.909577] INFO  [PROXY] ob_resource_pool_processor.cpp:2401 [102838][Y0-00007FFFD112A9E0] [lt=12] [dc=0] will create new cluster resource(cluster_name=obcluster, cluster_id=0, cr_version=7)
[2023-07-04 16:44:24.763396] INFO  [PROXY] ob_resource_pool_processor.cpp:1517 [102838][Y0-00007FFFD112A9E0] [lt=35] [dc=0] local rslist is unavailable many times, need get newest one(cluster_name=obcluster, cluster_id=0, new_failure_count=3, rs_list=[[0]"127.0.0.1:2881"])
[2023-07-04 16:44:24.763429] INFO  [PROXY] ob_resource_pool_processor.cpp:943 [102838][Y0-00007FFFD112A9E0] [lt=31] [dc=0] will add INIT_RS task(this=0x7fffd2622eb0, cluster_name=obcluster, cluster_id=0)
[2023-07-04 16:44:24.763496] INFO  [PROXY] ob_resource_pool_processor.cpp:1124 [102838][Y0-00007FFFD112A9E0] [lt=8] [dc=0] ObClusterResourceCreateCont::main_handler(event="CLUSTER_RESOURCE_CREATE_TIMEOUT_EVENT", init_status=1, cluster_name=obcluster, cluster_id=0, data=0x7fffe03cbde0)
[2023-07-04 16:44:24.763538] INFO  [PROXY] ob_proxy_json_config_info.cpp:1217 [102980][Y0-00007FFFE03C8A10] [lt=82] [dc=0] will update rslist(cluster_name=obcluster, cluster_id=0, real cluster_id=0, old rslist=[[0]{server:"127.0.0.1:2881", is_dup_replica:false, role:"LEADER", type:"FULL"}], new rslist=[[0]{server:"127.0.0.1:2881", is_dup_replica:false, role:"LEADER", type:"FULL"}])
[2023-07-04 16:44:24.763628] INFO  [PROXY] ob_route_utils.cpp:1262 [102980][Y0-00007FFFE03C8A10] [lt=52] [dc=0] update sys tenant __all_dummy succ((cluster_name=obcluster, tmp_entry={{this:0x7fffd13166c0, ref_count:2, cr_version:0, cr_id:0, create_time_us:1688460264763603, last_valid_time_us:1688460264763517, last_access_time_us:1688460264763517, last_update_time_us:0, schema_version:0, tenant_version:0, time_for_expired:0, state:"AVAIL"}, this:0x7fffd13166c0, is_inited:true, is_dummy_entry:true, is_entry_from_rslist:true, is_empty_entry_allowed:false, is_need_force_flush:false, has_dup_replica:false, cr_id:0, name:{cluster_name:"obcluster", tenant_name:"sys", database_name:"oceanbase", package_name:"", table_name:"__all_dummy"}, table_id:1099511627911, table_type:"UNKNOWN", part_num:1, replica_num:1, buf_len:32, buf_start:0x7fffd13167c8, tenant_servers:{this:0x7ffff0c81120, is_inited:true, server_count:1, replica_count:1, partition_count:1, next_partition_idx:0, server_array:0x7fffd1320850, server_array_:[[0]{server:"127.0.0.1:2881", is_dup_replica:false, role:"FOLLOWER", type:"FULL"}]}})
[2023-07-04 16:44:25.818021] INFO  [PROXY] ob_resource_pool_processor.cpp:906 [102838][Y0-00007FFFD112A9E0] [lt=15] [dc=0] handle timeout(created_cr=0x7fffcd22a080, is_rslist_from_local=false)
[2023-07-04 16:44:25.818075] INFO  [PROXY] ob_resource_pool_processor.cpp:1354 [102838][Y0-00007FFFD112A9E0] [lt=29] [dc=0] cluster resource create complete(created_cr={this:0x7fffcd22a080, ref_count:3, is_inited:true, cluster_info_key:{cluster_name:{config_string:"obcluster"}, cluster_id:0}, cr_state:"CR_INIT_FAILED", version:7, last_access_time_ns:0, deleting_completed_thread_num:0, fetch_rslist_task_count:1, fetch_idc_list_task_count:0, last_idc_list_refresh_time_ns:0, last_rslist_refresh_time_ns:1688460264762912244, server_state_version:0}, pending_list_count=0)
[2023-07-04 16:44:25.818124] INFO  [PROXY] ob_async_common_task.cpp:209 [102838][Y0-00007FFFE03C8A10] [lt=35] [dc=0] async task has been cancelled(ret=0)
[2023-07-04 16:44:25.818139] INFO  [PROXY] ob_resource_pool_processor.cpp:1826 [102838][Y0-00007FFFE03C8A10] [lt=12] [dc=0] ObClusterResource will destroy, and wait to be free(this={this:0x7fffcd22a080, ref_count:0, is_inited:true, cluster_info_key:{cluster_name:{config_string:"obcluster"}, cluster_id:0}, cr_state:"CR_INIT_FAILED", version:7, last_access_time_ns:0, deleting_completed_thread_num:0, fetch_rslist_task_count:0, fetch_idc_list_task_count:0, last_idc_list_refresh_time_ns:0, last_rslist_refresh_time_ns:1688460264762912244, server_state_version:0})
[2023-07-04 16:44:25.818210] INFO  [PROXY] ob_mysql_proxy.h:177 [102838][Y0-00007FFFE03C8A10] [lt=16] [dc=0] client pool will be destroyed(client_pool={this:0x7ffff0e517d0, ref_count:3, is_inited:true, stop:false, mc_count:2, cluster_resource:0x7ffff067d080})
[2023-07-04 16:44:25.818266] INFO  [PROXY] ob_congestion_manager.cpp:103 [102838][Y0-00007FFFE03C8A10] [lt=13] [dc=0] ObCongestionManager will destroy(this={this:0x7fffcd22e180, is_inited:true, is_base_servers_added:false, is_congestion_enabled:true, zone_count:0, config:{ref_count:1, this:0x7ffff0e67b60, conn_failure_threshold:5, alive_failure_threshold:5, fail_window_sec:120, retry_interval_sec:20, min_keep_congestion_interval_sec:20}})
[2023-07-04 16:44:25.818324] INFO  [PROXY] ob_resource_pool_processor.cpp:1784 [102838][Y0-00007FFFE03C8A10] [lt=16] [dc=0] the cluster resource will free(this={this:0x7fffcd22a080, ref_count:0, is_inited:false, cluster_info_key:{cluster_name:{config_string:"obcluster"}, cluster_id:0}, cr_state:"CR_DEAD", version:7, last_access_time_ns:0, deleting_completed_thread_num:0, fetch_rslist_task_count:0, fetch_idc_list_task_count:0, last_idc_list_refresh_time_ns:0, last_rslist_refresh_time_ns:1688460264762912244, server_state_version:0})
[2023-07-04 16:44:25.818368] INFO  [PROXY] ob_congestion_manager.cpp:103 [102838][Y0-00007FFFE03C8A10] [lt=13] [dc=0] ObCongestionManager will destroy(this={this:0x7fffcd22e180, is_inited:false, is_base_servers_added:false, is_congestion_enabled:true, zone_count:0, config:NULL})
[2023-07-04 16:44:25.818651] INFO  [PROXY] ob_resource_pool_processor.cpp:1124 [102838][Y0-00007FFFD112A9E0] [lt=12] [dc=0] ObClusterResourceCreateCont::main_handler(event="CLUSTER_RESOURCE_INFORM_OUT_EVENT", init_status=1, cluster_name=obcluster, cluster_id=0, data=0x7fffe03cc7f0)
[2023-07-04 16:44:27.363396] WARN  [PROXY.SM] state_get_cluster_resource (ob_mysql_sm.cpp:1678) [102838][Y0-00007FFFD112A9E0] [lt=14] [dc=0] data is NULL(sm_id=13, ret=-4152)
[2023-07-04 16:44:28.875617] WARN  [PROXY.TXN] handle_error_jump (ob_mysql_transact.cpp:123) [102838][Y0-00007FFFD112A9E0] [lt=39] [dc=0] [ObMysqlTransact::handle_error_jump]
[2023-07-04 16:44:28.875671] WARN  [PROXY.SM] setup_error_transfer (ob_mysql_sm.cpp:8587) [102838][Y0-00007FFFD112A9E0] [lt=42] [dc=0] [setup_error_transfer] Now closing connection(sm_id=13, request_cmd="Sleep", sql_cmd="Handshake", sql=OB_MYSQL_COM_LOGIN)
[2023-07-04 16:44:28.875709] WARN  [PROXY.SM] update_cmd_stats (ob_mysql_sm.cpp:9761) [102838][Y0-00007FFFD112A9E0] [lt=17] [dc=0] Slow Query: ((client_ip={192.168.21.18:55713}, server_ip={*Not IP address [0]*:0}, obproxy_client_port={*Not IP address [0]*:0}, server_trace_id=, route_type=ROUTE_TYPE_MAX, user_name=root, tenant_name=sys, cluster_name=obcluster, logic_database_name=, logic_tenant_name=, ob_proxy_protocol=0, cs_id=13, proxy_sessid=0, ss_id=0, server_sessid=0, sm_id=13, cmd_size_stats={client_request_bytes:211, server_request_bytes:0, server_response_bytes:0, client_response_bytes:0}, cmd_time_stats={client_transaction_idle_time_us=3271, client_request_read_time_us=457, client_request_analyze_time_us=426, cluster_resource_create_time_us=12605558, pl_lookup_time_us=0, pl_process_time_us=0, bl_lookup_time_us=0, bl_process_time_us=0, congestion_control_time_us=0, congestion_process_time_us=0, do_observer_open_time_us=0, server_connect_time_us=0, server_sync_session_variable_time_us=0, server_send_saved_login_time_us=0, server_send_use_database_time_us=0, server_send_session_variable_time_us=0, server_send_session_user_variable_time_us=0, server_send_all_session_variable_time_us=0, server_send_start_trans_time_us=0, server_send_xa_start_time_us=0, build_server_request_time_us=0, plugin_compress_request_time_us=0, prepare_send_request_to_server_time_us=0, server_request_write_time_us=0, server_process_request_time_us=0, server_response_read_time_us=0, plugin_decompress_response_time_us=0, server_response_analyze_time_us=0, ok_packet_trim_time_us=0, client_response_write_time_us=0, request_total_time_us=15663039}, sql=OB_MYSQL_COM_LOGIN, trans_internal_routing_state=not in trans)
[2023-07-04 16:44:28.875906] WARN  [PROXY.SM] update_stats (ob_mysql_sm.cpp:9888) [102838][Y0-00007FFFD112A9E0] [lt=5] [dc=0] Slow transaction: ((client_ip={192.168.21.18:55713}, server_ip={*Not IP address [0]*:0}, server_trace_id=, route_type=ROUTE_TYPE_MAX, ob_proxy_protocol=0, cs_id=13, proxy_sessid=0, ss_id=0, server_sessid=0, sm_id=13, state=9, trans_stats_={client_requests:1, server_responses:0, pl_lookup_retries:0, server_retries:0, client_request_bytes:211, server_request_bytes:0, server_response_bytes:0, client_response_bytes:78, client_transaction_idle_time_us=3271, client_process_request_time_us=1470, client_request_read_time_us=1470, client_request_analyze_time_us=433, cluster_resource_create_time_us=12605558, pl_lookup_time_us=0, pl_process_time_us=0, congestion_control_time_us=0, congestion_process_time_us=0, do_observer_open_time_us=0, server_connect_time_us=0, sync_session_variable_time_us=0, send_saved_login_time_us=0, send_use_database_time_us=0, send_session_vars_time_us=0, send_session_user_vars_time_us=0, send_all_session_vars_time_us=0, send_start_trans_time_us=0, send_xa_start_time_us=0, build_server_request_time_us=0, plugin_compress_request_time_us=0, prepare_send_request_to_server_time_us=0, server_request_write_time_us=0, server_process_request_time_us=0, server_response_read_time_us=0, plugin_decompress_response_time_us=0, server_response_analyze_time_us=0, ok_packet_trim_time_us=0, client_response_write_time_us=48, trans_time_us=15666631}, last_sql=OB_MYSQL_COM_LOGIN)
[2023-07-04 16:44:28.876011] INFO  [PROXY.SS] ob_mysql_client_session.cpp:652 [102838][Y0-00007FFFD112A9E0] [lt=39] [dc=0] client session do_io_close((*this={this:0x7fffe03ffcd0, is_proxy_mysql_client:false, is_waiting_trans_first_request:false, need_delete_cluster:false, is_first_dml_sql_got:false, vc_ready_killed:false, active:true, magic:19132429, conn_decrease:true, current_tid:102838, cs_id:13, proxy_sessid:0, session_info:{is_inited:true, priv_info:{has_all_privilege:false, cs_id:4294967295, user_priv_set:-1, cluster_name:"", tenant_name:"", user_name:""}, version:{common_hot_sys_var_version:0, common_sys_var_version:0, mysql_hot_sys_var_version:0, mysql_sys_var_version:0, hot_sys_var_version:0, sys_var_version:0, user_var_version:0, db_name_version:0, last_insert_id_version:0, sess_info_version:0}, hash_version:{common_hot_sys_var_version:0, common_sys_var_version:0, mysql_hot_sys_var_version:0, mysql_sys_var_version:0, hot_sys_var_version:0, sys_var_version:0, user_var_version:0, db_name_version:0, last_insert_id_version:0, sess_info_version:0}, val_hash:{common_hot_sys_var_hash:0, common_cold_sys_var_hash:0, mysql_hot_sys_var_hash:0, mysql_cold_sys_var_hash:0, hot_sys_var_hash:0, cold_sys_var_hash:0, user_var_hash:0}, global_vars_version:-1, is_global_vars_changed:false, is_trans_specified:false, is_user_idc_name_set:false, is_read_consistency_set:false, idc_name:"", cluster_id:0, real_meta_cluster_name:"", safe_read_snapshot:0, syncing_safe_read_snapshot:0, route_policy:1, proxy_route_policy:3, user_identity:2, global_vars_version:-1, is_read_only_user:false, is_request_follower_user:false, ob20_request:{remain_payload_len:0, ob20_request_received_done:false, ob20_header:{ob 20 protocol header:{compressed_len:0, seq:0, non_compressed_len:0}, magic_num:0, header_checksum:0, connection_id:0, request_id:0, pkt_seq:0, payload_len:0, version:0, flag_.flags:0, reserved:0}}, client_cap:0, server_cap:0}, dummy_ldc:{use_ldc:false, idc_name:"", item_count:0, site_start_index_array:[[0]0, [1]0, [2]0, [3]0], item_array:null, pl:null, ts:null, readonly_exist_status:"READONLY_ZONE_UNKNOWN"}, dummy_entry:null, server_state_version:0, cur_ss:null, bound_ss:null, lii_ss:null, cluster_resource:NULL, client_vc:0x7fffd1112620, using_ldg:false, trace_stats:NULL}, client_vc_=0x7fffd1112620, this=0x7fffe03ffcd0)
[2023-07-04 16:44:28.876133] INFO  [PROXY.CS] ob_mysql_client_session.cpp:94 [102838][Y0-00007FFFD112A9E0] [lt=108] [dc=0] client session destroy(cs_id=13, proxy_sessid=0, client_vc=NULL)
[2023-07-04 16:44:28.876258] INFO  [PROXY.SM] ob_mysql_sm.cpp:9188 [102838][Y0-00007FFFD112A9E0] [lt=6] [dc=0] deallocating sm(sm_id=13)
[2023-07-04 16:44:28.876347] INFO  [PROXY] ob_mysql_client_pool.cpp:135 [102856][Y0-00007FFFE03C79D0] [lt=0] [dc=0] all mysql client has been scheduled to destroy self(deleted_count=2)
[2023-07-04 16:44:28.876463] INFO  [PROXY] ob_client_vc.cpp:676 [102856][Y0-00007FFFE03C8A50] [lt=28] [dc=0] mysql client active timeout(active_timeout_ms=0, next_action=1, info={user_name:"proxyro@sys#obcluster:0", database_name:"oceanbase", cluster_name:"obcluster", request_param:{sql:"", is_deep_copy:false, current_idc_name:"", is_user_idc_name_set:false, need_print_trace_stat:false, target_addr:, ob_client_flags_.flags_:0, is_detect_client:false}})
[2023-07-04 16:44:28.876501] INFO  [PROXY] ob_client_vc.cpp:1170 [102856][Y0-00007FFFE03C8A50] [lt=35] [dc=0] mysql client will kill self(this=0x7ffff0e60e70)
[2023-07-04 16:44:28.876524] INFO  [PROXY] ob_client_vc.cpp:676 [102856][Y0-00007FFFE03C8A90] [lt=9] [dc=0] mysql client active timeout(active_timeout_ms=0, next_action=1, info={user_name:"proxyro@sys#obcluster:0", database_name:"oceanbase", cluster_name:"obcluster", request_param:{sql:"", is_deep_copy:false, current_idc_name:"", is_user_idc_name_set:false, need_print_trace_stat:false, target_addr:, ob_client_flags_.flags_:0, is_detect_client:false}})
[2023-07-04 16:44:28.876541] INFO  [PROXY] ob_client_vc.cpp:1170 [102856][Y0-00007FFFE03C8A90] [lt=16] [dc=0] mysql client will kill self(this=0x7ffff0e611e0)
[2023-07-04 16:44:28.876547] INFO  [PROXY] ob_mysql_client_pool.cpp:205 [102856][Y0-00007FFFE03C8A90] [lt=5] [dc=0] client pool will be free(this={this:0x7ffff0e517d0, ref_count:0, is_inited:false, stop:true, mc_count:2, cluster_resource:0x7ffff067d080})
[2023-07-04 16:44:29.056731] INFO  [PROXY] ob_table_cache.cpp:705 [102838][Y0-00007FFFF0C3EEA0] [lt=7] [dc=0] this table entry will erase from tc map((*it={{this:0x7ffff0d181b0, ref_count:1, cr_version:0, cr_id:0, create_time_us:1688460084072626, last_valid_time_us:1688460084072171, last_access_time_us:1688460120024871, last_update_time_us:0, schema_version:0, tenant_version:0, time_for_expired:0, state:"DELETED"}, this:0x7ffff0d181b0, is_inited:true, is_dummy_entry:true, is_entry_from_rslist:false, is_empty_entry_allowed:false, is_need_force_flush:false, has_dup_replica:false, cr_id:0, name:{cluster_name:"obcluster", tenant_name:"sys", database_name:"oceanbase", package_name:"", table_name:"__all_dummy"}, table_id:1099511627911, table_type:"UNKNOWN", part_num:1, replica_num:1, buf_len:32, buf_start:0x7ffff0d182b8, tenant_servers:{this:0x7ffff0c830a0, is_inited:true, server_count:1, replica_count:1, partition_count:1, next_partition_idx:0, server_array:0x7fffe03d6380, server_array_:[[0]{server:"127.0.0.1:2881", is_dup_replica:false, role:"FOLLOWER", type:"FULL"}]}})

这里存在data is NULL导致连接关闭,但是无法确定为什么data is NULL
另外外侧使用root用户登录,在日志中转成了proxyro@sys#obcluster:0这个是正常的吗

看日志输出信息描述的是 obproxy 会去尝试连接 127.0.0.1:2881,就是说obproxy跟observer是在同一台机器上的?但是看起来obproxy跟oceanbase的连接并不正常

是的,我把他们放到了一台机器上,那接下来有什么方式排查连接不正常的问题,我现在没有啥方向

我找到了proxyro 这个用户的信息。obproxy会使用这个用户去连接observer,如果连接不成功,就无法正常工作。参考这个文档:
https://www.oceanbase.com/docs/community-odp-cn-10000000001715916

enable_cached_server=True
这个参数是什么意思呢 没有找到相应的解释