BUG: workqueue lockup - pool cpus=0 node=0 flags=0x0 nice=0 stuck for 217s! Showing busy workqueues and worker pools: workqueue events: flags=0x100 pwq 2: cpus=0 node=0 flags=0x0 nice=0 active=17 refcnt=18 in-flight: 5992:switchdev_deferred_process_work switchdev_deferred_process_work pending: 5*nsim_dev_hwstats_traffic_work, psi_avgs_work, vmpressure_work_fn, vmstat_shepherd, 5*ovs_dp_masks_rebalance, drain_local_obj_stock, rht_deferred_worker pwq 6: cpus=1 node=0 flags=0x0 nice=0 active=12 refcnt=13 pending: 5*nsim_dev_hwstats_traffic_work, 2*psi_avgs_work, 5*ovs_dp_masks_rebalance workqueue events_highpri: flags=0x110 pwq 3: cpus=0 node=0 flags=0x0 nice=-20 active=1 refcnt=2 pending: fill_page_cache_func workqueue events_long: flags=0x100 pwq 2: cpus=0 node=0 flags=0x0 nice=0 active=5 refcnt=6 pending: 2*defense_work_handler, 2*br_fdb_cleanup, br_multicast_gc_work pwq 6: cpus=1 node=0 flags=0x0 nice=0 active=9 refcnt=10 pending: 8*defense_work_handler, br_fdb_cleanup workqueue events_unbound: flags=0x2 pwq 8: cpus=0-1 flags=0x4 nice=0 active=2 refcnt=3 pending: toggle_allocation_gate, flush_memcg_stats_dwork workqueue events_unbound: flags=0x2 pwq 8: cpus=0-1 flags=0x4 nice=0 active=4 refcnt=5 in-flight: 16401:linkwatch_event linkwatch_event pending: 2*cfg80211_wiphy_work, idle_cull_fn pwq 8: cpus=0-1 flags=0x4 nice=0 active=22 refcnt=23 in-flight: 15130:fsnotify_mark_destroy_workfn fsnotify_mark_destroy_workfn ,13:fsnotify_connector_destroy_workfn fsnotify_connector_destroy_workfn pending: 10*nsim_dev_trap_report_work, 2*idle_cull_fn, macvlan_process_broadcast, cfg80211_wiphy_work, crng_reseed, 2*macvlan_process_broadcast workqueue events_power_efficient: flags=0x180 pwq 2: cpus=0 node=0 flags=0x0 nice=0 active=9 refcnt=10 pending: neigh_managed_work, gc_worker, check_lifetime, neigh_periodic_work, do_cache_clean, check_lifetime, hash_ipmark6_gc, 2*check_lifetime pwq 6: cpus=1 node=0 flags=0x0 nice=0 active=4 refcnt=5 pending: wg_ratelimiter_gc_entries, neigh_managed_work, check_lifetime, neigh_periodic_work workqueue rcu_gp: flags=0x108 pwq 2: cpus=0 node=0 flags=0x0 nice=0 active=1 refcnt=2 pending: srcu_invoke_callbacks workqueue kvfree_rcu_reclaim: flags=0xa pwq 8: cpus=0-1 flags=0x4 nice=0 active=1 refcnt=2 pending: kfree_rcu_monitor pwq 8: cpus=0-1 flags=0x4 nice=0 active=1 refcnt=2 pending: kfree_rcu_monitor workqueue netns: flags=0x6000a pwq 8: cpus=0-1 flags=0x4 nice=0 active=1 refcnt=4 in-flight: 12327:cleanup_net workqueue mm_percpu_wq: flags=0x8 pwq 2: cpus=0 node=0 flags=0x0 nice=0 active=1 refcnt=2 pending: vmstat_update pwq 6: cpus=1 node=0 flags=0x0 nice=0 active=1 refcnt=2 pending: vmstat_update workqueue writeback: flags=0x4a pwq 8: cpus=0-1 flags=0x4 nice=0 active=1 refcnt=2 pending: wb_update_bandwidth_workfn workqueue iou_exit: flags=0x2 pwq 8: cpus=0-1 flags=0x4 nice=0 active=64 refcnt=3821 in-flight: 12147:io_ring_exit_work ,16960:io_ring_exit_work ,13445:io_ring_exit_work ,16786:io_ring_exit_work ,17536:io_ring_exit_work ,12:io_ring_exit_work ,16957:io_ring_exit_work ,16787:io_ring_exit_work ,16405:io_ring_exit_work ,6118:io_ring_exit_work ,6136:io_ring_exit_work ,6504:io_ring_exit_work ,15131:io_ring_exit_work ,16785:io_ring_exit_work ,37:io_ring_exit_work ,12641:io_ring_exit_work ,15697:io_ring_exit_work ,12331:io_ring_exit_work ,16674:io_ring_exit_work ,12330:io_ring_exit_work ,16784:io_ring_exit_work ,16783:io_ring_exit_work ,16404:io_ring_exit_work ,11366:io_ring_exit_work ,4085:io_ring_exit_work ,15119:io_ring_exit_work ,16961:io_ring_exit_work ,6200:io_ring_exit_work ,17833:io_ring_exit_work ,50:io_ring_exit_work ,6000:io_ring_exit_work ,17386:io_ring_exit_work ,6047:io_ring_exit_work ,16781:io_ring_exit_work ,17387:io_ring_exit_work ,15698:io_ring_exit_work ,17388:io_ring_exit_work ,3514:io_ring_exit_work ,12324:io_ring_exit_work ,15129:io_ring_exit_work ,999:io_ring_exit_work , 16959:io_ring_exit_work ,17389:io_ring_exit_work ,12638:io_ring_exit_work ,16964:io_ring_exit_work ,16402:io_ring_exit_work ,3485:io_ring_exit_work ,15133:io_ring_exit_work ,17385:io_ring_exit_work ,16782:io_ring_exit_work ,3868:io_ring_exit_work ,16963:io_ring_exit_work ,6040:io_ring_exit_work ,1316:io_ring_exit_work ,16780:io_ring_exit_work ,1139:io_ring_exit_work ,1310:io_ring_exit_work ,7283:io_ring_exit_work ,12326:io_ring_exit_work ,6014:io_ring_exit_work ,12332:io_ring_exit_work ,16863:io_ring_exit_work ,15132:io_ring_exit_work ,7025:io_ring_exit_work inactive: 3756*io_ring_exit_work workqueue ipv6_addrconf: flags=0x6000a pwq 8: cpus=0-1 flags=0x4 nice=0 active=1 refcnt=41 in-flight: 16403:addrconf_dad_work inactive: 27*addrconf_dad_work, 10*addrconf_verify_work workqueue bat_events: flags=0x6000a pwq 8: cpus=0-1 flags=0x4 nice=0 active=1 refcnt=66 in-flight: 17537:batadv_iv_send_outstanding_bat_ogm_packet inactive: batadv_tt_purge, batadv_dat_purge, 2*batadv_bla_periodic_work, batadv_tt_purge, batadv_bla_periodic_work, batadv_dat_purge, batadv_bla_periodic_work, 2*batadv_dat_purge, batadv_bla_periodic_work, 2*batadv_dat_purge, batadv_bla_periodic_work, 6*batadv_tt_purge, batadv_bla_periodic_work, batadv_dat_purge, batadv_bla_periodic_work, batadv_dat_purge, batadv_bla_periodic_work, batadv_dat_purge, batadv_iv_send_outstanding_bat_ogm_packet, 9*batadv_mcast_mla_update, 6*batadv_iv_send_outstanding_bat_ogm_packet, batadv_purge_orig, batadv_iv_send_outstanding_bat_ogm_packet, 2*batadv_purge_orig, 6*batadv_iv_send_outstanding_bat_ogm_packet, 6*batadv_purge_orig, 3*batadv_iv_send_outstanding_bat_ogm_packet, batadv_tt_purge workqueue wg-kex-wg0: flags=0x124 pwq 2: cpus=0 node=0 flags=0x0 nice=0 active=1 refcnt=2 pending: wg_packet_handshake_receive_worker workqueue wg-crypt-wg0: flags=0x128 pwq 2: cpus=0 node=0 flags=0x0 nice=0 active=3 refcnt=4 pending: wg_packet_tx_worker, wg_packet_encrypt_worker, wg_packet_decrypt_worker workqueue wg-kex-wg1: flags=0x124 pwq 2: cpus=0 node=0 flags=0x0 nice=0 active=1 refcnt=2 pending: wg_packet_handshake_receive_worker workqueue wg-kex-wg1: flags=0x6 pwq 8: cpus=0-1 flags=0x4 nice=0 active=1 refcnt=2 pending: wg_packet_handshake_send_worker workqueue wg-crypt-wg1: flags=0x128 pwq 2: cpus=0 node=0 flags=0x0 nice=0 active=2 refcnt=3 pending: wg_packet_encrypt_worker, wg_packet_tx_worker workqueue wg-kex-wg2: flags=0x124 pwq 2: cpus=0 node=0 flags=0x0 nice=0 active=1 refcnt=2 pending: wg_packet_handshake_receive_worker workqueue wg-kex-wg2: flags=0x6 pwq 8: cpus=0-1 flags=0x4 nice=0 active=1 refcnt=2 pending: wg_packet_handshake_send_worker workqueue wg-crypt-wg2: flags=0x128 pwq 2: cpus=0 node=0 flags=0x0 nice=0 active=2 refcnt=3 pending: wg_packet_encrypt_worker, wg_packet_tx_worker workqueue wg-kex-wg0: flags=0x124 pwq 2: cpus=0 node=0 flags=0x0 nice=0 active=1 refcnt=2 pending: wg_packet_handshake_receive_worker workqueue wg-kex-wg0: flags=0x6 pwq 8: cpus=0-1 flags=0x4 nice=0 active=1 refcnt=2 pending: wg_packet_handshake_send_worker workqueue wg-crypt-wg0: flags=0x128 pwq 2: cpus=0 node=0 flags=0x0 nice=0 active=3 refcnt=4 pending: wg_packet_tx_worker, wg_packet_encrypt_worker, wg_packet_decrypt_worker workqueue wg-kex-wg1: flags=0x6 pwq 8: cpus=0-1 flags=0x4 nice=0 active=1 refcnt=2 pending: wg_packet_handshake_send_worker workqueue wg-crypt-wg1: flags=0x128 pwq 2: cpus=0 node=0 flags=0x0 nice=0 active=3 refcnt=4 pending: wg_packet_encrypt_worker, wg_packet_tx_worker, wg_packet_decrypt_worker workqueue wg-kex-wg2: flags=0x6 pwq 8: cpus=0-1 flags=0x4 nice=0 active=1 refcnt=2 pending: wg_packet_handshake_send_worker workqueue wg-crypt-wg2: flags=0x128 pwq 2: cpus=0 node=0 flags=0x0 nice=0 active=2 refcnt=3 pending: wg_packet_encrypt_worker, wg_packet_tx_worker pwq 6: cpus=1 node=0 flags=0x0 nice=0 active=1 refcnt=2 pending: wg_packet_encrypt_worker workqueue hci3: flags=0x20012 pwq 9: cpus=0-1 node=0 flags=0x6 nice=-20 active=1 refcnt=4 in-flight: 14298:hci_cmd_sync_work workqueue hci4: flags=0x20012 pwq 9: cpus=0-1 node=0 flags=0x6 nice=-20 active=1 refcnt=4 pending: hci_cmd_work workqueue hci4: flags=0x20012 pwq 9: cpus=0-1 node=0 flags=0x6 nice=-20 active=1 refcnt=4 in-flight: 16048:hci_cmd_sync_work workqueue hci5: flags=0x20012 pwq 9: cpus=0-1 node=0 flags=0x6 nice=-20 active=1 refcnt=4 pending: hci_conn_timeout workqueue hci7: flags=0x20012 pwq 9: cpus=0-1 node=0 flags=0x6 nice=-20 active=1 refcnt=4 pending: hci_conn_timeout pool 2: cpus=0 node=0 flags=0x0 nice=0 hung=218s workers=9 idle: 5996 10 5909 114 13259 6015 5916 12359 pool 8: cpus=0-1 flags=0x4 nice=0 hung=110s workers=76 idle: 57 18020 18010 18017 18013 18014 pool 9: cpus=0-1 node=0 flags=0x6 nice=-20 hung=14s workers=3 manager: 12231 Showing backtraces of running workers in stalled CPU-bound worker pools: