Uploaded image for project: 'RHEL'
  1. RHEL
  2. RHEL-54496

lvremove of shared exclusive vdo results in failure to unlock [lvmlock] volume

    • Icon: Bug Bug
    • Resolution: Unresolved
    • Icon: Minor Minor
    • None
    • rhel-9.5
    • lvm2
    • None
    • No
    • None
    • rhel-sst-logical-storage
    • ssg_filesystems_storage_and_HA
    • 8
    • False
    • Hide

      None

      Show
      None
    • None
    • None
    • None
    • None
    • x86_64
    • None

      kernel-5.14.0-490.el9    BUILT: Fri Aug  2 10:42:23 PM CEST 2024
      lvm2-2.03.24-2.el9    BUILT: Wed Aug  7 09:41:45 PM CEST 2024
      lvm2-libs-2.03.24-2.el9    BUILT: Wed Aug  7 09:41:45 PM CEST 2024
       
       
      [root@virt-522 ~]# vgchange --lock-start vdo_2_282
        VG vdo_2_282 starting sanlock lockspace
        Starting locking.  Waiting for sanlock may take 20 sec to 3 min...
       
      [root@virt-522 ~]# lvcreate --yes --activate ey --type vdo -L 5G -n vdo_2_2820 vdo_2_282
        Wiping vdo signature on /dev/vdo_2_282/vpool0.
          Logical blocks defaulted to 523108 blocks.
          The VDO volume can address 2 GB in 1 data slab.
          It can grow to address at most 16 TB of physical storage in 8192 slabs.
          If a larger maximum size might be needed, use bigger slabs.
        Logical volume "vdo_2_2820" created.
       
      [root@virt-522 ~]# lvremove -f vdo_2_282
        WARNING: Failed to unlock vdo_2_282/vdo_2_2820.
        Logical volume "vdo_2_2820" successfully removed.
      [root@virt-522 ~]# echo $?
      0
       
      [root@virt-522 ~]# lvs -a -o +devices
        LV        VG            Attr       LSize   Pool Origin Data%  Meta%  Move Log Cpy%Sync Convert Devices       
        [lvmlock] global        -wi-ao---- 256.00m                                                     /dev/sdh1(0)  
        root      rhel_virt-522 -wi-ao----  <8.00g                                                     /dev/vda2(256)
        swap      rhel_virt-522 -wi-ao----   1.00g                                                     /dev/vda2(0)  
        [lvmlock] vdo_2_282     -wi-ao---- 256.00m                                                     /dev/sdf1(0)  
       
      [root@virt-522 ~]# lvremove -f vdo_2_282
      [root@virt-522 ~]# 
       
       
       
       
       
      [root@virt-522 ~]# lvremove -vvvv -f vdo_2_282 > /tmp/lvremove 2>&1
      [...]
      22:08:37.073107 lvremove[74422] device_mapper/libdm-deptree.c:1027  Removing vdo_2_282-vpool0-vpool (253:5)
      22:08:37.073126 lvremove[74422] device_mapper/libdm-common.c:2452  Udev cookie 0xd4da658 (semid 33) incremented to 3
      22:08:37.073137 lvremove[74422] device_mapper/libdm-common.c:2685  Udev cookie 0xd4da658 (semid 33) assigned to REMOVE task(2) with flags DISABLE_SUBSYSTEM_RULES DISABLE_DISK_RULES DISABLE_OTHER_RULES DISABLE_LIBRARY_FALLBACK         (0x2e)
      22:08:37.073147 lvremove[74422] device_mapper/ioctl/libdm-iface.c:2100  dm remove   (253:5) [ noopencount flush retryremove ]   [2048] (*1)
      22:08:37.354350 lvremove[74422] device_mapper/libdm-common.c:1491  vdo_2_282-vpool0-vpool: Stacking NODE_DEL [trust_udev]
      22:08:37.354391 lvremove[74422] device_mapper/ioctl/libdm-iface.c:2100  dm info   (253:4) [ opencount flush ]   [2048] (*1)
      22:08:37.354413 lvremove[74422] device_mapper/libdm-deptree.c:1027  Removing vdo_2_282-vpool0_vdata (253:4)
      22:08:37.354434 lvremove[74422] device_mapper/libdm-common.c:2452  Udev cookie 0xd4da658 (semid 33) incremented to 2
      22:08:37.354444 lvremove[74422] device_mapper/libdm-common.c:2685  Udev cookie 0xd4da658 (semid 33) assigned to REMOVE task(2) with flags DISABLE_SUBSYSTEM_RULES DISABLE_DISK_RULES DISABLE_OTHER_RULES DISABLE_LIBRARY_FALLBACK         (0x2e)
      22:08:37.354479 lvremove[74422] device_mapper/ioctl/libdm-iface.c:2100  dm remove   (253:4) [ noopencount flush retryremove ]   [2048] (*1)
      22:08:37.357986 lvremove[74422] device_mapper/libdm-common.c:1491  vdo_2_282-vpool0_vdata: Stacking NODE_DEL [trust_udev]
      22:08:37.358021 lvremove[74422] mm/memlock.c:645  Leaving section (deactivated).
      22:08:37.358035 lvremove[74422] activate/dev_manager.c:919  Getting device info for vdo_2_282-vdo_2_2820 [LVM-GwPDM80Hg1C8hVi4nnZZI0WcRcIXjyi0cydl31ykhoyHKqxD6ADPna6yQ5FFwLXC].
      22:08:37.358047 lvremove[74422] device_mapper/ioctl/libdm-iface.c:2100  dm info  LVM-GwPDM80Hg1C8hVi4nnZZI0WcRcIXjyi0cydl31ykhoyHKqxD6ADPna6yQ5FFwLXC [ noopencount flush ]   [2048] (*1)
      22:08:37.358085 lvremove[74422] activate/activate.c:2475  Deactivating vdo_2_282/vpool0.
      22:08:37.358097 lvremove[74422] activate/dev_manager.c:919  Getting device info for vdo_2_282-vpool0 [LVM-GwPDM80Hg1C8hVi4nnZZI0WcRcIXjyi0ua3ZwmJCPDvrgv9Q0NWCstFJdBOe1Q0Z-pool].
      22:08:37.358107 lvremove[74422] device_mapper/ioctl/libdm-iface.c:2100  dm info  LVM-GwPDM80Hg1C8hVi4nnZZI0WcRcIXjyi0ua3ZwmJCPDvrgv9Q0NWCstFJdBOe1Q0Z-pool [ opencount flush ]   [2048] (*1)
      22:08:37.358121 lvremove[74422] device_mapper/ioctl/libdm-iface.c:2100  dm info  LVM-GwPDM80Hg1C8hVi4nnZZI0WcRcIXjyi0ua3ZwmJCPDvrgv9Q0NWCstFJdBOe1Q0Z [ opencount flush ]   [2048] (*1)
      22:08:37.358134 lvremove[74422] activate/dev_manager.c:919  Getting device info for vdo_2_282-vpool0-vpool [LVM-GwPDM80Hg1C8hVi4nnZZI0WcRcIXjyi0ua3ZwmJCPDvrgv9Q0NWCstFJdBOe1Q0Z-vpool].
      22:08:37.358144 lvremove[74422] device_mapper/ioctl/libdm-iface.c:2100  dm info  LVM-GwPDM80Hg1C8hVi4nnZZI0WcRcIXjyi0ua3ZwmJCPDvrgv9Q0NWCstFJdBOe1Q0Z-vpool [ opencount flush ]   [2048] (*1)
      22:08:37.358156 lvremove[74422] device_mapper/ioctl/libdm-iface.c:2100  dm info  LVM-GwPDM80Hg1C8hVi4nnZZI0WcRcIXjyi0ua3ZwmJCPDvrgv9Q0NWCstFJdBOe1Q0Z [ opencount flush ]   [2048] (*1)
      22:08:37.358169 lvremove[74422] activate/activate.c:2475  Deactivating vdo_2_282/vpool0_vdata.
      22:08:37.358179 lvremove[74422] activate/dev_manager.c:919  Getting device info for vdo_2_282-vpool0_vdata [LVM-GwPDM80Hg1C8hVi4nnZZI0WcRcIXjyi0Qekm1zdptmv8Fawpe4L8lUq6aRVt5IOi-vdata].
      22:08:37.358188 lvremove[74422] device_mapper/ioctl/libdm-iface.c:2100  dm info  LVM-GwPDM80Hg1C8hVi4nnZZI0WcRcIXjyi0Qekm1zdptmv8Fawpe4L8lUq6aRVt5IOi-vdata [ noopencount flush ]   [2048] (*1)
      22:08:37.358201 lvremove[74422] device_mapper/ioctl/libdm-iface.c:2100  dm info  LVM-GwPDM80Hg1C8hVi4nnZZI0WcRcIXjyi0Qekm1zdptmv8Fawpe4L8lUq6aRVt5IOi [ noopencount flush ]   [2048] (*1)
      22:08:37.358213 lvremove[74422] metadata/lv_manip.c:7738  Releasing logical volume "vdo_2_2820"
      22:08:37.358240 lvremove[74422] metadata/lv_manip.c:1193  Remove vdo_2_282/vdo_2_2820:0[0] from the top of LV vdo_2_282/vpool0:0.
      22:08:37.358250 lvremove[74422] metadata/lv_manip.c:896  vdo_2_282/vdo_2_2820:0 is no longer a user of vdo_2_282/vpool0.
      22:08:37.358265 lvremove[74422] activate/dev_manager.c:919  Getting device info for vdo_2_282-vpool0_vdata [LVM-GwPDM80Hg1C8hVi4nnZZI0WcRcIXjyi0Qekm1zdptmv8Fawpe4L8lUq6aRVt5IOi-vdata].
      22:08:37.358275 lvremove[74422] device_mapper/ioctl/libdm-iface.c:2100  dm info  LVM-GwPDM80Hg1C8hVi4nnZZI0WcRcIXjyi0Qekm1zdptmv8Fawpe4L8lUq6aRVt5IOi-vdata [ noopencount flush ]   [2048] (*1)
      22:08:37.358287 lvremove[74422] device_mapper/ioctl/libdm-iface.c:2100  dm info  LVM-GwPDM80Hg1C8hVi4nnZZI0WcRcIXjyi0Qekm1zdptmv8Fawpe4L8lUq6aRVt5IOi [ noopencount flush ]   [2048] (*1)
      22:08:37.358314 lvremove[74422] device_mapper/libdm-config.c:1084  devices/issue_discards not found in config: defaulting to 0
      22:08:37.358336 lvremove[74422] locking/lvmlockd.c:2474  lockd LV vdo_2_282/vpool0 mode un uuid ua3Zwm-JCPD-vrgv-9Q0N-WCst-FJdB-Oe1Q0Z
      22:08:37.360029 lvremove[74422] locking/lvmlockd.c:181  lockd_result 0 flags none lm sanlock
      22:08:37.360068 lvremove[74422] locking/lvmlockd.c:435  lvmlockd lock_lv un vg vdo_2_282 lv vpool0 result 0 0
      22:08:37.360091 lvremove[74422] locking/lvmlockd.c:2925  lockd free LV vdo_2_282/vpool0 ua3Zwm-JCPD-vrgv-9Q0N-WCst-FJdB-Oe1Q0Z lock_args 1.0.0:70254592
      22:08:37.364754 lvremove[74422] locking/lvmlockd.c:181  lockd_result 0 flags none lm sanlock
      22:08:37.364785 lvremove[74422] metadata/lv_manip.c:7751  Postponing write and commit.
      22:08:37.364795 lvremove[74422] locking/lvmlockd.c:2638  No vdo pool for vdo_2_282/vdo_2_2820
      22:08:37.364820 lvremove[74422] metadata/lv_manip.c:7767  WARNING: Failed to unlock vdo_2_282/vdo_2_2820.
      22:08:37.364840 lvremove[74422] metadata/metadata.c:2927  Writing metadata for VG vdo_2_282.
      22:08:37.364853 lvremove[74422] metadata/pv_manip.c:413  /dev/sdf1 0:      0     64: lvmlock(0:0)
      22:08:37.364890 lvremove[74422] metadata/pv_manip.c:413  /dev/sdf1 1:     64  17854: NULL(0:0)
      22:08:37.364900 lvremove[74422] metadata/pv_manip.c:413  /dev/sdd1 0:      0  17918: NULL(0:0)
      22:08:37.364910 lvremove[74422] metadata/pv_manip.c:413  /dev/sdb1 0:      0  17918: NULL(0:0)
      22:08:37.364919 lvremove[74422] metadata/pv_manip.c:413  /dev/sda1 0:      0  17918: NULL(0:0)
      22:08:37.365089 lvremove[74422] format_text/archiver.c:139  Archiving volume group "vdo_2_282" metadata (seqno 14).
      22:08:37.365917 lvremove[74422] mm/memlock.c:601  Unlock: Memlock counters: prioritized:1 locked:0 critical:0 daemon:0 suspended:0
      22:08:37.365944 lvremove[74422] mm/memlock.c:509  Restoring original task priority 0.
      22:08:37.365958 lvremove[74422] format_text/format-text.c:196  Reading mda header sector from /dev/sda1 at 4096
      22:08:37.366049 lvremove[74422] metadata/vg.c:65  Allocated VG vdo_2_282 at 0x55eab6ab0570.
      22:08:37.366078 lvremove[74422] format_text/import_vsn1.c:631  Importing logical volume vdo_2_282/lvmlock.
      22:08:37.366090 lvremove[74422] format_text/import_vsn1.c:755  Logical volume vdo_2_282/lvmlock is sanlock lv.
      22:08:37.366114 lvremove[74422] format_text/format-text.c:663  VG vdo_2_282 seqno 15 metadata write to /dev/sda1 mda_start 4096 mda_size 4190208 mda_last 4194303
      22:08:37.366124 lvremove[74422] format_text/format-text.c:372  VG vdo_2_282 15 new metadata start align from 39818 to 39936 (+118)
      22:08:37.366133 lvremove[74422] format_text/format-text.c:778  VG vdo_2_282 15 metadata area location old start 36352 last 39817 size 3466 wrap 0
      22:08:37.366143 lvremove[74422] format_text/format-text.c:785  VG vdo_2_282 15 metadata area location new start 39936 last 41991 size 2056 wrap 0
      22:08:37.366152 lvremove[74422] format_text/format-text.c:871  VG vdo_2_282 15 metadata disk location start 44032 size 2056 last 46087
      22:08:37.366161 lvremove[74422] format_text/format-text.c:910  VG vdo_2_282 15 metadata last align from 46087 to 46591 (+504)
      22:08:37.366220 lvremove[74422] format_text/format-text.c:958  VG vdo_2_282 15 metadata write at 44032 size 2560 (wrap 0)
      22:08:37.367015 lvremove[74422] format_text/format-text.c:196  Reading mda header sector from /dev/sdb1 at 4096
      22:08:37.367039 lvremove[74422] format_text/format-text.c:663  VG vdo_2_282 seqno 15 metadata write to /dev/sdb1 mda_start 4096 mda_size 4190208 mda_last 4194303
      22:08:37.367049 lvremove[74422] format_text/format-text.c:372  VG vdo_2_282 15 new metadata start align from 39818 to 39936 (+118)
      22:08:37.367058 lvremove[74422] format_text/format-text.c:778  VG vdo_2_282 15 metadata area location old start 36352 last 39817 size 3466 wrap 0
      22:08:37.367067 lvremove[74422] format_text/format-text.c:785  VG vdo_2_282 15 metadata area location new start 39936 last 41991 size 2056 wrap 0
      22:08:37.367076 lvremove[74422] format_text/format-text.c:871  VG vdo_2_282 15 metadata disk location start 44032 size 2056 last 46087
      22:08:37.367085 lvremove[74422] format_text/format-text.c:910  VG vdo_2_282 15 metadata last align from 46087 to 46591 (+504)
      22:08:37.367098 lvremove[74422] format_text/format-text.c:958  VG vdo_2_282 15 metadata write at 44032 size 2560 (wrap 0)
      22:08:37.367846 lvremove[74422] format_text/format-text.c:196  Reading mda header sector from /dev/sdd1 at 4096
      [...]
      

              lvm-team lvm-team
              cmarthal@redhat.com Corey Marthaler
              lvm-team lvm-team
              Cluster QE Cluster QE
              Votes:
              0 Vote for this issue
              Watchers:
              6 Start watching this issue

                Created:
                Updated: