root@pve3:~# lsblk NAME MAJ:MIN RM SIZE RO TYPE MOUNTPOINTS sda 8:0 1 0B 0 disk sdb 8:16 1 0B 0 disk sdc 8:32 1 0B 0 disk sdd 8:48 1 0B 0 disk sr0 11:0 1 1024M 0 rom sr1 11:1 1 1024M 0 rom sr2 11:2 1 1024M 0 rom sr3 11:3 1 1024M 0 rom nvme10n1 259:0 0 2.9T 0 disk nvme7n1 259:1 0 2.9T 0 disk nvme8n1 259:2 0 2.9T 0 disk nvme6n1 259:3 0 2.9T 0 disk nvme11n1 259:4 0 2.9T 0 disk nvme5n1 259:5 0 2.9T 0 disk nvme2n1 259:6 0 2.9T 0 disk nvme4n1 259:7 0 2.9T 0 disk nvme0n1 259:8 0 894.3G 0 disk ├─nvme0n1p1 259:10 0 1007K 0 part ├─nvme0n1p2 259:11 0 1G 0 part └─nvme0n1p3 259:12 0 879G 0 part nvme3n1 259:9 0 2.9T 0 disk nvme12n1 259:13 0 2.9T 0 disk nvme9n1 259:14 0 2.9T 0 disk nvme1n1 259:15 0 894.3G 0 disk ├─nvme1n1p1 259:16 0 1007K 0 part ├─nvme1n1p2 259:17 0 1G 0 part └─nvme1n1p3 259:18 0 879G 0 part / root@pve3:~# mdadm --detail mdadm: No devices given. root@pve3:~# nano /etc/mdadm/mdadm.conf root@pve3:~# mdadm --create --verbose /dev/md0 -l 10 -n 10 /dev/nvme2n1 /dev/nvme3n1 /dev/nvme4n1 /dev/nvme5n1 /dev/nvme6n1 /dev/nvme7n1 /dev/nvme8n1 /dev/nvme9n1 /dev/nvme10n1 /dev/nvme11n1 mdadm: layout defaults to n2 mdadm: layout defaults to n2 mdadm: chunk size defaults to 512K mdadm: partition table exists on /dev/nvme2n1 mdadm: partition table exists on /dev/nvme2n1 but will be lost or meaningless after creating array mdadm: partition table exists on /dev/nvme3n1 mdadm: partition table exists on /dev/nvme3n1 but will be lost or meaningless after creating array mdadm: partition table exists on /dev/nvme4n1 mdadm: partition table exists on /dev/nvme4n1 but will be lost or meaningless after creating array mdadm: partition table exists on /dev/nvme5n1 mdadm: partition table exists on /dev/nvme5n1 but will be lost or meaningless after creating array mdadm: partition table exists on /dev/nvme6n1 mdadm: partition table exists on /dev/nvme6n1 but will be lost or meaningless after creating array mdadm: partition table exists on /dev/nvme7n1 mdadm: partition table exists on /dev/nvme7n1 but will be lost or meaningless after creating array mdadm: partition table exists on /dev/nvme8n1 mdadm: partition table exists on /dev/nvme8n1 but will be lost or meaningless after creating array mdadm: partition table exists on /dev/nvme9n1 mdadm: partition table exists on /dev/nvme9n1 but will be lost or meaningless after creating array mdadm: partition table exists on /dev/nvme10n1 mdadm: partition table exists on /dev/nvme10n1 but will be lost or meaningless after creating array mdadm: partition table exists on /dev/nvme11n1 mdadm: partition table exists on /dev/nvme11n1 but will be lost or meaningless after creating array mdadm: size set to 3125484544K mdadm: automatically enabling write-intent bitmap on large array Continue creating array? y mdadm: Defaulting to version 1.2 metadata mdadm: array /dev/md0 started. root@pve3:~# mdadm --detail --scan >> /etc/mdadm/mdadm.conf root@pve3:~# update-initramfs -u update-initramfs: Generating /boot/initrd.img-6.8.12-10-pve Running hook script 'zz-proxmox-boot'.. Re-executing '/etc/kernel/postinst.d/zz-proxmox-boot' in new private mount namespace.. Copying and configuring kernels on /dev/disk/by-uuid/CDB6-63C3 Copying kernel and creating boot-entry for 6.8.12-10-pve Copying kernel and creating boot-entry for 6.8.12-9-pve Copying and configuring kernels on /dev/disk/by-uuid/CDC6-2548 Copying kernel and creating boot-entry for 6.8.12-10-pve Copying kernel and creating boot-entry for 6.8.12-9-pve root@pve3:~# cat /proc/mdstat Personalities : [raid0] [raid1] [raid6] [raid5] [raid4] [raid10] md0 : active raid10 nvme11n1[9] nvme10n1[8] nvme9n1[7] nvme8n1[6] nvme7n1[5] nvme6n1[4] nvme5n1[3] nvme4n1[2] nvme3n1[1] nvme2n1[0] 15627422720 blocks super 1.2 512K chunks 2 near-copies [10/10] [UUUUUUUUUU] [>....................] resync = 0.0% (14200448/15627422720) finish=1301.0min speed=200009K/sec bitmap: 117/117 pages [468KB], 65536KB chunk unused devices: root@pve3:~# mdadm --detail mdadm: No devices given. root@pve3:~# mdadm --detail /dev/md0 /dev/md0: Version : 1.2 Creation Time : Fri Apr 25 16:54:10 2025 Raid Level : raid10 Array Size : 15627422720 (14.55 TiB 16.00 TB) Used Dev Size : 3125484544 (2.91 TiB 3.20 TB) Raid Devices : 10 Total Devices : 10 Persistence : Superblock is persistent Intent Bitmap : Internal Update Time : Fri Apr 25 16:55:38 2025 State : clean, resyncing Active Devices : 10 Working Devices : 10 Failed Devices : 0 Spare Devices : 0 Layout : near=2 Chunk Size : 512K Consistency Policy : bitmap Resync Status : 0% complete Name : pve3:0 (local to host pve3) UUID : d87cfe99:e3cefa6b:f0982d1c:b7740266 Events : 16 Number Major Minor RaidDevice State 0 259 6 0 active sync set-A /dev/nvme2n1 1 259 9 1 active sync set-B /dev/nvme3n1 2 259 7 2 active sync set-A /dev/nvme4n1 3 259 5 3 active sync set-B /dev/nvme5n1 4 259 3 4 active sync set-A /dev/nvme6n1 5 259 1 5 active sync set-B /dev/nvme7n1 6 259 2 6 active sync set-A /dev/nvme8n1 7 259 14 7 active sync set-B /dev/nvme9n1 8 259 0 8 active sync set-A /dev/nvme10n1 9 259 4 9 active sync set-B /dev/nvme11n1 root@pve3:~#