ceph-cluster map
知道cluster topology,是因为这5种cluster map。
====================================== 知道cluster topology,是因为这5种cluster map。 相关命令 有命令补全,跟交换机命令行一样 ceph mon dump ceph osd dump ceph fs dump ceph pg dump 这个需要反编译,来得到文本 ceph osd getcrushmap -o crush crushtool -d crush -o crush1 ====================================== [root@ali-1 dd]# ceph mon dump dumped monmap epoch 5 epoch 5 fsid 69e6081b-075f-4f39-8cf3-f1e5bd68908b last_changed 2019-03-12 13:58:31.228140 created 2019-03-12 12:14:21.704124 0: 192.168.3.51:6789/0 mon.ali-1 1: 192.168.3.52:6789/0 mon.ali-2 2: 192.168.3.53:6789/0 mon.ali-3 ====================================== [root@ali-1 dd]# ceph fs dump dumped fsmap epoch 1 e1 enable_multiple, ever_enabled_multiple: 0,0 compat: compat={},rocompat={},incompat={1=base v0.20,2=client writeable ranges,3=default file layouts on dirs,4=dir inode in separate object,5=mds uses versioned encoding,6=dirfrag is stored in omap,8=file layout v2} legacy client fscid: -1 No filesystems configured ====================================== [root@ali-1 dd]# ceph pg dump dumped all version 544167 stamp 2019-03-20 10:28:24.077612 last_osdmap_epoch 816 last_pg_scan 816 full_ratio 0.9 nearfull_ratio 0.8 [root@ceph1 ~]# ceph pg ls PG OBJECTS DEGRADED MISPLACED UNFOUND BYTES LOG STATE STATE_STAMP VERSION REPORTED UP ACTING SCRUB_STAMP DEEP_SCRUB_STAMP 1.0 0 0 0 0 0 2 active+clean 2019-03-28 02:42:54.430131 16'2 57:95 [1,2,0]p1 [1,2,0]p1 2019-03-28 02:42:54.430020 2019-03-28 02:42:54.430020 1.1 0 0 0 0 0 0 active+clean 2019-03-27 20:42:33.846731 0'0 57:78 [2,0,1]p2 [2,0,1]p2 2019-03-27 20:42:33.846600 2019-03-27 20:42:33.846600 1.2 0 0 0 0 0 0 active+clean 2019-03-27 20:02:31.853254 0'0 57:92 [1,0,2]p1 [1,0,2]p1 2019-03-27 20:02:31.853127 2019-03-21 18:53:07.286885 1.3 0 0 0 0 0 0 active+clean 2019-03-28 01:04:29.499574 0'0 57:94 [0,1,2]p0 [0,1,2]p0 2019-03-28 01:04:29.499476 2019-03-21 18:53:07.286885 1.4 0 0 0 0 0 0 active+clean 2019-03-28 10:17:42.694788 0'0 57:77 [2,1,0]p2 [2,1,0]p2 2019-03-28 10:17:42.694658 2019-03-21 18:53:07.286885 1.5 0 0 0 0 0 0 active+clean 2019-03-28 14:33:49.922515 0'0 57:78 [2,0,1]p2 [2,0,1]p2 2019-03-28 14:33:49.922414 2019-03-21 18:53:07.286885 1.6 0 0 0 0 0 0 active+clean 2019-03-28 08:33:08.897114 0'0 57:78 [2,1,0]p2 [2,1,0]p2 2019-03-28 08:33:08.897044 2019-03-25 19:51:32.716535 1.7 0 0 0 0 0 0 active+clean 2019-03-27 21:37:16.417698 0'0 57:92 [1,2,0]p1 [1,2,0]p1 2019-03-27 21:37:16.417553 2019-03-22 23:05:53.863908 2.0 1 0 0 0 337 1 active+clean 2019-03-27 15:07:09.127196 19'1 57:155 [1,2,0]p1 [1,2,0]p1 2019-03-27 15:07:09.127107 2019-03-22 15:05:32.211389 2.1 0 0 0 0 0 0 active+clean 2019-03-27 20:55:41.958378 0'0 57:89 [0,2,1]p0 [0,2,1]p0 2019-03-27 20:55:41.958328 2019-03-27 20:55:41.958328 2.2 0 0 0 0 0 0 active+clean 2019-03-28 03:09:45.117140 0'0 57:87 [1,0,2]p1 [1,0,2]p1 2019-03-28 03:09:45.117036 2019-03-28 03:09:45.117036 2.3 0 0 0 0 0 0 active+clean 2019-03-27 08:54:17.944907 0'0 57:87 [1,0,2]p1 [1,0,2]p1 2019-03-27 08:54:17.944792 2019-03-26 05:44:21.586541 2.4 0 0 0 0 0 0 active+clean 2019-03-27 23:42:52.040458 0'0 57:89 [0,2,1]p0 [0,2,1]p0 2019-03-27 23:42:52.040353 2019-03-22 15:05:32.211389 2.5 0 0 0 0 0 0 active+clean 2019-03-27 14:26:15.908085 0'0 57:73 [2,0,1]p2 [2,0,1]p2 2019-03-27 14:26:15.908022 2019-03-22 15:05:32.211389 2.6 1 0 0 0 736 2 active+clean 2019-03-28 15:00:22.282027 33'2 57:161 [0,2,1]p0 [0,2,1]p0 2019-03-28 15:00:22.281923 2019-03-26 05:39:41.395132 2.7 2 0 0 0 92 4 active+clean 2019-03-27 17:09:39.415262 41'4 57:253 [1,2,0]p1 [1,2,0]p1 2019-03-27 17:09:39.415167 2019-03-27 17:09:39.415167 [root@ceph1 rbdpool]# ceph pg map 8.13 osdmap e55 pg 8.13 (8.13) -> up [0,2,1] acting [0,2,1] pg id由{pool-num}.{pg-id}组成 ceph osd lspools [root@ceph1 rbdpool]# ceph pg stat 124 pgs: 124 active+clean; 56 GiB data, 172 GiB used, 8.4 GiB / 180 GiB avail [root@client mnt]# rm -rf a* 上面的删除操作后,下面的pg才开始清理 [root@ceph1 rbdpool]# ceph pg stat 124 pgs: 124 active+clean; 2.5 MiB data, 3.5 GiB used, 177 GiB / 180 GiB avail; 8.7 KiB/s rd, 85 B/s wr, 479 op/s ====================================== [root@ali-1 dd]# ceph osd getcrushmap -o crush 39 [root@ali-1 dd]# file crush crush: MS Windows icon resource - 8 icons, 2-colors [root@ali-1 dd]# crushtool -d crush -o crush1 [root@ali-1 dd]# file crush1 crush1: ASCII text [root@ali-1 dd]# cat crush1 # begin crush map tunable choose_local_tries 0 tunable choose_local_fallback_tries 0 tunable choose_total_tries 50 ...... rule pool-d83c6154956b44aea7639c7bd4c45c65-rule { id 1 type replicated min_size 1 max_size 10 step take pool-d83c6154956b44aea7639c7bd4c45c65-root step chooseleaf firstn 3 type rack step emit } # end crush map [root@ali-1 dd]# ====================================== [root@ali-1 dd]# ceph osd dump epoch 816 fsid 69e6081b-075f-4f39-8cf3-f1e5bd68908b created 2019-03-12 12:14:22.409031 modified 2019-03-19 20:51:38.522821 flags nodeep-scrub,sortbitwise,recovery_deletes,purged_snapdirs crush_version 39 full_ratio 0.9 backfillfull_ratio 0.85 nearfull_ratio 0.8 omap_full_ratio 0.9 omap_backfillfull_ratio 0.85 omap_nearfull_ratio 0.8 require_min_compat_client luminous min_compat_client luminous require_osd_release luminous pool 1 'pool-d83c6154956b44aea7639c7bd4c45c65' replicated size 3 min_size 1 crush_rule 1 object_hash rjenkins pg_num 2048 pgp_num 2048 last_change 202 flags hashpspool stripe_width 0 async_recovery_max_updates 60 osd_backfillfull_ratio 0.85 osd_full_ratio 0.9 osd_nearfull_ratio 0.8 osd_omap_backfillfull_ratio 0.85 osd_omap_nearfull_ratio 0.8 removed_snaps [1~7] max_osd 24 osd.0 down out weight 0 up_from 199 up_thru 199 down_at 207 last_clean_interval [20,196) 192.168.3.53:6811/171409 192.168.3.53:6813/171409 192.168.3.53:6814/171409 192.168.3.53:6815/171409 192.168.1.53:6803/171409 autoout,exists 54e32850-b1ef-44e1-8df9-d3c93bfe4807 osd.1 down out weight 0 up_from 199 up_thru 199 down_at 205 last_clean_interval [28,196) 192.168.3.53:6824/172426 192.168.3.53:6825/172426 192.168.3.53:6826/172426 192.168.3.53:6827/172426 192.168.1.53:6806/172426 autoout,exists 17af8207-2a25-405b-b87d-1c6d7806cc8d osd.2 down out weight 0 up_from 199 up_thru 199 down_at 205 last_clean_interval [26,196) 192.168.3.53:6802/169882 192.168.3.53:6806/169882 192.168.3.53:6810/169882 192.168.3.53:6812/169882 192.168.1.53:6800/169882 autoout,exists 06cf6578-e516-4e4a-a494-10423b8999cd osd.3 down out weight 0 up_from 199 up_thru 199 down_at 203 last_clean_interval [22,196) 192.168.3.53:6816/171705 192.168.3.53:6817/171705 192.168.3.53:6818/171705 192.168.3.53:6819/171705 192.168.1.53:6804/171705 autoout,exists bc31e4ab-a135-4782-81b3-e92969921ba7 osd.4 up in weight 1 up_from 199 up_thru 806 down_at 198 last_clean_interval [32,196) 192.168.3.53:6828/172791 192.168.3.53:6829/172791 192.168.3.53:6830/172791 192.168.3.53:6831/172791 192.168.1.53:6807/172791 exists,up 62edd341-50b8-4cca-852f-852a51f96760 osd.5 up in weight 1 up_from 198 up_thru 808 down_at 197 last_clean_interval [34,196) 192.168.3.53:6820/172091 192.168.3.53:6821/172091 192.168.3.53:6822/172091 192.168.3.53:6823/172091 192.168.1.53:6805/172091 exists,up 00d0cd89-2e74-4709-b4b4-6deaf465b97e osd.6 up in weight 1 up_from 198 up_thru 791 down_at 197 last_clean_interval [30,196) 192.168.3.53:6805/171250 192.168.3.53:6807/171250 192.168.3.53:6808/171250 192.168.3.53:6809/171250 192.168.1.53:6802/171250 exists,up 8ed2597f-1a92-4b90-8036-43b7953cffea osd.7 up in weight 1 up_from 199 up_thru 809 down_at 198 last_clean_interval [24,196) 192.168.3.53:6800/170832 192.168.3.53:6801/170832 192.168.3.53:6803/170832 192.168.3.53:6804/170832 192.168.1.53:6801/170832 exists,up f5723232-3f04-4c22-9394-bdc69d7bcff6 osd.8 up in weight 1 up_from 199 up_thru 741 down_at 198 last_clean_interval [21,196) 192.168.3.52:6804/451677 192.168.3.52:6805/451677 192.168.3.52:6806/451677 192.168.3.52:6807/451677 192.168.1.52:6801/451677 exists,up f75a6ee5-cd79-499c-9926-db400f0bed93 osd.9 up in weight 1 up_from 199 up_thru 767 down_at 198 last_clean_interval [25,196) 192.168.3.52:6816/452498 192.168.3.52:6817/452498 192.168.3.52:6818/452498 192.168.3.52:6819/452498 192.168.1.52:6804/452498 exists,up 30431fd9-306c-4037-a5bd-cf6b9bc77ca1 osd.10 up in weight 1 up_from 199 up_thru 811 down_at 198 last_clean_interval [33,196) 192.168.3.52:6828/453576 192.168.3.52:6829/453576 192.168.3.52:6830/453576 192.168.3.52:6831/453576 192.168.1.52:6807/453576 exists,up 6ed49e4d-d640-4466-957e-94d2f4ba055f osd.11 up in weight 1 up_from 199 up_thru 749 down_at 198 last_clean_interval [27,196) 192.168.3.52:6812/452162 192.168.3.52:6813/452162 192.168.3.52:6814/452162 192.168.3.52:6815/452162 192.168.1.52:6803/452162 exists,up 46333226-5c5e-475c-8b41-d58980da3f43 osd.12 up in weight 1 up_from 199 up_thru 777 down_at 198 last_clean_interval [29,196) 192.168.3.52:6824/453249 192.168.3.52:6825/453249 192.168.3.52:6826/453249 192.168.3.52:6827/453249 192.168.1.52:6806/453249 exists,up 6168f2cd-de56-4529-8fe5-c80e93f134cd osd.13 up in weight 1 up_from 199 up_thru 735 down_at 198 last_clean_interval [31,196) 192.168.3.52:6820/452905 192.168.3.52:6821/452905 192.168.3.52:6822/452905 192.168.3.52:6823/452905 192.168.1.52:6805/452905 exists,up 26e54a1c-601a-4f3b-afdc-a0c5b140affc osd.14 up in weight 1 up_from 199 up_thru 726 down_at 198 last_clean_interval [23,196) 192.168.3.52:6808/451987 192.168.3.52:6809/451987 192.168.3.52:6810/451987 192.168.3.52:6811/451987 192.168.1.52:6802/451987 exists,up fa366bda-3ac8-4056-8114-b156acffb4aa osd.15 up in weight 1 up_from 199 up_thru 733 down_at 198 last_clean_interval [35,196) 192.168.3.52:6800/451520 192.168.3.52:6801/451520 192.168.3.52:6802/451520 192.168.3.52:6803/451520 192.168.1.52:6800/451520 exists,up e9a16507-7121-465c-af80-9d371a9018ad osd.16 down out weight 0 up_from 198 up_thru 389 down_at 519 last_clean_interval [45,196) 192.168.3.51:6804/357119 192.168.3.51:6805/357119 192.168.3.51:6806/357119 192.168.3.51:6807/357119 192.168.1.51:6801/357119 autoout,exists c39c2030-4ad2-49b2-a2bd-d6f26d9cc2c8 osd.17 down out weight 0 up_from 198 up_thru 433 down_at 521 last_clean_interval [51,196) 192.168.3.51:6824/358617 192.168.3.51:6825/358617 192.168.3.51:6826/358617 192.168.3.51:6827/358617 192.168.1.51:6806/358617 autoout,exists 9fa68652-dda8-485a-9363-92d109bc7283 osd.18 down out weight 0 up_from 199 up_thru 501 down_at 521 last_clean_interval [49,196) 192.168.3.51:6808/357481 192.168.3.51:6809/357481 192.168.3.51:6810/357481 192.168.3.51:6811/357481 192.168.1.51:6802/357481 autoout,exists f91dc889-379d-427a-8251-9525deb70603 osd.19 up in weight 1 up_from 199 up_thru 781 down_at 198 last_clean_interval [44,196) 192.168.3.51:6820/358465 192.168.3.51:6821/358465 192.168.3.51:6822/358465 192.168.3.51:6823/358465 192.168.1.51:6805/358465 exists,up 254c1dc1-c5aa-406d-a144-408c757f6b34 osd.20 down out weight 0 up_from 199 up_thru 486 down_at 521 last_clean_interval [50,196) 192.168.3.51:6800/356918 192.168.3.51:6801/356918 192.168.3.51:6802/356918 192.168.3.51:6803/356918 192.168.1.51:6800/356918 autoout,exists c13c44fd-397f-465d-bc14-917e8899e2fd osd.21 up in weight 1 up_from 199 up_thru 805 down_at 198 last_clean_interval [47,196) 192.168.3.51:6812/357957 192.168.3.51:6813/357957 192.168.3.51:6814/357957 192.168.3.51:6815/357957 192.168.1.51:6803/357957 exists,up c5028149-28ec-4bd4-a5fe-3d13bdb82c6a osd.22 up in weight 1 up_from 199 up_thru 801 down_at 198 last_clean_interval [46,196) 192.168.3.51:6816/358291 192.168.3.51:6817/358291 192.168.3.51:6818/358291 192.168.3.51:6819/358291 192.168.1.51:6804/358291 exists,up 27c2a32e-eef3-41c9-9650-15246fb20ac4 osd.23 up in weight 1 up_from 199 up_thru 809 down_at 198 last_clean_interval [48,196) 192.168.3.51:6828/358827 192.168.3.51:6829/358827 192.168.3.51:6830/358827 192.168.3.51:6831/358827 192.168.1.51:6807/358827 exists,up 4f877615-df0d-40d0-a351-a21dc518c3f4 pg_upmap_items 1.1 [0,2] pg_upmap_items 1.2 [0,2,19,18] pg_upmap_items 1.3 [4,2]