diff --git a/ocaml/tests/test_vm_helpers.ml b/ocaml/tests/test_vm_helpers.ml index bdd8dc061d8..4aa506c3ddf 100644 --- a/ocaml/tests/test_vm_helpers.ml +++ b/ocaml/tests/test_vm_helpers.ml @@ -159,7 +159,7 @@ let rec assert_equivalent expected_grouping actual_grouping = assert_host_groups_equal e g ; assert_equivalent es gs -let assert_host_groups_equal_for_vgpu g g' = +let assert_host_groups_equal g g' = match g' with | [] -> () @@ -170,7 +170,7 @@ let assert_host_groups_equal_for_vgpu g g' = Alcotest.(check (slist string String.compare)) "check host strings" (extract_host_strings g) (extract_host_strings g') -let rec assert_equivalent_for_vgpu expected_grouping actual_grouping = +let rec assert_equivalent_for_grouping expected_grouping actual_grouping = match (expected_grouping, actual_grouping) with | [], [] -> () @@ -181,19 +181,23 @@ let rec assert_equivalent_for_vgpu expected_grouping actual_grouping = Alcotest.fail (Printf.sprintf "%d fewer groups than expected." (List.length xx)) | e :: es, g :: gs -> - assert_host_groups_equal_for_vgpu e g ; - assert_equivalent_for_vgpu es gs + assert_host_groups_equal e g ; + assert_equivalent_for_grouping es gs let assert_grouping ~__context gpu_group ~visible_hosts vgpu_type g = let vgpu = VGPU_T.make_vgpu ~__context ~gPU_group:gpu_group vgpu_type in let host_lists = rank_hosts_by_best_vgpu ~__context vgpu visible_hosts in - assert_equivalent_for_vgpu g host_lists + assert_equivalent_for_grouping g host_lists let check_expectations ~__context gpu_group visible_hosts vgpu_type expected_grouping = assert_grouping ~__context gpu_group ~visible_hosts vgpu_type expected_grouping +let check_anti_affinity_grouping ~__context ~vm ~group expected_grouping = + let host_lists = rank_hosts_by_placement ~__context ~vm ~group in + assert_equivalent_for_grouping expected_grouping host_lists + let test_group_hosts_bf () = on_pool_of_k1s VGPU_T.( @@ -524,6 +528,28 @@ let test_group_hosts_netsriov_with_allocated () = "Test-failure: Unexpected number of sriov network in test" ) +let test_get_group_key_anti_affinity () = + let __context = T.make_test_database () in + let vm = T.make_vm ~__context () in + let group = T.make_vm_group ~__context ~placement:`anti_affinity () in + Db.VM.set_groups ~__context ~self:vm ~value:[group] ; + match Xapi_vm_helpers.get_group_key ~__context ~vm with + | `AntiAffinity _ -> + () + | _ -> + Alcotest.fail "Test-failure: Unexpected Group Key in test" + +let test_get_group_key_normal_group () = + let __context = T.make_test_database () in + let vm = T.make_vm ~__context () in + let normal_group = T.make_vm_group ~__context ~placement:`normal () in + Db.VM.set_groups ~__context ~self:vm ~value:[normal_group] ; + match Xapi_vm_helpers.get_group_key ~__context ~vm with + | `Other -> + () + | _ -> + Alcotest.fail "Test-failure: Unexpected Group Key in test" + let test_get_group_key_vgpu () = on_pool_of_intel_i350 (fun __context _ _ _ -> let group = List.hd (Db.GPU_group.get_all ~__context) in @@ -573,6 +599,352 @@ let test_get_group_key_vgpu_and_netsriov () = Alcotest.fail "Test-failure: Unexpected Group Key in test" ) +let test_get_group_key_anti_affinity_and_vgpu_and_netsriov () = + on_pool_of_intel_i350 (fun __context _ _ _ -> + let group = List.hd (Db.GPU_group.get_all ~__context) in + let vm = make_vm_with_vgpu_in_group ~__context VGPU_T.k100 group in + let sriov_network = + List.find + (fun network -> + Xapi_network_sriov_helpers.is_sriov_network ~__context ~self:network + ) + (Db.Network.get_all ~__context) + in + let (_ : API.ref_VIF) = + T.make_vif ~__context ~vM:vm ~network:sriov_network () + in + let anti_affinity_group = + T.make_vm_group ~__context ~placement:`anti_affinity () + in + Db.VM.set_groups ~__context ~self:vm ~value:[anti_affinity_group] ; + match Xapi_vm_helpers.get_group_key ~__context ~vm with + | `AntiAffinity _ -> + () + | _ -> + Alcotest.fail "Test-failure: Unexpected Group Key in test" + ) + +type vm_info = {name: string; host: string; group: string; state: string} + +type vm_anti_affinity_ranked_grp_test_case = { + description: string + ; vm_to_start: vm_info + ; other_vms: vm_info list + ; hosts: string list + ; affinity_host: string + ; expected: string list list +} + +let vm_anti_affinity_ranked_grp_test_cases = + [ + { + description= "No other VM" + ; vm_to_start= + {name= "vm"; host= ""; group= "anti-affinity"; state= "halted"} + ; other_vms= [] + ; hosts= ["h1"; "h2"; "h3"] + ; affinity_host= "" + ; expected= [["h1"; "h2"; "h3"]] + } + ; { + description= "VMs not in group" + ; vm_to_start= + {name= "vm"; host= ""; group= "anti-affinity"; state= "halted"} + ; other_vms= + [ + {name= "vm1"; host= "h2"; group= ""; state= "running"} + ; {name= "vm2"; host= "h3"; group= ""; state= "running"} + ; {name= "vm3"; host= "h3"; group= ""; state= "running"} + ] + ; hosts= ["h1"; "h2"; "h3"] + ; affinity_host= "" + ; expected= [["h1"; "h2"; "h3"]] + } + ; { + description= "VMs in other group" + ; vm_to_start= + {name= "vm"; host= ""; group= "anti-affinity"; state= "halted"} + ; other_vms= + [ + {name= "vm1"; host= "h2"; group= "other-group"; state= "running"} + ; {name= "vm2"; host= "h3"; group= "other-group"; state= "running"} + ; {name= "vm3"; host= "h3"; group= "other-group"; state= "running"} + ] + ; hosts= ["h1"; "h2"; "h3"] + ; affinity_host= "" + ; expected= [["h1"; "h2"; "h3"]] + } + ; { + description= "3 running VMs (h1(0) h2(1) h3(2))" + ; vm_to_start= + {name= "vm"; host= ""; group= "anti-affinity"; state= "halted"} + ; other_vms= + [ + {name= "vm1"; host= "h2"; group= "anti-affinity"; state= "running"} + ; {name= "vm2"; host= "h3"; group= "anti-affinity"; state= "running"} + ; {name= "vm3"; host= "h3"; group= "anti-affinity"; state= "running"} + ] + ; hosts= ["h1"; "h2"; "h3"] + ; affinity_host= "" + ; expected= [["h1"]; ["h2"]; ["h3"]] + } + ; { + description= "3 running VMs (h1(1) h2(1) h3(1))" + ; vm_to_start= + {name= "vm"; host= ""; group= "anti-affinity"; state= "halted"} + ; other_vms= + [ + {name= "vm1"; host= "h1"; group= "anti-affinity"; state= "running"} + ; {name= "vm2"; host= "h2"; group= "anti-affinity"; state= "running"} + ; {name= "vm3"; host= "h3"; group= "anti-affinity"; state= "running"} + ] + ; hosts= ["h1"; "h2"; "h3"] + ; affinity_host= "" + ; expected= [["h1"; "h2"; "h3"]] + } + ; { + description= "3 running VMs (h1(0) h2(0) h3(3))" + ; vm_to_start= + {name= "vm"; host= ""; group= "anti-affinity"; state= "halted"} + ; other_vms= + [ + {name= "vm1"; host= "h3"; group= "anti-affinity"; state= "running"} + ; {name= "vm2"; host= "h3"; group= "anti-affinity"; state= "running"} + ; {name= "vm3"; host= "h3"; group= "anti-affinity"; state= "running"} + ] + ; hosts= ["h1"; "h2"; "h3"] + ; affinity_host= "" + ; expected= [["h1"; "h2"]; ["h3"]] + } + ; { + description= "3 starting VMs (h1(0) h2(1) h3(2))" + ; vm_to_start= + {name= "vm"; host= ""; group= "anti-affinity"; state= "halted"} + ; other_vms= + [ + {name= "vm1"; host= "h2"; group= "anti-affinity"; state= "starting"} + ; {name= "vm2"; host= "h3"; group= "anti-affinity"; state= "starting"} + ; {name= "vm3"; host= "h3"; group= "anti-affinity"; state= "starting"} + ] + ; hosts= ["h1"; "h2"; "h3"] + ; affinity_host= "" + ; expected= [["h1"]; ["h2"]; ["h3"]] + } + ; { + description= "3 starting VMs (h1(1) h2(1) h3(1))" + ; vm_to_start= + {name= "vm"; host= ""; group= "anti-affinity"; state= "halted"} + ; other_vms= + [ + {name= "vm1"; host= "h1"; group= "anti-affinity"; state= "starting"} + ; {name= "vm2"; host= "h2"; group= "anti-affinity"; state= "starting"} + ; {name= "vm3"; host= "h3"; group= "anti-affinity"; state= "starting"} + ] + ; hosts= ["h1"; "h2"; "h3"] + ; affinity_host= "" + ; expected= [["h1"; "h2"; "h3"]] + } + ; { + description= "3 starting VMs (h1(0) h2(0) h3(3))" + ; vm_to_start= + {name= "vm"; host= ""; group= "anti-affinity"; state= "halted"} + ; other_vms= + [ + {name= "vm1"; host= "h3"; group= "anti-affinity"; state= "starting"} + ; {name= "vm2"; host= "h3"; group= "anti-affinity"; state= "starting"} + ; {name= "vm3"; host= "h3"; group= "anti-affinity"; state= "starting"} + ] + ; hosts= ["h1"; "h2"; "h3"] + ; affinity_host= "" + ; expected= [["h1"; "h2"]; ["h3"]] + } + ; { + description= "3 stopped VMs" + ; vm_to_start= + {name= "vm"; host= ""; group= "anti-affinity"; state= "halted"} + ; other_vms= + [ + {name= "vm1"; host= ""; group= "anti-affinity"; state= "halted"} + ; {name= "vm2"; host= ""; group= "anti-affinity"; state= "halted"} + ; {name= "vm3"; host= ""; group= "anti-affinity"; state= "halted"} + ] + ; hosts= ["h1"; "h2"; "h3"] + ; affinity_host= "" + ; expected= [["h1"; "h2"; "h3"]] + } + ; { + description= "3 suspended VMs" + ; vm_to_start= + {name= "vm"; host= ""; group= "anti-affinity"; state= "halted"} + ; other_vms= + [ + {name= "vm1"; host= ""; group= "anti-affinity"; state= "suspended"} + ; {name= "vm2"; host= ""; group= "anti-affinity"; state= "suspended"} + ; {name= "vm3"; host= ""; group= "anti-affinity"; state= "suspended"} + ] + ; hosts= ["h1"; "h2"; "h3"] + ; affinity_host= "" + ; expected= [["h1"; "h2"; "h3"]] + } + ; { + description= "3 paused VMs (h1(0) h2(1) h3(2))" + ; vm_to_start= + {name= "vm"; host= ""; group= "anti-affinity"; state= "halted"} + ; other_vms= + [ + {name= "vm1"; host= "h2"; group= "anti-affinity"; state= "paused"} + ; {name= "vm2"; host= "h3"; group= "anti-affinity"; state= "paused"} + ; {name= "vm3"; host= "h3"; group= "anti-affinity"; state= "paused"} + ] + ; hosts= ["h1"; "h2"; "h3"] + ; affinity_host= "" + ; expected= [["h1"; "h2"; "h3"]] + } + ; { + description= "3 running VMs with affinity-host" + ; vm_to_start= + {name= "vm"; host= ""; group= "anti-affinity"; state= "halted"} + ; other_vms= + [ + {name= "vm1"; host= "h1"; group= "anti-affinity"; state= "running"} + ; {name= "vm2"; host= "h2"; group= "anti-affinity"; state= "running"} + ; {name= "vm3"; host= "h3"; group= "anti-affinity"; state= "running"} + ] + ; hosts= ["h1"; "h2"; "h3"] + ; affinity_host= "h1" + ; expected= [["h1"]; ["h2"; "h3"]] + } + ; { + description= "6 running VMs (h1(1) h2(2) h3(3))" + ; vm_to_start= + {name= "vm"; host= ""; group= "anti-affinity"; state= "halted"} + ; other_vms= + [ + {name= "vm1"; host= "h1"; group= "anti-affinity"; state= "running"} + ; {name= "vm2"; host= "h2"; group= "anti-affinity"; state= "running"} + ; {name= "vm3"; host= "h2"; group= "anti-affinity"; state= "running"} + ; {name= "vm4"; host= "h3"; group= "anti-affinity"; state= "running"} + ; {name= "vm5"; host= "h3"; group= "anti-affinity"; state= "running"} + ; {name= "vm6"; host= "h3"; group= "anti-affinity"; state= "running"} + ] + ; hosts= ["h1"; "h2"; "h3"] + ; affinity_host= "" + ; expected= [["h1"]; ["h2"]; ["h3"]] + } + ; { + description= "6 running VMs (h1(2) h2(2) h3(2))" + ; vm_to_start= + {name= "vm"; host= ""; group= "anti-affinity"; state= "halted"} + ; other_vms= + [ + {name= "vm1"; host= "h1"; group= "anti-affinity"; state= "running"} + ; {name= "vm2"; host= "h1"; group= "anti-affinity"; state= "running"} + ; {name= "vm3"; host= "h2"; group= "anti-affinity"; state= "running"} + ; {name= "vm4"; host= "h2"; group= "anti-affinity"; state= "running"} + ; {name= "vm5"; host= "h3"; group= "anti-affinity"; state= "running"} + ; {name= "vm6"; host= "h3"; group= "anti-affinity"; state= "running"} + ] + ; hosts= ["h1"; "h2"; "h3"] + ; affinity_host= "" + ; expected= [["h1"; "h2"; "h3"]] + } + ] + +let make_hosts ~__context ~hosts = + match hosts with + | fst :: others -> + let host1 = List.hd (Db.Host.get_all ~__context) in + Db.Host.set_name_label ~__context ~self:host1 ~value:fst ; + List.iter + (fun h -> + let _ = T.make_host ~__context ~name_label:h () in + () + ) + others + | [] -> + () + +let make_vm_based_on_vm_info ~__context ~vm_info = + let vm = T.make_vm ~__context ~name_label:vm_info.name () in + ( match vm_info.group with + | "" -> + () + | group_name -> + let group = + match Db.VM_group.get_by_name_label ~__context ~label:group_name with + | g :: _ -> + g + | [] -> + T.make_vm_group ~__context ~placement:`anti_affinity + ~name_label:group_name () + in + Db.VM.set_groups ~__context ~self:vm ~value:[group] + ) ; + ( match vm_info.host with + | "" -> + () + | host_name -> ( + let host = + List.hd (Db.Host.get_by_name_label ~__context ~label:host_name) + in + match vm_info.state with + | "running" -> + Db.VM.set_power_state ~__context ~self:vm ~value:`Running ; + Db.VM.set_resident_on ~__context ~self:vm ~value:host + | "starting" -> + Db.VM.set_power_state ~__context ~self:vm ~value:`Halted ; + Db.VM.set_scheduled_to_be_resident_on ~__context ~self:vm ~value:host + | "suspended" -> + Db.VM.set_power_state ~__context ~self:vm ~value:`Suspended + | "paused" -> + Db.VM.set_power_state ~__context ~self:vm ~value:`Paused ; + Db.VM.set_resident_on ~__context ~self:vm ~value:host + | "halted" -> + Db.VM.set_power_state ~__context ~self:vm ~value:`Halted + | _ -> + () + ) + ) ; + vm + +let test_vm_anti_affinity_ranked_grp + {vm_to_start; other_vms; hosts; affinity_host; expected; _} () = + let __context = T.make_test_database () in + make_hosts ~__context ~hosts ; + let vm = make_vm_based_on_vm_info ~__context ~vm_info:vm_to_start in + let _ = + List.map + (fun vm -> make_vm_based_on_vm_info ~__context ~vm_info:vm) + other_vms + in + Db.VM.set_affinity ~__context ~self:vm + ~value: + ( match affinity_host with + | "" -> + Ref.null + | host_name -> + List.hd (Db.Host.get_by_name_label ~__context ~label:host_name) + ) ; + let group = Db.VM.get_groups ~__context ~self:vm |> List.hd in + check_anti_affinity_grouping ~__context ~vm ~group + (List.map + (fun list -> + List.map + (fun host_name -> + List.hd (Db.Host.get_by_name_label ~__context ~label:host_name) + ) + list + ) + expected + ) + +let generate_vm_anti_affinity_ranked_grp_tests case = + (case.description, `Quick, test_vm_anti_affinity_ranked_grp case) + +let anti_affinity_tests = + List.map generate_vm_anti_affinity_ranked_grp_tests + vm_anti_affinity_ranked_grp_test_cases + let test = [ ("test_gpus_available_succeeds", `Quick, test_gpus_available_succeeds) @@ -612,14 +984,24 @@ let test = , `Quick , test_group_hosts_netsriov_with_allocated ) + ; ( "test_get_group_key_anti_affinity" + , `Quick + , test_get_group_key_anti_affinity + ) + ; ("test_get_group_key_normal_group", `Quick, test_get_group_key_normal_group) ; ("test_get_group_key_vgpu", `Quick, test_get_group_key_vgpu) ; ("test_get_group_key_netsriov", `Quick, test_get_group_key_netsriov) ; ( "test_get_group_key_vgpu_and_netsriov" , `Quick , test_get_group_key_vgpu_and_netsriov ) + ; ( "test_get_group_key_anti_affinity_and_vgpu_and_netsriov" + , `Quick + , test_get_group_key_anti_affinity_and_vgpu_and_netsriov + ) ] let () = Suite_init.harness_init () ; - Alcotest.run "Test VM Helpers suite" [("Test_vm_helpers", test)] + Alcotest.run "Test VM Helpers suite" + [("Test_vm_helpers", test @ anti_affinity_tests)] diff --git a/quality-gate.sh b/quality-gate.sh index 6659ae24def..483033b9a8a 100755 --- a/quality-gate.sh +++ b/quality-gate.sh @@ -3,7 +3,7 @@ set -e list-hd () { - N=318 + N=324 LIST_HD=$(git grep -r --count 'List.hd' -- **/*.ml | cut -d ':' -f 2 | paste -sd+ - | bc) if [ "$LIST_HD" -eq "$N" ]; then echo "OK counted $LIST_HD List.hd usages"