Hi, I'd like to balance freedisk and cores across eight nodes. Here is my cluster-preferences and cluster-policy:
{ "responseHeader":{ "status":0, "QTime":0}, "cluster-preferences":[{ "precision":10, "maximize":"freedisk"} ,{ "minimize":"cores", "precision":10} ,{ "minimize":"sysLoadAvg", "precision":3}], "cluster-policy":[{ "freedisk":"<10", "replica":"0", "strict":"true"}], "triggers":{".auto_add_replicas":{ "name":".auto_add_replicas", "event":"nodeLost", "waitFor":120, "actions":[{ "name":"auto_add_replicas_plan", "class":"solr.AutoAddReplicasPlanAction"}, { "name":"execute_plan", "class":"solr.ExecutePlanAction"}], "enabled":true}}, "listeners":{".auto_add_replicas.system":{ "trigger":".auto_add_replicas", "afterAction":[], "stage":["STARTED", "ABORTED", "SUCCEEDED", "FAILED", "BEFORE_ACTION", "AFTER_ACTION", "IGNORED"], "class":"org.apache.solr.cloud.autoscaling.SystemLogListener", "beforeAction":[]}}, "properties":{}, "WARNING":"This response format is experimental. It is likely to change in the future."} Can you help me understand why least loaded node is test-54 in this case? { "responseHeader":{ "status":0, "QTime":1294}, "diagnostics":{ "sortedNodes":[{ "node":"test-52:8983_solr", "cores":99, "freedisk":1136.8754272460938, "sysLoadAvg":0.0}, { "node":"test-56:8983_solr", "cores":99, "freedisk":1045.345874786377, "sysLoadAvg":6.0}, { "node":"test-51:8983_solr", "cores":94, "freedisk":1029.996826171875, "sysLoadAvg":17.0}, { "node":"test-55:8983_solr", "cores":98, "freedisk":876.639045715332, "sysLoadAvg":2.0}, { "node":"test-53:8983_solr", "cores":91, "freedisk":715.8955001831055, "sysLoadAvg":17.0}, { "node":"test-58:8983_solr", "cores":104, "freedisk":927.1832389831543, "sysLoadAvg":0.0}, { "node":"test-57:8983_solr", "cores":120, "freedisk":934.3348655700684, "sysLoadAvg":0.0}, { "node":"test-54:8983_solr", "cores":165, "freedisk":580.5822525024414, "sysLoadAvg":0.0}], "violations":[]}, "WARNING":"This response format is experimental. It is likely to change in the future."} Solr 7.3.1 is running. Thank you